Files
2026-04-05 00:43:23 +05:30

256 lines
7.3 KiB
JavaScript

/**
* ThirdEye Meet Recorder — Content Script
* Injected into meet.google.com pages.
* Uses Web Speech API (Chrome built-in) for live transcription.
* Buffers transcript and POSTs chunks to ThirdEye backend every CHUNK_INTERVAL_MS.
*/
const CHUNK_INTERVAL_MS = 30000; // Send a chunk every 30 seconds
const MAX_BUFFER_CHARS = 8000; // ~2000 tokens — safe for LLM processing
let recognition = null;
let isRecording = false;
let transcriptBuffer = "";
let meetingId = null;
let backendUrl = "http://localhost:8000";
let ingestSecret = "thirdeye_meet_secret_change_me";
let groupId = "meet_sessions";
let chunkTimer = null;
let chunkCount = 0;
// --- Helpers ---
function getMeetingId() {
// Extract meeting code from URL: meet.google.com/abc-defg-hij
const match = window.location.pathname.match(/\/([a-z]{3}-[a-z]{4}-[a-z]{3})/i);
if (match) return match[1];
// Fallback: use timestamp-based ID
return `meet_${Date.now()}`;
}
function getParticipantName() {
// Try to find the user's own name from the Meet DOM
const nameEl = document.querySelector('[data-self-name]');
if (nameEl) return nameEl.getAttribute('data-self-name');
const profileEl = document.querySelector('img[data-iml]');
if (profileEl && profileEl.alt) return profileEl.alt;
return "Unknown";
}
// --- Transport ---
async function sendChunkToBackend(text, isFinal = false) {
if (!text || text.trim().length < 10) return; // Don't send near-empty chunks
const payload = {
meeting_id: meetingId,
group_id: groupId,
chunk_index: chunkCount++,
text: text.trim(),
speaker: getParticipantName(),
timestamp: new Date().toISOString(),
is_final: isFinal,
};
try {
const res = await fetch(`${backendUrl}/api/meet/ingest`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"X-ThirdEye-Secret": ingestSecret,
},
body: JSON.stringify(payload),
});
if (!res.ok) {
console.warn(`[ThirdEye] Backend rejected chunk: ${res.status}`);
} else {
console.log(`[ThirdEye] Chunk ${payload.chunk_index} sent (${text.length} chars)`);
chrome.runtime.sendMessage({
type: "CHUNK_SENT",
chunkIndex: payload.chunk_index,
charCount: text.length,
meetingId: meetingId,
});
}
} catch (err) {
console.warn(`[ThirdEye] Failed to send chunk: ${err.message}`);
// Buffer is NOT cleared on failure — next flush will include this text
transcriptBuffer = text + "\n" + transcriptBuffer;
}
}
// --- Periodic flush ---
function flushBuffer() {
if (transcriptBuffer.trim().length > 0) {
sendChunkToBackend(transcriptBuffer);
transcriptBuffer = "";
}
}
function startChunkTimer() {
if (chunkTimer) clearInterval(chunkTimer);
chunkTimer = setInterval(flushBuffer, CHUNK_INTERVAL_MS);
}
function stopChunkTimer() {
if (chunkTimer) {
clearInterval(chunkTimer);
chunkTimer = null;
}
}
// --- Web Speech API ---
function initSpeechRecognition() {
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
if (!SpeechRecognition) {
console.error("[ThirdEye] Web Speech API not available in this browser.");
chrome.runtime.sendMessage({ type: "ERROR", message: "Web Speech API not supported." });
return null;
}
const rec = new SpeechRecognition();
rec.continuous = true; // Don't stop after first pause
rec.interimResults = true; // Get partial results while speaking
rec.lang = "en-US"; // Change if needed
rec.maxAlternatives = 1;
rec.onstart = () => {
console.log("[ThirdEye] Speech recognition started.");
chrome.runtime.sendMessage({ type: "STATUS", status: "recording", meetingId });
};
rec.onresult = (event) => {
let newText = "";
for (let i = event.resultIndex; i < event.results.length; i++) {
const result = event.results[i];
if (result.isFinal) {
newText += result[0].transcript + " ";
}
// We only accumulate FINAL results to avoid duplicates from interim results
}
if (newText.trim()) {
transcriptBuffer += newText;
// Guard: if buffer is getting huge, flush early
if (transcriptBuffer.length > MAX_BUFFER_CHARS) {
flushBuffer();
}
}
};
rec.onerror = (event) => {
console.warn(`[ThirdEye] Speech recognition error: ${event.error}`);
if (event.error === "not-allowed") {
chrome.runtime.sendMessage({
type: "ERROR",
message: "Microphone permission denied. Allow mic access in Chrome settings.",
});
} else if (event.error !== "no-speech") {
// Restart on non-fatal errors
setTimeout(() => { if (isRecording) rec.start(); }, 1000);
}
};
rec.onend = () => {
console.log("[ThirdEye] Speech recognition ended.");
if (isRecording) {
// Auto-restart: Chrome's Web Speech API stops after ~60s of silence
setTimeout(() => { if (isRecording) rec.start(); }, 250);
}
};
return rec;
}
// --- Public controls (called from popup via chrome.tabs.sendMessage) ---
async function startRecording(config = {}) {
if (isRecording) return;
// Load config from storage or use provided values
const stored = await chrome.storage.sync.get(["backendUrl", "ingestSecret", "groupId"]);
backendUrl = config.backendUrl || stored.backendUrl || "http://localhost:8000";
ingestSecret = config.ingestSecret || stored.ingestSecret || "thirdeye_meet_secret_change_me";
groupId = config.groupId || stored.groupId || "meet_sessions";
meetingId = getMeetingId();
chunkCount = 0;
transcriptBuffer = "";
isRecording = true;
recognition = initSpeechRecognition();
if (!recognition) {
isRecording = false;
return;
}
recognition.start();
startChunkTimer();
// Notify backend that a new meeting has started
try {
await fetch(`${backendUrl}/api/meet/start`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"X-ThirdEye-Secret": ingestSecret,
},
body: JSON.stringify({
meeting_id: meetingId,
group_id: groupId,
started_at: new Date().toISOString(),
speaker: getParticipantName(),
}),
});
} catch (err) {
console.warn("[ThirdEye] Could not notify backend of meeting start:", err.message);
}
}
async function stopRecording() {
if (!isRecording) return;
isRecording = false;
stopChunkTimer();
if (recognition) {
recognition.stop();
recognition = null;
}
// Send the final buffered chunk marked as final
await sendChunkToBackend(transcriptBuffer, true);
transcriptBuffer = "";
chrome.runtime.sendMessage({ type: "STATUS", status: "stopped", meetingId });
console.log("[ThirdEye] Recording stopped.");
}
// --- Message listener (from popup) ---
chrome.runtime.onMessage.addListener((msg, sender, sendResponse) => {
if (msg.type === "START_RECORDING") {
startRecording(msg.config || {}).then(() => sendResponse({ ok: true }));
return true; // async
}
if (msg.type === "STOP_RECORDING") {
stopRecording().then(() => sendResponse({ ok: true }));
return true;
}
if (msg.type === "GET_STATUS") {
sendResponse({
isRecording,
meetingId,
bufferLength: transcriptBuffer.length,
chunkCount,
});
return true;
}
});
console.log("[ThirdEye] Content script loaded on", window.location.href);