fix(communications): restore voice dictation in message composer
This commit is contained in:
@@ -2295,6 +2295,7 @@ onMounted(() => {
|
||||
|
||||
onBeforeUnmount(() => {
|
||||
stopCrmRealtime();
|
||||
stopCommRecording(true);
|
||||
if (pilotRecording.value) {
|
||||
stopPilotRecording("fill");
|
||||
}
|
||||
@@ -3459,6 +3460,8 @@ const commPinnedOnly = ref(false);
|
||||
const commDraft = ref("");
|
||||
const commSending = ref(false);
|
||||
const commRecording = ref(false);
|
||||
const commTranscribing = ref(false);
|
||||
const commMicError = ref("");
|
||||
const commComposerMode = ref<"message" | "planned" | "logged" | "document">("message");
|
||||
const commQuickMenuOpen = ref(false);
|
||||
const commPinContextMenu = ref<{
|
||||
@@ -3493,6 +3496,11 @@ const eventCloseError = ref<Record<string, string>>({});
|
||||
const eventArchiveRecordingById = ref<Record<string, boolean>>({});
|
||||
const eventArchiveTranscribingById = ref<Record<string, boolean>>({});
|
||||
const eventArchiveMicErrorById = ref<Record<string, string>>({});
|
||||
let commMediaRecorder: MediaRecorder | null = null;
|
||||
let commRecorderStream: MediaStream | null = null;
|
||||
let commRecorderMimeType = "audio/webm";
|
||||
let commRecordingChunks: Blob[] = [];
|
||||
let commDiscardOnStop = false;
|
||||
let eventArchiveMediaRecorder: MediaRecorder | null = null;
|
||||
let eventArchiveRecorderStream: MediaStream | null = null;
|
||||
let eventArchiveRecorderMimeType = "audio/webm";
|
||||
@@ -3500,6 +3508,7 @@ let eventArchiveChunks: Blob[] = [];
|
||||
let eventArchiveTargetEventId = "";
|
||||
|
||||
watch(selectedCommThreadId, () => {
|
||||
stopCommRecording(true);
|
||||
stopEventArchiveRecording();
|
||||
destroyAllCommCallWaves();
|
||||
callTranscriptOpen.value = {};
|
||||
@@ -3508,6 +3517,7 @@ watch(selectedCommThreadId, () => {
|
||||
callTranscriptError.value = {};
|
||||
commPinnedOnly.value = false;
|
||||
commDraft.value = "";
|
||||
commMicError.value = "";
|
||||
commComposerMode.value = "message";
|
||||
commQuickMenuOpen.value = false;
|
||||
commPinContextMenu.value = { open: false, x: 0, y: 0, entry: null };
|
||||
@@ -4593,8 +4603,99 @@ async function sendCommMessage() {
|
||||
}
|
||||
}
|
||||
|
||||
async function startCommRecording() {
|
||||
if (commRecording.value || commTranscribing.value) return;
|
||||
commMicError.value = "";
|
||||
if (!pilotMicSupported.value) {
|
||||
commMicError.value = "Recording is not supported in this browser";
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
const preferredMime = "audio/webm;codecs=opus";
|
||||
const recorder = MediaRecorder.isTypeSupported(preferredMime)
|
||||
? new MediaRecorder(stream, { mimeType: preferredMime })
|
||||
: new MediaRecorder(stream);
|
||||
|
||||
commRecorderStream = stream;
|
||||
commRecorderMimeType = recorder.mimeType || "audio/webm";
|
||||
commMediaRecorder = recorder;
|
||||
commRecordingChunks = [];
|
||||
commDiscardOnStop = false;
|
||||
commRecording.value = true;
|
||||
|
||||
recorder.ondataavailable = (event: BlobEvent) => {
|
||||
if (event.data?.size) commRecordingChunks.push(event.data);
|
||||
};
|
||||
|
||||
recorder.onstop = async () => {
|
||||
const discard = commDiscardOnStop;
|
||||
commDiscardOnStop = false;
|
||||
commRecording.value = false;
|
||||
commMediaRecorder = null;
|
||||
|
||||
if (commRecorderStream) {
|
||||
commRecorderStream.getTracks().forEach((track) => track.stop());
|
||||
commRecorderStream = null;
|
||||
}
|
||||
|
||||
const audioBlob = new Blob(commRecordingChunks, { type: commRecorderMimeType });
|
||||
commRecordingChunks = [];
|
||||
if (discard || audioBlob.size === 0) return;
|
||||
|
||||
commTranscribing.value = true;
|
||||
try {
|
||||
const text = await transcribeAudioBlob(audioBlob);
|
||||
if (!text) {
|
||||
commMicError.value = "Could not recognize speech";
|
||||
return;
|
||||
}
|
||||
const previous = String(commDraft.value ?? "").trim();
|
||||
commDraft.value = previous ? `${previous} ${text}` : text;
|
||||
commMicError.value = "";
|
||||
} catch (error: any) {
|
||||
commMicError.value = String(error?.data?.message ?? error?.message ?? "Voice transcription failed");
|
||||
} finally {
|
||||
commTranscribing.value = false;
|
||||
}
|
||||
};
|
||||
|
||||
recorder.start();
|
||||
} catch {
|
||||
commRecording.value = false;
|
||||
commMicError.value = "No microphone access";
|
||||
if (commRecorderStream) {
|
||||
commRecorderStream.getTracks().forEach((track) => track.stop());
|
||||
commRecorderStream = null;
|
||||
}
|
||||
commMediaRecorder = null;
|
||||
}
|
||||
}
|
||||
|
||||
function stopCommRecording(discard = false) {
|
||||
if (!commMediaRecorder || commMediaRecorder.state === "inactive") {
|
||||
commRecording.value = false;
|
||||
commDiscardOnStop = false;
|
||||
commRecordingChunks = [];
|
||||
if (commRecorderStream) {
|
||||
commRecorderStream.getTracks().forEach((track) => track.stop());
|
||||
commRecorderStream = null;
|
||||
}
|
||||
commMediaRecorder = null;
|
||||
return;
|
||||
}
|
||||
commDiscardOnStop = discard;
|
||||
commMediaRecorder.stop();
|
||||
}
|
||||
|
||||
function toggleCommRecording() {
|
||||
commRecording.value = !commRecording.value;
|
||||
if (commTranscribing.value) return;
|
||||
if (commRecording.value) {
|
||||
stopCommRecording();
|
||||
return;
|
||||
}
|
||||
void startCommRecording();
|
||||
}
|
||||
|
||||
function handleCommComposerEnter(event: KeyboardEvent) {
|
||||
@@ -5435,6 +5536,9 @@ async function decideFeedCard(card: FeedCard, decision: "accepted" | "rejected")
|
||||
<p v-if="commEventError && commComposerMode !== 'message'" class="comm-event-error text-xs text-error">
|
||||
{{ commEventError }}
|
||||
</p>
|
||||
<p v-if="commMicError" class="comm-mic-error text-xs text-error">
|
||||
{{ commMicError }}
|
||||
</p>
|
||||
|
||||
<div v-if="commComposerMode === 'message'" class="comm-input-channel dropdown dropdown-top not-prose">
|
||||
<button
|
||||
@@ -5472,9 +5576,9 @@ async function decideFeedCard(card: FeedCard, decision: "accepted" | "rejected")
|
||||
</button>
|
||||
<button
|
||||
class="btn btn-xs btn-circle border border-base-300 bg-base-100 text-base-content/80 hover:bg-base-200"
|
||||
:class="commRecording ? 'comm-mic-active' : ''"
|
||||
:disabled="commSending || commEventSaving"
|
||||
title="Voice input"
|
||||
:class="commRecording || commTranscribing ? 'comm-mic-active' : ''"
|
||||
:disabled="commSending || commEventSaving || commTranscribing"
|
||||
:title="commRecording ? 'Stop and insert transcript' : commTranscribing ? 'Transcribing...' : 'Voice input'"
|
||||
@click="toggleCommRecording"
|
||||
>
|
||||
<svg viewBox="0 0 24 24" class="h-3.5 w-3.5 fill-current">
|
||||
@@ -5484,7 +5588,7 @@ async function decideFeedCard(card: FeedCard, decision: "accepted" | "rejected")
|
||||
|
||||
<button
|
||||
class="btn btn-sm btn-circle border-0 bg-[#5865f2] text-white hover:bg-[#4752c4]"
|
||||
:disabled="commSending || commEventSaving || !commDraft.trim() || (commComposerMode === 'message' && !commSendChannel)"
|
||||
:disabled="commSending || commEventSaving || commRecording || commTranscribing || !commDraft.trim() || (commComposerMode === 'message' && !commSendChannel)"
|
||||
:title="
|
||||
commComposerMode === 'message'
|
||||
? `Send via ${commSendChannel}`
|
||||
@@ -5630,6 +5734,14 @@ async function decideFeedCard(card: FeedCard, decision: "accepted" | "rejected")
|
||||
top: 8px;
|
||||
}
|
||||
|
||||
.comm-mic-error {
|
||||
position: absolute;
|
||||
left: 12px;
|
||||
top: 8px;
|
||||
max-width: 65%;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
.comm-input-textarea::placeholder {
|
||||
color: color-mix(in oklab, var(--color-base-content) 45%, transparent);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user