WIP - realtie advice from openAI

This commit is contained in:
Francisco Gaona
2026-01-04 04:55:08 +01:00
parent b8f839cfe4
commit 86fa7a9564

View File

@@ -436,12 +436,30 @@ export class VoiceService {
const { callSid, tenantId, userId } = params;
try {
// Get OpenAI config
// Get OpenAI config - tenantId might be a domain, so look it up
const centralPrisma = getCentralPrisma();
const tenant = await centralPrisma.tenant.findUnique({
where: { id: tenantId },
select: { integrationsConfig: true },
});
// Try to find tenant by domain first (if tenantId is like "tenant1")
let tenant;
if (!tenantId.match(/^[0-9a-f]{8}-[0-9a-f]{4}-/i)) {
// Looks like a domain, not a UUID
const domainRecord = await centralPrisma.domain.findUnique({
where: { domain: tenantId },
include: { tenant: { select: { id: true, integrationsConfig: true } } },
});
tenant = domainRecord?.tenant;
} else {
// It's a UUID
tenant = await centralPrisma.tenant.findUnique({
where: { id: tenantId },
select: { id: true, integrationsConfig: true },
});
}
if (!tenant) {
this.logger.warn(`Tenant not found for identifier: ${tenantId}`);
return;
}
const config = this.getIntegrationConfig(tenant?.integrationsConfig as any);
@@ -451,7 +469,8 @@ export class VoiceService {
}
// Connect to OpenAI Realtime API
const ws = new WebSocket('wss://api.openai.com/v1/realtime', {
const model = config.openai.model || 'gpt-4o-realtime-preview-2024-10-01';
const ws = new WebSocket(`wss://api.openai.com/v1/realtime?model=${model}`, {
headers: {
'Authorization': `Bearer ${config.openai.apiKey}`,
'OpenAI-Beta': 'realtime=v1',
@@ -461,6 +480,9 @@ export class VoiceService {
ws.on('open', () => {
this.logger.log(`OpenAI Realtime connected for call ${callSid}`);
// Add to connections map only after it's open
this.openaiConnections.set(callSid, ws);
// Initialize session
ws.send(JSON.stringify({
type: 'session.update',
@@ -477,19 +499,21 @@ export class VoiceService {
});
ws.on('message', (data: Buffer) => {
this.handleOpenAIMessage(callSid, tenantId, userId, JSON.parse(data.toString()));
// Pass the tenant UUID (tenant.id) instead of the domain string
this.handleOpenAIMessage(callSid, tenant.id, userId, JSON.parse(data.toString()));
});
ws.on('error', (error) => {
this.logger.error(`OpenAI WebSocket error for call ${callSid}`, error);
});
ws.on('close', () => {
this.logger.log(`OpenAI Realtime disconnected for call ${callSid}`);
this.logger.error(`OpenAI WebSocket error for call ${callSid}:`, error);
this.openaiConnections.delete(callSid);
});
this.openaiConnections.set(callSid, ws);
ws.on('close', (code, reason) => {
this.logger.log(`OpenAI Realtime disconnected for call ${callSid} - Code: ${code}, Reason: ${reason.toString()}`);
this.openaiConnections.delete(callSid);
});
// Don't add to connections here - wait for 'open' event
} catch (error) {
this.logger.error('Failed to initialize OpenAI Realtime', error);
}
@@ -563,11 +587,14 @@ export class VoiceService {
message: any,
) {
try {
// Log all message types for debugging
this.logger.debug(`OpenAI message type: ${message.type} for call ${callSid}`);
switch (message.type) {
case 'conversation.item.created':
if (message.item.type === 'message' && message.item.role === 'assistant') {
// AI response generated
this.logger.log(`AI response for call ${callSid}`);
this.logger.log(`AI response for call ${callSid}: ${JSON.stringify(message.item.content)}`);
}
break;
@@ -595,6 +622,7 @@ export class VoiceService {
// Real-time transcript chunk
const deltaState = this.callStates.get(callSid);
if (deltaState?.userId && message.delta) {
this.logger.log(`📝 Transcript chunk: "${message.delta}"`);
// Emit to frontend via gateway
if (this.voiceGateway) {
await this.voiceGateway.notifyAiTranscript(deltaState.userId, {
@@ -603,13 +631,13 @@ export class VoiceService {
isFinal: false,
});
}
this.logger.debug(`Transcript delta for call ${callSid}: ${message.delta}`);
}
break;
case 'response.audio_transcript.done':
// Final transcript
const transcript = message.transcript;
this.logger.log(`✅ Final transcript for call ${callSid}: "${transcript}"`);
await this.updateCallTranscript(callSid, tenantId, transcript);
break;
@@ -618,8 +646,21 @@ export class VoiceService {
await this.handleToolCall(callSid, tenantId, userId, message);
break;
case 'session.created':
this.logger.log(`OpenAI session created for call ${callSid}`);
break;
case 'session.updated':
this.logger.log(`OpenAI session updated for call ${callSid}`);
break;
case 'error':
this.logger.error(`OpenAI error for call ${callSid}: ${JSON.stringify(message.error)}`);
break;
default:
// Handle other message types
// Log other message types for debugging
this.logger.debug(`Unhandled OpenAI message type: ${message.type}`);
break;
}
} catch (error) {