Compare commits
7 Commits
3086f78d34
...
aiprocessb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
de65aa4025 | ||
|
|
ded413b99b | ||
|
|
20fc90a3fb | ||
|
|
51c82d3d95 | ||
|
|
a4577ddcf3 | ||
|
|
5f3fcef1ec | ||
|
|
16907aadf8 |
8
.env.api
8
.env.api
@@ -5,6 +5,14 @@ DATABASE_URL="mysql://platform:platform@db:3306/platform"
|
||||
CENTRAL_DATABASE_URL="mysql://root:asjdnfqTash37faggT@db:3306/central_platform"
|
||||
REDIS_URL="redis://redis:6379"
|
||||
|
||||
# Meilisearch (optional)
|
||||
MEILI_HOST="http://meilisearch:7700"
|
||||
MEILI_API_KEY="dev-meili-master-key"
|
||||
MEILI_INDEX_PREFIX="tenant_"
|
||||
|
||||
# JWT, multi-tenant hints, etc.
|
||||
JWT_SECRET="devsecret"
|
||||
TENANCY_STRATEGY="single-db"
|
||||
|
||||
|
||||
CENTRAL_SUBDOMAINS="central,admin"
|
||||
|
||||
2
.env.web
2
.env.web
@@ -2,4 +2,4 @@ NUXT_PORT=3001
|
||||
NUXT_HOST=0.0.0.0
|
||||
|
||||
# Point Nuxt to the API container (not localhost)
|
||||
NUXT_PUBLIC_API_BASE_URL=http://jupiter.routebox.co:3000
|
||||
NUXT_PUBLIC_API_BASE_URL=https://tenant1.routebox.co
|
||||
|
||||
324
AI_PROCESS_BUILDER_README.md
Normal file
324
AI_PROCESS_BUILDER_README.md
Normal file
@@ -0,0 +1,324 @@
|
||||
# AI Process Builder + Chat Orchestrator
|
||||
|
||||
A complete implementation of tenant-scoped AI process automation where admins design LangGraph-compiled workflows via React Flow UI, and end-users execute them through a Deep Agent chat orchestrator with deterministic, audited execution.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
### Backend Components
|
||||
|
||||
#### 1. **Deep Agent Orchestrator** ([deep-agent.orchestrator.ts](backend/src/ai-processes/deep-agent.orchestrator.ts))
|
||||
- Uses LangChain/OpenAI to intelligently select processes
|
||||
- Extracts structured inputs from natural language
|
||||
- Generates friendly confirmation messages
|
||||
- Three-step workflow: discover → select → extract → execute
|
||||
|
||||
#### 2. **Graph Compiler** ([ai-processes.compiler.ts](backend/src/ai-processes/ai-processes.compiler.ts))
|
||||
- Validates ReactFlow JSON graphs (Start/End nodes, reachability, cycles)
|
||||
- Compiles to LangGraph-compatible state machines
|
||||
- Validates tool allowlist and JSON schemas (Ajv)
|
||||
- Persists compiled artifact for versioned execution
|
||||
|
||||
#### 3. **Runtime Executor** ([ai-processes.runner.ts](backend/src/ai-processes/ai-processes.runner.ts))
|
||||
- Executes compiled graphs deterministically
|
||||
- Implements 4 node types: LLMDecisionNode, ToolNode, HumanInputNode, End
|
||||
- Handles conditional edges via jsonlogic
|
||||
- Emits real-time events for streaming updates
|
||||
|
||||
#### 4. **Tool Registry** ([tools/tool-registry.ts](backend/src/ai-processes/tools/tool-registry.ts))
|
||||
- Tenant-scoped tool allowlist (database-backed via AiToolConfig)
|
||||
- Demo tools wrapping ObjectService (findAccount, createAccount, etc.)
|
||||
- Context injection (tenantId, userId, knex) for secure execution
|
||||
|
||||
#### 5. **Orchestrator Service** ([ai-processes.orchestrator.service.ts](backend/src/ai-processes/ai-processes.orchestrator.service.ts))
|
||||
- Integrates Deep Agent for process selection
|
||||
- Falls back to standard AI assistant when no processes configured
|
||||
- Manages chat sessions and message history
|
||||
- Streams execution events via SSE
|
||||
|
||||
### Frontend Components
|
||||
|
||||
#### 1. **AIChatBar** ([components/AIChatBar.vue](frontend/components/AIChatBar.vue))
|
||||
- Updated to call `/ai-processes/chat/messages` endpoint
|
||||
- SSE event stream consumer for real-time updates
|
||||
- Displays process selection, node execution, tool calls
|
||||
- Handles NEED_INPUT events for human-in-the-loop
|
||||
|
||||
#### 2. **Process Management UI** ([pages/ai-processes/](frontend/pages/ai-processes/))
|
||||
- List view: displays all processes with versions
|
||||
- Editor view: React Flow integration via iframe + postMessage
|
||||
- Test runner for quick validation
|
||||
|
||||
#### 3. **React Flow Editor** ([ai-processes-editor/src/App.tsx](frontend/ai-processes-editor/src/App.tsx))
|
||||
- Node palette: Start, LLMDecisionNode, ToolNode, HumanInputNode, End
|
||||
- Visual graph designer with drag-drop
|
||||
- Auto-saves to parent window via postMessage
|
||||
- Loads existing graphs for editing
|
||||
|
||||
### Data Models (Objection.js)
|
||||
|
||||
```typescript
|
||||
AiProcess
|
||||
├── id, tenantId, name, description, latestVersion
|
||||
└── relations: versions[], runs[]
|
||||
|
||||
AiProcessVersion
|
||||
├── id, tenantId, processId, version
|
||||
├── graphJson (ReactFlow definition)
|
||||
└── compiledJson (LangGraph artifact)
|
||||
|
||||
AiProcessRun
|
||||
├── id, tenantId, processId, version, status
|
||||
├── inputJson, outputJson, errorJson, stateJson
|
||||
└── currentNodeId (for resume)
|
||||
|
||||
AiChatSession
|
||||
├── id, tenantId, userId
|
||||
└── relations: messages[]
|
||||
|
||||
AiChatMessage
|
||||
├── id, sessionId, role, content
|
||||
└── timestamps
|
||||
|
||||
AiAuditEvent
|
||||
├── id, tenantId, runId, eventType
|
||||
└── payloadJson (full event data)
|
||||
|
||||
AiToolConfig
|
||||
├── id, tenantId, toolName, enabled
|
||||
└── configJson (tool-specific settings)
|
||||
```
|
||||
|
||||
## Demo Process: Register New Pet
|
||||
|
||||
A complete workflow demonstrating conditional logic and tool orchestration:
|
||||
|
||||
1. **Extract Info** (LLMDecisionNode)
|
||||
- Parses user message for pet + owner details
|
||||
- Outputs structured JSON with validation
|
||||
|
||||
2. **Find/Create Account** (Conditional)
|
||||
- Searches for existing account by name/email
|
||||
- Creates new account if not found
|
||||
- Merges results into state
|
||||
|
||||
3. **Find/Create Contact** (Conditional)
|
||||
- Searches for existing contact under account
|
||||
- Creates new contact if not found
|
||||
|
||||
4. **Create Pet** (ToolNode)
|
||||
- Inserts pet record linked to contact
|
||||
- Returns pet ID
|
||||
|
||||
### Seed the Demo Process
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
npm run migrate:tenant -- <tenant-slug>
|
||||
npm run seed:demo-process -- <tenant-slug>
|
||||
```
|
||||
|
||||
### Test the Demo Process
|
||||
|
||||
1. Navigate to `/ai-processes` in your tenant subdomain
|
||||
2. Open "Register New Pet" process
|
||||
3. Click "Test Run" or use the chat bar:
|
||||
|
||||
```
|
||||
User: "Register a dog named Max, breed Golden Retriever, age 3,
|
||||
owned by John Smith, email john@example.com"
|
||||
|
||||
Agent: 🔄 Selected process: Register New Pet
|
||||
I'll register Max (Golden Retriever, 3 years old) for John Smith.
|
||||
|
||||
⚙️ Executing step: Extract Info
|
||||
✓ Extracted pet details
|
||||
|
||||
🔧 Using tool: findAccount
|
||||
ℹ️ Account not found, creating new account
|
||||
|
||||
🔧 Using tool: createAccount
|
||||
✓ Created account for John Smith
|
||||
|
||||
🔧 Using tool: findContact
|
||||
ℹ️ Contact not found, creating new contact
|
||||
|
||||
🔧 Using tool: createContact
|
||||
✓ Created contact: John Smith
|
||||
|
||||
🔧 Using tool: createPet
|
||||
✓ Created pet: Max (ID: pet_1234567890)
|
||||
|
||||
✅ Process completed successfully!
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### Process Management (Admin)
|
||||
|
||||
```typescript
|
||||
GET /tenants/:tenantId/ai-processes
|
||||
POST /tenants/:tenantId/ai-processes
|
||||
GET /tenants/:tenantId/ai-processes/:id
|
||||
POST /tenants/:tenantId/ai-processes/:id/versions
|
||||
GET /tenants/:tenantId/ai-processes/:id/versions
|
||||
|
||||
POST /tenants/:tenantId/ai-processes/:id/runs
|
||||
POST /tenants/:tenantId/ai-processes/runs/:runId/resume
|
||||
```
|
||||
|
||||
### Chat Orchestrator (End User)
|
||||
|
||||
```typescript
|
||||
POST /tenants/:tenantId/ai-processes/chat/messages
|
||||
SSE /tenants/:tenantId/ai-processes/stream?sessionId=xxx
|
||||
```
|
||||
|
||||
## Event Stream Types
|
||||
|
||||
```typescript
|
||||
type StreamEvent =
|
||||
| { type: 'agent_started' }
|
||||
| { type: 'processes_listed', data: { count: number } }
|
||||
| { type: 'process_selected', processId: string, version: number }
|
||||
| { type: 'agent_message', data: { message: string } }
|
||||
| { type: 'node_started', nodeId: string }
|
||||
| { type: 'node_completed', nodeId: string }
|
||||
| { type: 'tool_called', toolName: string, nodeId: string }
|
||||
| { type: 'llm_decision', nodeId: string, data: any }
|
||||
| { type: 'need_input', data: { prompt: string, schema: JSONSchema } }
|
||||
| { type: 'final', data: { output: any } }
|
||||
| { type: 'error', data: { error: string } }
|
||||
```
|
||||
|
||||
## Security & Guardrails
|
||||
|
||||
### 1. **Tenancy Isolation**
|
||||
- All queries filtered by `tenantId` (enforced in Objection models)
|
||||
- Tool context includes tenant scope
|
||||
- Database-per-tenant architecture (inherited from platform)
|
||||
|
||||
### 2. **Tool Allowlist**
|
||||
- Two-level validation:
|
||||
- Tenant-level: `AiToolConfig` table (enabled tools per tenant)
|
||||
- Compile-time: validates toolName exists in registry
|
||||
- Runtime check before tool execution
|
||||
|
||||
### 3. **Schema Validation**
|
||||
- LLMDecisionNode output validated against JSON Schema (Ajv)
|
||||
- HumanInputNode input validated before resume
|
||||
- Graph structure validated at compile time
|
||||
|
||||
### 4. **Audit Trail**
|
||||
- Every node execution logged to `ai_audit_events`
|
||||
- Includes: tool calls, LLM decisions, state mutations, errors
|
||||
- Queryable for compliance dashboards
|
||||
|
||||
### 5. **Versioning**
|
||||
- Immutable process versions (create-only)
|
||||
- Runs reference specific version number
|
||||
- Graph definition + compiled artifact stored together
|
||||
|
||||
## Running the System
|
||||
|
||||
### 1. **Run Migrations**
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
npm run migrate:tenant -- tenant1
|
||||
```
|
||||
|
||||
### 2. **Seed Demo Data**
|
||||
|
||||
```bash
|
||||
npm run seed:demo-process -- tenant1
|
||||
```
|
||||
|
||||
### 3. **Start Backend**
|
||||
|
||||
```bash
|
||||
npm run start:dev
|
||||
```
|
||||
|
||||
### 4. **Build Editor (if needed)**
|
||||
|
||||
```bash
|
||||
cd frontend/ai-processes-editor
|
||||
npm install
|
||||
npm run build
|
||||
```
|
||||
|
||||
### 5. **Start Frontend**
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### 6. **Access UI**
|
||||
|
||||
- Admin UI: `http://tenant1.localhost:3001/ai-processes`
|
||||
- Chat UI: Available in bottom drawer on any page (⌘K to toggle)
|
||||
|
||||
## Extension Points
|
||||
|
||||
### Adding New Node Types
|
||||
|
||||
1. Define type in [ai-processes.types.ts](backend/src/ai-processes/ai-processes.types.ts)
|
||||
2. Add schema validation in [ai-processes.schemas.ts](backend/src/ai-processes/ai-processes.schemas.ts)
|
||||
3. Implement executor in [ai-processes.runner.ts](backend/src/ai-processes/ai-processes.runner.ts)
|
||||
4. Add UI component in React Flow editor
|
||||
|
||||
### Adding New Tools
|
||||
|
||||
1. Implement handler in [tools/demo-tools.ts](backend/src/ai-processes/tools/demo-tools.ts)
|
||||
2. Register in `demoTools` export
|
||||
3. Add to tenant allowlist via UI or seed script
|
||||
4. Document input/output schema
|
||||
|
||||
### Custom LLM Decision Logic
|
||||
|
||||
Override `llmDecision` callback in [ai-processes.service.ts](backend/src/ai-processes/ai-processes.service.ts):
|
||||
|
||||
```typescript
|
||||
llmDecision: async (node, state) => {
|
||||
const prompt = renderTemplate(node.data.promptTemplate, state);
|
||||
const response = await callOpenAI(prompt, node.data.model);
|
||||
return validateAgainstSchema(response, node.data.outputSchema);
|
||||
}
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Process not appearing in chat
|
||||
|
||||
- Check: `npm run seed:demo-process` completed successfully
|
||||
- Verify: Process exists in database (`select * from ai_processes`)
|
||||
- Check: Tools enabled (`select * from ai_tool_configs`)
|
||||
|
||||
### Graph validation errors
|
||||
|
||||
- Ensure exactly one Start node
|
||||
- Ensure at least one End node
|
||||
- Check all edges reference valid node IDs
|
||||
- Verify tool names match registered tools
|
||||
|
||||
### SSE stream not working
|
||||
|
||||
- Check CORS settings for subdomain routing
|
||||
- Verify `sessionId` returned from initial message
|
||||
- Check browser console for connection errors
|
||||
- Fallback: use polling endpoint (TODO: implement)
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **Enhanced Input Extraction**: Use Deep Agent to extract required fields per process
|
||||
2. **Visual Schema Builder**: UI for JSON Schema creation (drag-drop fields)
|
||||
3. **Conditional Edge Builder**: Visual jsonlogic editor
|
||||
4. **Process Analytics**: Dashboard showing run success rates, avg duration
|
||||
5. **Human-in-Loop UI**: Dynamic form renderer for HumanInputNode
|
||||
6. **Process Marketplace**: Share processes across tenants (with permissions)
|
||||
7. **Python Microservice**: Optional Python runtime for native LangGraph support
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
83
DEBUG_INCOMING_CALL.md
Normal file
83
DEBUG_INCOMING_CALL.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Debugging Incoming Call Issue
|
||||
|
||||
## Current Problem
|
||||
- Hear "Connecting to your call" message (TwiML is executing)
|
||||
- No ring on mobile after "Connecting" message
|
||||
- Click Accept button does nothing
|
||||
- Call never connects
|
||||
|
||||
## Root Cause Hypothesis
|
||||
The Twilio Device SDK is likely **NOT receiving the incoming call event** from Twilio's Signaling Server. This could be because:
|
||||
|
||||
1. **Identity Mismatch**: The Device's identity (from JWT token) doesn't match the `<Client>ID</Client>` in TwiML
|
||||
2. **Device Not Registered**: Device registration isn't completing before the call arrives
|
||||
3. **Twilio Signaling Issue**: Device isn't connected to Twilio Signaling Server
|
||||
|
||||
## How to Debug
|
||||
|
||||
### Step 1: Check Device Identity in Console
|
||||
When you open the softphone dialog, **open Browser DevTools Console (F12)**
|
||||
|
||||
You should see logs like:
|
||||
```
|
||||
Token received, creating Device...
|
||||
Token identity: e6d45fa3-a108-4085-81e5-a8e05e85e6fb
|
||||
Token grants: {voice: {...}}
|
||||
Registering Twilio Device...
|
||||
✓ Twilio Device registered - ready to receive calls
|
||||
Device identity: e6d45fa3-a108-4085-81e5-a8e05e85e6fb
|
||||
Device state: ready
|
||||
```
|
||||
|
||||
**Note the Device identity value** - e.g., "e6d45fa3-a108-4085-81e5-a8e05e85e6fb"
|
||||
|
||||
### Step 2: Check Backend Logs
|
||||
When you make an inbound call, look for backend logs showing:
|
||||
|
||||
```
|
||||
╔════════════════════════════════════════╗
|
||||
║ === INBOUND CALL RECEIVED ===
|
||||
╚════════════════════════════════════════╝
|
||||
...
|
||||
Client IDs to dial: e6d45fa3-a108-4085-81e5-a8e05e85e6fb
|
||||
First Client ID format check: "e6d45fa3-a108-4085-81e5-a8e05e85e6fb" (length: 36)
|
||||
```
|
||||
|
||||
### Step 3: Compare Identities
|
||||
The Device identity from frontend console MUST MATCH the Client ID from backend logs.
|
||||
|
||||
**If they match**: The issue is with Twilio Signaling or Device SDK configuration
|
||||
**If they don't match**: We found the bug - identity mismatch
|
||||
|
||||
### Step 4: Monitor Incoming Event
|
||||
When you make the inbound call, keep watching the browser console for:
|
||||
|
||||
```
|
||||
🔔 Twilio Device INCOMING event received: {...}
|
||||
```
|
||||
|
||||
**If this appears**: The Device SDK IS receiving the call, so the Accept button issue is frontend
|
||||
**If this doesn't appear**: The Device SDK is NOT receiving the call, so it's an identity/registration issue
|
||||
|
||||
## What Changed
|
||||
- Frontend now relies on **Twilio Device SDK `incoming` event** (not Socket.IO) for showing incoming call
|
||||
- Added comprehensive logging to Device initialization
|
||||
- Added logging to Accept button handler
|
||||
- Backend logs Device ID format for comparison
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Make an inbound call
|
||||
2. Check browser console for the 5 logs above
|
||||
3. Check backend logs for Client ID
|
||||
4. Look for "🔔 Twilio Device INCOMING event" in browser console
|
||||
5. Try clicking Accept and watch console for "📞 Accepting call" logs
|
||||
6. Report back with:
|
||||
- Device identity from console
|
||||
- Client ID from backend logs
|
||||
- Whether "🔔 Twilio Device INCOMING event" appears
|
||||
- Whether any accept logs appear
|
||||
|
||||
## Important Files
|
||||
- Backend: `/backend/src/voice/voice.controller.ts` (lines 205-210 show Client ID logging)
|
||||
- Frontend: `/frontend/composables/useSoftphone.ts` (Device initialization and incoming handler)
|
||||
173
SOFTPHONE_AI_ASSISTANT.md
Normal file
173
SOFTPHONE_AI_ASSISTANT.md
Normal file
@@ -0,0 +1,173 @@
|
||||
# Softphone AI Assistant - Complete Implementation
|
||||
|
||||
## 🎉 Features Implemented
|
||||
|
||||
### ✅ Real-time AI Call Assistant
|
||||
- **OpenAI Realtime API Integration** - Listens to live calls and provides suggestions
|
||||
- **Audio Streaming** - Twilio Media Streams fork audio to backend for AI processing
|
||||
- **Real-time Transcription** - Speech-to-text during calls
|
||||
- **Smart Suggestions** - AI analyzes conversation and advises the agent
|
||||
|
||||
## 🔧 Architecture
|
||||
|
||||
### Backend Flow
|
||||
```
|
||||
Inbound Call → TwiML (<Start><Stream> + <Dial>)
|
||||
→ Media Stream WebSocket → OpenAI Realtime API
|
||||
→ AI Processing → Socket.IO → Frontend
|
||||
```
|
||||
|
||||
### Key Components
|
||||
|
||||
1. **TwiML Structure** (`voice.controller.ts:226-234`)
|
||||
- `<Start><Stream>` - Forks audio for AI processing
|
||||
- `<Dial><Client>` - Connects call to agent's softphone
|
||||
|
||||
2. **OpenAI Integration** (`voice.service.ts:431-519`)
|
||||
- WebSocket connection to `wss://api.openai.com/v1/realtime?model=gpt-4o-realtime-preview-2024-10-01`
|
||||
- Session config with custom instructions for agent assistance
|
||||
- Handles transcripts and generates suggestions
|
||||
|
||||
3. **AI Message Handler** (`voice.service.ts:609-707`)
|
||||
- Processes OpenAI events (transcripts, suggestions, audio)
|
||||
- Routes suggestions to frontend via Socket.IO
|
||||
- Saves transcripts to database
|
||||
|
||||
4. **Voice Gateway** (`voice.gateway.ts:272-289`)
|
||||
- `notifyAiTranscript()` - Real-time transcript chunks
|
||||
- `notifyAiSuggestion()` - AI suggestions to agent
|
||||
|
||||
### Frontend Components
|
||||
|
||||
1. **Softphone Dialog** (`SoftphoneDialog.vue:104-135`)
|
||||
- AI Assistant section with badge showing suggestion count
|
||||
- Color-coded suggestions (blue=response, green=action, purple=insight)
|
||||
- Animated highlight for newest suggestion
|
||||
|
||||
2. **Softphone Composable** (`useSoftphone.ts:515-535`)
|
||||
- Socket.IO event handlers for `ai:suggestion` and `ai:transcript`
|
||||
- Maintains history of last 10 suggestions
|
||||
- Maintains history of last 50 transcript items
|
||||
|
||||
## 📋 AI Prompt Configuration
|
||||
|
||||
The AI is instructed to:
|
||||
- **Listen, not talk** - It advises the agent, not the caller
|
||||
- **Provide concise suggestions** - 1-2 sentences max
|
||||
- **Use formatted output**:
|
||||
- `💡 Suggestion: [advice]`
|
||||
- `⚠️ Alert: [important notice]`
|
||||
- `📋 Action: [CRM action]`
|
||||
|
||||
## 🎨 UI Features
|
||||
|
||||
### Suggestion Types
|
||||
- **Response** (Blue) - Suggested replies or approaches
|
||||
- **Action** (Green) - Recommended CRM actions
|
||||
- **Insight** (Purple) - Important alerts or observations
|
||||
|
||||
### Visual Feedback
|
||||
- Badge showing number of suggestions
|
||||
- Newest suggestion pulses for attention
|
||||
- Auto-scrolling suggestion list
|
||||
- Timestamp on each suggestion
|
||||
|
||||
## 🔍 How to Monitor
|
||||
|
||||
### 1. Backend Logs
|
||||
```bash
|
||||
# Watch for AI events
|
||||
docker logs -f neo-backend-1 | grep -E "AI|OpenAI|transcript|suggestion"
|
||||
```
|
||||
|
||||
Key log markers:
|
||||
- `📝 Transcript chunk:` - Real-time speech detection
|
||||
- `✅ Final transcript:` - Complete transcript saved
|
||||
- `💡 AI Suggestion:` - AI-generated advice
|
||||
|
||||
### 2. Database
|
||||
```sql
|
||||
-- View call transcripts
|
||||
SELECT call_sid, ai_transcript, created_at
|
||||
FROM calls
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 5;
|
||||
```
|
||||
|
||||
### 3. Frontend Console
|
||||
- Open browser DevTools Console
|
||||
- Watch for: "AI suggestion:", "AI transcript:"
|
||||
|
||||
## 🚀 Testing
|
||||
|
||||
1. **Make a test call** to your Twilio number
|
||||
2. **Accept the call** in the softphone dialog
|
||||
3. **Talk during the call** - Say something like "I need to schedule a follow-up"
|
||||
4. **Watch the UI** - AI suggestions appear in real-time
|
||||
5. **Check logs** - See transcription and suggestion generation
|
||||
|
||||
## 📊 Current Status
|
||||
|
||||
✅ **Working**:
|
||||
- Inbound calls ring softphone
|
||||
- Media stream forks audio to backend
|
||||
- OpenAI processes audio (1300+ packets/call)
|
||||
- AI generates suggestions
|
||||
- Suggestions appear in frontend
|
||||
- Transcripts saved to database
|
||||
|
||||
## 🔧 Configuration
|
||||
|
||||
### Required Environment Variables
|
||||
```env
|
||||
# OpenAI API Key (set in tenant integrations config)
|
||||
OPENAI_API_KEY=sk-...
|
||||
|
||||
# Optional overrides
|
||||
OPENAI_MODEL=gpt-4o-realtime-preview-2024-10-01
|
||||
OPENAI_VOICE=alloy
|
||||
```
|
||||
|
||||
### Tenant Configuration
|
||||
Set in Settings > Integrations:
|
||||
- OpenAI API Key
|
||||
- Model (optional)
|
||||
- Voice (optional)
|
||||
|
||||
## 🎯 Next Steps (Optional Enhancements)
|
||||
|
||||
1. **CRM Tool Execution** - Implement actual tool calls (search contacts, create tasks)
|
||||
2. **Audio Response** - Send OpenAI audio back to caller (two-way AI interaction)
|
||||
3. **Sentiment Analysis** - Track call sentiment in real-time
|
||||
4. **Call Summary** - Generate post-call summary automatically
|
||||
5. **Custom Prompts** - Allow agents to customize AI instructions per call type
|
||||
|
||||
## 🐛 Troubleshooting
|
||||
|
||||
### No suggestions appearing?
|
||||
1. Check OpenAI API key is configured
|
||||
2. Verify WebSocket connection logs show "OpenAI Realtime connected"
|
||||
3. Check frontend Socket.IO connection is established
|
||||
4. Verify user ID matches between backend and frontend
|
||||
|
||||
### Transcripts not saving?
|
||||
1. Check tenant database connection
|
||||
2. Verify `calls` table has `ai_transcript` column
|
||||
3. Check logs for "Failed to update transcript" errors
|
||||
|
||||
### OpenAI connection fails?
|
||||
1. Verify API key is valid
|
||||
2. Check model name is correct
|
||||
3. Review WebSocket close codes in logs
|
||||
|
||||
## 📝 Files Modified
|
||||
|
||||
**Backend:**
|
||||
- `/backend/src/voice/voice.service.ts` - OpenAI integration & AI message handling
|
||||
- `/backend/src/voice/voice.controller.ts` - TwiML generation with stream fork
|
||||
- `/backend/src/voice/voice.gateway.ts` - Socket.IO event emission
|
||||
- `/backend/src/main.ts` - Media stream WebSocket handler
|
||||
|
||||
**Frontend:**
|
||||
- `/frontend/components/SoftphoneDialog.vue` - AI suggestions UI
|
||||
- `/frontend/composables/useSoftphone.ts` - Socket.IO event handlers
|
||||
@@ -18,3 +18,6 @@ JWT_EXPIRES_IN="7d"
|
||||
# Application
|
||||
NODE_ENV="development"
|
||||
PORT="3000"
|
||||
|
||||
# Central Admin Subdomains (comma-separated list of subdomains that access the central database)
|
||||
CENTRAL_SUBDOMAINS="central,admin"
|
||||
|
||||
115
backend/insert-demo-process.sql
Normal file
115
backend/insert-demo-process.sql
Normal file
@@ -0,0 +1,115 @@
|
||||
-- Insert demo AI process directly
|
||||
SET @process_id = '2d883482-4df0-44d7-b6cf-8541b482afe4';
|
||||
SET @version_id = '437b1e72-405e-4862-a8bc-f368e554b482';
|
||||
SET @user_id = 'system';
|
||||
|
||||
-- Insert process
|
||||
INSERT INTO ai_processes (id, name, created_by)
|
||||
VALUES (@process_id, 'Register New Pet', @user_id);
|
||||
|
||||
-- Insert process version with compiled graph
|
||||
INSERT INTO ai_process_versions (id, process_id, version, graph_json, compiled_json, created_by)
|
||||
VALUES (
|
||||
@version_id,
|
||||
@process_id,
|
||||
1,
|
||||
'{}',
|
||||
JSON_OBJECT(
|
||||
'id', 'register_new_pet',
|
||||
'name', 'Register New Pet',
|
||||
'description', 'Complete pet registration workflow',
|
||||
'allowCycles', false,
|
||||
'startNodeId', 'start',
|
||||
'endNodeIds', JSON_ARRAY('end'),
|
||||
'maxIterations', 50,
|
||||
'nodes', JSON_ARRAY(
|
||||
JSON_OBJECT('id', 'start', 'type', 'Start', 'data', JSON_OBJECT('label', 'Start')),
|
||||
JSON_OBJECT('id', 'extract_info', 'type', 'LLMDecisionNode', 'data', JSON_OBJECT(
|
||||
'label', 'Extract Info',
|
||||
'promptTemplate', 'Extract: petName, species, ownerFirstName, ownerLastName, ownerEmail, accountName from: {{state.message}}',
|
||||
'inputKeys', JSON_ARRAY('message'),
|
||||
'outputSchema', JSON_OBJECT(
|
||||
'type', 'object',
|
||||
'properties', JSON_OBJECT(
|
||||
'petName', JSON_OBJECT('type', 'string'),
|
||||
'species', JSON_OBJECT('type', 'string'),
|
||||
'ownerFirstName', JSON_OBJECT('type', 'string'),
|
||||
'ownerLastName', JSON_OBJECT('type', 'string'),
|
||||
'ownerEmail', JSON_OBJECT('type', 'string'),
|
||||
'accountName', JSON_OBJECT('type', 'string')
|
||||
),
|
||||
'required', JSON_ARRAY('petName', 'species', 'ownerFirstName', 'ownerLastName')
|
||||
)
|
||||
)),
|
||||
JSON_OBJECT('id', 'find_account', 'type', 'ToolNode', 'data', JSON_OBJECT(
|
||||
'label', 'Find Account',
|
||||
'toolName', 'findAccount',
|
||||
'argsTemplate', JSON_OBJECT('name', '{{state.accountName}}', 'email', '{{state.ownerEmail}}'),
|
||||
'outputMapping', JSON_OBJECT('found', 'accountFound', 'accountId', 'accountId')
|
||||
)),
|
||||
JSON_OBJECT('id', 'create_account', 'type', 'ToolNode', 'data', JSON_OBJECT(
|
||||
'label', 'Create Account',
|
||||
'toolName', 'createAccount',
|
||||
'argsTemplate', JSON_OBJECT('name', '{{state.accountName}}', 'email', '{{state.ownerEmail}}'),
|
||||
'outputMapping', JSON_OBJECT('accountId', 'accountId')
|
||||
)),
|
||||
JSON_OBJECT('id', 'find_contact', 'type', 'ToolNode', 'data', JSON_OBJECT(
|
||||
'label', 'Find Contact',
|
||||
'toolName', 'findContact',
|
||||
'argsTemplate', JSON_OBJECT(
|
||||
'firstName', '{{state.ownerFirstName}}',
|
||||
'lastName', '{{state.ownerLastName}}',
|
||||
'email', '{{state.ownerEmail}}',
|
||||
'accountId', '{{state.accountId}}'
|
||||
),
|
||||
'outputMapping', JSON_OBJECT('found', 'contactFound', 'contactId', 'contactId')
|
||||
)),
|
||||
JSON_OBJECT('id', 'create_contact', 'type', 'ToolNode', 'data', JSON_OBJECT(
|
||||
'label', 'Create Contact',
|
||||
'toolName', 'createContact',
|
||||
'argsTemplate', JSON_OBJECT(
|
||||
'firstName', '{{state.ownerFirstName}}',
|
||||
'lastName', '{{state.ownerLastName}}',
|
||||
'email', '{{state.ownerEmail}}',
|
||||
'accountId', '{{state.accountId}}'
|
||||
),
|
||||
'outputMapping', JSON_OBJECT('contactId', 'contactId')
|
||||
)),
|
||||
JSON_OBJECT('id', 'create_pet', 'type', 'ToolNode', 'data', JSON_OBJECT(
|
||||
'label', 'Create Pet',
|
||||
'toolName', 'createPet',
|
||||
'argsTemplate', JSON_OBJECT(
|
||||
'name', '{{state.petName}}',
|
||||
'species', '{{state.species}}',
|
||||
'ownerId', '{{state.contactId}}'
|
||||
),
|
||||
'outputMapping', JSON_OBJECT('petId', 'petId')
|
||||
)),
|
||||
JSON_OBJECT('id', 'end', 'type', 'End', 'data', JSON_OBJECT('label', 'End'))
|
||||
),
|
||||
'edges', JSON_ARRAY(
|
||||
JSON_OBJECT('id', 'e1', 'source', 'start', 'target', 'extract_info'),
|
||||
JSON_OBJECT('id', 'e2', 'source', 'extract_info', 'target', 'find_account'),
|
||||
JSON_OBJECT('id', 'e3', 'source', 'find_account', 'target', 'find_contact', 'condition', JSON_OBJECT('==', JSON_ARRAY(JSON_OBJECT('var', 'accountFound'), true))),
|
||||
JSON_OBJECT('id', 'e4', 'source', 'find_account', 'target', 'create_account', 'condition', JSON_OBJECT('==', JSON_ARRAY(JSON_OBJECT('var', 'accountFound'), false))),
|
||||
JSON_OBJECT('id', 'e5', 'source', 'create_account', 'target', 'find_contact'),
|
||||
JSON_OBJECT('id', 'e6', 'source', 'find_contact', 'target', 'create_pet', 'condition', JSON_OBJECT('==', JSON_ARRAY(JSON_OBJECT('var', 'contactFound'), true))),
|
||||
JSON_OBJECT('id', 'e7', 'source', 'find_contact', 'target', 'create_contact', 'condition', JSON_OBJECT('==', JSON_ARRAY(JSON_OBJECT('var', 'contactFound'), false))),
|
||||
JSON_OBJECT('id', 'e8', 'source', 'create_contact', 'target', 'create_pet'),
|
||||
JSON_OBJECT('id', 'e9', 'source', 'create_pet', 'target', 'end')
|
||||
)
|
||||
),
|
||||
@user_id
|
||||
);
|
||||
|
||||
-- Insert tool allowlist
|
||||
INSERT INTO ai_tool_configs (id, tool_name, enabled)
|
||||
VALUES
|
||||
(UUID(), 'findAccount', true),
|
||||
(UUID(), 'createAccount', true),
|
||||
(UUID(), 'findContact', true),
|
||||
(UUID(), 'createContact', true),
|
||||
(UUID(), 'createPet', true)
|
||||
ON DUPLICATE KEY UPDATE enabled = true;
|
||||
|
||||
SELECT 'Demo process inserted successfully!' as result;
|
||||
@@ -0,0 +1,29 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema.createTable('custom_migrations', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.uuid('tenantId').notNullable();
|
||||
table.string('name', 255).notNullable();
|
||||
table.text('description');
|
||||
table.enum('type', [
|
||||
'create_table',
|
||||
'add_column',
|
||||
'alter_column',
|
||||
'add_index',
|
||||
'drop_table',
|
||||
'custom',
|
||||
]).notNullable();
|
||||
table.text('sql').notNullable();
|
||||
table.enum('status', ['pending', 'executed', 'failed']).defaultTo('pending');
|
||||
table.timestamp('executedAt').nullable();
|
||||
table.text('error').nullable();
|
||||
table.timestamps(true, true);
|
||||
|
||||
table.index(['tenantId']);
|
||||
table.index(['status']);
|
||||
table.index(['created_at']);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema.dropTableIfExists('custom_migrations');
|
||||
};
|
||||
@@ -0,0 +1,103 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
// Add orgWideDefault to object_definitions
|
||||
.alterTable('object_definitions', (table) => {
|
||||
table
|
||||
.enum('orgWideDefault', ['private', 'public_read', 'public_read_write'])
|
||||
.defaultTo('private')
|
||||
.notNullable();
|
||||
})
|
||||
// Create role_object_permissions table
|
||||
.createTable('role_object_permissions', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.uuid('roleId').notNullable();
|
||||
table.uuid('objectDefinitionId').notNullable();
|
||||
table.boolean('canCreate').defaultTo(false);
|
||||
table.boolean('canRead').defaultTo(false);
|
||||
table.boolean('canEdit').defaultTo(false);
|
||||
table.boolean('canDelete').defaultTo(false);
|
||||
table.boolean('canViewAll').defaultTo(false);
|
||||
table.boolean('canModifyAll').defaultTo(false);
|
||||
table.timestamps(true, true);
|
||||
|
||||
table
|
||||
.foreign('roleId')
|
||||
.references('id')
|
||||
.inTable('roles')
|
||||
.onDelete('CASCADE');
|
||||
table
|
||||
.foreign('objectDefinitionId')
|
||||
.references('id')
|
||||
.inTable('object_definitions')
|
||||
.onDelete('CASCADE');
|
||||
table.unique(['roleId', 'objectDefinitionId']);
|
||||
table.index(['roleId']);
|
||||
table.index(['objectDefinitionId']);
|
||||
})
|
||||
// Create role_field_permissions table
|
||||
.createTable('role_field_permissions', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.uuid('roleId').notNullable();
|
||||
table.uuid('fieldDefinitionId').notNullable();
|
||||
table.boolean('canRead').defaultTo(true);
|
||||
table.boolean('canEdit').defaultTo(true);
|
||||
table.timestamps(true, true);
|
||||
|
||||
table
|
||||
.foreign('roleId')
|
||||
.references('id')
|
||||
.inTable('roles')
|
||||
.onDelete('CASCADE');
|
||||
table
|
||||
.foreign('fieldDefinitionId')
|
||||
.references('id')
|
||||
.inTable('field_definitions')
|
||||
.onDelete('CASCADE');
|
||||
table.unique(['roleId', 'fieldDefinitionId']);
|
||||
table.index(['roleId']);
|
||||
table.index(['fieldDefinitionId']);
|
||||
})
|
||||
// Create record_shares table for sharing specific records
|
||||
.createTable('record_shares', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.uuid('objectDefinitionId').notNullable();
|
||||
table.uuid('recordId').notNullable();
|
||||
table.uuid('granteeUserId').notNullable();
|
||||
table.uuid('grantedByUserId').notNullable();
|
||||
table.json('accessLevel').notNullable(); // { canRead, canEdit, canDelete }
|
||||
table.timestamp('expiresAt').nullable();
|
||||
table.timestamp('revokedAt').nullable();
|
||||
table.timestamp('createdAt').defaultTo(knex.fn.now());
|
||||
table.timestamp('updatedAt').defaultTo(knex.fn.now());
|
||||
|
||||
table
|
||||
.foreign('objectDefinitionId')
|
||||
.references('id')
|
||||
.inTable('object_definitions')
|
||||
.onDelete('CASCADE');
|
||||
table
|
||||
.foreign('granteeUserId')
|
||||
.references('id')
|
||||
.inTable('users')
|
||||
.onDelete('CASCADE');
|
||||
table
|
||||
.foreign('grantedByUserId')
|
||||
.references('id')
|
||||
.inTable('users')
|
||||
.onDelete('CASCADE');
|
||||
table.index(['objectDefinitionId', 'recordId']);
|
||||
table.index(['granteeUserId']);
|
||||
table.index(['expiresAt']);
|
||||
table.index(['revokedAt']);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.dropTableIfExists('record_shares')
|
||||
.dropTableIfExists('role_field_permissions')
|
||||
.dropTableIfExists('role_object_permissions')
|
||||
.alterTable('object_definitions', (table) => {
|
||||
table.dropColumn('orgWideDefault');
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,55 @@
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.up = async function (knex) {
|
||||
// Create calls table for tracking voice calls
|
||||
await knex.schema.createTable('calls', (table) => {
|
||||
table.string('id', 36).primary();
|
||||
table.string('call_sid', 100).unique().notNullable().comment('Twilio call SID');
|
||||
table.enum('direction', ['inbound', 'outbound']).notNullable();
|
||||
table.string('from_number', 20).notNullable();
|
||||
table.string('to_number', 20).notNullable();
|
||||
table.enum('status', [
|
||||
'queued',
|
||||
'ringing',
|
||||
'in-progress',
|
||||
'completed',
|
||||
'busy',
|
||||
'failed',
|
||||
'no-answer',
|
||||
'canceled'
|
||||
]).notNullable().defaultTo('queued');
|
||||
table.integer('duration_seconds').unsigned().nullable();
|
||||
table.string('recording_url', 500).nullable();
|
||||
table.text('ai_transcript').nullable().comment('Full transcript from OpenAI');
|
||||
table.text('ai_summary').nullable().comment('AI-generated summary');
|
||||
table.json('ai_insights').nullable().comment('Structured insights from AI');
|
||||
table.string('user_id', 36).notNullable().comment('User who handled the call');
|
||||
table.timestamp('started_at').nullable();
|
||||
table.timestamp('ended_at').nullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.timestamp('updated_at').defaultTo(knex.fn.now());
|
||||
|
||||
// Indexes
|
||||
table.index('call_sid');
|
||||
table.index('user_id');
|
||||
table.index('status');
|
||||
table.index('direction');
|
||||
table.index(['created_at', 'user_id']);
|
||||
|
||||
// Foreign key to users table
|
||||
table.foreign('user_id').references('id').inTable('users').onDelete('CASCADE');
|
||||
});
|
||||
|
||||
console.log('✅ Created calls table');
|
||||
};
|
||||
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.down = async function (knex) {
|
||||
await knex.schema.dropTableIfExists('calls');
|
||||
console.log('✅ Dropped calls table');
|
||||
};
|
||||
@@ -0,0 +1,207 @@
|
||||
exports.up = async function (knex) {
|
||||
await knex.schema.createTable('contacts', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.string('firstName', 100).notNullable();
|
||||
table.string('lastName', 100).notNullable();
|
||||
table.uuid('accountId').notNullable();
|
||||
table.timestamps(true, true);
|
||||
|
||||
table
|
||||
.foreign('accountId')
|
||||
.references('id')
|
||||
.inTable('accounts')
|
||||
.onDelete('CASCADE');
|
||||
table.index(['accountId']);
|
||||
table.index(['lastName', 'firstName']);
|
||||
});
|
||||
|
||||
await knex.schema.createTable('contact_details', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.string('relatedObjectType', 100).notNullable();
|
||||
table.uuid('relatedObjectId').notNullable();
|
||||
table.string('detailType', 50).notNullable();
|
||||
table.string('label', 100);
|
||||
table.text('value').notNullable();
|
||||
table.boolean('isPrimary').defaultTo(false);
|
||||
table.timestamps(true, true);
|
||||
|
||||
table.index(['relatedObjectType', 'relatedObjectId']);
|
||||
table.index(['detailType']);
|
||||
});
|
||||
|
||||
const [contactObjectId] = await knex('object_definitions').insert({
|
||||
id: knex.raw('(UUID())'),
|
||||
apiName: 'Contact',
|
||||
label: 'Contact',
|
||||
pluralLabel: 'Contacts',
|
||||
description: 'Standard Contact object',
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
const contactObjectDefId =
|
||||
contactObjectId ||
|
||||
(await knex('object_definitions').where('apiName', 'Contact').first()).id;
|
||||
|
||||
await knex('field_definitions').insert([
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactObjectDefId,
|
||||
apiName: 'firstName',
|
||||
label: 'First Name',
|
||||
type: 'String',
|
||||
length: 100,
|
||||
isRequired: true,
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
displayOrder: 1,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactObjectDefId,
|
||||
apiName: 'lastName',
|
||||
label: 'Last Name',
|
||||
type: 'String',
|
||||
length: 100,
|
||||
isRequired: true,
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
displayOrder: 2,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactObjectDefId,
|
||||
apiName: 'accountId',
|
||||
label: 'Account',
|
||||
type: 'Reference',
|
||||
referenceObject: 'Account',
|
||||
isRequired: true,
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
displayOrder: 3,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
]);
|
||||
|
||||
const [contactDetailObjectId] = await knex('object_definitions').insert({
|
||||
id: knex.raw('(UUID())'),
|
||||
apiName: 'ContactDetail',
|
||||
label: 'Contact Detail',
|
||||
pluralLabel: 'Contact Details',
|
||||
description: 'Polymorphic contact detail object',
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
const contactDetailObjectDefId =
|
||||
contactDetailObjectId ||
|
||||
(await knex('object_definitions').where('apiName', 'ContactDetail').first())
|
||||
.id;
|
||||
|
||||
const contactDetailRelationObjects = ['Account', 'Contact']
|
||||
|
||||
await knex('field_definitions').insert([
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'relatedObjectType',
|
||||
label: 'Related Object Type',
|
||||
type: 'PICKLIST',
|
||||
length: 100,
|
||||
isRequired: true,
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 1,
|
||||
ui_metadata: JSON.stringify({
|
||||
options: contactDetailRelationObjects.map((value) => ({ label: value, value })),
|
||||
}),
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'relatedObjectId',
|
||||
label: 'Related Object ID',
|
||||
type: 'LOOKUP',
|
||||
length: 36,
|
||||
isRequired: true,
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 2,
|
||||
ui_metadata: JSON.stringify({
|
||||
relationObjects: contactDetailRelationObjects,
|
||||
relationTypeField: 'relatedObjectType',
|
||||
relationDisplayField: 'name',
|
||||
}),
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'detailType',
|
||||
label: 'Detail Type',
|
||||
type: 'String',
|
||||
length: 50,
|
||||
isRequired: true,
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 3,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'label',
|
||||
label: 'Label',
|
||||
type: 'String',
|
||||
length: 100,
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 4,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'value',
|
||||
label: 'Value',
|
||||
type: 'Text',
|
||||
isRequired: true,
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 5,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'isPrimary',
|
||||
label: 'Primary',
|
||||
type: 'Boolean',
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 6,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
]);
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
await knex.schema.dropTableIfExists('contact_details');
|
||||
await knex.schema.dropTableIfExists('contacts');
|
||||
};
|
||||
@@ -0,0 +1,101 @@
|
||||
exports.up = async function (knex) {
|
||||
const contactDetailObject = await knex('object_definitions')
|
||||
.where({ apiName: 'ContactDetail' })
|
||||
.first();
|
||||
|
||||
if (!contactDetailObject) return;
|
||||
|
||||
const relationObjects = ['Account', 'Contact'];
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactDetailObject.id,
|
||||
apiName: 'relatedObjectType',
|
||||
})
|
||||
.update({
|
||||
type: 'PICKLIST',
|
||||
length: 100,
|
||||
isSystem: false,
|
||||
ui_metadata: JSON.stringify({
|
||||
options: relationObjects.map((value) => ({ label: value, value })),
|
||||
}),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactDetailObject.id,
|
||||
apiName: 'relatedObjectId',
|
||||
})
|
||||
.update({
|
||||
type: 'LOOKUP',
|
||||
length: 36,
|
||||
isSystem: false,
|
||||
ui_metadata: JSON.stringify({
|
||||
relationObjects,
|
||||
relationTypeField: 'relatedObjectType',
|
||||
relationDisplayField: 'name',
|
||||
}),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
await knex('field_definitions')
|
||||
.whereIn('apiName', [
|
||||
'detailType',
|
||||
'label',
|
||||
'value',
|
||||
'isPrimary',
|
||||
])
|
||||
.andWhere({ objectDefinitionId: contactDetailObject.id })
|
||||
.update({
|
||||
isSystem: false,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
const contactDetailObject = await knex('object_definitions')
|
||||
.where({ apiName: 'ContactDetail' })
|
||||
.first();
|
||||
|
||||
if (!contactDetailObject) return;
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactDetailObject.id,
|
||||
apiName: 'relatedObjectType',
|
||||
})
|
||||
.update({
|
||||
type: 'String',
|
||||
length: 100,
|
||||
isSystem: true,
|
||||
ui_metadata: null,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactDetailObject.id,
|
||||
apiName: 'relatedObjectId',
|
||||
})
|
||||
.update({
|
||||
type: 'String',
|
||||
length: 36,
|
||||
isSystem: true,
|
||||
ui_metadata: null,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
await knex('field_definitions')
|
||||
.whereIn('apiName', [
|
||||
'detailType',
|
||||
'label',
|
||||
'value',
|
||||
'isPrimary',
|
||||
])
|
||||
.andWhere({ objectDefinitionId: contactDetailObject.id })
|
||||
.update({
|
||||
isSystem: true,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,45 @@
|
||||
exports.up = async function (knex) {
|
||||
const contactDetailObject = await knex('object_definitions')
|
||||
.where({ apiName: 'ContactDetail' })
|
||||
.first();
|
||||
|
||||
if (!contactDetailObject) return;
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({ objectDefinitionId: contactDetailObject.id })
|
||||
.whereIn('apiName', [
|
||||
'relatedObjectType',
|
||||
'relatedObjectId',
|
||||
'detailType',
|
||||
'label',
|
||||
'value',
|
||||
'isPrimary',
|
||||
])
|
||||
.update({
|
||||
isSystem: false,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
const contactDetailObject = await knex('object_definitions')
|
||||
.where({ apiName: 'ContactDetail' })
|
||||
.first();
|
||||
|
||||
if (!contactDetailObject) return;
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({ objectDefinitionId: contactDetailObject.id })
|
||||
.whereIn('apiName', [
|
||||
'relatedObjectType',
|
||||
'relatedObjectId',
|
||||
'detailType',
|
||||
'label',
|
||||
'value',
|
||||
'isPrimary',
|
||||
])
|
||||
.update({
|
||||
isSystem: true,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,62 @@
|
||||
exports.up = async function (knex) {
|
||||
// Add ownerId column to contacts
|
||||
await knex.schema.alterTable('contacts', (table) => {
|
||||
table.uuid('ownerId');
|
||||
table
|
||||
.foreign('ownerId')
|
||||
.references('id')
|
||||
.inTable('users')
|
||||
.onDelete('SET NULL');
|
||||
table.index(['ownerId']);
|
||||
});
|
||||
|
||||
// Add ownerId field definition metadata for Contact object
|
||||
const contactObject = await knex('object_definitions')
|
||||
.where('apiName', 'Contact')
|
||||
.first();
|
||||
|
||||
if (contactObject) {
|
||||
const existingField = await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactObject.id,
|
||||
apiName: 'ownerId',
|
||||
})
|
||||
.first();
|
||||
|
||||
if (!existingField) {
|
||||
await knex('field_definitions').insert({
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactObject.id,
|
||||
apiName: 'ownerId',
|
||||
label: 'Owner',
|
||||
type: 'Reference',
|
||||
referenceObject: 'User',
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
displayOrder: 4,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
const contactObject = await knex('object_definitions')
|
||||
.where('apiName', 'Contact')
|
||||
.first();
|
||||
|
||||
if (contactObject) {
|
||||
await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactObject.id,
|
||||
apiName: 'ownerId',
|
||||
})
|
||||
.delete();
|
||||
}
|
||||
|
||||
await knex.schema.alterTable('contacts', (table) => {
|
||||
table.dropForeign(['ownerId']);
|
||||
table.dropColumn('ownerId');
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,72 @@
|
||||
exports.up = async function (knex) {
|
||||
await knex.schema.createTable('ai_processes', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.string('name').notNullable();
|
||||
table.text('description');
|
||||
table.integer('latest_version').notNullable().defaultTo(1);
|
||||
table.string('created_by').notNullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.timestamp('updated_at').defaultTo(knex.fn.now());
|
||||
});
|
||||
|
||||
await knex.schema.createTable('ai_process_versions', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.uuid('process_id').notNullable();
|
||||
table.integer('version').notNullable();
|
||||
table.json('graph_json').notNullable();
|
||||
table.json('compiled_json').notNullable();
|
||||
table.string('created_by').notNullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.unique(['process_id', 'version']);
|
||||
table.index(['process_id']);
|
||||
});
|
||||
|
||||
await knex.schema.createTable('ai_process_runs', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.uuid('process_id').notNullable();
|
||||
table.integer('version').notNullable();
|
||||
table.string('status').notNullable();
|
||||
table.json('input_json').notNullable();
|
||||
table.json('output_json');
|
||||
table.json('error_json');
|
||||
table.json('state_json');
|
||||
table.string('current_node_id');
|
||||
table.timestamp('started_at').defaultTo(knex.fn.now());
|
||||
table.timestamp('ended_at');
|
||||
table.index(['process_id']);
|
||||
});
|
||||
|
||||
await knex.schema.createTable('ai_chat_sessions', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.string('user_id').notNullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.index(['user_id']);
|
||||
});
|
||||
|
||||
await knex.schema.createTable('ai_chat_messages', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.uuid('session_id').notNullable();
|
||||
table.string('role').notNullable();
|
||||
table.text('content').notNullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.index(['session_id']);
|
||||
});
|
||||
|
||||
await knex.schema.createTable('ai_audit_events', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.uuid('run_id').notNullable();
|
||||
table.string('event_type').notNullable();
|
||||
table.json('payload_json').notNullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.index(['run_id']);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
await knex.schema.dropTableIfExists('ai_audit_events');
|
||||
await knex.schema.dropTableIfExists('ai_chat_messages');
|
||||
await knex.schema.dropTableIfExists('ai_chat_sessions');
|
||||
await knex.schema.dropTableIfExists('ai_process_runs');
|
||||
await knex.schema.dropTableIfExists('ai_process_versions');
|
||||
await knex.schema.dropTableIfExists('ai_processes');
|
||||
};
|
||||
@@ -0,0 +1,14 @@
|
||||
exports.up = async function (knex) {
|
||||
await knex.schema.createTable('ai_tool_configs', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.string('tool_name').notNullable().unique();
|
||||
table.boolean('enabled').notNullable().defaultTo(true);
|
||||
table.json('config_json');
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.timestamp('updated_at').defaultTo(knex.fn.now());
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
await knex.schema.dropTableIfExists('ai_tool_configs');
|
||||
};
|
||||
1375
backend/package-lock.json
generated
1375
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -23,9 +23,15 @@
|
||||
"migrate:rollback": "knex migrate:rollback --knexfile=knexfile.js",
|
||||
"migrate:status": "ts-node -r tsconfig-paths/register scripts/check-migration-status.ts",
|
||||
"migrate:tenant": "ts-node -r tsconfig-paths/register scripts/migrate-tenant.ts",
|
||||
"migrate:all-tenants": "ts-node -r tsconfig-paths/register scripts/migrate-all-tenants.ts"
|
||||
"migrate:all-tenants": "ts-node -r tsconfig-paths/register scripts/migrate-all-tenants.ts",
|
||||
"seed:demo-process": "ts-node -r tsconfig-paths/register scripts/seed-demo-process.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@casl/ability": "^6.7.5",
|
||||
"@fastify/websocket": "^10.0.1",
|
||||
"@langchain/core": "^1.1.12",
|
||||
"@langchain/langgraph": "^1.0.15",
|
||||
"@langchain/openai": "^1.2.1",
|
||||
"@nestjs/bullmq": "^10.1.0",
|
||||
"@nestjs/common": "^10.3.0",
|
||||
"@nestjs/config": "^3.1.1",
|
||||
@@ -33,19 +39,31 @@
|
||||
"@nestjs/jwt": "^10.2.0",
|
||||
"@nestjs/passport": "^10.0.3",
|
||||
"@nestjs/platform-fastify": "^10.3.0",
|
||||
"@nestjs/platform-socket.io": "^10.4.20",
|
||||
"@nestjs/serve-static": "^4.0.2",
|
||||
"@nestjs/websockets": "^10.4.20",
|
||||
"@prisma/client": "^5.8.0",
|
||||
"@types/json-logic-js": "^2.0.8",
|
||||
"ajv": "^8.17.1",
|
||||
"ajv-formats": "^3.0.1",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.1.0",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.1",
|
||||
"ioredis": "^5.3.2",
|
||||
"json-logic-js": "^2.0.5",
|
||||
"knex": "^3.1.0",
|
||||
"langchain": "^1.2.7",
|
||||
"mysql2": "^3.15.3",
|
||||
"objection": "^3.1.5",
|
||||
"openai": "^6.15.0",
|
||||
"passport": "^0.7.0",
|
||||
"passport-jwt": "^4.0.1",
|
||||
"reflect-metadata": "^0.2.1",
|
||||
"rxjs": "^7.8.1"
|
||||
"rxjs": "^7.8.1",
|
||||
"socket.io": "^4.8.3",
|
||||
"twilio": "^5.11.1",
|
||||
"ws": "^8.18.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nestjs/cli": "^10.3.0",
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE `tenants` ADD COLUMN `integrationsConfig` JSON NULL;
|
||||
@@ -24,17 +24,18 @@ model User {
|
||||
}
|
||||
|
||||
model Tenant {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
slug String @unique // Used for identification
|
||||
dbHost String // Database host
|
||||
dbPort Int @default(3306)
|
||||
dbName String // Database name
|
||||
dbUsername String // Database username
|
||||
dbPassword String // Encrypted database password
|
||||
status String @default("active") // active, suspended, deleted
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
slug String @unique // Used for identification
|
||||
dbHost String // Database host
|
||||
dbPort Int @default(3306)
|
||||
dbName String // Database name
|
||||
dbUsername String // Database username
|
||||
dbPassword String // Encrypted database password
|
||||
integrationsConfig Json? // Encrypted JSON config for external services (Twilio, OpenAI, etc.)
|
||||
status String @default("active") // active, suspended, deleted
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
domains Domain[]
|
||||
|
||||
|
||||
@@ -125,6 +125,7 @@ model FieldDefinition {
|
||||
isSystem Boolean @default(false)
|
||||
isCustom Boolean @default(true)
|
||||
displayOrder Int @default(0)
|
||||
uiMetadata Json? @map("ui_metadata")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @updatedAt @map("updated_at")
|
||||
|
||||
@@ -144,12 +145,137 @@ model Account {
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
owner User @relation(fields: [ownerId], references: [id])
|
||||
owner User @relation(fields: [ownerId], references: [id])
|
||||
contacts Contact[]
|
||||
|
||||
@@index([ownerId])
|
||||
@@map("accounts")
|
||||
}
|
||||
|
||||
model Contact {
|
||||
id String @id @default(uuid())
|
||||
firstName String
|
||||
lastName String
|
||||
accountId String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
account Account @relation(fields: [accountId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([accountId])
|
||||
@@map("contacts")
|
||||
}
|
||||
|
||||
model ContactDetail {
|
||||
id String @id @default(uuid())
|
||||
relatedObjectType String
|
||||
relatedObjectId String
|
||||
detailType String
|
||||
label String?
|
||||
value String
|
||||
isPrimary Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@index([relatedObjectType, relatedObjectId])
|
||||
@@map("contact_details")
|
||||
}
|
||||
|
||||
// AI Process Builder + Chat Orchestrator
|
||||
model AiProcess {
|
||||
id String @id @default(uuid())
|
||||
tenantId String @map("tenant_id")
|
||||
name String
|
||||
description String? @db.Text
|
||||
latestVersion Int @default(1) @map("latest_version")
|
||||
createdBy String @map("created_by")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @updatedAt @map("updated_at")
|
||||
|
||||
versions AiProcessVersion[]
|
||||
runs AiProcessRun[]
|
||||
|
||||
@@index([tenantId])
|
||||
@@map("ai_processes")
|
||||
}
|
||||
|
||||
model AiProcessVersion {
|
||||
id String @id @default(uuid())
|
||||
tenantId String @map("tenant_id")
|
||||
processId String @map("process_id")
|
||||
version Int
|
||||
graphJson Json @map("graph_json")
|
||||
compiledJson Json @map("compiled_json")
|
||||
createdBy String @map("created_by")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
|
||||
process AiProcess @relation(fields: [processId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([processId, version])
|
||||
@@index([tenantId])
|
||||
@@map("ai_process_versions")
|
||||
}
|
||||
|
||||
model AiProcessRun {
|
||||
id String @id @default(uuid())
|
||||
tenantId String @map("tenant_id")
|
||||
processId String @map("process_id")
|
||||
version Int
|
||||
status String
|
||||
inputJson Json @map("input_json")
|
||||
outputJson Json? @map("output_json")
|
||||
errorJson Json? @map("error_json")
|
||||
stateJson Json? @map("state_json")
|
||||
currentNodeId String? @map("current_node_id")
|
||||
startedAt DateTime @default(now()) @map("started_at")
|
||||
endedAt DateTime? @map("ended_at")
|
||||
|
||||
process AiProcess @relation(fields: [processId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([tenantId])
|
||||
@@index([processId])
|
||||
@@map("ai_process_runs")
|
||||
}
|
||||
|
||||
model AiChatSession {
|
||||
id String @id @default(uuid())
|
||||
tenantId String @map("tenant_id")
|
||||
userId String @map("user_id")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
|
||||
messages AiChatMessage[]
|
||||
|
||||
@@index([tenantId])
|
||||
@@index([userId])
|
||||
@@map("ai_chat_sessions")
|
||||
}
|
||||
|
||||
model AiChatMessage {
|
||||
id String @id @default(uuid())
|
||||
sessionId String @map("session_id")
|
||||
role String
|
||||
content String @db.Text
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
|
||||
session AiChatSession @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([sessionId])
|
||||
@@map("ai_chat_messages")
|
||||
}
|
||||
|
||||
model AiAuditEvent {
|
||||
id String @id @default(uuid())
|
||||
tenantId String @map("tenant_id")
|
||||
runId String @map("run_id")
|
||||
eventType String @map("event_type")
|
||||
payloadJson Json @map("payload_json")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
|
||||
@@index([tenantId])
|
||||
@@index([runId])
|
||||
@@map("ai_audit_events")
|
||||
}
|
||||
|
||||
// Application Builder
|
||||
model App {
|
||||
id String @id @default(uuid())
|
||||
|
||||
@@ -1,8 +1,53 @@
|
||||
# Tenant Migration Scripts
|
||||
# Tenant Migration & Admin Scripts
|
||||
|
||||
This directory contains scripts for managing database migrations across all tenants in the multi-tenant platform.
|
||||
This directory contains scripts for managing database migrations across all tenants and creating admin users in the multi-tenant platform.
|
||||
|
||||
## Available Scripts
|
||||
## Admin User Management
|
||||
|
||||
### Create Central Admin User
|
||||
|
||||
```bash
|
||||
npm run create-central-admin
|
||||
```
|
||||
|
||||
Creates an administrator user in the **central database**. Central admins can:
|
||||
- Manage tenants (create, update, delete)
|
||||
- Access platform-wide administration features
|
||||
- View all tenant information
|
||||
- Manage tenant provisioning
|
||||
|
||||
**Interactive Mode:**
|
||||
```bash
|
||||
npm run create-central-admin
|
||||
# You will be prompted for:
|
||||
# - Email
|
||||
# - Password
|
||||
# - First Name (optional)
|
||||
# - Last Name (optional)
|
||||
# - Role (admin or superadmin)
|
||||
```
|
||||
|
||||
**Non-Interactive Mode (using environment variables):**
|
||||
```bash
|
||||
EMAIL=admin@example.com PASSWORD=securepass123 FIRST_NAME=John LAST_NAME=Doe ROLE=superadmin npm run create-central-admin
|
||||
```
|
||||
|
||||
**Logging In as Central Admin:**
|
||||
1. Access the application using a central subdomain (e.g., `central.yourdomain.com` or `admin.yourdomain.com`)
|
||||
2. Enter your central admin credentials
|
||||
3. You'll be authenticated against the central database (not a tenant database)
|
||||
|
||||
**Note:** The system automatically detects if you're logging in from a central subdomain based on the `CENTRAL_SUBDOMAINS` environment variable (defaults to `central,admin`). No special UI or configuration is needed on the frontend.
|
||||
|
||||
### Create Tenant User
|
||||
|
||||
For creating users within a specific tenant database, use:
|
||||
```bash
|
||||
npm run create-tenant-user <tenant-slug>
|
||||
# (Note: This script may need to be created or already exists)
|
||||
```
|
||||
|
||||
## Migration Scripts
|
||||
|
||||
### 1. Create a New Migration
|
||||
|
||||
|
||||
@@ -43,8 +43,9 @@ function decryptPassword(encryptedPassword: string): string {
|
||||
function createTenantKnexConnection(tenant: any): Knex {
|
||||
const decryptedPassword = decryptPassword(tenant.dbPassword);
|
||||
|
||||
// Replace 'db' hostname with 'localhost' when running outside Docker
|
||||
const dbHost = tenant.dbHost === 'db' ? 'localhost' : tenant.dbHost;
|
||||
// Use Docker hostname 'db' when running inside container
|
||||
// The dbHost will be 'db' for Docker connections or 'localhost' for local development
|
||||
const dbHost = tenant.dbHost;
|
||||
|
||||
return knex({
|
||||
client: 'mysql2',
|
||||
@@ -82,7 +83,7 @@ async function migrateTenant(tenant: any): Promise<void> {
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`❌ ${tenant.name}: Migration failed:`, error.message);
|
||||
console.error(`❌ ${tenant.name}: Migration failed:`, error);
|
||||
throw error;
|
||||
} finally {
|
||||
await tenantKnex.destroy();
|
||||
|
||||
181
backend/scripts/seed-default-roles.ts
Normal file
181
backend/scripts/seed-default-roles.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
import { Knex } from 'knex';
|
||||
import * as knexLib from 'knex';
|
||||
|
||||
/**
|
||||
* Create a Knex connection for tenant database
|
||||
*/
|
||||
function createKnexConnection(database: string): Knex {
|
||||
return knexLib.default({
|
||||
client: 'mysql2',
|
||||
connection: {
|
||||
host: process.env.DB_HOST || 'db',
|
||||
port: parseInt(process.env.DB_PORT || '3306'),
|
||||
user: 'root',
|
||||
password: 'asjdnfqTash37faggT',
|
||||
database: database,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
interface RoleWithPermissions {
|
||||
name: string;
|
||||
description: string;
|
||||
objectPermissions: {
|
||||
[objectApiName: string]: {
|
||||
canCreate: boolean;
|
||||
canRead: boolean;
|
||||
canEdit: boolean;
|
||||
canDelete: boolean;
|
||||
canViewAll: boolean;
|
||||
canModifyAll: boolean;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
const DEFAULT_ROLES: RoleWithPermissions[] = [
|
||||
{
|
||||
name: 'System Administrator',
|
||||
description: 'Full access to all objects and records. Can view and modify all data.',
|
||||
objectPermissions: {
|
||||
'*': {
|
||||
canCreate: true,
|
||||
canRead: true,
|
||||
canEdit: true,
|
||||
canDelete: true,
|
||||
canViewAll: true,
|
||||
canModifyAll: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Standard User',
|
||||
description: 'Can create, read, edit, and delete own records. Respects OWD settings.',
|
||||
objectPermissions: {
|
||||
'*': {
|
||||
canCreate: true,
|
||||
canRead: true,
|
||||
canEdit: true,
|
||||
canDelete: true,
|
||||
canViewAll: false,
|
||||
canModifyAll: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Read Only',
|
||||
description: 'Can only read records based on OWD settings. No create, edit, or delete.',
|
||||
objectPermissions: {
|
||||
'*': {
|
||||
canCreate: false,
|
||||
canRead: true,
|
||||
canEdit: false,
|
||||
canDelete: false,
|
||||
canViewAll: false,
|
||||
canModifyAll: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
async function seedRolesForTenant(knex: Knex, tenantName: string) {
|
||||
console.log(`\n🌱 Seeding roles for tenant: ${tenantName}`);
|
||||
|
||||
// Get all object definitions
|
||||
const objectDefinitions = await knex('object_definitions').select('id', 'apiName');
|
||||
|
||||
for (const roleData of DEFAULT_ROLES) {
|
||||
// Check if role already exists
|
||||
const existingRole = await knex('roles')
|
||||
.where({ name: roleData.name })
|
||||
.first();
|
||||
|
||||
let roleId: string;
|
||||
|
||||
if (existingRole) {
|
||||
console.log(` ℹ️ Role "${roleData.name}" already exists, skipping...`);
|
||||
roleId = existingRole.id;
|
||||
} else {
|
||||
// Create role
|
||||
await knex('roles').insert({
|
||||
name: roleData.name,
|
||||
guardName: 'api',
|
||||
description: roleData.description,
|
||||
});
|
||||
|
||||
// Get the inserted role
|
||||
const newRole = await knex('roles')
|
||||
.where({ name: roleData.name })
|
||||
.first();
|
||||
|
||||
roleId = newRole.id;
|
||||
console.log(` ✅ Created role: ${roleData.name}`);
|
||||
}
|
||||
|
||||
// Create object permissions for all objects
|
||||
const wildcardPermissions = roleData.objectPermissions['*'];
|
||||
|
||||
for (const objectDef of objectDefinitions) {
|
||||
// Check if permission already exists
|
||||
const existingPermission = await knex('role_object_permissions')
|
||||
.where({
|
||||
roleId: roleId,
|
||||
objectDefinitionId: objectDef.id,
|
||||
})
|
||||
.first();
|
||||
|
||||
if (!existingPermission) {
|
||||
await knex('role_object_permissions').insert({
|
||||
roleId: roleId,
|
||||
objectDefinitionId: objectDef.id,
|
||||
canCreate: wildcardPermissions.canCreate,
|
||||
canRead: wildcardPermissions.canRead,
|
||||
canEdit: wildcardPermissions.canEdit,
|
||||
canDelete: wildcardPermissions.canDelete,
|
||||
canViewAll: wildcardPermissions.canViewAll,
|
||||
canModifyAll: wildcardPermissions.canModifyAll,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
console.log(` 📋 Set permissions for ${objectDefinitions.length} objects`);
|
||||
}
|
||||
}
|
||||
|
||||
async function seedAllTenants() {
|
||||
console.log('🚀 Starting role seeding for all tenants...\n');
|
||||
|
||||
// For now, seed the main tenant database
|
||||
const databases = ['tenant_tenant1'];
|
||||
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (const database of databases) {
|
||||
try {
|
||||
const knex = createKnexConnection(database);
|
||||
await seedRolesForTenant(knex, database);
|
||||
await knex.destroy();
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error(`❌ ${database}: Seeding failed:`, error.message);
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n============================================================');
|
||||
console.log('📊 Seeding Summary');
|
||||
console.log('============================================================');
|
||||
console.log(`✅ Successful: ${successCount}`);
|
||||
console.log(`❌ Failed: ${errorCount}`);
|
||||
|
||||
if (errorCount === 0) {
|
||||
console.log('\n🎉 All tenant roles seeded successfully!');
|
||||
}
|
||||
}
|
||||
|
||||
seedAllTenants()
|
||||
.then(() => process.exit(0))
|
||||
.catch((error) => {
|
||||
console.error('Unhandled error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
332
backend/scripts/seed-demo-process.ts
Normal file
332
backend/scripts/seed-demo-process.ts
Normal file
@@ -0,0 +1,332 @@
|
||||
import { randomUUID } from 'crypto';
|
||||
import { AiProcess, AiProcessVersion, AiToolConfig } from '../src/models/ai-process.model';
|
||||
|
||||
// Bootstrap NestJS to get proper services
|
||||
async function getTenantContext(tenantSlugOrId: string) {
|
||||
const { NestFactory } = await import('@nestjs/core');
|
||||
const { AppModule } = await import('../src/app.module');
|
||||
const { TenantDatabaseService } = await import('../src/tenant/tenant-database.service');
|
||||
|
||||
// Create app context (without listening)
|
||||
const app = await NestFactory.createApplicationContext(AppModule, {
|
||||
logger: false,
|
||||
});
|
||||
|
||||
const tenantDbService = app.get(TenantDatabaseService);
|
||||
|
||||
// Resolve tenant ID
|
||||
const tenantId = await tenantDbService.resolveTenantId(tenantSlugOrId);
|
||||
|
||||
// Get proper Knex connection
|
||||
const knex = await tenantDbService.getTenantKnexById(tenantId);
|
||||
|
||||
return { tenantId, knex, app };
|
||||
}
|
||||
|
||||
/**
|
||||
* Seed script for demo AI Process: Register New Pet
|
||||
*
|
||||
* This process demonstrates:
|
||||
* - Conditional logic (find or create account/contact)
|
||||
* - Tool usage (findAccount, createAccount, findContact, createContact, createPet)
|
||||
* - Sequential execution
|
||||
* - LLM decision nodes with structured JSON output
|
||||
*
|
||||
* Usage:
|
||||
* npm run seed:demo-process -- <tenant-slug-or-id>
|
||||
*/
|
||||
|
||||
const demoProcessGraph = {
|
||||
id: 'register_new_pet',
|
||||
name: 'Register New Pet',
|
||||
description: 'Complete pet registration workflow with account and contact resolution',
|
||||
allowCycles: false,
|
||||
nodes: [
|
||||
{
|
||||
id: 'start',
|
||||
type: 'Start',
|
||||
position: { x: 250, y: 50 },
|
||||
data: { label: 'Start' },
|
||||
},
|
||||
{
|
||||
id: 'extract_info',
|
||||
type: 'LLMDecisionNode',
|
||||
position: { x: 250, y: 150 },
|
||||
data: {
|
||||
label: 'Extract Pet Info',
|
||||
promptTemplate: `Extract pet registration information from the user message.
|
||||
|
||||
User message: {{state.message}}
|
||||
|
||||
Extract:
|
||||
- Pet name (required)
|
||||
- Pet species (required, e.g., "dog", "cat", "bird")
|
||||
- Pet breed (optional)
|
||||
- Pet age (optional, as number)
|
||||
- Owner first name (required)
|
||||
- Owner last name (required)
|
||||
- Owner email (optional)
|
||||
- Owner phone (optional)
|
||||
- Account/Company name (optional, defaults to owner's full name)
|
||||
|
||||
Return JSON with these exact fields.`,
|
||||
inputKeys: ['message'],
|
||||
outputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
petName: { type: 'string' },
|
||||
species: { type: 'string' },
|
||||
breed: { type: 'string' },
|
||||
age: { type: 'number' },
|
||||
ownerFirstName: { type: 'string' },
|
||||
ownerLastName: { type: 'string' },
|
||||
ownerEmail: { type: 'string' },
|
||||
ownerPhone: { type: 'string' },
|
||||
accountName: { type: 'string' },
|
||||
},
|
||||
required: ['petName', 'species', 'ownerFirstName', 'ownerLastName'],
|
||||
},
|
||||
model: { name: 'gpt-4o', temperature: 0 },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'find_account',
|
||||
type: 'ToolNode',
|
||||
position: { x: 250, y: 280 },
|
||||
data: {
|
||||
label: 'Find Account',
|
||||
toolName: 'findAccount',
|
||||
argsTemplate: {
|
||||
name: '{{state.accountName}}',
|
||||
email: '{{state.ownerEmail}}',
|
||||
},
|
||||
outputMapping: {
|
||||
found: 'accountFound',
|
||||
accountId: 'accountId',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_account',
|
||||
type: 'ToolNode',
|
||||
position: { x: 450, y: 380 },
|
||||
data: {
|
||||
label: 'Create Account',
|
||||
toolName: 'createAccount',
|
||||
argsTemplate: {
|
||||
name: '{{state.accountName}}',
|
||||
email: '{{state.ownerEmail}}',
|
||||
phone: '{{state.ownerPhone}}',
|
||||
},
|
||||
outputMapping: {
|
||||
accountId: 'accountId',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'find_contact',
|
||||
type: 'ToolNode',
|
||||
position: { x: 250, y: 480 },
|
||||
data: {
|
||||
label: 'Find Contact',
|
||||
toolName: 'findContact',
|
||||
argsTemplate: {
|
||||
firstName: '{{state.ownerFirstName}}',
|
||||
lastName: '{{state.ownerLastName}}',
|
||||
email: '{{state.ownerEmail}}',
|
||||
accountId: '{{state.accountId}}',
|
||||
},
|
||||
outputMapping: {
|
||||
found: 'contactFound',
|
||||
contactId: 'contactId',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_contact',
|
||||
type: 'ToolNode',
|
||||
position: { x: 450, y: 580 },
|
||||
data: {
|
||||
label: 'Create Contact',
|
||||
toolName: 'createContact',
|
||||
argsTemplate: {
|
||||
firstName: '{{state.ownerFirstName}}',
|
||||
lastName: '{{state.ownerLastName}}',
|
||||
email: '{{state.ownerEmail}}',
|
||||
phone: '{{state.ownerPhone}}',
|
||||
accountId: '{{state.accountId}}',
|
||||
},
|
||||
outputMapping: {
|
||||
contactId: 'contactId',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_pet',
|
||||
type: 'ToolNode',
|
||||
position: { x: 250, y: 680 },
|
||||
data: {
|
||||
label: 'Create Pet Record',
|
||||
toolName: 'createPet',
|
||||
argsTemplate: {
|
||||
name: '{{state.petName}}',
|
||||
species: '{{state.species}}',
|
||||
breed: '{{state.breed}}',
|
||||
age: '{{state.age}}',
|
||||
ownerId: '{{state.contactId}}',
|
||||
},
|
||||
outputMapping: {
|
||||
petId: 'petId',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'end',
|
||||
type: 'End',
|
||||
position: { x: 250, y: 780 },
|
||||
data: { label: 'End' },
|
||||
},
|
||||
],
|
||||
edges: [
|
||||
{ id: 'e1', source: 'start', target: 'extract_info' },
|
||||
{ id: 'e2', source: 'extract_info', target: 'find_account' },
|
||||
{
|
||||
id: 'e3',
|
||||
source: 'find_account',
|
||||
target: 'find_contact',
|
||||
condition: { '==': [{ var: 'accountFound' }, true] },
|
||||
},
|
||||
{
|
||||
id: 'e4',
|
||||
source: 'find_account',
|
||||
target: 'create_account',
|
||||
condition: { '==': [{ var: 'accountFound' }, false] },
|
||||
},
|
||||
{ id: 'e5', source: 'create_account', target: 'find_contact' },
|
||||
{
|
||||
id: 'e6',
|
||||
source: 'find_contact',
|
||||
target: 'create_pet',
|
||||
condition: { '==': [{ var: 'contactFound' }, true] },
|
||||
},
|
||||
{
|
||||
id: 'e7',
|
||||
source: 'find_contact',
|
||||
target: 'create_contact',
|
||||
condition: { '==': [{ var: 'contactFound' }, false] },
|
||||
},
|
||||
{ id: 'e8', source: 'create_contact', target: 'create_pet' },
|
||||
{ id: 'e9', source: 'create_pet', target: 'end' },
|
||||
],
|
||||
};
|
||||
|
||||
const demoTools = [
|
||||
'findAccount',
|
||||
'createAccount',
|
||||
'findContact',
|
||||
'createContact',
|
||||
'createPet',
|
||||
];
|
||||
|
||||
async function seedDemoProcess(tenantSlugOrId: string) {
|
||||
let app;
|
||||
try {
|
||||
console.log(`\n🌱 Seeding demo AI process for tenant: ${tenantSlugOrId}\n`);
|
||||
|
||||
const context = await getTenantContext(tenantSlugOrId);
|
||||
const { tenantId, knex, app: nestApp } = context;
|
||||
app = nestApp;
|
||||
|
||||
console.log(`✓ Resolved tenant ID: ${tenantId}`);
|
||||
console.log(`✓ Connected to tenant database`);
|
||||
|
||||
// Check if process already exists
|
||||
const existing = await AiProcess.query(knex)
|
||||
.where('name', demoProcessGraph.name)
|
||||
.first();
|
||||
|
||||
if (existing) {
|
||||
console.log(`⚠ Process "${demoProcessGraph.name}" already exists (ID: ${existing.id})`);
|
||||
console.log(` To create a new version, update via the UI.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create process in transaction
|
||||
await knex.transaction(async (trx) => {
|
||||
const processId = randomUUID();
|
||||
const userId = 'system'; // System user for seed data
|
||||
|
||||
// Create process
|
||||
await AiProcess.query(trx).insert({
|
||||
id: processId,
|
||||
name: demoProcessGraph.name,
|
||||
description: demoProcessGraph.description,
|
||||
latestVersion: 1,
|
||||
createdBy: userId,
|
||||
});
|
||||
console.log(`✓ Created process: ${demoProcessGraph.name} (${processId})`);
|
||||
|
||||
// Create initial version
|
||||
// Note: In production, this would call the compiler service
|
||||
// For seed, we're storing a simplified version
|
||||
await AiProcessVersion.query(trx).insert({
|
||||
id: randomUUID(),
|
||||
processId,
|
||||
version: 1,
|
||||
graphJson: demoProcessGraph,
|
||||
compiledJson: {
|
||||
graphId: demoProcessGraph.id,
|
||||
version: 1,
|
||||
nodes: demoProcessGraph.nodes,
|
||||
edges: demoProcessGraph.edges,
|
||||
startNodeId: 'start',
|
||||
endNodeIds: ['end'],
|
||||
adjacency: {},
|
||||
},
|
||||
createdBy: userId,
|
||||
});
|
||||
console.log(`✓ Created process version 1`);
|
||||
|
||||
// Enable demo tools for tenant
|
||||
for (const toolName of demoTools) {
|
||||
const existingTool = await AiToolConfig.query(trx)
|
||||
.where('tool_name', toolName)
|
||||
.first();
|
||||
|
||||
if (!existingTool) {
|
||||
await AiToolConfig.query(trx).insert({
|
||||
id: randomUUID(),
|
||||
toolName,
|
||||
enabled: true,
|
||||
});
|
||||
console.log(`✓ Enabled tool: ${toolName}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`\n✅ Demo process seeded successfully!\n`);
|
||||
console.log(`Next steps:`);
|
||||
console.log(` 1. Navigate to /ai-processes in your frontend`);
|
||||
console.log(` 2. Open the "${demoProcessGraph.name}" process`);
|
||||
console.log(` 3. Test it by sending a message like:`);
|
||||
console.log(` "Register a dog named Max, owned by John Smith (john@email.com)"`);
|
||||
console.log();
|
||||
|
||||
if (app) await app.close();
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
console.error('❌ Seed failed:', error);
|
||||
if (app) await app.close();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Get tenant from command line args
|
||||
const tenantSlugOrId = process.argv[2];
|
||||
|
||||
if (!tenantSlugOrId) {
|
||||
console.error('Usage: npm run seed:demo-process -- <tenant-slug-or-id>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
seedDemoProcess(tenantSlugOrId);
|
||||
41
backend/src/ai-assistant/ai-assistant.controller.ts
Normal file
41
backend/src/ai-assistant/ai-assistant.controller.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { Body, Controller, Post, UseGuards } from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { CurrentUser } from '../auth/current-user.decorator';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { AiAssistantService } from './ai-assistant.service';
|
||||
import { AiChatRequestDto } from './dto/ai-chat.dto';
|
||||
import { AiSearchRequestDto } from './dto/ai-search.dto';
|
||||
|
||||
@Controller('ai')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class AiAssistantController {
|
||||
constructor(private readonly aiAssistantService: AiAssistantService) {}
|
||||
|
||||
@Post('chat')
|
||||
async chat(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Body() payload: AiChatRequestDto,
|
||||
) {
|
||||
return this.aiAssistantService.handleChat(
|
||||
tenantId,
|
||||
user.userId,
|
||||
payload.message,
|
||||
payload.history,
|
||||
payload.context,
|
||||
);
|
||||
}
|
||||
|
||||
@Post('search')
|
||||
async search(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Body() payload: AiSearchRequestDto,
|
||||
) {
|
||||
return this.aiAssistantService.searchRecords(
|
||||
tenantId,
|
||||
user.userId,
|
||||
payload,
|
||||
);
|
||||
}
|
||||
}
|
||||
15
backend/src/ai-assistant/ai-assistant.module.ts
Normal file
15
backend/src/ai-assistant/ai-assistant.module.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { AiAssistantController } from './ai-assistant.controller';
|
||||
import { AiAssistantService } from './ai-assistant.service';
|
||||
import { ObjectModule } from '../object/object.module';
|
||||
import { PageLayoutModule } from '../page-layout/page-layout.module';
|
||||
import { TenantModule } from '../tenant/tenant.module';
|
||||
import { MeilisearchModule } from '../search/meilisearch.module';
|
||||
|
||||
@Module({
|
||||
imports: [ObjectModule, PageLayoutModule, TenantModule, MeilisearchModule],
|
||||
controllers: [AiAssistantController],
|
||||
providers: [AiAssistantService],
|
||||
exports: [AiAssistantService],
|
||||
})
|
||||
export class AiAssistantModule {}
|
||||
1236
backend/src/ai-assistant/ai-assistant.service.ts
Normal file
1236
backend/src/ai-assistant/ai-assistant.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
32
backend/src/ai-assistant/ai-assistant.types.ts
Normal file
32
backend/src/ai-assistant/ai-assistant.types.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
export interface AiChatMessage {
|
||||
role: 'user' | 'assistant';
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface AiChatContext {
|
||||
objectApiName?: string;
|
||||
view?: string;
|
||||
recordId?: string;
|
||||
route?: string;
|
||||
}
|
||||
|
||||
export interface AiAssistantReply {
|
||||
reply: string;
|
||||
action?: 'create_record' | 'collect_fields' | 'clarify';
|
||||
missingFields?: string[];
|
||||
record?: any;
|
||||
}
|
||||
|
||||
export interface AiAssistantState {
|
||||
message: string;
|
||||
history?: AiChatMessage[];
|
||||
context: AiChatContext;
|
||||
objectDefinition?: any;
|
||||
pageLayout?: any;
|
||||
extractedFields?: Record<string, any>;
|
||||
requiredFields?: string[];
|
||||
missingFields?: string[];
|
||||
action?: AiAssistantReply['action'];
|
||||
record?: any;
|
||||
reply?: string;
|
||||
}
|
||||
36
backend/src/ai-assistant/dto/ai-chat.dto.ts
Normal file
36
backend/src/ai-assistant/dto/ai-chat.dto.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { Type } from 'class-transformer';
|
||||
import { IsNotEmpty, IsObject, IsOptional, IsString, ValidateNested } from 'class-validator';
|
||||
import { AiChatMessageDto } from './ai-chat.message.dto';
|
||||
|
||||
export class AiChatContextDto {
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
objectApiName?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
view?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
recordId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
route?: string;
|
||||
}
|
||||
|
||||
export class AiChatRequestDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
message: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
context?: AiChatContextDto;
|
||||
|
||||
@IsOptional()
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => AiChatMessageDto)
|
||||
history?: AiChatMessageDto[];
|
||||
}
|
||||
10
backend/src/ai-assistant/dto/ai-chat.message.dto.ts
Normal file
10
backend/src/ai-assistant/dto/ai-chat.message.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { IsIn, IsNotEmpty, IsString } from 'class-validator';
|
||||
|
||||
export class AiChatMessageDto {
|
||||
@IsIn(['user', 'assistant'])
|
||||
role: 'user' | 'assistant';
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
text: string;
|
||||
}
|
||||
22
backend/src/ai-assistant/dto/ai-search.dto.ts
Normal file
22
backend/src/ai-assistant/dto/ai-search.dto.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { Type } from 'class-transformer';
|
||||
import { IsNotEmpty, IsOptional, IsString, IsNumber } from 'class-validator';
|
||||
|
||||
export class AiSearchRequestDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
objectApiName: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
query: string;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsNumber()
|
||||
page?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsNumber()
|
||||
pageSize?: number;
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { compileProcessGraph, GraphValidationError } from '../ai-processes.compiler';
|
||||
import { demoRegisterNewPetProcess } from '../demo-process';
|
||||
|
||||
describe('ai-processes compiler', () => {
|
||||
it('throws when missing start node', () => {
|
||||
const badGraph = {
|
||||
...demoRegisterNewPetProcess,
|
||||
nodes: demoRegisterNewPetProcess.nodes.filter((n) => n.type !== 'Start'),
|
||||
};
|
||||
|
||||
expect(() =>
|
||||
compileProcessGraph(badGraph, { tenantId: 'default', version: 1 }),
|
||||
).toThrow(GraphValidationError);
|
||||
});
|
||||
|
||||
it('compiles the demo process graph', () => {
|
||||
const compiled = compileProcessGraph(demoRegisterNewPetProcess, {
|
||||
tenantId: 'default',
|
||||
version: 1,
|
||||
});
|
||||
|
||||
expect(compiled.startNodeId).toBe('start');
|
||||
expect(compiled.endNodeIds).toContain('end');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,40 @@
|
||||
import { compileProcessGraph } from '../ai-processes.compiler';
|
||||
import { demoRegisterNewPetProcess } from '../demo-process';
|
||||
import { runCompiledGraph } from '../ai-processes.runner';
|
||||
import { ToolRegistry } from '../tools/tool-registry';
|
||||
|
||||
describe('ai-processes runner', () => {
|
||||
it('runs the demo process until human input is required', async () => {
|
||||
const compiled = compileProcessGraph(demoRegisterNewPetProcess, {
|
||||
tenantId: 'default',
|
||||
version: 1,
|
||||
});
|
||||
|
||||
const result = await runCompiledGraph({
|
||||
compiledGraph: compiled,
|
||||
input: {
|
||||
accountName: 'Acme Inc',
|
||||
firstName: 'Jamie',
|
||||
lastName: 'Doe',
|
||||
},
|
||||
toolRegistry: new ToolRegistry(),
|
||||
toolContext: { tenantId: 'default', userId: 'user-1' },
|
||||
llmDecision: async (node, state) => {
|
||||
if (node.id === 'decide_account') {
|
||||
return { accountAction: 'find', accountName: state.accountName };
|
||||
}
|
||||
if (node.id === 'decide_contact') {
|
||||
return {
|
||||
contactAction: 'find',
|
||||
firstName: state.firstName,
|
||||
lastName: state.lastName,
|
||||
};
|
||||
}
|
||||
return {};
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.status).toBe('waiting');
|
||||
expect(result.currentNodeId).toBe('need_pet');
|
||||
});
|
||||
});
|
||||
191
backend/src/ai-processes/ai-processes.compiler.ts
Normal file
191
backend/src/ai-processes/ai-processes.compiler.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
import { apply as applyJsonLogic } from 'json-logic-js';
|
||||
import { createAjv } from './ai-processes.schemas';
|
||||
import {
|
||||
CompiledGraph,
|
||||
ProcessGraphDefinition,
|
||||
ProcessGraphEdge,
|
||||
ProcessGraphNode,
|
||||
} from './ai-processes.types';
|
||||
import { ToolRegistry } from './tools/tool-registry';
|
||||
|
||||
export class GraphValidationError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'GraphValidationError';
|
||||
}
|
||||
}
|
||||
|
||||
export interface CompileOptions {
|
||||
tenantId: string;
|
||||
version: number;
|
||||
}
|
||||
|
||||
export const validateGraphDefinition = (
|
||||
graph: ProcessGraphDefinition,
|
||||
tenantId: string,
|
||||
) => {
|
||||
const ajv = createAjv();
|
||||
const validate = ajv.getSchema<ProcessGraphDefinition>('processGraph');
|
||||
if (!validate) {
|
||||
throw new GraphValidationError('Graph schema is not registered.');
|
||||
}
|
||||
const valid = validate(graph);
|
||||
if (!valid) {
|
||||
throw new GraphValidationError(
|
||||
`Graph schema validation failed: ${ajv.errorsText(validate.errors)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const startNodes = graph.nodes.filter((node) => node.type === 'Start');
|
||||
const endNodes = graph.nodes.filter((node) => node.type === 'End');
|
||||
|
||||
if (startNodes.length !== 1) {
|
||||
throw new GraphValidationError('Graph must contain exactly one Start node.');
|
||||
}
|
||||
if (endNodes.length < 1) {
|
||||
throw new GraphValidationError('Graph must contain at least one End node.');
|
||||
}
|
||||
|
||||
const nodeIds = new Set(graph.nodes.map((node) => node.id));
|
||||
graph.edges.forEach((edge) => {
|
||||
if (!nodeIds.has(edge.source) || !nodeIds.has(edge.target)) {
|
||||
throw new GraphValidationError(`Edge ${edge.id} references unknown nodes.`);
|
||||
}
|
||||
});
|
||||
|
||||
const adjacency = buildAdjacency(graph.edges);
|
||||
const reachable = new Set<string>();
|
||||
const queue = [startNodes[0].id];
|
||||
|
||||
while (queue.length) {
|
||||
const current = queue.shift();
|
||||
if (!current || reachable.has(current)) continue;
|
||||
reachable.add(current);
|
||||
(adjacency[current] || []).forEach((neighbor) => queue.push(neighbor));
|
||||
}
|
||||
|
||||
graph.nodes.forEach((node) => {
|
||||
if (!reachable.has(node.id)) {
|
||||
throw new GraphValidationError(`Node ${node.id} is not reachable.`);
|
||||
}
|
||||
});
|
||||
|
||||
if (!graph.allowCycles && hasCycle(graph.nodes, graph.edges)) {
|
||||
throw new GraphValidationError('Graph contains cycles but allowCycles=false.');
|
||||
}
|
||||
|
||||
const toolRegistry = new ToolRegistry();
|
||||
const allToolNames = toolRegistry.getAllToolNames();
|
||||
|
||||
graph.nodes.forEach((node) => {
|
||||
if (node.type === 'ToolNode') {
|
||||
const toolName = (node.data as { toolName?: string }).toolName;
|
||||
if (!toolName) {
|
||||
throw new GraphValidationError(
|
||||
`ToolNode ${node.id} missing toolName configuration.`,
|
||||
);
|
||||
}
|
||||
// Validate tool exists in registry (allowlist check happens at runtime)
|
||||
if (!allToolNames.includes(toolName)) {
|
||||
throw new GraphValidationError(
|
||||
`Tool ${toolName} is not registered in the tool registry.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (node.type === 'LLMDecisionNode') {
|
||||
const data = node.data as {
|
||||
promptTemplate?: string;
|
||||
inputKeys?: string[];
|
||||
outputSchema?: Record<string, unknown>;
|
||||
model?: { name?: string; temperature?: number };
|
||||
};
|
||||
if (!data.promptTemplate || !data.outputSchema || !data.model?.name) {
|
||||
throw new GraphValidationError(
|
||||
`LLMDecisionNode ${node.id} missing required configuration.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (node.type === 'HumanInputNode') {
|
||||
const data = node.data as {
|
||||
requiredFieldsSchema?: Record<string, unknown>;
|
||||
promptToUser?: string;
|
||||
};
|
||||
if (!data.requiredFieldsSchema || !data.promptToUser) {
|
||||
throw new GraphValidationError(
|
||||
`HumanInputNode ${node.id} missing required configuration.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
graph.edges.forEach((edge) => {
|
||||
if (edge.condition) {
|
||||
try {
|
||||
applyJsonLogic(edge.condition, {});
|
||||
} catch (error) {
|
||||
throw new GraphValidationError(
|
||||
`Edge ${edge.id} has invalid json-logic condition.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const compileProcessGraph = (
|
||||
graph: ProcessGraphDefinition,
|
||||
options: CompileOptions,
|
||||
): CompiledGraph => {
|
||||
validateGraphDefinition(graph, options.tenantId);
|
||||
|
||||
const startNodeId = graph.nodes.find((node) => node.type === 'Start')?.id;
|
||||
if (!startNodeId) {
|
||||
throw new GraphValidationError('Start node missing after validation.');
|
||||
}
|
||||
|
||||
const endNodeIds = graph.nodes
|
||||
.filter((node) => node.type === 'End')
|
||||
.map((node) => node.id);
|
||||
|
||||
return {
|
||||
graphId: graph.id,
|
||||
version: options.version,
|
||||
nodes: graph.nodes,
|
||||
edges: graph.edges,
|
||||
startNodeId,
|
||||
endNodeIds,
|
||||
adjacency: buildAdjacency(graph.edges),
|
||||
allowCycles: graph.allowCycles,
|
||||
maxIterations: graph.maxIterations,
|
||||
};
|
||||
};
|
||||
|
||||
const buildAdjacency = (edges: ProcessGraphEdge[]) => {
|
||||
return edges.reduce<Record<string, string[]>>((acc, edge) => {
|
||||
if (!acc[edge.source]) {
|
||||
acc[edge.source] = [];
|
||||
}
|
||||
acc[edge.source].push(edge.target);
|
||||
return acc;
|
||||
}, {});
|
||||
};
|
||||
|
||||
const hasCycle = (nodes: ProcessGraphNode[], edges: ProcessGraphEdge[]) => {
|
||||
const adjacency = buildAdjacency(edges);
|
||||
const visited = new Set<string>();
|
||||
const stack = new Set<string>();
|
||||
|
||||
const visit = (nodeId: string): boolean => {
|
||||
if (stack.has(nodeId)) return true;
|
||||
if (visited.has(nodeId)) return false;
|
||||
visited.add(nodeId);
|
||||
stack.add(nodeId);
|
||||
const neighbors = adjacency[nodeId] || [];
|
||||
for (const neighbor of neighbors) {
|
||||
if (visit(neighbor)) return true;
|
||||
}
|
||||
stack.delete(nodeId);
|
||||
return false;
|
||||
};
|
||||
|
||||
return nodes.some((node) => visit(node.id));
|
||||
};
|
||||
144
backend/src/ai-processes/ai-processes.controller.ts
Normal file
144
backend/src/ai-processes/ai-processes.controller.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Get,
|
||||
Param,
|
||||
Post,
|
||||
Put,
|
||||
Query,
|
||||
Sse,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { CurrentUser } from '../auth/current-user.decorator';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { AiProcessesService } from './ai-processes.service';
|
||||
import { AiProcessesStreamService } from './ai-processes.stream.service';
|
||||
import { AiProcessesOrchestratorService } from './ai-processes.orchestrator.service';
|
||||
import { CreateAiProcessDto, UpdateAiProcessDto } from './dto/ai-process.dto';
|
||||
import { CreateAiRunDto, ResumeAiRunDto } from './dto/ai-run.dto';
|
||||
import { CreateChatSessionDto, SendChatMessageDto } from './dto/ai-chat.dto';
|
||||
|
||||
@Controller('tenants/:tenantId')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class AiProcessesController {
|
||||
constructor(
|
||||
private readonly processesService: AiProcessesService,
|
||||
private readonly streamService: AiProcessesStreamService,
|
||||
private readonly orchestratorService: AiProcessesOrchestratorService,
|
||||
) {}
|
||||
|
||||
@Get('ai-processes')
|
||||
async listProcesses(@TenantId() tenantId: string) {
|
||||
return this.processesService.listProcesses(tenantId);
|
||||
}
|
||||
|
||||
@Post('ai-processes')
|
||||
async createProcess(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Body() payload: CreateAiProcessDto,
|
||||
) {
|
||||
return this.processesService.createProcess(
|
||||
tenantId,
|
||||
user.userId,
|
||||
payload.name,
|
||||
payload.description,
|
||||
payload.graph,
|
||||
);
|
||||
}
|
||||
|
||||
@Put('ai-processes/:processId')
|
||||
async updateProcess(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Param('processId') processId: string,
|
||||
@Body() payload: UpdateAiProcessDto,
|
||||
) {
|
||||
return this.processesService.createProcessVersion(
|
||||
tenantId,
|
||||
user.userId,
|
||||
processId,
|
||||
payload.graph,
|
||||
);
|
||||
}
|
||||
|
||||
@Get('ai-processes/:processId/versions')
|
||||
async listVersions(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('processId') processId: string,
|
||||
) {
|
||||
return this.processesService.listProcessVersions(tenantId, processId);
|
||||
}
|
||||
|
||||
@Post('ai-processes/:processId/runs')
|
||||
async createRun(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Param('processId') processId: string,
|
||||
@Body() payload: CreateAiRunDto,
|
||||
) {
|
||||
return this.processesService.createRun(
|
||||
tenantId,
|
||||
user.userId,
|
||||
processId,
|
||||
payload.input,
|
||||
payload.sessionId,
|
||||
payload.sessionId
|
||||
? (event) => this.streamService.emit(payload.sessionId as string, event)
|
||||
: undefined,
|
||||
);
|
||||
}
|
||||
|
||||
@Post('ai-runs/:runId/resume')
|
||||
async resumeRun(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Param('runId') runId: string,
|
||||
@Body() payload: ResumeAiRunDto,
|
||||
) {
|
||||
return this.processesService.resumeRun(
|
||||
tenantId,
|
||||
user.userId,
|
||||
runId,
|
||||
payload.input,
|
||||
payload.sessionId,
|
||||
payload.sessionId
|
||||
? (event) => this.streamService.emit(payload.sessionId as string, event)
|
||||
: undefined,
|
||||
);
|
||||
}
|
||||
|
||||
@Post('ai-chat/sessions')
|
||||
async createSession(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Body() _payload: CreateChatSessionDto,
|
||||
) {
|
||||
return this.orchestratorService.createSession(tenantId, user.userId);
|
||||
}
|
||||
|
||||
@Post('ai-chat/messages')
|
||||
@Post('ai-processes/chat/messages')
|
||||
async sendChatMessage(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Body() payload: SendChatMessageDto,
|
||||
) {
|
||||
return this.orchestratorService.sendMessage(
|
||||
tenantId,
|
||||
user.userId,
|
||||
payload.message,
|
||||
payload.sessionId,
|
||||
payload.processId,
|
||||
payload.history,
|
||||
payload.context,
|
||||
);
|
||||
}
|
||||
|
||||
@Sse('ai-chat/stream')
|
||||
@Sse('ai-processes/stream')
|
||||
streamChat(@Query('sessionId') sessionId: string) {
|
||||
return this.streamService.getStream(sessionId);
|
||||
}
|
||||
}
|
||||
19
backend/src/ai-processes/ai-processes.module.ts
Normal file
19
backend/src/ai-processes/ai-processes.module.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { TenantModule } from '../tenant/tenant.module';
|
||||
import { AiAssistantModule } from '../ai-assistant/ai-assistant.module';
|
||||
import { AiProcessesController } from './ai-processes.controller';
|
||||
import { AiProcessesService } from './ai-processes.service';
|
||||
import { AiProcessesStreamService } from './ai-processes.stream.service';
|
||||
import { AiProcessesOrchestratorService } from './ai-processes.orchestrator.service';
|
||||
|
||||
@Module({
|
||||
imports: [TenantModule, AiAssistantModule],
|
||||
controllers: [AiProcessesController],
|
||||
providers: [
|
||||
AiProcessesService,
|
||||
AiProcessesStreamService,
|
||||
AiProcessesOrchestratorService,
|
||||
],
|
||||
exports: [AiProcessesService],
|
||||
})
|
||||
export class AiProcessesModule {}
|
||||
212
backend/src/ai-processes/ai-processes.orchestrator.service.ts
Normal file
212
backend/src/ai-processes/ai-processes.orchestrator.service.ts
Normal file
@@ -0,0 +1,212 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Knex } from 'knex';
|
||||
import { AiProcessesService } from './ai-processes.service';
|
||||
import { AiProcessesStreamService } from './ai-processes.stream.service';
|
||||
import { AiAssistantService } from '../ai-assistant/ai-assistant.service';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import { AiChatMessage, AiChatSession } from '../models/ai-chat.model';
|
||||
import { DeepAgentOrchestrator } from './deep-agent.orchestrator';
|
||||
|
||||
@Injectable()
|
||||
export class AiProcessesOrchestratorService {
|
||||
constructor(
|
||||
private readonly processesService: AiProcessesService,
|
||||
private readonly streamService: AiProcessesStreamService,
|
||||
private readonly tenantDbService: TenantDatabaseService,
|
||||
private readonly aiAssistantService: AiAssistantService,
|
||||
) {}
|
||||
|
||||
private async getTenantContext(tenantId: string) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
return { knex, tenantId: resolvedTenantId };
|
||||
}
|
||||
|
||||
private async createSessionWithContext(
|
||||
knex: Knex,
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
) {
|
||||
return AiChatSession.query(knex).insert({
|
||||
userId,
|
||||
});
|
||||
}
|
||||
|
||||
async createSession(tenantId: string, userId: string) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
return this.createSessionWithContext(knex, resolvedTenantId, userId);
|
||||
}
|
||||
|
||||
async sendMessage(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
message: string,
|
||||
sessionId?: string,
|
||||
processId?: string,
|
||||
history?: { role: string; text: string }[],
|
||||
context?: Record<string, unknown>,
|
||||
) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
|
||||
const session = sessionId
|
||||
? await AiChatSession.query(knex).findById(sessionId)
|
||||
: await this.createSessionWithContext(knex, resolvedTenantId, userId);
|
||||
|
||||
if (!session) {
|
||||
throw new Error('Chat session not found.');
|
||||
}
|
||||
|
||||
await AiChatMessage.query(knex).insert({
|
||||
sessionId: session.id,
|
||||
role: 'user',
|
||||
content: message,
|
||||
});
|
||||
|
||||
this.streamService.emit(session.id, { type: 'agent_started' });
|
||||
|
||||
const processes = await this.processesService.listProcesses(resolvedTenantId);
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'processes_listed',
|
||||
data: { count: processes.length },
|
||||
});
|
||||
|
||||
// If no processes configured, fallback to standard AI assistant
|
||||
if (!processes.length) {
|
||||
const response = await this.aiAssistantService.handleChat(
|
||||
resolvedTenantId,
|
||||
userId,
|
||||
message,
|
||||
(history ?? []) as any,
|
||||
context ?? {},
|
||||
);
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'final',
|
||||
data: { reply: response.reply, action: response.action },
|
||||
});
|
||||
|
||||
await AiChatMessage.query(knex).insert({
|
||||
sessionId: session.id,
|
||||
role: 'assistant',
|
||||
content: response.reply,
|
||||
});
|
||||
|
||||
return {
|
||||
sessionId: session.id,
|
||||
reply: response.reply,
|
||||
action: response.action,
|
||||
record: response.record,
|
||||
};
|
||||
}
|
||||
|
||||
// Get OpenAI credentials from tenant integrations
|
||||
const credentials = await this.aiAssistantService.getOpenAiConfig(resolvedTenantId);
|
||||
if (!credentials?.apiKey) {
|
||||
throw new Error('OpenAI credentials not configured for this tenant');
|
||||
}
|
||||
|
||||
// Create Deep Agent with tenant's credentials
|
||||
const deepAgent = new DeepAgentOrchestrator(credentials.apiKey, credentials.model);
|
||||
|
||||
// Use Deep Agent to select the best process
|
||||
const processInfos = processes.map((p) => ({
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
description: p.description || undefined,
|
||||
}));
|
||||
|
||||
const selection = await deepAgent.selectProcess(
|
||||
message,
|
||||
processInfos,
|
||||
history as any,
|
||||
);
|
||||
|
||||
// If we need more information or no match, respond with question
|
||||
if (selection.action === 'need_more_info' || selection.action === 'no_match') {
|
||||
const reply = selection.question || selection.reasoning ||
|
||||
'I\'m not sure which process to use. Could you provide more details?';
|
||||
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'final',
|
||||
data: { reply, needsMoreInfo: true },
|
||||
});
|
||||
|
||||
await AiChatMessage.query(knex).insert({
|
||||
sessionId: session.id,
|
||||
role: 'assistant',
|
||||
content: reply,
|
||||
});
|
||||
|
||||
return { sessionId: session.id, reply, needsMoreInfo: true };
|
||||
}
|
||||
|
||||
// Process selected - find it and execute
|
||||
const selectedProcess = processes.find((p) => p.id === selection.processId);
|
||||
if (!selectedProcess) {
|
||||
throw new Error('Selected process not found.');
|
||||
}
|
||||
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'process_selected',
|
||||
processId: selectedProcess.id,
|
||||
version: selectedProcess.latestVersion,
|
||||
data: { processName: selectedProcess.name, reasoning: selection.reasoning },
|
||||
});
|
||||
|
||||
// Extract inputs from the message
|
||||
// For now, we'll use a simple approach - just pass the message as input
|
||||
// In a more sophisticated implementation, we'd use the deep agent to extract structured inputs
|
||||
const startMessage = await deepAgent.generateStartMessage(
|
||||
selectedProcess.name,
|
||||
{ message },
|
||||
);
|
||||
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'agent_message',
|
||||
data: { message: startMessage },
|
||||
});
|
||||
|
||||
await AiChatMessage.query(knex).insert({
|
||||
sessionId: session.id,
|
||||
role: 'assistant',
|
||||
content: startMessage,
|
||||
});
|
||||
|
||||
const { run, result } = await this.processesService.createRun(
|
||||
resolvedTenantId,
|
||||
userId,
|
||||
selectedProcess.id,
|
||||
{ message, context: context || {} },
|
||||
session.id,
|
||||
(payload) => this.streamService.emit(session.id, payload),
|
||||
);
|
||||
|
||||
// Emit final event
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'final',
|
||||
data: {
|
||||
runId: run.id,
|
||||
status: result.status,
|
||||
output: result.output,
|
||||
message: result.status === 'completed'
|
||||
? '✅ Workflow completed successfully!'
|
||||
: result.status === 'error'
|
||||
? `❌ Workflow failed: ${result.error?.message || 'Unknown error'}`
|
||||
: '⏸️ Workflow paused',
|
||||
},
|
||||
});
|
||||
|
||||
await AiChatMessage.query(knex).insert({
|
||||
sessionId: session.id,
|
||||
role: 'assistant',
|
||||
content: result.status === 'completed'
|
||||
? '✅ Workflow completed successfully!'
|
||||
: result.status === 'error'
|
||||
? `❌ Workflow failed: ${result.error?.message || 'Unknown error'}`
|
||||
: '⏸️ Workflow paused',
|
||||
});
|
||||
|
||||
return { sessionId: session.id, runId: run.id, status: result.status };
|
||||
}
|
||||
}
|
||||
222
backend/src/ai-processes/ai-processes.runner.ts
Normal file
222
backend/src/ai-processes/ai-processes.runner.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import { apply as applyJsonLogic } from 'json-logic-js';
|
||||
import Ajv from 'ajv';
|
||||
import { ToolRegistry, ToolContext } from './tools/tool-registry';
|
||||
import {
|
||||
AiProcessEventPayload,
|
||||
CompiledGraph,
|
||||
ProcessGraphNode,
|
||||
} from './ai-processes.types';
|
||||
|
||||
export interface RunOptions {
|
||||
compiledGraph: CompiledGraph;
|
||||
input: Record<string, unknown>;
|
||||
toolRegistry: ToolRegistry;
|
||||
toolContext: ToolContext;
|
||||
onEvent?: (event: AiProcessEventPayload) => void;
|
||||
llmDecision: (
|
||||
node: ProcessGraphNode,
|
||||
state: Record<string, unknown>,
|
||||
) => Promise<Record<string, unknown>>;
|
||||
}
|
||||
|
||||
export interface RunResult {
|
||||
status: 'running' | 'waiting' | 'completed' | 'error';
|
||||
state: Record<string, unknown>;
|
||||
currentNodeId?: string;
|
||||
output?: Record<string, unknown>;
|
||||
error?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export const runCompiledGraph = async (
|
||||
options: RunOptions,
|
||||
startNodeId?: string,
|
||||
): Promise<RunResult> => {
|
||||
const {
|
||||
compiledGraph,
|
||||
input,
|
||||
toolRegistry,
|
||||
toolContext,
|
||||
onEvent,
|
||||
llmDecision,
|
||||
} = options;
|
||||
|
||||
const state: Record<string, unknown> = { ...input };
|
||||
let currentNodeId = startNodeId ?? compiledGraph.startNodeId;
|
||||
let iterations = 0;
|
||||
const maxIterations = compiledGraph.maxIterations ?? 50;
|
||||
|
||||
const emit = (payload: AiProcessEventPayload) => {
|
||||
if (onEvent) {
|
||||
onEvent(payload);
|
||||
}
|
||||
};
|
||||
|
||||
while (currentNodeId) {
|
||||
if (
|
||||
compiledGraph.nodes.length > 0 &&
|
||||
compiledGraph.endNodeIds.includes(currentNodeId)
|
||||
) {
|
||||
emit({ type: 'node_started', nodeId: currentNodeId });
|
||||
emit({ type: 'node_completed', nodeId: currentNodeId });
|
||||
emit({ type: 'final', data: { output: state } });
|
||||
return { status: 'completed', state, output: state };
|
||||
}
|
||||
|
||||
const node = compiledGraph.nodes.find((item) => item.id === currentNodeId);
|
||||
if (!node) {
|
||||
return {
|
||||
status: 'error',
|
||||
state,
|
||||
error: { message: `Node ${currentNodeId} not found.` },
|
||||
};
|
||||
}
|
||||
|
||||
emit({ type: 'node_started', nodeId: node.id });
|
||||
|
||||
if (node.type === 'LLMDecisionNode') {
|
||||
const output = await llmDecision(node, state);
|
||||
validateNodeOutput(node, output);
|
||||
Object.assign(state, output);
|
||||
}
|
||||
|
||||
if (node.type === 'ToolNode') {
|
||||
const toolName = (node.data as { toolName: string }).toolName;
|
||||
emit({ type: 'tool_called', nodeId: node.id, toolName });
|
||||
const tool = toolRegistry.getTool(toolName);
|
||||
const argsTemplate = (node.data as { argsTemplate: Record<string, unknown> })
|
||||
.argsTemplate;
|
||||
const resolvedArgs = resolveTemplate(argsTemplate, state);
|
||||
|
||||
// Debug logging
|
||||
console.log(`[ToolNode ${node.id}] Tool: ${toolName}`);
|
||||
console.log(`[ToolNode ${node.id}] State keys:`, Object.keys(state));
|
||||
console.log(`[ToolNode ${node.id}] ArgsTemplate:`, JSON.stringify(argsTemplate));
|
||||
console.log(`[ToolNode ${node.id}] ResolvedArgs:`, JSON.stringify(resolvedArgs));
|
||||
|
||||
const toolResult = await tool(toolContext, {
|
||||
...resolvedArgs,
|
||||
state,
|
||||
});
|
||||
|
||||
console.log(`[ToolNode ${node.id}] ToolResult:`, JSON.stringify(toolResult));
|
||||
|
||||
const outputMapping = (node.data as { outputMapping: Record<string, string> })
|
||||
.outputMapping;
|
||||
Object.entries(outputMapping).forEach(([key, path]) => {
|
||||
console.log(`[ToolNode ${node.id}] Mapping: toolResult['${key}'] = ${toolResult[key]} -> state['${path}']`);
|
||||
state[path] = toolResult[key];
|
||||
});
|
||||
}
|
||||
|
||||
if (node.type === 'HumanInputNode') {
|
||||
const data = node.data as {
|
||||
requiredFieldsSchema: Record<string, unknown>;
|
||||
promptToUser: string;
|
||||
};
|
||||
emit({
|
||||
type: 'need_input',
|
||||
nodeId: node.id,
|
||||
data: {
|
||||
requiredFieldsSchema: data.requiredFieldsSchema,
|
||||
promptToUser: data.promptToUser,
|
||||
},
|
||||
});
|
||||
return { status: 'waiting', state, currentNodeId: node.id };
|
||||
}
|
||||
|
||||
emit({ type: 'node_completed', nodeId: node.id });
|
||||
|
||||
const nextTargets = compiledGraph.edges.filter(
|
||||
(edge) => edge.source === node.id,
|
||||
);
|
||||
|
||||
if (nextTargets.length === 0) {
|
||||
return {
|
||||
status: 'error',
|
||||
state,
|
||||
error: { message: `No outgoing edges for node ${node.id}.` },
|
||||
};
|
||||
}
|
||||
|
||||
const selectedEdge = selectEdge(nextTargets, state);
|
||||
if (!selectedEdge) {
|
||||
return {
|
||||
status: 'error',
|
||||
state,
|
||||
error: { message: `No edge conditions matched for node ${node.id}.` },
|
||||
};
|
||||
}
|
||||
|
||||
currentNodeId = selectedEdge.target;
|
||||
iterations += 1;
|
||||
|
||||
if (!compiledGraph.allowCycles && iterations > compiledGraph.nodes.length) {
|
||||
return {
|
||||
status: 'error',
|
||||
state,
|
||||
error: { message: 'Cycle detected during execution.' },
|
||||
};
|
||||
}
|
||||
|
||||
if (compiledGraph.allowCycles && iterations > maxIterations) {
|
||||
return {
|
||||
status: 'error',
|
||||
state,
|
||||
error: { message: 'Max iterations exceeded.' },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return { status: 'completed', state, output: state };
|
||||
};
|
||||
|
||||
const resolveTemplate = (
|
||||
template: Record<string, unknown>,
|
||||
state: Record<string, unknown>,
|
||||
) => {
|
||||
return Object.entries(template).reduce<Record<string, unknown>>(
|
||||
(acc, [key, value]) => {
|
||||
if (typeof value === 'string' && value.startsWith('{{state.')) {
|
||||
const path = value.replace('{{state.', '').replace('}}', '');
|
||||
acc[key] = state[path];
|
||||
} else {
|
||||
acc[key] = value;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{},
|
||||
);
|
||||
};
|
||||
|
||||
const selectEdge = (
|
||||
edges: { condition?: Record<string, unknown>; target: string }[],
|
||||
state: Record<string, unknown>,
|
||||
) => {
|
||||
if (edges.length === 1) return edges[0];
|
||||
|
||||
return edges.find((edge) => {
|
||||
if (!edge.condition) return true;
|
||||
try {
|
||||
return Boolean(applyJsonLogic(edge.condition, state));
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const validateNodeOutput = (
|
||||
node: ProcessGraphNode,
|
||||
output: Record<string, unknown>,
|
||||
) => {
|
||||
const schema = (node.data as { outputSchema?: Record<string, unknown> })
|
||||
.outputSchema;
|
||||
if (!schema) return;
|
||||
const ajv = new Ajv({ allErrors: true, strict: false });
|
||||
const validate = ajv.compile(schema);
|
||||
if (!validate(output)) {
|
||||
const errors = validate.errors?.map(e => `${e.instancePath} ${e.message}`).join(', ');
|
||||
throw new Error(
|
||||
`LLM output invalid for node ${node.id}. Errors: ${errors}. Output: ${JSON.stringify(output)}`
|
||||
);
|
||||
}
|
||||
};
|
||||
79
backend/src/ai-processes/ai-processes.schemas.ts
Normal file
79
backend/src/ai-processes/ai-processes.schemas.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import Ajv, { JSONSchemaType } from 'ajv';
|
||||
import addFormats from 'ajv-formats';
|
||||
import {
|
||||
AiNodeType,
|
||||
ProcessGraphDefinition,
|
||||
ProcessGraphEdge,
|
||||
ProcessGraphNode,
|
||||
} from './ai-processes.types';
|
||||
|
||||
const nodeTypes: AiNodeType[] = [
|
||||
'Start',
|
||||
'LLMDecisionNode',
|
||||
'ToolNode',
|
||||
'HumanInputNode',
|
||||
'End',
|
||||
];
|
||||
|
||||
export const graphSchema: any = {
|
||||
type: 'object',
|
||||
required: ['id', 'name', 'nodes', 'edges'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
description: { type: 'string', nullable: true },
|
||||
allowCycles: { type: 'boolean', nullable: true },
|
||||
maxIterations: { type: 'number', nullable: true },
|
||||
nodes: {
|
||||
type: 'array',
|
||||
items: { $ref: '#/definitions/processGraphNode' },
|
||||
minItems: 1,
|
||||
},
|
||||
edges: {
|
||||
type: 'array',
|
||||
items: { $ref: '#/definitions/processGraphEdge' },
|
||||
minItems: 0,
|
||||
},
|
||||
},
|
||||
definitions: {
|
||||
processGraphEdge: {
|
||||
type: 'object',
|
||||
required: ['id', 'source', 'target'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
source: { type: 'string' },
|
||||
target: { type: 'string' },
|
||||
condition: { type: 'object', nullable: true },
|
||||
},
|
||||
},
|
||||
processGraphNode: {
|
||||
type: 'object',
|
||||
required: ['id', 'type', 'data'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
type: { type: 'string', enum: nodeTypes },
|
||||
position: {
|
||||
type: 'object',
|
||||
nullable: true,
|
||||
required: ['x', 'y'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
x: { type: 'number' },
|
||||
y: { type: 'number' },
|
||||
},
|
||||
},
|
||||
data: { type: 'object' },
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const createAjv = () => {
|
||||
const ajv = new Ajv({ allErrors: true, strict: false });
|
||||
addFormats(ajv);
|
||||
ajv.addSchema(graphSchema, 'processGraph');
|
||||
return ajv;
|
||||
};
|
||||
319
backend/src/ai-processes/ai-processes.service.ts
Normal file
319
backend/src/ai-processes/ai-processes.service.ts
Normal file
@@ -0,0 +1,319 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { Knex } from 'knex';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import {
|
||||
AiAuditEvent,
|
||||
AiProcess,
|
||||
AiProcessRun,
|
||||
AiProcessVersion,
|
||||
} from '../models/ai-process.model';
|
||||
import { compileProcessGraph } from './ai-processes.compiler';
|
||||
import { runCompiledGraph } from './ai-processes.runner';
|
||||
import {
|
||||
AiProcessEventPayload,
|
||||
CompiledGraph,
|
||||
ProcessGraphDefinition,
|
||||
} from './ai-processes.types';
|
||||
import { ToolRegistry } from './tools/tool-registry';
|
||||
import { demoTools } from './tools/demo-tools';
|
||||
|
||||
@Injectable()
|
||||
export class AiProcessesService {
|
||||
constructor(private readonly tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
private async getTenantContext(tenantId: string) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
return { knex, tenantId: resolvedTenantId };
|
||||
}
|
||||
|
||||
async listProcesses(tenantId: string) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
return AiProcess.query(knex)
|
||||
.withGraphFetched('versions')
|
||||
.orderBy('created_at', 'desc');
|
||||
}
|
||||
|
||||
async getProcess(tenantId: string, processId: string) {
|
||||
const { knex } = await this.getTenantContext(tenantId);
|
||||
return AiProcess.query(knex)
|
||||
.findById(processId)
|
||||
.withGraphFetched('versions');
|
||||
}
|
||||
|
||||
async createProcess(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
name: string,
|
||||
description: string | undefined,
|
||||
graph: ProcessGraphDefinition,
|
||||
) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
const compiled = compileProcessGraph(graph, {
|
||||
tenantId: resolvedTenantId,
|
||||
version: 1,
|
||||
});
|
||||
|
||||
return knex.transaction(async (trx) => {
|
||||
const processId = randomUUID();
|
||||
|
||||
await AiProcess.query(trx).insert({
|
||||
id: processId,
|
||||
name,
|
||||
description,
|
||||
latestVersion: 1,
|
||||
createdBy: userId,
|
||||
});
|
||||
|
||||
await trx('ai_process_versions').insert({
|
||||
id: randomUUID(),
|
||||
process_id: processId,
|
||||
version: 1,
|
||||
graph_json: JSON.stringify(graph),
|
||||
compiled_json: JSON.stringify(compiled),
|
||||
created_by: userId,
|
||||
created_at: new Date(),
|
||||
});
|
||||
|
||||
return AiProcess.query(trx)
|
||||
.findById(processId)
|
||||
.withGraphFetched('versions');
|
||||
});
|
||||
}
|
||||
|
||||
async createProcessVersion(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
processId: string,
|
||||
graph: ProcessGraphDefinition,
|
||||
) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
|
||||
const process = await AiProcess.query(knex).findById(processId);
|
||||
if (!process) {
|
||||
throw new Error('Process not found.');
|
||||
}
|
||||
|
||||
const nextVersion = process.latestVersion + 1;
|
||||
const compiled = compileProcessGraph(graph, {
|
||||
tenantId: resolvedTenantId,
|
||||
version: nextVersion,
|
||||
});
|
||||
|
||||
return knex.transaction(async (trx) => {
|
||||
await AiProcess.query(trx)
|
||||
.findById(processId)
|
||||
.patch({ latestVersion: nextVersion });
|
||||
|
||||
const versionId = randomUUID();
|
||||
await trx('ai_process_versions').insert({
|
||||
id: versionId,
|
||||
process_id: processId,
|
||||
version: nextVersion,
|
||||
graph_json: JSON.stringify(graph),
|
||||
compiled_json: JSON.stringify(compiled),
|
||||
created_by: userId,
|
||||
created_at: new Date(),
|
||||
});
|
||||
|
||||
return AiProcessVersion.query(trx).findById(versionId);
|
||||
});
|
||||
}
|
||||
|
||||
async listProcessVersions(tenantId: string, processId: string) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
return AiProcessVersion.query(knex)
|
||||
.where({ process_id: processId })
|
||||
.orderBy('version', 'desc');
|
||||
}
|
||||
|
||||
async createRun(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
processId: string,
|
||||
input: Record<string, unknown>,
|
||||
sessionId: string | undefined,
|
||||
emitEvent?: (payload: AiProcessEventPayload) => void,
|
||||
) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
const process = await AiProcess.query(knex).findById(processId);
|
||||
if (!process) {
|
||||
throw new Error('Process not found.');
|
||||
}
|
||||
|
||||
const versionRecord = await AiProcessVersion.query(knex).findOne({
|
||||
process_id: processId,
|
||||
version: process.latestVersion,
|
||||
});
|
||||
|
||||
if (!versionRecord) {
|
||||
throw new Error('Process version not found.');
|
||||
}
|
||||
|
||||
const runId = randomUUID();
|
||||
await AiProcessRun.query(knex).insert({
|
||||
id: runId,
|
||||
processId,
|
||||
version: versionRecord.version,
|
||||
status: 'running',
|
||||
inputJson: input,
|
||||
stateJson: input,
|
||||
currentNodeId: null,
|
||||
});
|
||||
|
||||
const run = await AiProcessRun.query(knex).findById(runId);
|
||||
if (!run) {
|
||||
throw new Error('Run not created.');
|
||||
}
|
||||
|
||||
const compiled = versionRecord.compiledJson as unknown as CompiledGraph;
|
||||
const toolRegistry = new ToolRegistry(demoTools);
|
||||
await toolRegistry.loadTenantAllowlist(resolvedTenantId, knex);
|
||||
|
||||
const emitAndAudit = (event: AiProcessEventPayload) => {
|
||||
emitEvent?.(event);
|
||||
void AiAuditEvent.query(knex).insert({
|
||||
id: randomUUID(),
|
||||
runId,
|
||||
eventType: event.type,
|
||||
payloadJson: event as any,
|
||||
});
|
||||
};
|
||||
const result = await runCompiledGraph(
|
||||
{
|
||||
compiledGraph: compiled,
|
||||
input,
|
||||
toolRegistry,
|
||||
toolContext: { tenantId: resolvedTenantId, userId, knex },
|
||||
onEvent: (event) => emitAndAudit({ ...event, runId, sessionId }),
|
||||
llmDecision: async (node, state) =>
|
||||
this.mockDecision(node.id, state),
|
||||
},
|
||||
run.currentNodeId ?? undefined,
|
||||
);
|
||||
|
||||
const updatedRun = await this.persistRunResult(runId, result, knex);
|
||||
|
||||
return { run: updatedRun, result };
|
||||
}
|
||||
|
||||
async resumeRun(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
runId: string,
|
||||
input: Record<string, unknown>,
|
||||
sessionId: string | undefined,
|
||||
emitEvent?: (payload: AiProcessEventPayload) => void,
|
||||
) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
const run = await AiProcessRun.query(knex).findById(runId);
|
||||
if (!run) {
|
||||
throw new Error('Run not found.');
|
||||
}
|
||||
const versionRecord = await AiProcessVersion.query(knex).findOne({
|
||||
process_id: run.processId,
|
||||
version: run.version,
|
||||
});
|
||||
if (!versionRecord) {
|
||||
throw new Error('Process version not found.');
|
||||
}
|
||||
|
||||
const compiled = versionRecord.compiledJson as unknown as CompiledGraph;
|
||||
const toolRegistry = new ToolRegistry(demoTools);
|
||||
await toolRegistry.loadTenantAllowlist(resolvedTenantId, knex);
|
||||
|
||||
const mergedState = { ...(run.stateJson || {}), ...input };
|
||||
const emitAndAudit = (event: AiProcessEventPayload) => {
|
||||
emitEvent?.(event);
|
||||
void AiAuditEvent.query(knex).insert({
|
||||
id: randomUUID(),
|
||||
runId: run.id,
|
||||
eventType: event.type,
|
||||
payloadJson: event as any,
|
||||
});
|
||||
};
|
||||
|
||||
const result = await runCompiledGraph(
|
||||
{
|
||||
compiledGraph: compiled,
|
||||
input: mergedState,
|
||||
toolRegistry,
|
||||
toolContext: { tenantId: resolvedTenantId, userId, knex },
|
||||
onEvent: (event) =>
|
||||
emitAndAudit({ ...event, runId: run.id, sessionId }),
|
||||
llmDecision: async (node, state) =>
|
||||
this.mockDecision(node.id, state),
|
||||
},
|
||||
run.currentNodeId ?? undefined,
|
||||
);
|
||||
|
||||
const updatedRun = await this.persistRunResult(run.id, result, knex);
|
||||
|
||||
return { run: updatedRun, result };
|
||||
}
|
||||
|
||||
private async persistRunResult(runId: string, result: any, knex: Knex) {
|
||||
const endedAt =
|
||||
result.status === 'completed' || result.status === 'error'
|
||||
? new Date()
|
||||
: null;
|
||||
|
||||
return AiProcessRun.query(knex).patchAndFetchById(runId, {
|
||||
status: result.status,
|
||||
outputJson: result.output,
|
||||
errorJson: result.error,
|
||||
stateJson: result.state,
|
||||
currentNodeId: result.currentNodeId ?? null,
|
||||
endedAt,
|
||||
});
|
||||
}
|
||||
|
||||
private async mockDecision(
|
||||
nodeId: string,
|
||||
state: Record<string, unknown>,
|
||||
) {
|
||||
if (nodeId === 'extract_info') {
|
||||
// Extract pet registration info from the message
|
||||
const message = (state.message as string) || '';
|
||||
|
||||
// Simple extraction (in production, this would use an LLM)
|
||||
const petNameMatch = message.match(/(?:dog|cat|pet)\s+named\s+(\w+)/i);
|
||||
const petTypeMatch = message.match(/(dog|cat)/i);
|
||||
const ownerNameMatch = message.match(/owned\s+by\s+([\w\s]+?)(?:\s*\(|$)/i);
|
||||
const emailMatch = message.match(/\(?([\w\.-]+@[\w\.-]+\.\w+)\)?/i);
|
||||
|
||||
const ownerName = ownerNameMatch?.[1]?.trim() || 'Unknown Owner';
|
||||
const nameParts = ownerName.split(/\s+/);
|
||||
const firstName = nameParts[0] || 'Unknown';
|
||||
const lastName = nameParts.slice(1).join(' ') || 'Owner';
|
||||
|
||||
return {
|
||||
petName: petNameMatch?.[1] || 'Unknown Pet',
|
||||
species: petTypeMatch?.[1]?.toLowerCase() || 'dog',
|
||||
ownerFirstName: firstName,
|
||||
ownerLastName: lastName,
|
||||
ownerEmail: emailMatch?.[1] || null,
|
||||
accountName: `${firstName} ${lastName}`,
|
||||
};
|
||||
}
|
||||
if (nodeId === 'decide_account') {
|
||||
const accountName = (state.accountName as string) ?? 'New Account';
|
||||
const accountAction = state.accountId ? 'find' : 'create';
|
||||
return { accountAction, accountName };
|
||||
}
|
||||
if (nodeId === 'decide_contact') {
|
||||
const firstName = (state.firstName as string) ?? 'Jane';
|
||||
const lastName = (state.lastName as string) ?? 'Doe';
|
||||
const contactAction = state.contactId ? 'find' : 'create';
|
||||
return { contactAction, firstName, lastName };
|
||||
}
|
||||
return {};
|
||||
}
|
||||
}
|
||||
33
backend/src/ai-processes/ai-processes.stream.service.ts
Normal file
33
backend/src/ai-processes/ai-processes.stream.service.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { MessageEvent } from '@nestjs/common';
|
||||
import { Observable, Subject } from 'rxjs';
|
||||
import { AiProcessEventPayload } from './ai-processes.types';
|
||||
|
||||
@Injectable()
|
||||
export class AiProcessesStreamService {
|
||||
private readonly streams = new Map<string, Subject<MessageEvent>>();
|
||||
|
||||
getStream(sessionId: string): Observable<MessageEvent> {
|
||||
return this.getSubject(sessionId).asObservable();
|
||||
}
|
||||
|
||||
emit(sessionId: string, payload: AiProcessEventPayload) {
|
||||
const subject = this.getSubject(sessionId);
|
||||
subject.next({ type: payload.type, data: payload });
|
||||
}
|
||||
|
||||
close(sessionId: string) {
|
||||
const subject = this.streams.get(sessionId);
|
||||
if (subject) {
|
||||
subject.complete();
|
||||
this.streams.delete(sessionId);
|
||||
}
|
||||
}
|
||||
|
||||
private getSubject(sessionId: string) {
|
||||
if (!this.streams.has(sessionId)) {
|
||||
this.streams.set(sessionId, new Subject<MessageEvent>());
|
||||
}
|
||||
return this.streams.get(sessionId) as Subject<MessageEvent>;
|
||||
}
|
||||
}
|
||||
125
backend/src/ai-processes/ai-processes.types.ts
Normal file
125
backend/src/ai-processes/ai-processes.types.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { JSONSchema7 } from 'json-schema';
|
||||
|
||||
export type AiNodeType =
|
||||
| 'Start'
|
||||
| 'LLMDecisionNode'
|
||||
| 'ToolNode'
|
||||
| 'HumanInputNode'
|
||||
| 'End';
|
||||
|
||||
export interface ProcessGraphDefinition {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
allowCycles?: boolean;
|
||||
maxIterations?: number;
|
||||
nodes: ProcessGraphNode[];
|
||||
edges: ProcessGraphEdge[];
|
||||
}
|
||||
|
||||
export interface ProcessGraphNode {
|
||||
id: string;
|
||||
type: AiNodeType;
|
||||
position?: { x: number; y: number };
|
||||
data:
|
||||
| StartNodeData
|
||||
| LLMDecisionNodeData
|
||||
| ToolNodeData
|
||||
| HumanInputNodeData
|
||||
| EndNodeData;
|
||||
}
|
||||
|
||||
export interface ProcessGraphEdge {
|
||||
id: string;
|
||||
source: string;
|
||||
target: string;
|
||||
condition?: JsonLogicExpression;
|
||||
}
|
||||
|
||||
export type JsonLogicExpression = Record<string, unknown>;
|
||||
|
||||
export interface StartNodeData {
|
||||
label?: string;
|
||||
}
|
||||
|
||||
export interface EndNodeData {
|
||||
label?: string;
|
||||
}
|
||||
|
||||
export interface LLMDecisionNodeData {
|
||||
label?: string;
|
||||
promptTemplate: string;
|
||||
inputKeys: string[];
|
||||
outputSchema: JSONSchema7;
|
||||
model: {
|
||||
name: string;
|
||||
temperature: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ToolNodeData {
|
||||
label?: string;
|
||||
toolName: string;
|
||||
argsTemplate: Record<string, unknown>;
|
||||
outputMapping: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface HumanInputNodeData {
|
||||
label?: string;
|
||||
requiredFieldsSchema: JSONSchema7;
|
||||
promptToUser: string;
|
||||
}
|
||||
|
||||
export interface CompiledGraph {
|
||||
graphId: string;
|
||||
version: number;
|
||||
nodes: ProcessGraphNode[];
|
||||
edges: ProcessGraphEdge[];
|
||||
startNodeId: string;
|
||||
endNodeIds: string[];
|
||||
adjacency: Record<string, string[]>;
|
||||
allowCycles?: boolean;
|
||||
maxIterations?: number;
|
||||
}
|
||||
|
||||
export type AiProcessStatus = 'running' | 'waiting' | 'completed' | 'error';
|
||||
|
||||
export interface AiProcessRunContext {
|
||||
state: Record<string, unknown>;
|
||||
currentNodeId?: string;
|
||||
iterationCount?: number;
|
||||
}
|
||||
|
||||
export type AiProcessEventType =
|
||||
| 'agent_started'
|
||||
| 'processes_listed'
|
||||
| 'process_selected'
|
||||
| 'agent_message'
|
||||
| 'node_started'
|
||||
| 'tool_called'
|
||||
| 'node_completed'
|
||||
| 'need_input'
|
||||
| 'final'
|
||||
| 'error';
|
||||
|
||||
export interface AiProcessEventPayload {
|
||||
type: AiProcessEventType;
|
||||
runId?: string;
|
||||
sessionId?: string;
|
||||
nodeId?: string;
|
||||
toolName?: string;
|
||||
processId?: string;
|
||||
version?: number;
|
||||
data?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface NeedInputPayload {
|
||||
runId: string;
|
||||
requiredFieldsSchema: JSONSchema7;
|
||||
promptToUser: string;
|
||||
}
|
||||
|
||||
export interface ProcessSelection {
|
||||
processId: string;
|
||||
version: number;
|
||||
}
|
||||
202
backend/src/ai-processes/deep-agent.orchestrator.ts
Normal file
202
backend/src/ai-processes/deep-agent.orchestrator.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { JsonOutputParser } from '@langchain/core/output_parsers';
|
||||
import { SystemMessage, HumanMessage } from '@langchain/core/messages';
|
||||
|
||||
export interface ProcessInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface ProcessSelectionResult {
|
||||
action: 'select_process' | 'need_more_info' | 'no_match';
|
||||
processId?: string;
|
||||
question?: string;
|
||||
reasoning?: string;
|
||||
}
|
||||
|
||||
export interface InputExtractionResult {
|
||||
hasAllInputs: boolean;
|
||||
extractedInputs: Record<string, unknown>;
|
||||
missingFields?: string[];
|
||||
question?: string;
|
||||
}
|
||||
|
||||
export class DeepAgentOrchestrator {
|
||||
private model: ChatOpenAI;
|
||||
|
||||
constructor(
|
||||
apiKey: string,
|
||||
modelName: string = 'gpt-4o',
|
||||
temperature: number = 0,
|
||||
) {
|
||||
this.model = new ChatOpenAI({
|
||||
apiKey,
|
||||
modelName,
|
||||
temperature,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Step 1: Select the best matching process from available processes
|
||||
*/
|
||||
async selectProcess(
|
||||
userMessage: string,
|
||||
availableProcesses: ProcessInfo[],
|
||||
conversationHistory?: { role: string; text: string }[],
|
||||
): Promise<ProcessSelectionResult> {
|
||||
const processList = availableProcesses
|
||||
.map((p) => `- ${p.name} (ID: ${p.id}): ${p.description || 'No description'}`)
|
||||
.join('\n');
|
||||
|
||||
const historyContext =
|
||||
conversationHistory && conversationHistory.length > 0
|
||||
? `\n\nConversation history:\n${conversationHistory
|
||||
.map((msg) => `${msg.role}: ${msg.text}`)
|
||||
.join('\n')}`
|
||||
: '';
|
||||
|
||||
const systemPrompt = `You are an intelligent process orchestrator. Your task is to select the most appropriate business process based on the user's request.
|
||||
|
||||
Available processes:
|
||||
${processList}
|
||||
|
||||
Rules:
|
||||
1. Select exactly ONE process that best matches the user's intent
|
||||
2. If the request is ambiguous or matches multiple processes, ask for clarification
|
||||
3. If no process matches, indicate no match
|
||||
4. Always provide reasoning for your decision
|
||||
|
||||
Respond with JSON:
|
||||
{
|
||||
"action": "select_process" | "need_more_info" | "no_match",
|
||||
"processId": "selected process ID or null",
|
||||
"question": "clarifying question if needed",
|
||||
"reasoning": "brief explanation of decision"
|
||||
}`;
|
||||
|
||||
const userPrompt = `User request: ${userMessage}${historyContext}`;
|
||||
|
||||
try {
|
||||
const response = await this.model.invoke([
|
||||
new SystemMessage(systemPrompt),
|
||||
new HumanMessage(userPrompt),
|
||||
]);
|
||||
|
||||
const parser = new JsonOutputParser<ProcessSelectionResult>();
|
||||
const content = response.content as string;
|
||||
const jsonMatch = content.match(/\{[\s\S]*\}/);
|
||||
|
||||
if (jsonMatch) {
|
||||
return await parser.parse(jsonMatch[0]);
|
||||
}
|
||||
|
||||
return {
|
||||
action: 'no_match',
|
||||
reasoning: 'Failed to parse LLM response',
|
||||
};
|
||||
} catch (error: any) {
|
||||
console.error('Process selection error:', error);
|
||||
return {
|
||||
action: 'no_match',
|
||||
reasoning: `Error: ${error.message}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Step 2: Extract required inputs from user message
|
||||
*/
|
||||
async extractInputs(
|
||||
userMessage: string,
|
||||
requiredFields: { name: string; description: string; required: boolean }[],
|
||||
conversationHistory?: { role: string; text: string }[],
|
||||
context?: Record<string, unknown>,
|
||||
): Promise<InputExtractionResult> {
|
||||
const fieldsList = requiredFields
|
||||
.map((f) => `- ${f.name} (${f.required ? 'required' : 'optional'}): ${f.description}`)
|
||||
.join('\n');
|
||||
|
||||
const historyContext =
|
||||
conversationHistory && conversationHistory.length > 0
|
||||
? `\n\nConversation history:\n${conversationHistory
|
||||
.map((msg) => `${msg.role}: ${msg.text}`)
|
||||
.join('\n')}`
|
||||
: '';
|
||||
|
||||
const contextInfo = context ? `\n\nAvailable context: ${JSON.stringify(context)}` : '';
|
||||
|
||||
const systemPrompt = `You are an input extraction assistant. Extract structured data from the user's message and conversation history.
|
||||
|
||||
Required fields for this process:
|
||||
${fieldsList}${contextInfo}
|
||||
|
||||
Rules:
|
||||
1. Extract as many fields as possible from the message and context
|
||||
2. Only mark hasAllInputs=true if ALL required fields are present
|
||||
3. If required fields are missing, generate a natural question to ask the user
|
||||
4. Use context data when available (e.g., current page context)
|
||||
|
||||
Respond with JSON:
|
||||
{
|
||||
"hasAllInputs": true | false,
|
||||
"extractedInputs": { "field1": "value1", ... },
|
||||
"missingFields": ["field1", "field2"] or undefined,
|
||||
"question": "natural language question" or undefined
|
||||
}`;
|
||||
|
||||
const userPrompt = `User message: ${userMessage}${historyContext}`;
|
||||
|
||||
try {
|
||||
const response = await this.model.invoke([
|
||||
new SystemMessage(systemPrompt),
|
||||
new HumanMessage(userPrompt),
|
||||
]);
|
||||
|
||||
const parser = new JsonOutputParser<InputExtractionResult>();
|
||||
const content = response.content as string;
|
||||
const jsonMatch = content.match(/\{[\s\S]*\}/);
|
||||
|
||||
if (jsonMatch) {
|
||||
return await parser.parse(jsonMatch[0]);
|
||||
}
|
||||
|
||||
return {
|
||||
hasAllInputs: false,
|
||||
extractedInputs: {},
|
||||
missingFields: requiredFields.filter((f) => f.required).map((f) => f.name),
|
||||
question: 'I need more information to proceed. Could you provide additional details?',
|
||||
};
|
||||
} catch (error: any) {
|
||||
console.error('Input extraction error:', error);
|
||||
return {
|
||||
hasAllInputs: false,
|
||||
extractedInputs: {},
|
||||
question: 'I encountered an error processing your request. Please try again.',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Step 3: Generate a friendly response explaining what will happen
|
||||
*/
|
||||
async generateStartMessage(
|
||||
processName: string,
|
||||
extractedInputs: Record<string, unknown>,
|
||||
): Promise<string> {
|
||||
const systemPrompt = `You are a friendly assistant explaining what process will be executed. Be concise and clear.`;
|
||||
|
||||
const userPrompt = `Generate a brief message (1-2 sentences) confirming that you will execute the "${processName}" process with these inputs: ${JSON.stringify(extractedInputs)}`;
|
||||
|
||||
try {
|
||||
const response = await this.model.invoke([
|
||||
new SystemMessage(systemPrompt),
|
||||
new HumanMessage(userPrompt),
|
||||
]);
|
||||
|
||||
return (response.content as string).trim();
|
||||
} catch (error) {
|
||||
return `I'll execute the ${processName} process with your provided information.`;
|
||||
}
|
||||
}
|
||||
}
|
||||
173
backend/src/ai-processes/demo-process.ts
Normal file
173
backend/src/ai-processes/demo-process.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import { ProcessGraphDefinition } from './ai-processes.types';
|
||||
|
||||
export const demoRegisterNewPetProcess: ProcessGraphDefinition = {
|
||||
id: 'register_new_pet',
|
||||
name: 'Register New Pet',
|
||||
description: 'Resolve account/contact then create pet.',
|
||||
allowCycles: false,
|
||||
nodes: [
|
||||
{
|
||||
id: 'start',
|
||||
type: 'Start',
|
||||
data: { label: 'Start' },
|
||||
},
|
||||
{
|
||||
id: 'decide_account',
|
||||
type: 'LLMDecisionNode',
|
||||
data: {
|
||||
label: 'Decide Account Action',
|
||||
promptTemplate:
|
||||
'Decide whether to find or create an account. Return JSON {"accountAction":"find|create","accountName":"string"}.',
|
||||
inputKeys: ['accountName'],
|
||||
outputSchema: {
|
||||
type: 'object',
|
||||
required: ['accountAction', 'accountName'],
|
||||
properties: {
|
||||
accountAction: { type: 'string', enum: ['find', 'create'] },
|
||||
accountName: { type: 'string' },
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
model: { name: 'gpt-4o-mini', temperature: 0 },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'find_account',
|
||||
type: 'ToolNode',
|
||||
data: {
|
||||
label: 'Find Account',
|
||||
toolName: 'findAccount',
|
||||
argsTemplate: { accountName: '{{state.accountName}}' },
|
||||
outputMapping: { accountId: 'accountId', found: 'accountFound' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_account',
|
||||
type: 'ToolNode',
|
||||
data: {
|
||||
label: 'Create Account',
|
||||
toolName: 'createAccount',
|
||||
argsTemplate: { accountName: '{{state.accountName}}' },
|
||||
outputMapping: { accountId: 'accountId' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'decide_contact',
|
||||
type: 'LLMDecisionNode',
|
||||
data: {
|
||||
label: 'Decide Contact Action',
|
||||
promptTemplate:
|
||||
'Decide whether to find or create a contact. Return JSON {"contactAction":"find|create","firstName":"string","lastName":"string"}.',
|
||||
inputKeys: ['firstName', 'lastName'],
|
||||
outputSchema: {
|
||||
type: 'object',
|
||||
required: ['contactAction', 'firstName', 'lastName'],
|
||||
properties: {
|
||||
contactAction: { type: 'string', enum: ['find', 'create'] },
|
||||
firstName: { type: 'string' },
|
||||
lastName: { type: 'string' },
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
model: { name: 'gpt-4o-mini', temperature: 0 },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'find_contact',
|
||||
type: 'ToolNode',
|
||||
data: {
|
||||
label: 'Find Contact',
|
||||
toolName: 'findContact',
|
||||
argsTemplate: {
|
||||
accountId: '{{state.accountId}}',
|
||||
firstName: '{{state.firstName}}',
|
||||
lastName: '{{state.lastName}}',
|
||||
},
|
||||
outputMapping: { contactId: 'contactId', found: 'contactFound' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_contact',
|
||||
type: 'ToolNode',
|
||||
data: {
|
||||
label: 'Create Contact',
|
||||
toolName: 'createContact',
|
||||
argsTemplate: {
|
||||
accountId: '{{state.accountId}}',
|
||||
firstName: '{{state.firstName}}',
|
||||
lastName: '{{state.lastName}}',
|
||||
},
|
||||
outputMapping: { contactId: 'contactId' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'need_pet',
|
||||
type: 'HumanInputNode',
|
||||
data: {
|
||||
label: 'Collect Pet Info',
|
||||
promptToUser: 'What is the pet name and type?',
|
||||
requiredFieldsSchema: {
|
||||
type: 'object',
|
||||
required: ['petName', 'petType'],
|
||||
properties: {
|
||||
petName: { type: 'string' },
|
||||
petType: { type: 'string' },
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_pet',
|
||||
type: 'ToolNode',
|
||||
data: {
|
||||
label: 'Create Pet',
|
||||
toolName: 'createPet',
|
||||
argsTemplate: {
|
||||
contactId: '{{state.contactId}}',
|
||||
petName: '{{state.petName}}',
|
||||
petType: '{{state.petType}}',
|
||||
},
|
||||
outputMapping: { petId: 'petId' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'end',
|
||||
type: 'End',
|
||||
data: { label: 'End' },
|
||||
},
|
||||
],
|
||||
edges: [
|
||||
{ id: 'e_start_account', source: 'start', target: 'decide_account' },
|
||||
{
|
||||
id: 'e_account_find',
|
||||
source: 'decide_account',
|
||||
target: 'find_account',
|
||||
condition: { '==': [{ var: 'accountAction' }, 'find'] },
|
||||
},
|
||||
{
|
||||
id: 'e_account_create',
|
||||
source: 'decide_account',
|
||||
target: 'create_account',
|
||||
condition: { '==': [{ var: 'accountAction' }, 'create'] },
|
||||
},
|
||||
{ id: 'e_account_to_contact', source: 'find_account', target: 'decide_contact' },
|
||||
{ id: 'e_create_account_to_contact', source: 'create_account', target: 'decide_contact' },
|
||||
{
|
||||
id: 'e_contact_find',
|
||||
source: 'decide_contact',
|
||||
target: 'find_contact',
|
||||
condition: { '==': [{ var: 'contactAction' }, 'find'] },
|
||||
},
|
||||
{
|
||||
id: 'e_contact_create',
|
||||
source: 'decide_contact',
|
||||
target: 'create_contact',
|
||||
condition: { '==': [{ var: 'contactAction' }, 'create'] },
|
||||
},
|
||||
{ id: 'e_contact_to_pet', source: 'find_contact', target: 'need_pet' },
|
||||
{ id: 'e_create_contact_to_pet', source: 'create_contact', target: 'need_pet' },
|
||||
{ id: 'e_need_pet_to_create', source: 'need_pet', target: 'create_pet' },
|
||||
{ id: 'e_pet_to_end', source: 'create_pet', target: 'end' },
|
||||
],
|
||||
};
|
||||
28
backend/src/ai-processes/dto/ai-chat.dto.ts
Normal file
28
backend/src/ai-processes/dto/ai-chat.dto.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { IsArray, IsObject, IsOptional, IsString } from 'class-validator';
|
||||
|
||||
export class CreateChatSessionDto {
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
context?: string;
|
||||
}
|
||||
|
||||
export class SendChatMessageDto {
|
||||
@IsString()
|
||||
message!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
history?: { role: string; text: string }[];
|
||||
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
context?: Record<string, unknown>;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
sessionId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
processId?: string;
|
||||
}
|
||||
24
backend/src/ai-processes/dto/ai-process.dto.ts
Normal file
24
backend/src/ai-processes/dto/ai-process.dto.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { IsArray, IsObject, IsOptional, IsString } from 'class-validator';
|
||||
import { ProcessGraphDefinition } from '../ai-processes.types';
|
||||
|
||||
export class CreateAiProcessDto {
|
||||
@IsString()
|
||||
name!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
description?: string;
|
||||
|
||||
@IsObject()
|
||||
graph!: ProcessGraphDefinition;
|
||||
}
|
||||
|
||||
export class UpdateAiProcessDto {
|
||||
@IsObject()
|
||||
graph!: ProcessGraphDefinition;
|
||||
}
|
||||
|
||||
export class AiProcessListResponseDto {
|
||||
@IsArray()
|
||||
items!: Record<string, unknown>[];
|
||||
}
|
||||
19
backend/src/ai-processes/dto/ai-run.dto.ts
Normal file
19
backend/src/ai-processes/dto/ai-run.dto.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { IsObject, IsOptional, IsString } from 'class-validator';
|
||||
|
||||
export class CreateAiRunDto {
|
||||
@IsObject()
|
||||
input!: Record<string, unknown>;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
sessionId?: string;
|
||||
}
|
||||
|
||||
export class ResumeAiRunDto {
|
||||
@IsObject()
|
||||
input!: Record<string, unknown>;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
sessionId?: string;
|
||||
}
|
||||
226
backend/src/ai-processes/tools/demo-tools.ts
Normal file
226
backend/src/ai-processes/tools/demo-tools.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import { ToolContext, ToolHandler } from './tool-registry';
|
||||
import { Account } from '../../models/account.model';
|
||||
import { Contact } from '../../models/contact.model';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
/**
|
||||
* Demo tools that wrap ObjectService operations
|
||||
* These tools provide structured access to CRM entities
|
||||
*/
|
||||
|
||||
export const findAccount: ToolHandler = async (ctx, args) => {
|
||||
if (!ctx.knex) {
|
||||
throw new Error('Knex connection required for findAccount');
|
||||
}
|
||||
|
||||
const { name } = args as { name?: string };
|
||||
|
||||
if (!name) {
|
||||
return { found: false, accountId: null, message: 'Name required' };
|
||||
}
|
||||
|
||||
try {
|
||||
const query = Account.query(ctx.knex).where('name', 'like', `%${name}%`);
|
||||
|
||||
const account = await query.first();
|
||||
|
||||
if (account) {
|
||||
return {
|
||||
found: true,
|
||||
accountId: account.id,
|
||||
account: {
|
||||
id: account.id,
|
||||
name: account.name,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return { found: false, accountId: null };
|
||||
} catch (error: any) {
|
||||
return { found: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
export const createAccount: ToolHandler = async (ctx, args) => {
|
||||
if (!ctx.knex) {
|
||||
throw new Error('Knex connection required for createAccount');
|
||||
}
|
||||
|
||||
const { name, email, phone, industry } = args as {
|
||||
name: string;
|
||||
email?: string;
|
||||
phone?: string;
|
||||
industry?: string;
|
||||
};
|
||||
|
||||
if (!name) {
|
||||
throw new Error('Account name is required');
|
||||
}
|
||||
|
||||
try {
|
||||
const accountId = randomUUID();
|
||||
await ctx.knex('accounts').insert({
|
||||
id: accountId,
|
||||
name,
|
||||
phone,
|
||||
industry,
|
||||
ownerId: ctx.userId,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
accountId,
|
||||
account: {
|
||||
id: accountId,
|
||||
name,
|
||||
},
|
||||
};
|
||||
} catch (error: any) {
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
export const findContact: ToolHandler = async (ctx, args) => {
|
||||
if (!ctx.knex) {
|
||||
throw new Error('Knex connection required for findContact');
|
||||
}
|
||||
|
||||
const { firstName, lastName, accountId } = args as {
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
accountId?: string;
|
||||
};
|
||||
|
||||
if (!firstName && !lastName) {
|
||||
return {
|
||||
found: false,
|
||||
contactId: null,
|
||||
message: 'First name or last name required',
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
let query = Contact.query(ctx.knex);
|
||||
|
||||
if (firstName) {
|
||||
query = query.where('firstName', 'like', `%${firstName}%`);
|
||||
}
|
||||
if (lastName) {
|
||||
query = query.where('lastName', 'like', `%${lastName}%`);
|
||||
}
|
||||
if (accountId) {
|
||||
query = query.where('accountId', accountId);
|
||||
}
|
||||
|
||||
const contact = await query.first();
|
||||
|
||||
if (contact) {
|
||||
return {
|
||||
found: true,
|
||||
contactId: contact.id,
|
||||
contact: {
|
||||
id: contact.id,
|
||||
firstName: contact.firstName,
|
||||
lastName: contact.lastName,
|
||||
accountId: contact.accountId,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return { found: false, contactId: null };
|
||||
} catch (error: any) {
|
||||
return { found: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
export const createContact: ToolHandler = async (ctx, args) => {
|
||||
if (!ctx.knex) {
|
||||
throw new Error('Knex connection required for createContact');
|
||||
}
|
||||
|
||||
const { firstName, lastName, email, phone, accountId } = args as {
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
email?: string;
|
||||
phone?: string;
|
||||
accountId?: string;
|
||||
};
|
||||
|
||||
if (!firstName || !lastName) {
|
||||
throw new Error('First name and last name are required');
|
||||
}
|
||||
|
||||
try {
|
||||
const contactId = randomUUID();
|
||||
await ctx.knex('contacts').insert({
|
||||
id: contactId,
|
||||
firstName,
|
||||
lastName,
|
||||
accountId,
|
||||
ownerId: ctx.userId,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
contactId,
|
||||
contact: {
|
||||
id: contactId,
|
||||
firstName,
|
||||
lastName,
|
||||
accountId,
|
||||
},
|
||||
};
|
||||
} catch (error: any) {
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
export const createPet: ToolHandler = async (ctx, args) => {
|
||||
if (!ctx.knex) {
|
||||
throw new Error('Knex connection required for createPet');
|
||||
}
|
||||
|
||||
const { name, species, breed, age, ownerId } = args as {
|
||||
name: string;
|
||||
species: string;
|
||||
breed?: string;
|
||||
age?: number;
|
||||
ownerId: string; // Contact ID
|
||||
};
|
||||
|
||||
if (!name || !ownerId) {
|
||||
throw new Error('Pet name and owner (contact) are required');
|
||||
}
|
||||
|
||||
try {
|
||||
const petId = randomUUID();
|
||||
|
||||
// Get the accountId from the contact
|
||||
const contact = await ctx.knex('contacts').where('id', ownerId).first();
|
||||
|
||||
// Insert into dogs table
|
||||
await ctx.knex('dogs').insert({
|
||||
id: petId,
|
||||
name,
|
||||
ownerId,
|
||||
accountId: contact?.accountId,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
petId,
|
||||
pet: { id: petId, name, ownerId, accountId: contact?.accountId },
|
||||
};
|
||||
} catch (error: any) {
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
// Export all demo tools
|
||||
export const demoTools = {
|
||||
findAccount,
|
||||
createAccount,
|
||||
findContact,
|
||||
createContact,
|
||||
createPet,
|
||||
};
|
||||
89
backend/src/ai-processes/tools/tool-registry.ts
Normal file
89
backend/src/ai-processes/tools/tool-registry.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { Knex } from 'knex';
|
||||
import { AiToolConfig } from '../../models/ai-process.model';
|
||||
|
||||
export interface ToolContext {
|
||||
tenantId: string;
|
||||
userId: string;
|
||||
knex?: Knex;
|
||||
authScopes?: string[];
|
||||
}
|
||||
|
||||
export type ToolHandler = (
|
||||
ctx: ToolContext,
|
||||
args: Record<string, unknown>,
|
||||
) => Promise<Record<string, unknown>>;
|
||||
|
||||
export interface ToolDefinition {
|
||||
name: string;
|
||||
description: string;
|
||||
handler: ToolHandler;
|
||||
inputSchema?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
const defaultTools: Record<string, ToolHandler> = {
|
||||
findAccount: async () => ({ accountId: null, found: false }),
|
||||
createAccount: async (_ctx, args) => ({ accountId: `acc_${Date.now()}`, args }),
|
||||
findContact: async () => ({ contactId: null, found: false }),
|
||||
createContact: async (_ctx, args) => ({ contactId: `con_${Date.now()}`, args }),
|
||||
createPet: async (_ctx, args) => ({ petId: `pet_${Date.now()}`, args }),
|
||||
};
|
||||
|
||||
const tenantAllowlist: Record<string, string[]> = {
|
||||
default: Object.keys(defaultTools),
|
||||
};
|
||||
|
||||
export class ToolRegistry {
|
||||
private tools: Record<string, ToolHandler>;
|
||||
private allowlist: Record<string, string[]>;
|
||||
private dbAllowlistCache: Map<string, Set<string>> = new Map();
|
||||
|
||||
constructor(
|
||||
tools: Record<string, ToolHandler> = defaultTools,
|
||||
allowlist: Record<string, string[]> = tenantAllowlist,
|
||||
) {
|
||||
this.tools = tools;
|
||||
this.allowlist = allowlist;
|
||||
}
|
||||
|
||||
registerTool(name: string, handler: ToolHandler) {
|
||||
this.tools[name] = handler;
|
||||
}
|
||||
|
||||
async loadTenantAllowlist(tenantId: string, knex: Knex) {
|
||||
const configs = await AiToolConfig.query(knex)
|
||||
.where('enabled', true);
|
||||
|
||||
const allowed = new Set(configs.map((c) => c.toolName));
|
||||
this.dbAllowlistCache.set(tenantId, allowed);
|
||||
return allowed;
|
||||
}
|
||||
|
||||
async isToolAllowed(tenantId: string, toolName: string, knex?: Knex) {
|
||||
// Check database cache first
|
||||
if (this.dbAllowlistCache.has(tenantId)) {
|
||||
return this.dbAllowlistCache.get(tenantId)!.has(toolName);
|
||||
}
|
||||
|
||||
// Load from database if knex provided
|
||||
if (knex) {
|
||||
const allowed = await this.loadTenantAllowlist(tenantId, knex);
|
||||
return allowed.has(toolName);
|
||||
}
|
||||
|
||||
// Fallback to static allowlist
|
||||
const allowed = this.allowlist[tenantId] || this.allowlist.default || [];
|
||||
return allowed.includes(toolName);
|
||||
}
|
||||
|
||||
getTool(toolName: string): ToolHandler {
|
||||
const tool = this.tools[toolName];
|
||||
if (!tool) {
|
||||
throw new Error(`Tool ${toolName} is not registered.`);
|
||||
}
|
||||
return tool;
|
||||
}
|
||||
|
||||
getAllToolNames(): string[] {
|
||||
return Object.keys(this.tools);
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,9 @@ import { RbacModule } from './rbac/rbac.module';
|
||||
import { ObjectModule } from './object/object.module';
|
||||
import { AppBuilderModule } from './app-builder/app-builder.module';
|
||||
import { PageLayoutModule } from './page-layout/page-layout.module';
|
||||
import { VoiceModule } from './voice/voice.module';
|
||||
import { AiAssistantModule } from './ai-assistant/ai-assistant.module';
|
||||
import { AiProcessesModule } from './ai-processes/ai-processes.module';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
@@ -20,6 +23,9 @@ import { PageLayoutModule } from './page-layout/page-layout.module';
|
||||
ObjectModule,
|
||||
AppBuilderModule,
|
||||
PageLayoutModule,
|
||||
VoiceModule,
|
||||
AiAssistantModule,
|
||||
AiProcessesModule,
|
||||
],
|
||||
})
|
||||
export class AppModule {}
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
UnauthorizedException,
|
||||
HttpCode,
|
||||
HttpStatus,
|
||||
Req,
|
||||
} from '@nestjs/common';
|
||||
import { IsEmail, IsString, MinLength, IsOptional } from 'class-validator';
|
||||
import { AuthService } from './auth.service';
|
||||
@@ -40,17 +41,33 @@ class RegisterDto {
|
||||
export class AuthController {
|
||||
constructor(private authService: AuthService) {}
|
||||
|
||||
private isCentralSubdomain(subdomain: string): boolean {
|
||||
const centralSubdomains = (process.env.CENTRAL_SUBDOMAINS || 'central,admin').split(',');
|
||||
return centralSubdomains.includes(subdomain);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('login')
|
||||
async login(@TenantId() tenantId: string, @Body() loginDto: LoginDto) {
|
||||
if (!tenantId) {
|
||||
throw new UnauthorizedException('Tenant ID is required');
|
||||
async login(
|
||||
@TenantId() tenantId: string,
|
||||
@Body() loginDto: LoginDto,
|
||||
@Req() req: any,
|
||||
) {
|
||||
const subdomain = req.raw?.subdomain;
|
||||
|
||||
|
||||
// If it's a central subdomain, tenantId is not required
|
||||
if (!subdomain || !this.isCentralSubdomain(subdomain)) {
|
||||
if (!tenantId) {
|
||||
throw new UnauthorizedException('Tenant ID is required');
|
||||
}
|
||||
}
|
||||
|
||||
const user = await this.authService.validateUser(
|
||||
tenantId,
|
||||
loginDto.email,
|
||||
loginDto.password,
|
||||
subdomain,
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
@@ -64,9 +81,15 @@ export class AuthController {
|
||||
async register(
|
||||
@TenantId() tenantId: string,
|
||||
@Body() registerDto: RegisterDto,
|
||||
@Req() req: any,
|
||||
) {
|
||||
if (!tenantId) {
|
||||
throw new UnauthorizedException('Tenant ID is required');
|
||||
const subdomain = req.raw?.subdomain;
|
||||
|
||||
// If it's a central subdomain, tenantId is not required
|
||||
if (!subdomain || !this.isCentralSubdomain(subdomain)) {
|
||||
if (!tenantId) {
|
||||
throw new UnauthorizedException('Tenant ID is required');
|
||||
}
|
||||
}
|
||||
|
||||
const user = await this.authService.register(
|
||||
@@ -75,6 +98,7 @@ export class AuthController {
|
||||
registerDto.password,
|
||||
registerDto.firstName,
|
||||
registerDto.lastName,
|
||||
subdomain,
|
||||
);
|
||||
|
||||
return user;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { JwtService } from '@nestjs/jwt';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import { getCentralPrisma } from '../prisma/central-prisma.service';
|
||||
import * as bcrypt from 'bcrypt';
|
||||
|
||||
@Injectable()
|
||||
@@ -10,11 +11,24 @@ export class AuthService {
|
||||
private jwtService: JwtService,
|
||||
) {}
|
||||
|
||||
private isCentralSubdomain(subdomain: string): boolean {
|
||||
const centralSubdomains = (process.env.CENTRAL_SUBDOMAINS || 'central,admin').split(',');
|
||||
return centralSubdomains.includes(subdomain);
|
||||
}
|
||||
|
||||
async validateUser(
|
||||
tenantId: string,
|
||||
email: string,
|
||||
password: string,
|
||||
subdomain?: string,
|
||||
): Promise<any> {
|
||||
|
||||
// Check if this is a central subdomain
|
||||
if (subdomain && this.isCentralSubdomain(subdomain)) {
|
||||
return this.validateCentralUser(email, password);
|
||||
}
|
||||
|
||||
// Otherwise, validate as tenant user
|
||||
const tenantDb = await this.tenantDbService.getTenantKnex(tenantId);
|
||||
|
||||
const user = await tenantDb('users')
|
||||
@@ -43,6 +57,31 @@ export class AuthService {
|
||||
return null;
|
||||
}
|
||||
|
||||
private async validateCentralUser(
|
||||
email: string,
|
||||
password: string,
|
||||
): Promise<any> {
|
||||
const centralPrisma = getCentralPrisma();
|
||||
|
||||
const user = await centralPrisma.user.findUnique({
|
||||
where: { email },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (await bcrypt.compare(password, user.password)) {
|
||||
const { password: _, ...result } = user;
|
||||
return {
|
||||
...result,
|
||||
isCentralAdmin: true,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async login(user: any) {
|
||||
const payload = {
|
||||
sub: user.id,
|
||||
@@ -66,7 +105,14 @@ export class AuthService {
|
||||
password: string,
|
||||
firstName?: string,
|
||||
lastName?: string,
|
||||
subdomain?: string,
|
||||
) {
|
||||
// Check if this is a central subdomain
|
||||
if (subdomain && this.isCentralSubdomain(subdomain)) {
|
||||
return this.registerCentralUser(email, password, firstName, lastName);
|
||||
}
|
||||
|
||||
// Otherwise, register as tenant user
|
||||
const tenantDb = await this.tenantDbService.getTenantKnex(tenantId);
|
||||
|
||||
const hashedPassword = await bcrypt.hash(password, 10);
|
||||
@@ -88,4 +134,28 @@ export class AuthService {
|
||||
const { password: _, ...result } = user;
|
||||
return result;
|
||||
}
|
||||
|
||||
private async registerCentralUser(
|
||||
email: string,
|
||||
password: string,
|
||||
firstName?: string,
|
||||
lastName?: string,
|
||||
) {
|
||||
const centralPrisma = getCentralPrisma();
|
||||
|
||||
const hashedPassword = await bcrypt.hash(password, 10);
|
||||
|
||||
const user = await centralPrisma.user.create({
|
||||
data: {
|
||||
email,
|
||||
password: hashedPassword,
|
||||
firstName: firstName || null,
|
||||
lastName: lastName || null,
|
||||
isActive: true,
|
||||
},
|
||||
});
|
||||
|
||||
const { password: _, ...result } = user;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,13 +3,15 @@ import {
|
||||
FastifyAdapter,
|
||||
NestFastifyApplication,
|
||||
} from '@nestjs/platform-fastify';
|
||||
import { ValidationPipe } from '@nestjs/common';
|
||||
import { ValidationPipe, Logger } from '@nestjs/common';
|
||||
import { AppModule } from './app.module';
|
||||
import { VoiceService } from './voice/voice.service';
|
||||
import { AudioConverterService } from './voice/audio-converter.service';
|
||||
|
||||
async function bootstrap() {
|
||||
const app = await NestFactory.create<NestFastifyApplication>(
|
||||
AppModule,
|
||||
new FastifyAdapter(),
|
||||
new FastifyAdapter({ logger: true }),
|
||||
);
|
||||
|
||||
// Global validation pipe
|
||||
@@ -33,6 +35,145 @@ async function bootstrap() {
|
||||
const port = process.env.PORT || 3000;
|
||||
await app.listen(port, '0.0.0.0');
|
||||
|
||||
// After app is listening, register WebSocket handler
|
||||
const fastifyInstance = app.getHttpAdapter().getInstance();
|
||||
const logger = new Logger('MediaStreamWS');
|
||||
const voiceService = app.get(VoiceService);
|
||||
const audioConverter = app.get(AudioConverterService);
|
||||
|
||||
const WebSocketServer = require('ws').Server;
|
||||
const wss = new WebSocketServer({ noServer: true });
|
||||
|
||||
// Handle WebSocket upgrades at the server level
|
||||
const server = (fastifyInstance.server as any);
|
||||
|
||||
// Track active Media Streams connections: streamSid -> WebSocket
|
||||
const mediaStreams: Map<string, any> = new Map();
|
||||
|
||||
server.on('upgrade', (request: any, socket: any, head: any) => {
|
||||
if (request.url === '/api/voice/media-stream') {
|
||||
logger.log('=== MEDIA STREAM WEBSOCKET UPGRADE REQUEST ===');
|
||||
logger.log(`Path: ${request.url}`);
|
||||
|
||||
wss.handleUpgrade(request, socket, head, (ws: any) => {
|
||||
logger.log('=== MEDIA STREAM WEBSOCKET UPGRADED SUCCESSFULLY ===');
|
||||
handleMediaStreamSocket(ws);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
async function handleMediaStreamSocket(ws: any) {
|
||||
let streamSid: string | null = null;
|
||||
let callSid: string | null = null;
|
||||
let tenantDomain: string | null = null;
|
||||
let mediaPacketCount = 0;
|
||||
|
||||
ws.on('message', async (message: Buffer) => {
|
||||
try {
|
||||
const msg = JSON.parse(message.toString());
|
||||
|
||||
switch (msg.event) {
|
||||
case 'connected':
|
||||
logger.log('=== MEDIA STREAM EVENT: CONNECTED ===');
|
||||
logger.log(`Protocol: ${msg.protocol}`);
|
||||
logger.log(`Version: ${msg.version}`);
|
||||
break;
|
||||
|
||||
case 'start':
|
||||
streamSid = msg.streamSid;
|
||||
callSid = msg.start.callSid;
|
||||
tenantDomain = msg.start.customParameters?.tenantId || 'tenant1';
|
||||
|
||||
logger.log(`=== MEDIA STREAM EVENT: START ===`);
|
||||
logger.log(`StreamSid: ${streamSid}`);
|
||||
logger.log(`CallSid: ${callSid}`);
|
||||
logger.log(`Tenant: ${tenantDomain}`);
|
||||
logger.log(`MediaFormat: ${JSON.stringify(msg.start.mediaFormat)}`);
|
||||
|
||||
mediaStreams.set(streamSid, ws);
|
||||
logger.log(`Stored WebSocket for streamSid: ${streamSid}. Total active streams: ${mediaStreams.size}`);
|
||||
|
||||
// Initialize OpenAI Realtime connection
|
||||
logger.log(`Initializing OpenAI Realtime for call ${callSid}...`);
|
||||
try {
|
||||
await voiceService.initializeOpenAIRealtime({
|
||||
callSid,
|
||||
tenantId: tenantDomain,
|
||||
userId: msg.start.customParameters?.userId || 'system',
|
||||
});
|
||||
logger.log(`✓ OpenAI Realtime initialized for call ${callSid}`);
|
||||
} catch (error: any) {
|
||||
logger.error(`Failed to initialize OpenAI: ${error.message}`);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'media':
|
||||
mediaPacketCount++;
|
||||
// Only log every 500 packets to reduce noise
|
||||
if (mediaPacketCount % 500 === 0) {
|
||||
logger.log(`Received media packet #${mediaPacketCount} for StreamSid: ${streamSid}`);
|
||||
}
|
||||
|
||||
if (!callSid || !tenantDomain) {
|
||||
logger.warn('Received media before start event');
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
// Convert Twilio audio (μ-law 8kHz) to OpenAI format (PCM16 24kHz)
|
||||
const twilioAudio = msg.media.payload;
|
||||
const openaiAudio = audioConverter.twilioToOpenAI(twilioAudio);
|
||||
|
||||
// Send audio to OpenAI Realtime API
|
||||
await voiceService.sendAudioToOpenAI(callSid, openaiAudio);
|
||||
} catch (error: any) {
|
||||
logger.error(`Error processing media: ${error.message}`);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'stop':
|
||||
logger.log(`=== MEDIA STREAM EVENT: STOP ===`);
|
||||
logger.log(`StreamSid: ${streamSid}`);
|
||||
logger.log(`Total media packets received: ${mediaPacketCount}`);
|
||||
|
||||
if (streamSid) {
|
||||
mediaStreams.delete(streamSid);
|
||||
logger.log(`Removed WebSocket for streamSid: ${streamSid}`);
|
||||
}
|
||||
|
||||
// Clean up OpenAI connection
|
||||
if (callSid) {
|
||||
try {
|
||||
logger.log(`Cleaning up OpenAI connection for call ${callSid}...`);
|
||||
await voiceService.cleanupOpenAIConnection(callSid);
|
||||
logger.log(`✓ OpenAI connection cleaned up`);
|
||||
} catch (error: any) {
|
||||
logger.error(`Failed to cleanup OpenAI: ${error.message}`);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.debug(`Unknown media stream event: ${msg.event}`);
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`Error processing media stream message: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('close', () => {
|
||||
logger.log(`=== MEDIA STREAM WEBSOCKET CLOSED ===`);
|
||||
if (streamSid) {
|
||||
mediaStreams.delete(streamSid);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('error', (error: Error) => {
|
||||
logger.error(`=== MEDIA STREAM WEBSOCKET ERROR ===`);
|
||||
logger.error(`Error message: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
console.log(`🚀 Application is running on: http://localhost:${port}/api`);
|
||||
}
|
||||
|
||||
|
||||
306
backend/src/migration/custom-migration.service.ts
Normal file
306
backend/src/migration/custom-migration.service.ts
Normal file
@@ -0,0 +1,306 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export interface CustomMigrationRecord {
|
||||
id: string;
|
||||
tenantId: string;
|
||||
name: string;
|
||||
description: string;
|
||||
type: 'create_table' | 'add_column' | 'alter_column' | 'add_index' | 'drop_table' | 'custom';
|
||||
sql: string;
|
||||
status: 'pending' | 'executed' | 'failed';
|
||||
executedAt?: Date;
|
||||
error?: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class CustomMigrationService {
|
||||
private readonly logger = new Logger(CustomMigrationService.name);
|
||||
|
||||
/**
|
||||
* Generate SQL to create a table with standard fields
|
||||
*/
|
||||
generateCreateTableSQL(
|
||||
tableName: string,
|
||||
fields: {
|
||||
apiName: string;
|
||||
type: string;
|
||||
isRequired?: boolean;
|
||||
isUnique?: boolean;
|
||||
defaultValue?: string;
|
||||
}[] = [],
|
||||
): string {
|
||||
// Start with standard fields
|
||||
const columns: string[] = [
|
||||
'`id` VARCHAR(36) PRIMARY KEY',
|
||||
'`ownerId` VARCHAR(36)',
|
||||
'`name` VARCHAR(255)',
|
||||
'`created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP',
|
||||
'`updated_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP',
|
||||
];
|
||||
|
||||
// Add custom fields
|
||||
for (const field of fields) {
|
||||
const column = this.fieldToColumn(field);
|
||||
columns.push(column);
|
||||
}
|
||||
|
||||
// Add foreign key and index for ownerId
|
||||
columns.push('INDEX `idx_owner` (`ownerId`)');
|
||||
|
||||
return `CREATE TABLE IF NOT EXISTS \`${tableName}\` (
|
||||
${columns.join(',\n ')}
|
||||
)`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert field definition to SQL column definition
|
||||
*/
|
||||
private fieldToColumn(field: {
|
||||
apiName: string;
|
||||
type: string;
|
||||
isRequired?: boolean;
|
||||
isUnique?: boolean;
|
||||
defaultValue?: string;
|
||||
}): string {
|
||||
const columnName = field.apiName;
|
||||
let columnDef = `\`${columnName}\``;
|
||||
|
||||
// Map field types to SQL types
|
||||
switch (field.type.toUpperCase()) {
|
||||
case 'TEXT':
|
||||
case 'STRING':
|
||||
columnDef += ' VARCHAR(255)';
|
||||
break;
|
||||
case 'LONG_TEXT':
|
||||
columnDef += ' LONGTEXT';
|
||||
break;
|
||||
case 'NUMBER':
|
||||
case 'DECIMAL':
|
||||
columnDef += ' DECIMAL(18, 2)';
|
||||
break;
|
||||
case 'INTEGER':
|
||||
columnDef += ' INT';
|
||||
break;
|
||||
case 'BOOLEAN':
|
||||
columnDef += ' BOOLEAN DEFAULT FALSE';
|
||||
break;
|
||||
case 'DATE':
|
||||
columnDef += ' DATE';
|
||||
break;
|
||||
case 'DATE_TIME':
|
||||
columnDef += ' DATETIME';
|
||||
break;
|
||||
case 'EMAIL':
|
||||
columnDef += ' VARCHAR(255)';
|
||||
break;
|
||||
case 'URL':
|
||||
columnDef += ' VARCHAR(2048)';
|
||||
break;
|
||||
case 'PHONE':
|
||||
columnDef += ' VARCHAR(20)';
|
||||
break;
|
||||
case 'CURRENCY':
|
||||
columnDef += ' DECIMAL(18, 2)';
|
||||
break;
|
||||
case 'PERCENT':
|
||||
columnDef += ' DECIMAL(5, 2)';
|
||||
break;
|
||||
case 'PICKLIST':
|
||||
case 'MULTI_PICKLIST':
|
||||
columnDef += ' VARCHAR(255)';
|
||||
break;
|
||||
case 'LOOKUP':
|
||||
case 'BELONGS_TO':
|
||||
columnDef += ' VARCHAR(36)';
|
||||
break;
|
||||
default:
|
||||
columnDef += ' VARCHAR(255)';
|
||||
}
|
||||
|
||||
// Add constraints
|
||||
if (field.isRequired) {
|
||||
columnDef += ' NOT NULL';
|
||||
} else {
|
||||
columnDef += ' NULL';
|
||||
}
|
||||
|
||||
if (field.isUnique) {
|
||||
columnDef += ' UNIQUE';
|
||||
}
|
||||
|
||||
if (field.defaultValue !== undefined && field.defaultValue !== null) {
|
||||
columnDef += ` DEFAULT '${field.defaultValue}'`;
|
||||
}
|
||||
|
||||
return columnDef;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a custom migration record in the database
|
||||
*/
|
||||
async createMigrationRecord(
|
||||
tenantKnex: Knex,
|
||||
data: {
|
||||
tenantId: string;
|
||||
name: string;
|
||||
description: string;
|
||||
type: 'create_table' | 'add_column' | 'alter_column' | 'add_index' | 'drop_table' | 'custom';
|
||||
sql: string;
|
||||
},
|
||||
): Promise<CustomMigrationRecord> {
|
||||
// Ensure custom_migrations table exists
|
||||
await this.ensureMigrationsTable(tenantKnex);
|
||||
|
||||
const id = require('crypto').randomUUID();
|
||||
const now = new Date();
|
||||
|
||||
await tenantKnex('custom_migrations').insert({
|
||||
id,
|
||||
tenantId: data.tenantId,
|
||||
name: data.name,
|
||||
description: data.description,
|
||||
type: data.type,
|
||||
sql: data.sql,
|
||||
status: 'pending',
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
});
|
||||
|
||||
return tenantKnex('custom_migrations').where({ id }).first();
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a pending migration and update its status
|
||||
*/
|
||||
async executeMigration(
|
||||
tenantKnex: Knex,
|
||||
migrationId: string,
|
||||
): Promise<CustomMigrationRecord> {
|
||||
try {
|
||||
// Get the migration record
|
||||
const migration = await tenantKnex('custom_migrations')
|
||||
.where({ id: migrationId })
|
||||
.first();
|
||||
|
||||
if (!migration) {
|
||||
throw new Error(`Migration ${migrationId} not found`);
|
||||
}
|
||||
|
||||
if (migration.status === 'executed') {
|
||||
this.logger.log(`Migration ${migrationId} already executed`);
|
||||
return migration;
|
||||
}
|
||||
|
||||
// Execute the SQL
|
||||
this.logger.log(`Executing migration: ${migration.name}`);
|
||||
await tenantKnex.raw(migration.sql);
|
||||
|
||||
// Update status
|
||||
const now = new Date();
|
||||
await tenantKnex('custom_migrations')
|
||||
.where({ id: migrationId })
|
||||
.update({
|
||||
status: 'executed',
|
||||
executedAt: now,
|
||||
updated_at: now,
|
||||
});
|
||||
|
||||
this.logger.log(`Migration ${migration.name} executed successfully`);
|
||||
return tenantKnex('custom_migrations').where({ id: migrationId }).first();
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to execute migration ${migrationId}:`, error);
|
||||
|
||||
// Update status with error
|
||||
const now = new Date();
|
||||
await tenantKnex('custom_migrations')
|
||||
.where({ id: migrationId })
|
||||
.update({
|
||||
status: 'failed',
|
||||
error: error.message,
|
||||
updated_at: now,
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and execute a migration in one step
|
||||
*/
|
||||
async createAndExecuteMigration(
|
||||
tenantKnex: Knex,
|
||||
tenantId: string,
|
||||
data: {
|
||||
name: string;
|
||||
description: string;
|
||||
type: 'create_table' | 'add_column' | 'alter_column' | 'add_index' | 'drop_table' | 'custom';
|
||||
sql: string;
|
||||
},
|
||||
): Promise<CustomMigrationRecord> {
|
||||
// Create the migration record
|
||||
const migration = await this.createMigrationRecord(tenantKnex, {
|
||||
tenantId,
|
||||
...data,
|
||||
});
|
||||
|
||||
// Execute it immediately
|
||||
return this.executeMigration(tenantKnex, migration.id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the custom_migrations table exists in the tenant database
|
||||
*/
|
||||
private async ensureMigrationsTable(tenantKnex: Knex): Promise<void> {
|
||||
const hasTable = await tenantKnex.schema.hasTable('custom_migrations');
|
||||
|
||||
if (!hasTable) {
|
||||
await tenantKnex.schema.createTable('custom_migrations', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.uuid('tenantId').notNullable();
|
||||
table.string('name', 255).notNullable();
|
||||
table.text('description');
|
||||
table.enum('type', ['create_table', 'add_column', 'alter_column', 'add_index', 'drop_table', 'custom']).notNullable();
|
||||
table.text('sql').notNullable();
|
||||
table.enum('status', ['pending', 'executed', 'failed']).defaultTo('pending');
|
||||
table.timestamp('executedAt').nullable();
|
||||
table.text('error').nullable();
|
||||
table.timestamps(true, true);
|
||||
|
||||
table.index(['tenantId']);
|
||||
table.index(['status']);
|
||||
table.index(['created_at']);
|
||||
});
|
||||
|
||||
this.logger.log('Created custom_migrations table');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all migrations for a tenant
|
||||
*/
|
||||
async getMigrations(
|
||||
tenantKnex: Knex,
|
||||
tenantId: string,
|
||||
filter?: {
|
||||
status?: 'pending' | 'executed' | 'failed';
|
||||
type?: string;
|
||||
},
|
||||
): Promise<CustomMigrationRecord[]> {
|
||||
await this.ensureMigrationsTable(tenantKnex);
|
||||
|
||||
let query = tenantKnex('custom_migrations').where({ tenantId });
|
||||
|
||||
if (filter?.status) {
|
||||
query = query.where({ status: filter.status });
|
||||
}
|
||||
|
||||
if (filter?.type) {
|
||||
query = query.where({ type: filter.type });
|
||||
}
|
||||
|
||||
return query.orderBy('created_at', 'asc');
|
||||
}
|
||||
}
|
||||
10
backend/src/migration/migration.module.ts
Normal file
10
backend/src/migration/migration.module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { CustomMigrationService } from './custom-migration.service';
|
||||
import { TenantModule } from '../tenant/tenant.module';
|
||||
|
||||
@Module({
|
||||
imports: [TenantModule],
|
||||
providers: [CustomMigrationService],
|
||||
exports: [CustomMigrationService],
|
||||
})
|
||||
export class MigrationModule {}
|
||||
63
backend/src/models/ai-chat.model.ts
Normal file
63
backend/src/models/ai-chat.model.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { randomUUID } from 'crypto';
|
||||
import { snakeCaseMappers } from 'objection';
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class AiChatSession extends BaseModel {
|
||||
static tableName = 'ai_chat_sessions';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
|
||||
id!: string;
|
||||
userId!: string;
|
||||
createdAt!: Date;
|
||||
|
||||
$beforeInsert() {
|
||||
this.id = this.id || randomUUID();
|
||||
this.createdAt = this.createdAt || new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate() {}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
messages: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: AiChatMessage,
|
||||
join: {
|
||||
from: 'ai_chat_sessions.id',
|
||||
to: 'ai_chat_messages.session_id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class AiChatMessage extends BaseModel {
|
||||
static tableName = 'ai_chat_messages';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
|
||||
id!: string;
|
||||
sessionId!: string;
|
||||
role!: string;
|
||||
content!: string;
|
||||
createdAt!: Date;
|
||||
|
||||
$beforeInsert() {
|
||||
this.id = this.id || randomUUID();
|
||||
this.createdAt = this.createdAt || new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate() {}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
session: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: AiChatSession,
|
||||
join: {
|
||||
from: 'ai_chat_messages.session_id',
|
||||
to: 'ai_chat_sessions.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
164
backend/src/models/ai-process.model.ts
Normal file
164
backend/src/models/ai-process.model.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import { randomUUID } from 'crypto';
|
||||
import { QueryContext, snakeCaseMappers } from 'objection';
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class AiProcess extends BaseModel {
|
||||
static tableName = 'ai_processes';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
|
||||
id!: string;
|
||||
name!: string;
|
||||
description?: string;
|
||||
latestVersion!: number;
|
||||
createdBy!: string;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
$beforeInsert(queryContext: QueryContext) {
|
||||
this.id = this.id || randomUUID();
|
||||
super.$beforeInsert(queryContext);
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
versions: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: AiProcessVersion,
|
||||
join: {
|
||||
from: 'ai_processes.id',
|
||||
to: 'ai_process_versions.process_id',
|
||||
},
|
||||
},
|
||||
runs: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: AiProcessRun,
|
||||
join: {
|
||||
from: 'ai_processes.id',
|
||||
to: 'ai_process_runs.process_id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class AiProcessVersion extends BaseModel {
|
||||
static tableName = 'ai_process_versions';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
static jsonAttributes = ['graphJson', 'compiledJson'];
|
||||
|
||||
id!: string;
|
||||
processId!: string;
|
||||
version!: number;
|
||||
graphJson!: Record<string, unknown>;
|
||||
compiledJson!: Record<string, unknown>;
|
||||
createdBy!: string;
|
||||
createdAt!: Date;
|
||||
|
||||
$beforeInsert() {
|
||||
this.id = this.id || randomUUID();
|
||||
this.createdAt = this.createdAt || new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate() {}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
process: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: AiProcess,
|
||||
join: {
|
||||
from: 'ai_process_versions.process_id',
|
||||
to: 'ai_processes.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class AiProcessRun extends BaseModel {
|
||||
static tableName = 'ai_process_runs';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
static jsonAttributes = ['inputJson', 'outputJson', 'errorJson', 'stateJson'];
|
||||
|
||||
id!: string;
|
||||
processId!: string;
|
||||
version!: number;
|
||||
status!: string;
|
||||
inputJson!: Record<string, unknown>;
|
||||
outputJson?: Record<string, unknown> | null;
|
||||
errorJson?: Record<string, unknown> | null;
|
||||
stateJson?: Record<string, unknown>;
|
||||
currentNodeId?: string | null;
|
||||
startedAt?: Date;
|
||||
endedAt?: Date | null;
|
||||
|
||||
$beforeInsert() {
|
||||
this.id = this.id || randomUUID();
|
||||
this.startedAt = this.startedAt || new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate() {}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
process: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: AiProcess,
|
||||
join: {
|
||||
from: 'ai_process_runs.process_id',
|
||||
to: 'ai_processes.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class AiAuditEvent extends BaseModel {
|
||||
static tableName = 'ai_audit_events';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
static jsonAttributes = ['payloadJson'];
|
||||
|
||||
id!: string;
|
||||
runId!: string;
|
||||
eventType!: string;
|
||||
payloadJson!: Record<string, unknown>;
|
||||
createdAt!: Date;
|
||||
|
||||
$beforeInsert() {
|
||||
this.id = this.id || randomUUID();
|
||||
this.createdAt = this.createdAt || new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate() {}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
run: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: AiProcessRun,
|
||||
join: {
|
||||
from: 'ai_audit_events.run_id',
|
||||
to: 'ai_process_runs.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class AiToolConfig extends BaseModel {
|
||||
static tableName = 'ai_tool_configs';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
static jsonAttributes = ['configJson'];
|
||||
|
||||
id!: string;
|
||||
toolName!: string;
|
||||
enabled!: boolean;
|
||||
configJson?: Record<string, unknown>;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
$beforeInsert(queryContext: QueryContext) {
|
||||
this.id = this.id || randomUUID();
|
||||
super.$beforeInsert(queryContext);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,38 @@
|
||||
import { Model, ModelOptions, QueryContext, snakeCaseMappers } from 'objection';
|
||||
import { Model, ModelOptions, QueryContext } from 'objection';
|
||||
|
||||
export class BaseModel extends Model {
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
/**
|
||||
* Use a minimal column mapper: keep property names as-is, but handle
|
||||
* timestamp fields that are stored as created_at/updated_at in the DB.
|
||||
*/
|
||||
static columnNameMappers = {
|
||||
parse(dbRow: Record<string, any>) {
|
||||
const mapped: Record<string, any> = {};
|
||||
for (const [key, value] of Object.entries(dbRow || {})) {
|
||||
if (key === 'created_at') {
|
||||
mapped.createdAt = value;
|
||||
} else if (key === 'updated_at') {
|
||||
mapped.updatedAt = value;
|
||||
} else {
|
||||
mapped[key] = value;
|
||||
}
|
||||
}
|
||||
return mapped;
|
||||
},
|
||||
format(model: Record<string, any>) {
|
||||
const mapped: Record<string, any> = {};
|
||||
for (const [key, value] of Object.entries(model || {})) {
|
||||
if (key === 'createdAt') {
|
||||
mapped.created_at = value;
|
||||
} else if (key === 'updatedAt') {
|
||||
mapped.updated_at = value;
|
||||
} else {
|
||||
mapped[key] = value;
|
||||
}
|
||||
}
|
||||
return mapped;
|
||||
},
|
||||
};
|
||||
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
|
||||
114
backend/src/models/central.model.ts
Normal file
114
backend/src/models/central.model.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { Model, ModelOptions, QueryContext } from 'objection';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
/**
|
||||
* Central database models using Objection.js
|
||||
* These models work with the central database (not tenant databases)
|
||||
*/
|
||||
|
||||
export class CentralTenant extends Model {
|
||||
static tableName = 'tenants';
|
||||
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
dbHost: string;
|
||||
dbPort: number;
|
||||
dbName: string;
|
||||
dbUsername: string;
|
||||
dbPassword: string;
|
||||
status: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
|
||||
// Relations
|
||||
domains?: CentralDomain[];
|
||||
|
||||
$beforeInsert(queryContext: QueryContext) {
|
||||
this.id = this.id || randomUUID();
|
||||
// Auto-generate slug from name if not provided
|
||||
if (!this.slug && this.name) {
|
||||
this.slug = this.name.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, '');
|
||||
}
|
||||
this.createdAt = new Date();
|
||||
this.updatedAt = new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate(opt: ModelOptions, queryContext: QueryContext) {
|
||||
this.updatedAt = new Date();
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
domains: {
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass: CentralDomain,
|
||||
join: {
|
||||
from: 'tenants.id',
|
||||
to: 'domains.tenantId',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class CentralDomain extends Model {
|
||||
static tableName = 'domains';
|
||||
|
||||
id: string;
|
||||
domain: string;
|
||||
tenantId: string;
|
||||
isPrimary: boolean;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
|
||||
// Relations
|
||||
tenant?: CentralTenant;
|
||||
|
||||
$beforeInsert(queryContext: QueryContext) {
|
||||
this.id = this.id || randomUUID();
|
||||
this.createdAt = new Date();
|
||||
this.updatedAt = new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate(opt: ModelOptions, queryContext: QueryContext) {
|
||||
this.updatedAt = new Date();
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
tenant: {
|
||||
relation: Model.BelongsToOneRelation,
|
||||
modelClass: CentralTenant,
|
||||
join: {
|
||||
from: 'domains.tenantId',
|
||||
to: 'tenants.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class CentralUser extends Model {
|
||||
static tableName = 'users';
|
||||
|
||||
id: string;
|
||||
email: string;
|
||||
password: string;
|
||||
firstName: string | null;
|
||||
lastName: string | null;
|
||||
role: string;
|
||||
isActive: boolean;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
|
||||
$beforeInsert(queryContext: QueryContext) {
|
||||
this.id = this.id || randomUUID();
|
||||
this.createdAt = new Date();
|
||||
this.updatedAt = new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate(opt: ModelOptions, queryContext: QueryContext) {
|
||||
this.updatedAt = new Date();
|
||||
}
|
||||
}
|
||||
33
backend/src/models/contact-detail.model.ts
Normal file
33
backend/src/models/contact-detail.model.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class ContactDetail extends BaseModel {
|
||||
static tableName = 'contact_details';
|
||||
|
||||
id!: string;
|
||||
relatedObjectType!: 'Account' | 'Contact';
|
||||
relatedObjectId!: string;
|
||||
detailType!: string;
|
||||
label?: string;
|
||||
value!: string;
|
||||
isPrimary!: boolean;
|
||||
|
||||
// Provide optional relations for each supported parent type.
|
||||
static relationMappings = {
|
||||
account: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: 'account.model',
|
||||
join: {
|
||||
from: 'contact_details.relatedObjectId',
|
||||
to: 'accounts.id',
|
||||
},
|
||||
},
|
||||
contact: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: 'contact.model',
|
||||
join: {
|
||||
from: 'contact_details.relatedObjectId',
|
||||
to: 'contacts.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
30
backend/src/models/contact.model.ts
Normal file
30
backend/src/models/contact.model.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class Contact extends BaseModel {
|
||||
static tableName = 'contacts';
|
||||
|
||||
id!: string;
|
||||
firstName!: string;
|
||||
lastName!: string;
|
||||
accountId!: string;
|
||||
ownerId?: string;
|
||||
|
||||
static relationMappings = {
|
||||
account: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: 'account.model',
|
||||
join: {
|
||||
from: 'contacts.accountId',
|
||||
to: 'accounts.id',
|
||||
},
|
||||
},
|
||||
owner: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: 'user.model',
|
||||
join: {
|
||||
from: 'contacts.ownerId',
|
||||
to: 'users.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -30,6 +30,8 @@ export interface UIMetadata {
|
||||
step?: number; // For number
|
||||
accept?: string; // For file/image
|
||||
relationDisplayField?: string; // Which field to display for relations
|
||||
relationObjects?: string[]; // For polymorphic relations
|
||||
relationTypeField?: string; // Field API name storing the selected relation type
|
||||
|
||||
// Formatting
|
||||
format?: string; // Date format, number format, etc.
|
||||
@@ -74,5 +76,13 @@ export class FieldDefinition extends BaseModel {
|
||||
to: 'object_definitions.id',
|
||||
},
|
||||
},
|
||||
rolePermissions: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: () => require('./role-field-permission.model').RoleFieldPermission,
|
||||
join: {
|
||||
from: 'field_definitions.id',
|
||||
to: 'role_field_permissions.fieldDefinitionId',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -10,8 +10,11 @@ export class ObjectDefinition extends BaseModel {
|
||||
description?: string;
|
||||
isSystem: boolean;
|
||||
isCustom: boolean;
|
||||
orgWideDefault: 'private' | 'public_read' | 'public_read_write';
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
fields?: any[];
|
||||
rolePermissions?: any[];
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
@@ -25,12 +28,14 @@ export class ObjectDefinition extends BaseModel {
|
||||
description: { type: 'string' },
|
||||
isSystem: { type: 'boolean' },
|
||||
isCustom: { type: 'boolean' },
|
||||
orgWideDefault: { type: 'string', enum: ['private', 'public_read', 'public_read_write'] },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
const { FieldDefinition } = require('./field-definition.model');
|
||||
const { RoleObjectPermission } = require('./role-object-permission.model');
|
||||
|
||||
return {
|
||||
fields: {
|
||||
@@ -41,6 +46,14 @@ export class ObjectDefinition extends BaseModel {
|
||||
to: 'field_definitions.objectDefinitionId',
|
||||
},
|
||||
},
|
||||
rolePermissions: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: RoleObjectPermission,
|
||||
join: {
|
||||
from: 'object_definitions.id',
|
||||
to: 'role_object_permissions.objectDefinitionId',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
113
backend/src/models/record-share.model.ts
Normal file
113
backend/src/models/record-share.model.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export interface RecordShareAccessLevel {
|
||||
canRead: boolean;
|
||||
canEdit: boolean;
|
||||
canDelete: boolean;
|
||||
}
|
||||
|
||||
export class RecordShare extends BaseModel {
|
||||
static tableName = 'record_shares';
|
||||
|
||||
// Don't use snake_case mapping since DB columns are already camelCase
|
||||
static get columnNameMappers() {
|
||||
return {
|
||||
parse(obj: any) {
|
||||
return obj;
|
||||
},
|
||||
format(obj: any) {
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Don't auto-set timestamps - let DB defaults handle them
|
||||
$beforeInsert() {
|
||||
// Don't call super - skip BaseModel's timestamp logic
|
||||
}
|
||||
|
||||
$beforeUpdate() {
|
||||
// Don't call super - skip BaseModel's timestamp logic
|
||||
}
|
||||
|
||||
id!: string;
|
||||
objectDefinitionId!: string;
|
||||
recordId!: string;
|
||||
granteeUserId!: string;
|
||||
grantedByUserId!: string;
|
||||
accessLevel!: RecordShareAccessLevel;
|
||||
expiresAt?: Date;
|
||||
revokedAt?: Date;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required: ['objectDefinitionId', 'recordId', 'granteeUserId', 'grantedByUserId', 'accessLevel'],
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
objectDefinitionId: { type: 'string' },
|
||||
recordId: { type: 'string' },
|
||||
granteeUserId: { type: 'string' },
|
||||
grantedByUserId: { type: 'string' },
|
||||
accessLevel: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
canRead: { type: 'boolean' },
|
||||
canEdit: { type: 'boolean' },
|
||||
canDelete: { type: 'boolean' },
|
||||
},
|
||||
},
|
||||
expiresAt: {
|
||||
anyOf: [
|
||||
{ type: 'string', format: 'date-time' },
|
||||
{ type: 'null' },
|
||||
{ type: 'object' } // Allow Date objects
|
||||
]
|
||||
},
|
||||
revokedAt: {
|
||||
anyOf: [
|
||||
{ type: 'string', format: 'date-time' },
|
||||
{ type: 'null' },
|
||||
{ type: 'object' } // Allow Date objects
|
||||
]
|
||||
},
|
||||
createdAt: { type: ['string', 'object'], format: 'date-time' },
|
||||
updatedAt: { type: ['string', 'object'], format: 'date-time' },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
const { ObjectDefinition } = require('./object-definition.model');
|
||||
const { User } = require('./user.model');
|
||||
|
||||
return {
|
||||
objectDefinition: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: ObjectDefinition,
|
||||
join: {
|
||||
from: 'record_shares.objectDefinitionId',
|
||||
to: 'object_definitions.id',
|
||||
},
|
||||
},
|
||||
granteeUser: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
from: 'record_shares.granteeUserId',
|
||||
to: 'users.id',
|
||||
},
|
||||
},
|
||||
grantedByUser: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: User,
|
||||
join: {
|
||||
from: 'record_shares.grantedByUserId',
|
||||
to: 'users.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
51
backend/src/models/role-field-permission.model.ts
Normal file
51
backend/src/models/role-field-permission.model.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class RoleFieldPermission extends BaseModel {
|
||||
static tableName = 'role_field_permissions';
|
||||
|
||||
id!: string;
|
||||
roleId!: string;
|
||||
fieldDefinitionId!: string;
|
||||
canRead!: boolean;
|
||||
canEdit!: boolean;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required: ['roleId', 'fieldDefinitionId'],
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
roleId: { type: 'string' },
|
||||
fieldDefinitionId: { type: 'string' },
|
||||
canRead: { type: 'boolean' },
|
||||
canEdit: { type: 'boolean' },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
const { Role } = require('./role.model');
|
||||
const { FieldDefinition } = require('./field-definition.model');
|
||||
|
||||
return {
|
||||
role: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: Role,
|
||||
join: {
|
||||
from: 'role_field_permissions.roleId',
|
||||
to: 'roles.id',
|
||||
},
|
||||
},
|
||||
fieldDefinition: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: FieldDefinition,
|
||||
join: {
|
||||
from: 'role_field_permissions.fieldDefinitionId',
|
||||
to: 'field_definitions.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
59
backend/src/models/role-object-permission.model.ts
Normal file
59
backend/src/models/role-object-permission.model.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class RoleObjectPermission extends BaseModel {
|
||||
static tableName = 'role_object_permissions';
|
||||
|
||||
id!: string;
|
||||
roleId!: string;
|
||||
objectDefinitionId!: string;
|
||||
canCreate!: boolean;
|
||||
canRead!: boolean;
|
||||
canEdit!: boolean;
|
||||
canDelete!: boolean;
|
||||
canViewAll!: boolean;
|
||||
canModifyAll!: boolean;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required: ['roleId', 'objectDefinitionId'],
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
roleId: { type: 'string' },
|
||||
objectDefinitionId: { type: 'string' },
|
||||
canCreate: { type: 'boolean' },
|
||||
canRead: { type: 'boolean' },
|
||||
canEdit: { type: 'boolean' },
|
||||
canDelete: { type: 'boolean' },
|
||||
canViewAll: { type: 'boolean' },
|
||||
canModifyAll: { type: 'boolean' },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
const { Role } = require('./role.model');
|
||||
const { ObjectDefinition } = require('./object-definition.model');
|
||||
|
||||
return {
|
||||
role: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: Role,
|
||||
join: {
|
||||
from: 'role_object_permissions.roleId',
|
||||
to: 'roles.id',
|
||||
},
|
||||
},
|
||||
objectDefinition: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: ObjectDefinition,
|
||||
join: {
|
||||
from: 'role_object_permissions.objectDefinitionId',
|
||||
to: 'object_definitions.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -27,6 +27,8 @@ export class Role extends BaseModel {
|
||||
const { RolePermission } = require('./role-permission.model');
|
||||
const { Permission } = require('./permission.model');
|
||||
const { User } = require('./user.model');
|
||||
const { RoleObjectPermission } = require('./role-object-permission.model');
|
||||
const { RoleFieldPermission } = require('./role-field-permission.model');
|
||||
|
||||
return {
|
||||
rolePermissions: {
|
||||
@@ -61,6 +63,22 @@ export class Role extends BaseModel {
|
||||
to: 'users.id',
|
||||
},
|
||||
},
|
||||
objectPermissions: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: RoleObjectPermission,
|
||||
join: {
|
||||
from: 'roles.id',
|
||||
to: 'role_object_permissions.roleId',
|
||||
},
|
||||
},
|
||||
fieldPermissions: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: RoleFieldPermission,
|
||||
join: {
|
||||
from: 'roles.id',
|
||||
to: 'role_field_permissions.roleId',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,9 @@ export interface FieldConfigDTO {
|
||||
step?: number;
|
||||
accept?: string;
|
||||
relationObject?: string;
|
||||
relationObjects?: string[];
|
||||
relationDisplayField?: string;
|
||||
relationTypeField?: string;
|
||||
format?: string;
|
||||
prefix?: string;
|
||||
suffix?: string;
|
||||
@@ -43,6 +45,14 @@ export interface ObjectDefinitionDTO {
|
||||
description?: string;
|
||||
isSystem: boolean;
|
||||
fields: FieldConfigDTO[];
|
||||
relatedLists?: Array<{
|
||||
title: string;
|
||||
relationName: string;
|
||||
objectApiName: string;
|
||||
fields: FieldConfigDTO[];
|
||||
canCreate?: boolean;
|
||||
createRoute?: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
@@ -51,13 +61,29 @@ export class FieldMapperService {
|
||||
* Convert a field definition from the database to a frontend-friendly FieldConfig
|
||||
*/
|
||||
mapFieldToDTO(field: any): FieldConfigDTO {
|
||||
const uiMetadata = field.uiMetadata || {};
|
||||
// Parse ui_metadata if it's a JSON string or object
|
||||
let uiMetadata: any = {};
|
||||
const metadataField = field.ui_metadata || field.uiMetadata;
|
||||
if (metadataField) {
|
||||
if (typeof metadataField === 'string') {
|
||||
try {
|
||||
uiMetadata = JSON.parse(metadataField);
|
||||
} catch (e) {
|
||||
uiMetadata = {};
|
||||
}
|
||||
} else {
|
||||
uiMetadata = metadataField;
|
||||
}
|
||||
}
|
||||
|
||||
const frontendType = this.mapFieldType(field.type);
|
||||
const isLookupField = frontendType === 'belongsTo' || field.type.toLowerCase().includes('lookup');
|
||||
|
||||
return {
|
||||
id: field.id,
|
||||
apiName: field.apiName,
|
||||
label: field.label,
|
||||
type: this.mapFieldType(field.type),
|
||||
type: frontendType,
|
||||
|
||||
// Display properties
|
||||
placeholder: uiMetadata.placeholder || field.description,
|
||||
@@ -82,7 +108,12 @@ export class FieldMapperService {
|
||||
step: uiMetadata.step,
|
||||
accept: uiMetadata.accept,
|
||||
relationObject: field.referenceObject,
|
||||
relationDisplayField: uiMetadata.relationDisplayField,
|
||||
relationObjects: uiMetadata.relationObjects,
|
||||
// For lookup fields, provide default display field if not specified
|
||||
relationDisplayField: isLookupField
|
||||
? (uiMetadata.relationDisplayField || 'name')
|
||||
: uiMetadata.relationDisplayField,
|
||||
relationTypeField: uiMetadata.relationTypeField,
|
||||
|
||||
// Formatting
|
||||
format: uiMetadata.format,
|
||||
@@ -187,6 +218,17 @@ export class FieldMapperService {
|
||||
.filter((f: any) => f.isActive !== false)
|
||||
.sort((a: any, b: any) => (a.displayOrder || 0) - (b.displayOrder || 0))
|
||||
.map((f: any) => this.mapFieldToDTO(f)),
|
||||
relatedLists: (objectDef.relatedLists || []).map((list: any) => ({
|
||||
title: list.title,
|
||||
relationName: list.relationName,
|
||||
objectApiName: list.objectApiName,
|
||||
fields: (list.fields || [])
|
||||
.filter((f: any) => f.isActive !== false)
|
||||
.map((f: any) => this.mapFieldToDTO(f))
|
||||
.filter((f: any) => f.showOnList !== false),
|
||||
canCreate: list.canCreate,
|
||||
createRoute: list.createRoute,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
33
backend/src/object/models/base.model.ts
Normal file
33
backend/src/object/models/base.model.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { Model } from 'objection';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
/**
|
||||
* Base model for all dynamic and system models
|
||||
* Provides common functionality for all objects
|
||||
*/
|
||||
export class BaseModel extends Model {
|
||||
// Common fields
|
||||
id?: string;
|
||||
tenantId?: string;
|
||||
ownerId?: string;
|
||||
name?: string;
|
||||
created_at?: string;
|
||||
updated_at?: string;
|
||||
|
||||
// Hook to set system-managed fields
|
||||
async $beforeInsert() {
|
||||
if (!this.id) {
|
||||
this.id = randomUUID();
|
||||
}
|
||||
if (!this.created_at) {
|
||||
this.created_at = new Date().toISOString().slice(0, 19).replace('T', ' ');
|
||||
}
|
||||
if (!this.updated_at) {
|
||||
this.updated_at = new Date().toISOString().slice(0, 19).replace('T', ' ');
|
||||
}
|
||||
}
|
||||
|
||||
async $beforeUpdate() {
|
||||
this.updated_at = new Date().toISOString().slice(0, 19).replace('T', ' ');
|
||||
}
|
||||
}
|
||||
258
backend/src/object/models/dynamic-model.factory.ts
Normal file
258
backend/src/object/models/dynamic-model.factory.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
import { ModelClass, JSONSchema, RelationMappings, Model } from 'objection';
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export interface FieldDefinition {
|
||||
apiName: string;
|
||||
label: string;
|
||||
type: string;
|
||||
isRequired?: boolean;
|
||||
isUnique?: boolean;
|
||||
referenceObject?: string;
|
||||
defaultValue?: string;
|
||||
}
|
||||
|
||||
export interface RelationDefinition {
|
||||
name: string;
|
||||
type: 'belongsTo' | 'hasMany' | 'hasManyThrough';
|
||||
targetObjectApiName: string;
|
||||
fromColumn: string;
|
||||
toColumn: string;
|
||||
}
|
||||
|
||||
export interface ObjectMetadata {
|
||||
apiName: string;
|
||||
tableName: string;
|
||||
fields: FieldDefinition[];
|
||||
relations?: RelationDefinition[];
|
||||
}
|
||||
|
||||
export class DynamicModelFactory {
|
||||
/**
|
||||
* Get relation name from lookup field API name
|
||||
* Converts "ownerId" -> "owner", "customFieldId" -> "customfield"
|
||||
*/
|
||||
static getRelationName(lookupFieldApiName: string): string {
|
||||
return lookupFieldApiName.replace(/Id$/, '').toLowerCase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a dynamic model class from object metadata
|
||||
* @param meta Object metadata
|
||||
* @param getModel Function to retrieve model classes from registry
|
||||
*/
|
||||
static createModel(
|
||||
meta: ObjectMetadata,
|
||||
getModel?: (apiName: string) => ModelClass<any>,
|
||||
): ModelClass<any> {
|
||||
const { tableName, fields, apiName, relations = [] } = meta;
|
||||
|
||||
// Build JSON schema properties
|
||||
const properties: Record<string, any> = {
|
||||
id: { type: 'string' },
|
||||
tenantId: { type: 'string' },
|
||||
ownerId: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
created_at: { type: 'string', format: 'date-time' },
|
||||
updated_at: { type: 'string', format: 'date-time' },
|
||||
};
|
||||
|
||||
// Don't require id or tenantId - they'll be set automatically
|
||||
const required: string[] = [];
|
||||
|
||||
// Add custom fields
|
||||
for (const field of fields) {
|
||||
properties[field.apiName] = this.fieldToJsonSchema(field);
|
||||
|
||||
// Only mark as required if explicitly required AND not a system field
|
||||
const systemFields = ['id', 'tenantId', 'ownerId', 'name', 'created_at', 'updated_at'];
|
||||
if (field.isRequired && !systemFields.includes(field.apiName)) {
|
||||
required.push(field.apiName);
|
||||
}
|
||||
}
|
||||
|
||||
// Build relation mappings from lookup fields
|
||||
const lookupFields = fields.filter(f => f.type === 'LOOKUP' && f.referenceObject);
|
||||
|
||||
// Store lookup fields metadata for later use
|
||||
const lookupFieldsInfo = lookupFields.map(f => ({
|
||||
apiName: f.apiName,
|
||||
relationName: DynamicModelFactory.getRelationName(f.apiName),
|
||||
referenceObject: f.referenceObject,
|
||||
targetTable: this.getTableName(f.referenceObject),
|
||||
}));
|
||||
|
||||
// Create the dynamic model class extending BaseModel
|
||||
class DynamicModel extends BaseModel {
|
||||
static tableName = tableName;
|
||||
|
||||
static objectApiName = apiName;
|
||||
|
||||
static lookupFields = lookupFieldsInfo;
|
||||
|
||||
static get relationMappings(): RelationMappings {
|
||||
const mappings: RelationMappings = {};
|
||||
|
||||
// Build relation mappings from lookup fields
|
||||
for (const lookupInfo of lookupFieldsInfo) {
|
||||
// Use getModel function if provided, otherwise use string reference
|
||||
let modelClass: any = lookupInfo.referenceObject;
|
||||
|
||||
if (getModel) {
|
||||
const resolvedModel = getModel(lookupInfo.referenceObject);
|
||||
// Only use resolved model if it exists, otherwise skip this relation
|
||||
// It will be resolved later when the model is registered
|
||||
if (resolvedModel) {
|
||||
modelClass = resolvedModel;
|
||||
} else {
|
||||
// Skip this relation if model not found yet
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
mappings[lookupInfo.relationName] = {
|
||||
relation: Model.BelongsToOneRelation,
|
||||
modelClass,
|
||||
join: {
|
||||
from: `${tableName}.${lookupInfo.apiName}`,
|
||||
to: `${lookupInfo.targetTable}.id`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Add additional relation mappings (e.g., hasMany)
|
||||
for (const relation of relations) {
|
||||
if (mappings[relation.name]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let modelClass: any = relation.targetObjectApiName;
|
||||
if (getModel) {
|
||||
const resolvedModel = getModel(relation.targetObjectApiName);
|
||||
if (resolvedModel) {
|
||||
modelClass = resolvedModel;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const targetTable = DynamicModelFactory.getTableName(relation.targetObjectApiName);
|
||||
|
||||
if (relation.type === 'belongsTo') {
|
||||
mappings[relation.name] = {
|
||||
relation: Model.BelongsToOneRelation,
|
||||
modelClass,
|
||||
join: {
|
||||
from: `${tableName}.${relation.fromColumn}`,
|
||||
to: `${targetTable}.${relation.toColumn}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (relation.type === 'hasMany') {
|
||||
mappings[relation.name] = {
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass,
|
||||
join: {
|
||||
from: `${tableName}.${relation.fromColumn}`,
|
||||
to: `${targetTable}.${relation.toColumn}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return mappings;
|
||||
}
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required,
|
||||
properties,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return DynamicModel as any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a field definition to JSON schema property
|
||||
*/
|
||||
private static fieldToJsonSchema(field: FieldDefinition): Record<string, any> {
|
||||
const baseSchema = () => {
|
||||
switch (field.type.toUpperCase()) {
|
||||
case 'TEXT':
|
||||
case 'STRING':
|
||||
case 'EMAIL':
|
||||
case 'URL':
|
||||
case 'PHONE':
|
||||
case 'PICKLIST':
|
||||
case 'MULTI_PICKLIST':
|
||||
return {
|
||||
type: 'string',
|
||||
...(field.isUnique && { uniqueItems: true }),
|
||||
};
|
||||
|
||||
case 'LONG_TEXT':
|
||||
return { type: 'string' };
|
||||
|
||||
case 'NUMBER':
|
||||
case 'DECIMAL':
|
||||
case 'CURRENCY':
|
||||
case 'PERCENT':
|
||||
return {
|
||||
type: 'number',
|
||||
...(field.isUnique && { uniqueItems: true }),
|
||||
};
|
||||
|
||||
case 'INTEGER':
|
||||
return {
|
||||
type: 'integer',
|
||||
...(field.isUnique && { uniqueItems: true }),
|
||||
};
|
||||
|
||||
case 'BOOLEAN':
|
||||
return { type: 'boolean', default: false };
|
||||
|
||||
case 'DATE':
|
||||
return { type: 'string', format: 'date' };
|
||||
|
||||
case 'DATE_TIME':
|
||||
return { type: 'string', format: 'date-time' };
|
||||
|
||||
case 'LOOKUP':
|
||||
case 'BELONGS_TO':
|
||||
return { type: 'string' };
|
||||
|
||||
default:
|
||||
return { type: 'string' };
|
||||
}
|
||||
};
|
||||
|
||||
const schema = baseSchema();
|
||||
|
||||
// Allow null for non-required fields so optional strings/numbers don't fail validation
|
||||
if (!field.isRequired) {
|
||||
return {
|
||||
anyOf: [schema, { type: 'null' }],
|
||||
};
|
||||
}
|
||||
|
||||
return schema;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get table name from object API name
|
||||
*/
|
||||
private static getTableName(objectApiName: string): string {
|
||||
// Convert PascalCase/camelCase to snake_case and pluralize
|
||||
const snakeCase = objectApiName
|
||||
.replace(/([A-Z])/g, '_$1')
|
||||
.toLowerCase()
|
||||
.replace(/^_/, '');
|
||||
if (snakeCase.endsWith('y')) {
|
||||
return `${snakeCase.slice(0, -1)}ies`;
|
||||
}
|
||||
return snakeCase.endsWith('s') ? snakeCase : `${snakeCase}s`;
|
||||
}
|
||||
}
|
||||
73
backend/src/object/models/model.registry.ts
Normal file
73
backend/src/object/models/model.registry.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ModelClass } from 'objection';
|
||||
import { BaseModel } from './base.model';
|
||||
import { DynamicModelFactory, ObjectMetadata } from './dynamic-model.factory';
|
||||
|
||||
/**
|
||||
* Registry to store and retrieve dynamic models
|
||||
* One registry per tenant
|
||||
*/
|
||||
@Injectable()
|
||||
export class ModelRegistry {
|
||||
private registry = new Map<string, ModelClass<BaseModel>>();
|
||||
|
||||
/**
|
||||
* Register a model in the registry
|
||||
*/
|
||||
registerModel(apiName: string, modelClass: ModelClass<BaseModel>): void {
|
||||
this.registry.set(apiName, modelClass);
|
||||
const lowerKey = apiName.toLowerCase();
|
||||
if (lowerKey !== apiName && !this.registry.has(lowerKey)) {
|
||||
this.registry.set(lowerKey, modelClass);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a model from the registry
|
||||
*/
|
||||
getModel(apiName: string): ModelClass<BaseModel> {
|
||||
const model = this.registry.get(apiName) || this.registry.get(apiName.toLowerCase());
|
||||
if (!model) {
|
||||
throw new Error(`Model for ${apiName} not found in registry`);
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a model exists in the registry
|
||||
*/
|
||||
hasModel(apiName: string): boolean {
|
||||
return this.registry.has(apiName) || this.registry.has(apiName.toLowerCase());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and register a model from metadata
|
||||
*/
|
||||
createAndRegisterModel(
|
||||
metadata: ObjectMetadata,
|
||||
): ModelClass<BaseModel> {
|
||||
// Create model with a getModel function that resolves from this registry
|
||||
// Returns undefined if model not found (for models not yet registered)
|
||||
const model = DynamicModelFactory.createModel(
|
||||
metadata,
|
||||
(apiName: string) =>
|
||||
this.registry.get(apiName) || this.registry.get(apiName.toLowerCase()),
|
||||
);
|
||||
this.registerModel(metadata.apiName, model);
|
||||
return model;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered model names
|
||||
*/
|
||||
getAllModelNames(): string[] {
|
||||
return Array.from(this.registry.keys());
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the registry (useful for testing)
|
||||
*/
|
||||
clear(): void {
|
||||
this.registry.clear();
|
||||
}
|
||||
}
|
||||
203
backend/src/object/models/model.service.ts
Normal file
203
backend/src/object/models/model.service.ts
Normal file
@@ -0,0 +1,203 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Knex } from 'knex';
|
||||
import { ModelClass } from 'objection';
|
||||
import { BaseModel } from './base.model';
|
||||
import { ModelRegistry } from './model.registry';
|
||||
import { ObjectMetadata } from './dynamic-model.factory';
|
||||
import { TenantDatabaseService } from '../../tenant/tenant-database.service';
|
||||
import { UserModel, RoleModel, PermissionModel } from './system-models';
|
||||
|
||||
/**
|
||||
* Service to manage dynamic models for a specific tenant
|
||||
*/
|
||||
@Injectable()
|
||||
export class ModelService {
|
||||
private readonly logger = new Logger(ModelService.name);
|
||||
private tenantRegistries = new Map<string, ModelRegistry>();
|
||||
|
||||
constructor(private tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
/**
|
||||
* Get or create a registry for a tenant
|
||||
*/
|
||||
getTenantRegistry(tenantId: string): ModelRegistry {
|
||||
if (!this.tenantRegistries.has(tenantId)) {
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
// Register system models that are defined as static Objection models
|
||||
this.registerSystemModels(registry);
|
||||
|
||||
this.tenantRegistries.set(tenantId, registry);
|
||||
}
|
||||
return this.tenantRegistries.get(tenantId)!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register static system models in the registry
|
||||
* Uses simplified models without complex relationMappings to avoid modelPath issues
|
||||
*/
|
||||
private registerSystemModels(registry: ModelRegistry): void {
|
||||
// Register system models by their API name (used in referenceObject fields)
|
||||
// These are simplified versions without relationMappings to avoid dependency issues
|
||||
registry.registerModel('User', UserModel as any);
|
||||
registry.registerModel('Role', RoleModel as any);
|
||||
registry.registerModel('Permission', PermissionModel as any);
|
||||
|
||||
this.logger.debug('Registered system models: User, Role, Permission');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and register a model for a tenant
|
||||
*/
|
||||
async createModelForObject(
|
||||
tenantId: string,
|
||||
objectMetadata: ObjectMetadata,
|
||||
): Promise<ModelClass<BaseModel>> {
|
||||
const registry = this.getTenantRegistry(tenantId);
|
||||
const model = registry.createAndRegisterModel(objectMetadata);
|
||||
|
||||
this.logger.log(
|
||||
`Registered model for ${objectMetadata.apiName} in tenant ${tenantId}`,
|
||||
);
|
||||
|
||||
return model;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a model for a tenant and object
|
||||
*/
|
||||
getModel(tenantId: string, objectApiName: string): ModelClass<BaseModel> {
|
||||
const registry = this.getTenantRegistry(tenantId);
|
||||
return registry.getModel(objectApiName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a bound model (with knex connection) for a tenant and object
|
||||
*/
|
||||
async getBoundModel(
|
||||
tenantId: string,
|
||||
objectApiName: string,
|
||||
): Promise<ModelClass<BaseModel>> {
|
||||
const knex = await this.tenantDbService.getTenantKnexById(tenantId);
|
||||
const model = this.getModel(tenantId, objectApiName);
|
||||
|
||||
// Bind knex to the model and also to all models in the registry
|
||||
// This ensures system models also have knex bound when they're used in relations
|
||||
const registry = this.getTenantRegistry(tenantId);
|
||||
const allModels = registry.getAllModelNames();
|
||||
|
||||
// Bind knex to all models to ensure relations work
|
||||
for (const modelName of allModels) {
|
||||
try {
|
||||
const m = registry.getModel(modelName);
|
||||
if (m && !m.knex()) {
|
||||
m.knex(knex);
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore errors for models that don't need binding
|
||||
}
|
||||
}
|
||||
|
||||
return model.bindKnex(knex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a model exists for a tenant
|
||||
*/
|
||||
hasModel(tenantId: string, objectApiName: string): boolean {
|
||||
const registry = this.getTenantRegistry(tenantId);
|
||||
return registry.hasModel(objectApiName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all model names for a tenant
|
||||
*/
|
||||
getAllModelNames(tenantId: string): string[] {
|
||||
const registry = this.getTenantRegistry(tenantId);
|
||||
return registry.getAllModelNames();
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure a model is registered with all its dependencies.
|
||||
* This method handles recursive model creation for related objects.
|
||||
*
|
||||
* @param tenantId - The tenant ID
|
||||
* @param objectApiName - The object API name to ensure registration for
|
||||
* @param fetchMetadata - Callback function to fetch object metadata (provided by ObjectService)
|
||||
* @param visited - Set to track visited models and prevent infinite loops
|
||||
*/
|
||||
async ensureModelWithDependencies(
|
||||
tenantId: string,
|
||||
objectApiName: string,
|
||||
fetchMetadata: (apiName: string) => Promise<ObjectMetadata>,
|
||||
visited: Set<string> = new Set(),
|
||||
): Promise<void> {
|
||||
// Prevent infinite recursion
|
||||
if (visited.has(objectApiName)) {
|
||||
return;
|
||||
}
|
||||
visited.add(objectApiName);
|
||||
|
||||
// Check if model already exists
|
||||
if (this.hasModel(tenantId, objectApiName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Fetch the object metadata
|
||||
const objectMetadata = await fetchMetadata(objectApiName);
|
||||
|
||||
// Extract lookup fields to find dependencies
|
||||
const lookupFields = objectMetadata.fields.filter(
|
||||
f => f.type === 'LOOKUP' && f.referenceObject
|
||||
);
|
||||
|
||||
// Recursively ensure all dependent models are registered first
|
||||
for (const field of lookupFields) {
|
||||
if (field.referenceObject) {
|
||||
try {
|
||||
await this.ensureModelWithDependencies(
|
||||
tenantId,
|
||||
field.referenceObject,
|
||||
fetchMetadata,
|
||||
visited,
|
||||
);
|
||||
} catch (error) {
|
||||
// If related object doesn't exist (e.g., system tables), skip it
|
||||
this.logger.debug(
|
||||
`Skipping registration of related model ${field.referenceObject}: ${error.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (objectMetadata.relations) {
|
||||
for (const relation of objectMetadata.relations) {
|
||||
if (relation.targetObjectApiName) {
|
||||
try {
|
||||
await this.ensureModelWithDependencies(
|
||||
tenantId,
|
||||
relation.targetObjectApiName,
|
||||
fetchMetadata,
|
||||
visited,
|
||||
);
|
||||
} catch (error) {
|
||||
this.logger.debug(
|
||||
`Skipping registration of related model ${relation.targetObjectApiName}: ${error.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now create and register this model (all dependencies are ready)
|
||||
await this.createModelForObject(tenantId, objectMetadata);
|
||||
this.logger.log(`Registered model for ${objectApiName} in tenant ${tenantId}`);
|
||||
} catch (error) {
|
||||
this.logger.warn(
|
||||
`Failed to ensure model for ${objectApiName}: ${error.message}`
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
85
backend/src/object/models/system-models.ts
Normal file
85
backend/src/object/models/system-models.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { Model } from 'objection';
|
||||
|
||||
/**
|
||||
* Simplified User model for use in dynamic object relations
|
||||
* This version doesn't include complex relationMappings to avoid modelPath issues
|
||||
*/
|
||||
export class UserModel extends Model {
|
||||
static tableName = 'users';
|
||||
static objectApiName = 'User';
|
||||
|
||||
id!: string;
|
||||
email!: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
name?: string;
|
||||
isActive!: boolean;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required: ['email'],
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
email: { type: 'string', format: 'email' },
|
||||
firstName: { type: 'string' },
|
||||
lastName: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
isActive: { type: 'boolean' },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// No relationMappings to avoid modelPath resolution issues
|
||||
// These simplified models are only used for lookup relations from dynamic models
|
||||
}
|
||||
|
||||
/**
|
||||
* Simplified Role model for use in dynamic object relations
|
||||
*/
|
||||
export class RoleModel extends Model {
|
||||
static tableName = 'roles';
|
||||
static objectApiName = 'Role';
|
||||
|
||||
id!: string;
|
||||
name!: string;
|
||||
description?: string;
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required: ['name'],
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
description: { type: 'string' },
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simplified Permission model for use in dynamic object relations
|
||||
*/
|
||||
export class PermissionModel extends Model {
|
||||
static tableName = 'permissions';
|
||||
static objectApiName = 'Permission';
|
||||
|
||||
id!: string;
|
||||
name!: string;
|
||||
description?: string;
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required: ['name'],
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
description: { type: 'string' },
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -5,11 +5,22 @@ import { SetupObjectController } from './setup-object.controller';
|
||||
import { SchemaManagementService } from './schema-management.service';
|
||||
import { FieldMapperService } from './field-mapper.service';
|
||||
import { TenantModule } from '../tenant/tenant.module';
|
||||
import { MigrationModule } from '../migration/migration.module';
|
||||
import { RbacModule } from '../rbac/rbac.module';
|
||||
import { ModelRegistry } from './models/model.registry';
|
||||
import { ModelService } from './models/model.service';
|
||||
import { MeilisearchModule } from '../search/meilisearch.module';
|
||||
|
||||
@Module({
|
||||
imports: [TenantModule],
|
||||
providers: [ObjectService, SchemaManagementService, FieldMapperService],
|
||||
imports: [TenantModule, MigrationModule, RbacModule, MeilisearchModule],
|
||||
providers: [
|
||||
ObjectService,
|
||||
SchemaManagementService,
|
||||
FieldMapperService,
|
||||
ModelRegistry,
|
||||
ModelService,
|
||||
],
|
||||
controllers: [RuntimeObjectController, SetupObjectController],
|
||||
exports: [ObjectService, SchemaManagementService, FieldMapperService],
|
||||
exports: [ObjectService, SchemaManagementService, FieldMapperService, ModelService],
|
||||
})
|
||||
export class ObjectModule {}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -95,4 +95,20 @@ export class RuntimeObjectController {
|
||||
user.userId,
|
||||
);
|
||||
}
|
||||
|
||||
@Post(':objectApiName/records/bulk-delete')
|
||||
async deleteRecords(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Body() body: { recordIds?: string[]; ids?: string[] },
|
||||
@CurrentUser() user: any,
|
||||
) {
|
||||
const recordIds: string[] = body?.recordIds || body?.ids || [];
|
||||
return this.objectService.deleteRecords(
|
||||
tenantId,
|
||||
objectApiName,
|
||||
recordIds,
|
||||
user.userId,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,11 @@ export class SchemaManagementService {
|
||||
objectDefinition: ObjectDefinition,
|
||||
fields: FieldDefinition[],
|
||||
) {
|
||||
const tableName = this.getTableName(objectDefinition.apiName);
|
||||
const tableName = this.getTableName(
|
||||
objectDefinition.apiName,
|
||||
objectDefinition.label,
|
||||
objectDefinition.pluralLabel,
|
||||
);
|
||||
|
||||
// Check if table already exists
|
||||
const exists = await knex.schema.hasTable(tableName);
|
||||
@@ -44,8 +48,10 @@ export class SchemaManagementService {
|
||||
knex: Knex,
|
||||
objectApiName: string,
|
||||
field: FieldDefinition,
|
||||
objectLabel?: string,
|
||||
pluralLabel?: string,
|
||||
) {
|
||||
const tableName = this.getTableName(objectApiName);
|
||||
const tableName = this.getTableName(objectApiName, objectLabel, pluralLabel);
|
||||
|
||||
await knex.schema.alterTable(tableName, (table) => {
|
||||
this.addFieldColumn(table, field);
|
||||
@@ -61,8 +67,10 @@ export class SchemaManagementService {
|
||||
knex: Knex,
|
||||
objectApiName: string,
|
||||
fieldApiName: string,
|
||||
objectLabel?: string,
|
||||
pluralLabel?: string,
|
||||
) {
|
||||
const tableName = this.getTableName(objectApiName);
|
||||
const tableName = this.getTableName(objectApiName, objectLabel, pluralLabel);
|
||||
|
||||
await knex.schema.alterTable(tableName, (table) => {
|
||||
table.dropColumn(fieldApiName);
|
||||
@@ -71,11 +79,44 @@ export class SchemaManagementService {
|
||||
this.logger.log(`Removed field ${fieldApiName} from table ${tableName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Alter a field in an existing object table
|
||||
* Handles safe updates like changing NOT NULL or constraints
|
||||
* Warns about potentially destructive operations
|
||||
*/
|
||||
async alterFieldInTable(
|
||||
knex: Knex,
|
||||
objectApiName: string,
|
||||
fieldApiName: string,
|
||||
field: FieldDefinition,
|
||||
objectLabel?: string,
|
||||
pluralLabel?: string,
|
||||
options?: {
|
||||
skipTypeChange?: boolean; // Skip if type change would lose data
|
||||
},
|
||||
) {
|
||||
const tableName = this.getTableName(objectApiName, objectLabel, pluralLabel);
|
||||
const skipTypeChange = options?.skipTypeChange ?? true;
|
||||
|
||||
await knex.schema.alterTable(tableName, (table) => {
|
||||
// Drop the existing column and recreate with new definition
|
||||
// Note: This approach works for metadata changes, but type changes may need data migration
|
||||
table.dropColumn(fieldApiName);
|
||||
});
|
||||
|
||||
// Recreate the column with new definition
|
||||
await knex.schema.alterTable(tableName, (table) => {
|
||||
this.addFieldColumn(table, field);
|
||||
});
|
||||
|
||||
this.logger.log(`Altered field ${fieldApiName} in table ${tableName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop an object table
|
||||
*/
|
||||
async dropObjectTable(knex: Knex, objectApiName: string) {
|
||||
const tableName = this.getTableName(objectApiName);
|
||||
async dropObjectTable(knex: Knex, objectApiName: string, objectLabel?: string, pluralLabel?: string) {
|
||||
const tableName = this.getTableName(objectApiName, objectLabel, pluralLabel);
|
||||
|
||||
await knex.schema.dropTableIfExists(tableName);
|
||||
|
||||
@@ -94,15 +135,30 @@ export class SchemaManagementService {
|
||||
let column: Knex.ColumnBuilder;
|
||||
|
||||
switch (field.type) {
|
||||
// Text types
|
||||
case 'String':
|
||||
case 'TEXT':
|
||||
case 'EMAIL':
|
||||
case 'PHONE':
|
||||
case 'URL':
|
||||
column = table.string(columnName, field.length || 255);
|
||||
break;
|
||||
|
||||
case 'Text':
|
||||
case 'LONG_TEXT':
|
||||
column = table.text(columnName);
|
||||
break;
|
||||
|
||||
case 'PICKLIST':
|
||||
case 'MULTI_PICKLIST':
|
||||
column = table.string(columnName, 255);
|
||||
break;
|
||||
|
||||
// Numeric types
|
||||
case 'Number':
|
||||
case 'NUMBER':
|
||||
case 'CURRENCY':
|
||||
case 'PERCENT':
|
||||
if (field.scale && field.scale > 0) {
|
||||
column = table.decimal(
|
||||
columnName,
|
||||
@@ -115,18 +171,28 @@ export class SchemaManagementService {
|
||||
break;
|
||||
|
||||
case 'Boolean':
|
||||
case 'BOOLEAN':
|
||||
column = table.boolean(columnName).defaultTo(false);
|
||||
break;
|
||||
|
||||
// Date types
|
||||
case 'Date':
|
||||
case 'DATE':
|
||||
column = table.date(columnName);
|
||||
break;
|
||||
|
||||
case 'DateTime':
|
||||
case 'DATE_TIME':
|
||||
column = table.datetime(columnName);
|
||||
break;
|
||||
|
||||
case 'TIME':
|
||||
column = table.time(columnName);
|
||||
break;
|
||||
|
||||
// Relationship types
|
||||
case 'Reference':
|
||||
case 'LOOKUP':
|
||||
column = table.uuid(columnName);
|
||||
if (field.referenceObject) {
|
||||
const refTableName = this.getTableName(field.referenceObject);
|
||||
@@ -134,19 +200,30 @@ export class SchemaManagementService {
|
||||
}
|
||||
break;
|
||||
|
||||
// Email (legacy)
|
||||
case 'Email':
|
||||
column = table.string(columnName, 255);
|
||||
break;
|
||||
|
||||
// Phone (legacy)
|
||||
case 'Phone':
|
||||
column = table.string(columnName, 50);
|
||||
break;
|
||||
|
||||
// Url (legacy)
|
||||
case 'Url':
|
||||
column = table.string(columnName, 255);
|
||||
break;
|
||||
|
||||
// File types
|
||||
case 'FILE':
|
||||
case 'IMAGE':
|
||||
column = table.text(columnName); // Store file path or URL
|
||||
break;
|
||||
|
||||
// JSON
|
||||
case 'Json':
|
||||
case 'JSON':
|
||||
column = table.json(columnName);
|
||||
break;
|
||||
|
||||
@@ -174,16 +251,35 @@ export class SchemaManagementService {
|
||||
/**
|
||||
* Convert object API name to table name (convert to snake_case, pluralize)
|
||||
*/
|
||||
private getTableName(apiName: string): string {
|
||||
// Convert PascalCase to snake_case
|
||||
const snakeCase = apiName
|
||||
.replace(/([A-Z])/g, '_$1')
|
||||
.toLowerCase()
|
||||
.replace(/^_/, '');
|
||||
private getTableName(apiName: string, objectLabel?: string, pluralLabel?: string): string {
|
||||
const toSnakePlural = (source: string): string => {
|
||||
const cleaned = source.replace(/[\s-]+/g, '_');
|
||||
const snake = cleaned
|
||||
.replace(/([a-z0-9])([A-Z])/g, '$1_$2')
|
||||
.replace(/__+/g, '_')
|
||||
.toLowerCase()
|
||||
.replace(/^_/, '');
|
||||
|
||||
// Simple pluralization (append 's' if not already plural)
|
||||
// In production, use a proper pluralization library
|
||||
return snakeCase.endsWith('s') ? snakeCase : `${snakeCase}s`;
|
||||
if (snake.endsWith('y')) return `${snake.slice(0, -1)}ies`;
|
||||
if (snake.endsWith('s')) return snake;
|
||||
return `${snake}s`;
|
||||
};
|
||||
|
||||
const fromApi = toSnakePlural(apiName);
|
||||
const fromLabel = objectLabel ? toSnakePlural(objectLabel) : null;
|
||||
const fromPlural = pluralLabel ? toSnakePlural(pluralLabel) : null;
|
||||
|
||||
if (fromLabel && fromLabel.includes('_') && !fromApi.includes('_')) {
|
||||
return fromLabel;
|
||||
}
|
||||
if (fromPlural && fromPlural.includes('_') && !fromApi.includes('_')) {
|
||||
return fromPlural;
|
||||
}
|
||||
|
||||
if (fromLabel && fromLabel !== fromApi) return fromLabel;
|
||||
if (fromPlural && fromPlural !== fromApi) return fromPlural;
|
||||
|
||||
return fromApi;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -2,6 +2,9 @@ import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Patch,
|
||||
Put,
|
||||
Delete,
|
||||
Param,
|
||||
Body,
|
||||
UseGuards,
|
||||
@@ -10,6 +13,7 @@ import { ObjectService } from './object.service';
|
||||
import { FieldMapperService } from './field-mapper.service';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
|
||||
@Controller('setup/objects')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@@ -17,6 +21,7 @@ export class SetupObjectController {
|
||||
constructor(
|
||||
private objectService: ObjectService,
|
||||
private fieldMapperService: FieldMapperService,
|
||||
private tenantDbService: TenantDatabaseService,
|
||||
) {}
|
||||
|
||||
@Get()
|
||||
@@ -29,7 +34,8 @@ export class SetupObjectController {
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
) {
|
||||
return this.objectService.getObjectDefinition(tenantId, objectApiName);
|
||||
const objectDef = await this.objectService.getObjectDefinition(tenantId, objectApiName);
|
||||
return this.fieldMapperService.mapObjectDefinitionToDTO(objectDef);
|
||||
}
|
||||
|
||||
@Get(':objectApiName/ui-config')
|
||||
@@ -58,10 +64,93 @@ export class SetupObjectController {
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Body() data: any,
|
||||
) {
|
||||
return this.objectService.createFieldDefinition(
|
||||
const field = await this.objectService.createFieldDefinition(
|
||||
tenantId,
|
||||
objectApiName,
|
||||
data,
|
||||
);
|
||||
// Map the created field to frontend format
|
||||
return this.fieldMapperService.mapFieldToDTO(field);
|
||||
}
|
||||
|
||||
@Put(':objectApiName/fields/:fieldApiName')
|
||||
async updateFieldDefinition(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('fieldApiName') fieldApiName: string,
|
||||
@Body() data: any,
|
||||
) {
|
||||
const field = await this.objectService.updateFieldDefinition(
|
||||
tenantId,
|
||||
objectApiName,
|
||||
fieldApiName,
|
||||
data,
|
||||
);
|
||||
return this.fieldMapperService.mapFieldToDTO(field);
|
||||
}
|
||||
|
||||
@Delete(':objectApiName/fields/:fieldApiName')
|
||||
async deleteFieldDefinition(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('fieldApiName') fieldApiName: string,
|
||||
) {
|
||||
return this.objectService.deleteFieldDefinition(
|
||||
tenantId,
|
||||
objectApiName,
|
||||
fieldApiName,
|
||||
);
|
||||
}
|
||||
|
||||
@Patch(':objectApiName')
|
||||
async updateObjectDefinition(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Body() data: any,
|
||||
) {
|
||||
return this.objectService.updateObjectDefinition(tenantId, objectApiName, data);
|
||||
}
|
||||
|
||||
@Get(':objectId/field-permissions')
|
||||
async getFieldPermissions(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectId') objectId: string,
|
||||
) {
|
||||
return this.objectService.getFieldPermissions(tenantId, objectId);
|
||||
}
|
||||
|
||||
@Put(':objectId/field-permissions')
|
||||
async updateFieldPermission(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectId') objectId: string,
|
||||
@Body() data: { roleId: string; fieldDefinitionId: string; canRead: boolean; canEdit: boolean },
|
||||
) {
|
||||
return this.objectService.updateFieldPermission(tenantId, data.roleId, data.fieldDefinitionId, data.canRead, data.canEdit);
|
||||
}
|
||||
|
||||
@Get(':objectApiName/permissions/:roleId')
|
||||
async getObjectPermissions(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('roleId') roleId: string,
|
||||
) {
|
||||
return this.objectService.getObjectPermissions(tenantId, objectApiName, roleId);
|
||||
}
|
||||
|
||||
@Put(':objectApiName/permissions')
|
||||
async updateObjectPermissions(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Body() data: {
|
||||
roleId: string;
|
||||
canCreate: boolean;
|
||||
canRead: boolean;
|
||||
canEdit: boolean;
|
||||
canDelete: boolean;
|
||||
canViewAll: boolean;
|
||||
canModifyAll: boolean;
|
||||
},
|
||||
) {
|
||||
return this.objectService.updateObjectPermissions(tenantId, objectApiName, data);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ export class CreatePageLayoutDto {
|
||||
w: number;
|
||||
h: number;
|
||||
}>;
|
||||
relatedLists?: string[];
|
||||
};
|
||||
|
||||
@IsString()
|
||||
@@ -46,6 +47,7 @@ export class UpdatePageLayoutDto {
|
||||
w: number;
|
||||
h: number;
|
||||
}>;
|
||||
relatedLists?: string[];
|
||||
};
|
||||
|
||||
@IsString()
|
||||
|
||||
199
backend/src/rbac/ability.factory.ts
Normal file
199
backend/src/rbac/ability.factory.ts
Normal file
@@ -0,0 +1,199 @@
|
||||
import { AbilityBuilder, PureAbility, AbilityClass } from '@casl/ability';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { User } from '../models/user.model';
|
||||
import { RoleObjectPermission } from '../models/role-object-permission.model';
|
||||
import { RoleFieldPermission } from '../models/role-field-permission.model';
|
||||
import { RecordShare } from '../models/record-share.model';
|
||||
|
||||
// Define action types
|
||||
export type Action = 'create' | 'read' | 'update' | 'delete' | 'view_all' | 'modify_all';
|
||||
|
||||
// Define subject types - can be string (object API name) or actual object with fields
|
||||
export type Subject = string | { objectApiName: string; ownerId?: string; id?: string; [key: string]: any };
|
||||
|
||||
// Define field actions
|
||||
export type FieldAction = 'read' | 'edit';
|
||||
|
||||
export type AppAbility = PureAbility<[Action, Subject], { field?: string }>;
|
||||
|
||||
@Injectable()
|
||||
export class AbilityFactory {
|
||||
/**
|
||||
* Build CASL ability for a user based on their roles and permissions
|
||||
* This aggregates permissions from all roles the user has
|
||||
*/
|
||||
async defineAbilityFor(
|
||||
user: User & { roles?: Array<{ objectPermissions?: RoleObjectPermission[]; fieldPermissions?: RoleFieldPermission[] }> },
|
||||
recordShares?: RecordShare[],
|
||||
): Promise<AppAbility> {
|
||||
const { can, cannot, build } = new AbilityBuilder<AppAbility>(PureAbility as AbilityClass<AppAbility>);
|
||||
|
||||
if (!user.roles || user.roles.length === 0) {
|
||||
// No roles = no permissions
|
||||
return build();
|
||||
}
|
||||
|
||||
// Aggregate object permissions from all roles
|
||||
const objectPermissionsMap = new Map<string, {
|
||||
canCreate: boolean;
|
||||
canRead: boolean;
|
||||
canEdit: boolean;
|
||||
canDelete: boolean;
|
||||
canViewAll: boolean;
|
||||
canModifyAll: boolean;
|
||||
}>();
|
||||
|
||||
// Aggregate field permissions from all roles
|
||||
const fieldPermissionsMap = new Map<string, {
|
||||
canRead: boolean;
|
||||
canEdit: boolean;
|
||||
}>();
|
||||
|
||||
// Process all roles
|
||||
for (const role of user.roles) {
|
||||
// Aggregate object permissions
|
||||
if (role.objectPermissions) {
|
||||
for (const perm of role.objectPermissions) {
|
||||
const existing = objectPermissionsMap.get(perm.objectDefinitionId) || {
|
||||
canCreate: false,
|
||||
canRead: false,
|
||||
canEdit: false,
|
||||
canDelete: false,
|
||||
canViewAll: false,
|
||||
canModifyAll: false,
|
||||
};
|
||||
|
||||
// Union of permissions (if any role grants it, user has it)
|
||||
objectPermissionsMap.set(perm.objectDefinitionId, {
|
||||
canCreate: existing.canCreate || perm.canCreate,
|
||||
canRead: existing.canRead || perm.canRead,
|
||||
canEdit: existing.canEdit || perm.canEdit,
|
||||
canDelete: existing.canDelete || perm.canDelete,
|
||||
canViewAll: existing.canViewAll || perm.canViewAll,
|
||||
canModifyAll: existing.canModifyAll || perm.canModifyAll,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Aggregate field permissions
|
||||
if (role.fieldPermissions) {
|
||||
for (const perm of role.fieldPermissions) {
|
||||
const existing = fieldPermissionsMap.get(perm.fieldDefinitionId) || {
|
||||
canRead: false,
|
||||
canEdit: false,
|
||||
};
|
||||
|
||||
fieldPermissionsMap.set(perm.fieldDefinitionId, {
|
||||
canRead: existing.canRead || perm.canRead,
|
||||
canEdit: existing.canEdit || perm.canEdit,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert aggregated permissions to CASL rules
|
||||
for (const [objectId, perms] of objectPermissionsMap) {
|
||||
// Create permission
|
||||
if (perms.canCreate) {
|
||||
can('create', objectId);
|
||||
}
|
||||
|
||||
// Read permission
|
||||
if (perms.canRead) {
|
||||
can('read', objectId);
|
||||
}
|
||||
|
||||
// View all permission (can see all records regardless of ownership)
|
||||
if (perms.canViewAll) {
|
||||
can('view_all', objectId);
|
||||
}
|
||||
|
||||
// Edit permission
|
||||
if (perms.canEdit) {
|
||||
can('update', objectId);
|
||||
}
|
||||
|
||||
// Modify all permission (can edit all records regardless of ownership)
|
||||
if (perms.canModifyAll) {
|
||||
can('modify_all', objectId);
|
||||
}
|
||||
|
||||
// Delete permission
|
||||
if (perms.canDelete) {
|
||||
can('delete', objectId);
|
||||
}
|
||||
}
|
||||
|
||||
// Add record sharing permissions
|
||||
if (recordShares) {
|
||||
for (const share of recordShares) {
|
||||
// Only add if share is active (not expired, not revoked)
|
||||
const now = new Date();
|
||||
const isExpired = share.expiresAt && share.expiresAt < now;
|
||||
const isRevoked = share.revokedAt !== null;
|
||||
|
||||
if (!isExpired && !isRevoked) {
|
||||
// Note: Record-level sharing will be checked in authorization service
|
||||
// CASL abilities are primarily for object-level permissions
|
||||
// Individual record access is validated in applyScopeToQuery
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user can access a specific field
|
||||
* Returns true if user has permission or if no restriction exists
|
||||
*/
|
||||
canAccessField(
|
||||
fieldDefinitionId: string,
|
||||
action: FieldAction,
|
||||
user: User & { roles?: Array<{ fieldPermissions?: RoleFieldPermission[] }> },
|
||||
): boolean {
|
||||
if (!user.roles || user.roles.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Collect all field permissions from all roles
|
||||
const allFieldPermissions: RoleFieldPermission[] = [];
|
||||
for (const role of user.roles) {
|
||||
if (role.fieldPermissions) {
|
||||
allFieldPermissions.push(...role.fieldPermissions);
|
||||
}
|
||||
}
|
||||
|
||||
// If there are NO field permissions configured at all, allow by default
|
||||
if (allFieldPermissions.length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If field permissions exist, check for explicit grants (union of all roles)
|
||||
for (const role of user.roles) {
|
||||
if (role.fieldPermissions) {
|
||||
const fieldPerm = role.fieldPermissions.find(fp => fp.fieldDefinitionId === fieldDefinitionId);
|
||||
if (fieldPerm) {
|
||||
if (action === 'read' && fieldPerm.canRead) return true;
|
||||
if (action === 'edit' && fieldPerm.canEdit) return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No explicit rule for this field but other field permissions exist.
|
||||
// Default to allow so new fields don't get silently stripped and fail validation.
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter fields based on user permissions
|
||||
* Returns array of field IDs the user can access with the specified action
|
||||
*/
|
||||
filterFields(
|
||||
fieldDefinitionIds: string[],
|
||||
action: FieldAction,
|
||||
user: User & { roles?: Array<{ fieldPermissions?: RoleFieldPermission[] }> },
|
||||
): string[] {
|
||||
return fieldDefinitionIds.filter(fieldId => this.canAccessField(fieldId, action, user));
|
||||
}
|
||||
}
|
||||
282
backend/src/rbac/authorization.service.ts
Normal file
282
backend/src/rbac/authorization.service.ts
Normal file
@@ -0,0 +1,282 @@
|
||||
import { Injectable, ForbiddenException } from '@nestjs/common';
|
||||
import { Knex } from 'knex';
|
||||
import { User } from '../models/user.model';
|
||||
import { ObjectDefinition } from '../models/object-definition.model';
|
||||
import { FieldDefinition } from '../models/field-definition.model';
|
||||
import { RecordShare } from '../models/record-share.model';
|
||||
import { AbilityFactory, AppAbility, Action } from './ability.factory';
|
||||
import { DynamicModelFactory } from '../object/models/dynamic-model.factory';
|
||||
import { subject } from '@casl/ability';
|
||||
|
||||
@Injectable()
|
||||
export class AuthorizationService {
|
||||
constructor(private abilityFactory: AbilityFactory) {}
|
||||
|
||||
/**
|
||||
* Apply authorization scope to a query based on OWD and user permissions
|
||||
* This determines which records the user can see
|
||||
* Modifies the query in place and returns void
|
||||
*/
|
||||
async applyScopeToQuery<T = any>(
|
||||
query: any, // Accept both Knex and Objection query builders
|
||||
objectDef: ObjectDefinition,
|
||||
user: User & { roles?: any[] },
|
||||
action: Action,
|
||||
knex: Knex,
|
||||
): Promise<void> {
|
||||
// Get user's ability
|
||||
const recordShares = await this.getActiveRecordShares(objectDef.id, user.id, knex);
|
||||
const ability = await this.abilityFactory.defineAbilityFor(user, recordShares);
|
||||
|
||||
// Check if user has the base permission for this action
|
||||
// Use object ID, not API name, since permissions are stored by object ID
|
||||
if (!ability.can(action, objectDef.id)) {
|
||||
// No permission at all - return empty result
|
||||
query.where(knex.raw('1 = 0'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Check special permissions
|
||||
const hasViewAll = ability.can('view_all', objectDef.id);
|
||||
const hasModifyAll = ability.can('modify_all', objectDef.id);
|
||||
|
||||
// If user has view_all or modify_all, they can see all records
|
||||
if (hasViewAll || hasModifyAll) {
|
||||
// No filtering needed
|
||||
return;
|
||||
}
|
||||
|
||||
// Apply OWD (Org-Wide Default) restrictions
|
||||
switch (objectDef.orgWideDefault) {
|
||||
case 'public_read_write':
|
||||
// Everyone can see all records
|
||||
return;
|
||||
|
||||
case 'public_read':
|
||||
// Everyone can see all records (write operations checked separately)
|
||||
return;
|
||||
|
||||
case 'private':
|
||||
default:
|
||||
// Only owner and explicitly shared records
|
||||
await this.applyPrivateScope(query, objectDef, user, recordShares, knex);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply private scope: owner + shared records
|
||||
*/
|
||||
private async applyPrivateScope<T = any>(
|
||||
query: any, // Accept both Knex and Objection query builders
|
||||
objectDef: ObjectDefinition,
|
||||
user: User,
|
||||
recordShares: RecordShare[],
|
||||
knex: Knex,
|
||||
): Promise<void> {
|
||||
const tableName = this.getTableName(objectDef.apiName);
|
||||
|
||||
// Check if table has ownerId column
|
||||
const hasOwner = await knex.schema.hasColumn(tableName, 'ownerId');
|
||||
|
||||
if (!hasOwner && recordShares.length === 0) {
|
||||
// No ownership and no shares - user can't see anything
|
||||
query.where(knex.raw('1 = 0'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Build conditions: ownerId = user OR record shared with user
|
||||
query.where((builder) => {
|
||||
if (hasOwner) {
|
||||
builder.orWhere(`${tableName}.ownerId`, user.id);
|
||||
}
|
||||
|
||||
if (recordShares.length > 0) {
|
||||
const sharedRecordIds = recordShares.map(share => share.recordId);
|
||||
builder.orWhereIn(`${tableName}.id`, sharedRecordIds);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user can perform action on a specific record
|
||||
*/
|
||||
async canPerformAction(
|
||||
action: Action,
|
||||
objectDef: ObjectDefinition,
|
||||
record: any,
|
||||
user: User & { roles?: any[] },
|
||||
knex: Knex,
|
||||
): Promise<boolean> {
|
||||
const recordShares = await this.getActiveRecordShares(objectDef.id, user.id, knex);
|
||||
const ability = await this.abilityFactory.defineAbilityFor(user, recordShares);
|
||||
|
||||
// Check base permission - use object ID not API name
|
||||
if (!ability.can(action, objectDef.id)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check special permissions - use object ID not API name
|
||||
const hasViewAll = ability.can('view_all', objectDef.id);
|
||||
const hasModifyAll = ability.can('modify_all', objectDef.id);
|
||||
|
||||
// canViewAll only grants read access to all records
|
||||
if (action === 'read' && hasViewAll) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// canModifyAll grants edit/delete access to all records
|
||||
if ((action === 'update' || action === 'delete') && hasModifyAll) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check OWD
|
||||
switch (objectDef.orgWideDefault) {
|
||||
case 'public_read_write':
|
||||
return true;
|
||||
|
||||
case 'public_read':
|
||||
if (action === 'read') return true;
|
||||
// For write actions, check ownership
|
||||
return record.ownerId === user.id;
|
||||
|
||||
case 'private':
|
||||
default:
|
||||
// Check ownership
|
||||
if (record.ownerId === user.id) return true;
|
||||
|
||||
// Check if record is shared with user
|
||||
const share = recordShares.find(s => s.recordId === record.id);
|
||||
if (share) {
|
||||
if (action === 'read' && share.accessLevel.canRead) return true;
|
||||
if (action === 'update' && share.accessLevel.canEdit) return true;
|
||||
if (action === 'delete' && share.accessLevel.canDelete) return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter data based on field-level permissions
|
||||
* Removes fields the user cannot read
|
||||
*/
|
||||
async filterReadableFields(
|
||||
data: any,
|
||||
fields: FieldDefinition[],
|
||||
user: User & { roles?: any[] },
|
||||
): Promise<any> {
|
||||
const filtered: any = {};
|
||||
|
||||
// Always include id - it's required for navigation and record identification
|
||||
if (data.id !== undefined) {
|
||||
filtered.id = data.id;
|
||||
}
|
||||
|
||||
for (const field of fields) {
|
||||
if (this.abilityFactory.canAccessField(field.id, 'read', user)) {
|
||||
if (data[field.apiName] !== undefined) {
|
||||
filtered[field.apiName] = data[field.apiName];
|
||||
}
|
||||
|
||||
// For lookup fields, also include the related object (e.g., ownerId -> owner)
|
||||
if (field.type === 'LOOKUP') {
|
||||
const relationName = DynamicModelFactory.getRelationName(field.apiName);
|
||||
if (data[relationName] !== undefined) {
|
||||
filtered[relationName] = data[relationName];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filtered;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter data based on field-level permissions
|
||||
* Removes fields the user cannot edit
|
||||
*/
|
||||
async filterEditableFields(
|
||||
data: any,
|
||||
fields: FieldDefinition[],
|
||||
user: User & { roles?: any[] },
|
||||
): Promise<any> {
|
||||
const filtered: any = {};
|
||||
|
||||
for (const field of fields) {
|
||||
if (this.abilityFactory.canAccessField(field.id, 'edit', user)) {
|
||||
if (data[field.apiName] !== undefined) {
|
||||
filtered[field.apiName] = data[field.apiName];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filtered;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active record shares for a user on an object
|
||||
*/
|
||||
private async getActiveRecordShares(
|
||||
objectDefinitionId: string,
|
||||
userId: string,
|
||||
knex: Knex,
|
||||
): Promise<RecordShare[]> {
|
||||
const now = new Date();
|
||||
|
||||
return await RecordShare.query(knex)
|
||||
.where('objectDefinitionId', objectDefinitionId)
|
||||
.where('granteeUserId', userId)
|
||||
.whereNull('revokedAt')
|
||||
.where((builder) => {
|
||||
builder.whereNull('expiresAt').orWhere('expiresAt', '>', now);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user has permission to create records
|
||||
*/
|
||||
async canCreate(
|
||||
objectDef: ObjectDefinition,
|
||||
user: User & { roles?: any[] },
|
||||
): Promise<boolean> {
|
||||
const ability = await this.abilityFactory.defineAbilityFor(user, []);
|
||||
return ability.can('create', objectDef.id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Throw exception if user cannot perform action
|
||||
*/
|
||||
async assertCanPerformAction(
|
||||
action: Action,
|
||||
objectDef: ObjectDefinition,
|
||||
record: any,
|
||||
user: User & { roles?: any[] },
|
||||
knex: Knex,
|
||||
): Promise<void> {
|
||||
const can = await this.canPerformAction(action, objectDef, record, user, knex);
|
||||
if (!can) {
|
||||
throw new ForbiddenException(`You do not have permission to ${action} this record`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get table name from API name
|
||||
*/
|
||||
private getTableName(apiName: string): string {
|
||||
// Convert CamelCase to snake_case and pluralize
|
||||
const snakeCase = apiName
|
||||
.replace(/([A-Z])/g, '_$1')
|
||||
.toLowerCase()
|
||||
.replace(/^_/, '');
|
||||
|
||||
// Simple pluralization
|
||||
if (snakeCase.endsWith('y')) {
|
||||
return snakeCase.slice(0, -1) + 'ies';
|
||||
} else if (snakeCase.endsWith('s')) {
|
||||
return snakeCase;
|
||||
} else {
|
||||
return snakeCase + 's';
|
||||
}
|
||||
}
|
||||
}
|
||||
19
backend/src/rbac/dto/create-record-share.dto.ts
Normal file
19
backend/src/rbac/dto/create-record-share.dto.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { IsString, IsBoolean, IsOptional, IsDateString } from 'class-validator';
|
||||
|
||||
export class CreateRecordShareDto {
|
||||
@IsString()
|
||||
granteeUserId: string;
|
||||
|
||||
@IsBoolean()
|
||||
canRead: boolean;
|
||||
|
||||
@IsBoolean()
|
||||
canEdit: boolean;
|
||||
|
||||
@IsBoolean()
|
||||
canDelete: boolean;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
expiresAt?: string;
|
||||
}
|
||||
@@ -1,8 +1,16 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { RbacService } from './rbac.service';
|
||||
import { AbilityFactory } from './ability.factory';
|
||||
import { AuthorizationService } from './authorization.service';
|
||||
import { SetupRolesController } from './setup-roles.controller';
|
||||
import { SetupUsersController } from './setup-users.controller';
|
||||
import { RecordSharingController } from './record-sharing.controller';
|
||||
import { TenantModule } from '../tenant/tenant.module';
|
||||
|
||||
@Module({
|
||||
providers: [RbacService],
|
||||
exports: [RbacService],
|
||||
imports: [TenantModule],
|
||||
controllers: [SetupRolesController, SetupUsersController, RecordSharingController],
|
||||
providers: [RbacService, AbilityFactory, AuthorizationService],
|
||||
exports: [RbacService, AbilityFactory, AuthorizationService],
|
||||
})
|
||||
export class RbacModule {}
|
||||
|
||||
350
backend/src/rbac/record-sharing.controller.ts
Normal file
350
backend/src/rbac/record-sharing.controller.ts
Normal file
@@ -0,0 +1,350 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Delete,
|
||||
Param,
|
||||
Body,
|
||||
UseGuards,
|
||||
ForbiddenException,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { CurrentUser } from '../auth/current-user.decorator';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import { RecordShare } from '../models/record-share.model';
|
||||
import { ObjectDefinition } from '../models/object-definition.model';
|
||||
import { User } from '../models/user.model';
|
||||
import { AuthorizationService } from './authorization.service';
|
||||
import { CreateRecordShareDto } from './dto/create-record-share.dto';
|
||||
|
||||
@Controller('runtime/objects/:objectApiName/records/:recordId/shares')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class RecordSharingController {
|
||||
constructor(
|
||||
private tenantDbService: TenantDatabaseService,
|
||||
private authService: AuthorizationService,
|
||||
) {}
|
||||
|
||||
@Get()
|
||||
async getRecordShares(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('recordId') recordId: string,
|
||||
@CurrentUser() currentUser: any,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Get object definition
|
||||
const objectDef = await ObjectDefinition.query(knex)
|
||||
.findOne({ apiName: objectApiName });
|
||||
|
||||
if (!objectDef) {
|
||||
throw new Error('Object not found');
|
||||
}
|
||||
|
||||
// Get the record to check ownership
|
||||
const tableName = this.getTableName(
|
||||
objectDef.apiName,
|
||||
objectDef.label,
|
||||
objectDef.pluralLabel,
|
||||
);
|
||||
const record = await knex(tableName)
|
||||
.where({ id: recordId })
|
||||
.first();
|
||||
|
||||
if (!record) {
|
||||
throw new Error('Record not found');
|
||||
}
|
||||
|
||||
// Only owner can view shares
|
||||
if (record.ownerId !== currentUser.userId) {
|
||||
// Check if user has modify all permission
|
||||
const user: any = await User.query(knex)
|
||||
.findById(currentUser.userId)
|
||||
.withGraphFetched('roles.objectPermissions');
|
||||
|
||||
if (!user) {
|
||||
throw new ForbiddenException('User not found');
|
||||
}
|
||||
|
||||
const hasModifyAll = user.roles?.some(role =>
|
||||
role.objectPermissions?.some(
|
||||
perm => perm.objectDefinitionId === objectDef.id && perm.canModifyAll
|
||||
)
|
||||
);
|
||||
|
||||
if (!hasModifyAll) {
|
||||
throw new ForbiddenException('Only the record owner or users with Modify All permission can view shares');
|
||||
}
|
||||
}
|
||||
|
||||
// Get all active shares for this record
|
||||
const shares = await RecordShare.query(knex)
|
||||
.where({ objectDefinitionId: objectDef.id, recordId })
|
||||
.whereNull('revokedAt')
|
||||
.where(builder => {
|
||||
builder.whereNull('expiresAt').orWhere('expiresAt', '>', new Date());
|
||||
})
|
||||
.withGraphFetched('[granteeUser]')
|
||||
.orderBy('createdAt', 'desc');
|
||||
|
||||
return shares;
|
||||
}
|
||||
|
||||
@Post()
|
||||
async createRecordShare(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('recordId') recordId: string,
|
||||
@CurrentUser() currentUser: any,
|
||||
@Body() data: CreateRecordShareDto,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Get object definition
|
||||
const objectDef = await ObjectDefinition.query(knex)
|
||||
.findOne({ apiName: objectApiName });
|
||||
|
||||
if (!objectDef) {
|
||||
throw new Error('Object not found');
|
||||
}
|
||||
|
||||
// Get the record to check ownership
|
||||
const tableName = this.getTableName(
|
||||
objectDef.apiName,
|
||||
objectDef.label,
|
||||
objectDef.pluralLabel,
|
||||
);
|
||||
const record = await knex(tableName)
|
||||
.where({ id: recordId })
|
||||
.first();
|
||||
|
||||
if (!record) {
|
||||
throw new Error('Record not found');
|
||||
}
|
||||
|
||||
// Check if user can share - either owner or has modify permissions
|
||||
const canShare = await this.canUserShareRecord(
|
||||
currentUser.userId,
|
||||
record,
|
||||
objectDef,
|
||||
knex,
|
||||
);
|
||||
|
||||
if (!canShare) {
|
||||
throw new ForbiddenException('You do not have permission to share this record');
|
||||
}
|
||||
|
||||
// Cannot share with self
|
||||
if (data.granteeUserId === currentUser.userId) {
|
||||
throw new Error('Cannot share record with yourself');
|
||||
}
|
||||
|
||||
// Check if share already exists
|
||||
const existingShare = await RecordShare.query(knex)
|
||||
.where({
|
||||
objectDefinitionId: objectDef.id,
|
||||
recordId,
|
||||
granteeUserId: data.granteeUserId,
|
||||
})
|
||||
.whereNull('revokedAt')
|
||||
.first();
|
||||
|
||||
if (existingShare) {
|
||||
// Update existing share
|
||||
const updated = await RecordShare.query(knex)
|
||||
.patchAndFetchById(existingShare.id, {
|
||||
accessLevel: {
|
||||
canRead: data.canRead,
|
||||
canEdit: data.canEdit,
|
||||
canDelete: data.canDelete,
|
||||
},
|
||||
// Convert ISO string to MySQL datetime format
|
||||
expiresAt: data.expiresAt
|
||||
? knex.raw('?', [new Date(data.expiresAt).toISOString().slice(0, 19).replace('T', ' ')])
|
||||
: null,
|
||||
} as any);
|
||||
|
||||
return RecordShare.query(knex)
|
||||
.findById(updated.id)
|
||||
.withGraphFetched('[granteeUser]');
|
||||
}
|
||||
|
||||
// Create new share
|
||||
const share = await RecordShare.query(knex).insertAndFetch({
|
||||
objectDefinitionId: objectDef.id,
|
||||
recordId,
|
||||
granteeUserId: data.granteeUserId,
|
||||
grantedByUserId: currentUser.userId,
|
||||
accessLevel: {
|
||||
canRead: data.canRead,
|
||||
canEdit: data.canEdit,
|
||||
canDelete: data.canDelete,
|
||||
},
|
||||
// Convert ISO string to MySQL datetime format: YYYY-MM-DD HH:MM:SS
|
||||
expiresAt: data.expiresAt
|
||||
? knex.raw('?', [new Date(data.expiresAt).toISOString().slice(0, 19).replace('T', ' ')])
|
||||
: null,
|
||||
} as any);
|
||||
|
||||
return RecordShare.query(knex)
|
||||
.findById(share.id)
|
||||
.withGraphFetched('[granteeUser]');
|
||||
}
|
||||
|
||||
@Delete(':shareId')
|
||||
async deleteRecordShare(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('recordId') recordId: string,
|
||||
@Param('shareId') shareId: string,
|
||||
@CurrentUser() currentUser: any,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Get object definition
|
||||
const objectDef = await ObjectDefinition.query(knex)
|
||||
.findOne({ apiName: objectApiName });
|
||||
|
||||
if (!objectDef) {
|
||||
throw new Error('Object not found');
|
||||
}
|
||||
|
||||
// Get the record to check ownership
|
||||
const tableName = this.getTableName(
|
||||
objectDef.apiName,
|
||||
objectDef.label,
|
||||
objectDef.pluralLabel,
|
||||
);
|
||||
const record = await knex(tableName)
|
||||
.where({ id: recordId })
|
||||
.first();
|
||||
|
||||
if (!record) {
|
||||
throw new Error('Record not found');
|
||||
}
|
||||
|
||||
// Only owner can revoke shares
|
||||
if (record.ownerId !== currentUser.userId) {
|
||||
// Check if user has modify all permission
|
||||
const user: any = await User.query(knex)
|
||||
.findById(currentUser.userId)
|
||||
.withGraphFetched('roles.objectPermissions');
|
||||
|
||||
if (!user) {
|
||||
throw new ForbiddenException('User not found');
|
||||
}
|
||||
|
||||
const hasModifyAll = user.roles?.some(role =>
|
||||
role.objectPermissions?.some(
|
||||
perm => perm.objectDefinitionId === objectDef.id && perm.canModifyAll
|
||||
)
|
||||
);
|
||||
|
||||
if (!hasModifyAll) {
|
||||
throw new ForbiddenException('Only the record owner or users with Modify All permission can revoke shares');
|
||||
}
|
||||
}
|
||||
|
||||
// Revoke the share (soft delete)
|
||||
await RecordShare.query(knex)
|
||||
.patchAndFetchById(shareId, {
|
||||
revokedAt: knex.fn.now() as any,
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
private async canUserShareRecord(
|
||||
userId: string,
|
||||
record: any,
|
||||
objectDef: ObjectDefinition,
|
||||
knex: any,
|
||||
): Promise<boolean> {
|
||||
// Owner can always share
|
||||
if (record.ownerId === userId) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if user has modify all or edit permissions
|
||||
const user: any = await User.query(knex)
|
||||
.findById(userId)
|
||||
.withGraphFetched('roles.objectPermissions');
|
||||
|
||||
if (!user) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for canModifyAll permission
|
||||
const hasModifyAll = user.roles?.some(role =>
|
||||
role.objectPermissions?.some(
|
||||
perm => perm.objectDefinitionId === objectDef.id && perm.canModifyAll
|
||||
)
|
||||
);
|
||||
|
||||
if (hasModifyAll) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for canEdit permission (user needs edit to share)
|
||||
const hasEdit = user.roles?.some(role =>
|
||||
role.objectPermissions?.some(
|
||||
perm => perm.objectDefinitionId === objectDef.id && perm.canEdit
|
||||
)
|
||||
);
|
||||
|
||||
// If user has edit permission, check if they can actually edit this record
|
||||
// by using the authorization service
|
||||
if (hasEdit) {
|
||||
try {
|
||||
await this.authService.assertCanPerformAction(
|
||||
'update',
|
||||
objectDef,
|
||||
record,
|
||||
user,
|
||||
knex,
|
||||
);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private getTableName(apiName: string, objectLabel?: string, pluralLabel?: string): string {
|
||||
const toSnakePlural = (source: string): string => {
|
||||
const cleaned = source.replace(/[\s-]+/g, '_');
|
||||
const snake = cleaned
|
||||
.replace(/([a-z0-9])([A-Z])/g, '$1_$2')
|
||||
.replace(/__+/g, '_')
|
||||
.toLowerCase()
|
||||
.replace(/^_/, '');
|
||||
|
||||
if (snake.endsWith('y')) return `${snake.slice(0, -1)}ies`;
|
||||
if (snake.endsWith('s')) return snake;
|
||||
return `${snake}s`;
|
||||
};
|
||||
|
||||
const fromApi = toSnakePlural(apiName);
|
||||
const fromLabel = objectLabel ? toSnakePlural(objectLabel) : null;
|
||||
const fromPlural = pluralLabel ? toSnakePlural(pluralLabel) : null;
|
||||
|
||||
if (fromLabel && fromLabel.includes('_') && !fromApi.includes('_')) {
|
||||
return fromLabel;
|
||||
}
|
||||
if (fromPlural && fromPlural.includes('_') && !fromApi.includes('_')) {
|
||||
return fromPlural;
|
||||
}
|
||||
|
||||
if (fromLabel && fromLabel !== fromApi) return fromLabel;
|
||||
if (fromPlural && fromPlural !== fromApi) return fromPlural;
|
||||
|
||||
return fromApi;
|
||||
}
|
||||
}
|
||||
141
backend/src/rbac/setup-roles.controller.ts
Normal file
141
backend/src/rbac/setup-roles.controller.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Patch,
|
||||
Delete,
|
||||
Param,
|
||||
Body,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import { Role } from '../models/role.model';
|
||||
|
||||
@Controller('setup/roles')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class SetupRolesController {
|
||||
constructor(private tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
@Get()
|
||||
async getRoles(@TenantId() tenantId: string) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
return await Role.query(knex).select('*').orderBy('name', 'asc');
|
||||
}
|
||||
|
||||
@Get(':id')
|
||||
async getRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
return await Role.query(knex).findById(id).withGraphFetched('users');
|
||||
}
|
||||
|
||||
@Post()
|
||||
async createRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Body() data: { name: string; description?: string; guardName?: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
const role = await Role.query(knex).insert({
|
||||
name: data.name,
|
||||
description: data.description,
|
||||
guardName: data.guardName || 'tenant',
|
||||
});
|
||||
|
||||
return role;
|
||||
}
|
||||
|
||||
@Patch(':id')
|
||||
async updateRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
@Body() data: { name?: string; description?: string; guardName?: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
const updateData: any = {};
|
||||
|
||||
if (data.name) updateData.name = data.name;
|
||||
if (data.description !== undefined) updateData.description = data.description;
|
||||
if (data.guardName) updateData.guardName = data.guardName;
|
||||
|
||||
const role = await Role.query(knex).patchAndFetchById(id, updateData);
|
||||
return role;
|
||||
}
|
||||
|
||||
@Delete(':id')
|
||||
async deleteRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Delete role user assignments first
|
||||
await knex('user_roles').where({ roleId: id }).delete();
|
||||
|
||||
// Delete role permissions
|
||||
await knex('role_permissions').where({ roleId: id }).delete();
|
||||
await knex('role_object_permissions').where({ roleId: id }).delete();
|
||||
|
||||
// Delete the role
|
||||
await Role.query(knex).deleteById(id);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Post(':roleId/users')
|
||||
async addUserToRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('roleId') roleId: string,
|
||||
@Body() data: { userId: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Check if assignment already exists
|
||||
const existing = await knex('user_roles')
|
||||
.where({ userId: data.userId, roleId })
|
||||
.first();
|
||||
|
||||
if (existing) {
|
||||
return { success: true, message: 'User already assigned' };
|
||||
}
|
||||
|
||||
await knex('user_roles').insert({
|
||||
id: knex.raw('(UUID())'),
|
||||
userId: data.userId,
|
||||
roleId,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Delete(':roleId/users/:userId')
|
||||
async removeUserFromRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('roleId') roleId: string,
|
||||
@Param('userId') userId: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
await knex('user_roles')
|
||||
.where({ userId, roleId })
|
||||
.delete();
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
146
backend/src/rbac/setup-users.controller.ts
Normal file
146
backend/src/rbac/setup-users.controller.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Patch,
|
||||
Delete,
|
||||
Param,
|
||||
Body,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import { User } from '../models/user.model';
|
||||
import * as bcrypt from 'bcrypt';
|
||||
|
||||
@Controller('setup/users')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class SetupUsersController {
|
||||
constructor(private tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
@Get()
|
||||
async getUsers(@TenantId() tenantId: string) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
return await User.query(knex).withGraphFetched('roles');
|
||||
}
|
||||
|
||||
@Get(':id')
|
||||
async getUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
return await User.query(knex).findById(id).withGraphFetched('roles');
|
||||
}
|
||||
|
||||
@Post()
|
||||
async createUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Body() data: { email: string; password: string; firstName?: string; lastName?: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Hash password
|
||||
const hashedPassword = await bcrypt.hash(data.password, 10);
|
||||
|
||||
const user = await User.query(knex).insert({
|
||||
email: data.email,
|
||||
password: hashedPassword,
|
||||
firstName: data.firstName,
|
||||
lastName: data.lastName,
|
||||
isActive: true,
|
||||
});
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
@Patch(':id')
|
||||
async updateUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
@Body() data: { email?: string; password?: string; firstName?: string; lastName?: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
const updateData: any = {};
|
||||
|
||||
if (data.email) updateData.email = data.email;
|
||||
if (data.firstName !== undefined) updateData.firstName = data.firstName;
|
||||
if (data.lastName !== undefined) updateData.lastName = data.lastName;
|
||||
|
||||
// Hash password if provided
|
||||
if (data.password) {
|
||||
updateData.password = await bcrypt.hash(data.password, 10);
|
||||
}
|
||||
|
||||
const user = await User.query(knex).patchAndFetchById(id, updateData);
|
||||
return user;
|
||||
}
|
||||
|
||||
@Delete(':id')
|
||||
async deleteUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Delete user role assignments first
|
||||
await knex('user_roles').where({ userId: id }).delete();
|
||||
|
||||
// Delete the user
|
||||
await User.query(knex).deleteById(id);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Post(':userId/roles')
|
||||
async addRoleToUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('userId') userId: string,
|
||||
@Body() data: { roleId: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Check if assignment already exists
|
||||
const existing = await knex('user_roles')
|
||||
.where({ userId, roleId: data.roleId })
|
||||
.first();
|
||||
|
||||
if (existing) {
|
||||
return { success: true, message: 'Role already assigned' };
|
||||
}
|
||||
|
||||
await knex('user_roles').insert({
|
||||
id: knex.raw('(UUID())'),
|
||||
userId,
|
||||
roleId: data.roleId,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Delete(':userId/roles/:roleId')
|
||||
async removeRoleFromUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('userId') userId: string,
|
||||
@Param('roleId') roleId: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
await knex('user_roles')
|
||||
.where({ userId, roleId })
|
||||
.delete();
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
8
backend/src/search/meilisearch.module.ts
Normal file
8
backend/src/search/meilisearch.module.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { MeilisearchService } from './meilisearch.service';
|
||||
|
||||
@Module({
|
||||
providers: [MeilisearchService],
|
||||
exports: [MeilisearchService],
|
||||
})
|
||||
export class MeilisearchModule {}
|
||||
244
backend/src/search/meilisearch.service.ts
Normal file
244
backend/src/search/meilisearch.service.ts
Normal file
@@ -0,0 +1,244 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import * as http from 'http';
|
||||
import * as https from 'https';
|
||||
|
||||
type MeiliConfig = {
|
||||
host: string;
|
||||
apiKey?: string;
|
||||
indexPrefix: string;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class MeilisearchService {
|
||||
private readonly logger = new Logger(MeilisearchService.name);
|
||||
|
||||
isEnabled(): boolean {
|
||||
return Boolean(this.getConfig());
|
||||
}
|
||||
|
||||
async searchRecord(
|
||||
tenantId: string,
|
||||
objectApiName: string,
|
||||
query: string,
|
||||
displayField?: string,
|
||||
): Promise<{ id: string; hit: any } | null> {
|
||||
const config = this.getConfig();
|
||||
if (!config) return null;
|
||||
|
||||
const indexName = this.buildIndexName(config, tenantId, objectApiName);
|
||||
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/search`;
|
||||
|
||||
console.log('querying Meilisearch index:', { indexName, query, displayField });
|
||||
|
||||
try {
|
||||
const response = await this.requestJson('POST', url, {
|
||||
q: query,
|
||||
limit: 5,
|
||||
}, this.buildHeaders(config));
|
||||
|
||||
if (!this.isSuccessStatus(response.status)) {
|
||||
this.logger.warn(
|
||||
`Meilisearch query failed for index ${indexName}: ${response.status}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const hits = Array.isArray(response.body?.hits) ? response.body.hits : [];
|
||||
if (hits.length === 0) return null;
|
||||
|
||||
if (displayField) {
|
||||
const loweredQuery = query.toLowerCase();
|
||||
const exactMatch = hits.find((hit: any) => {
|
||||
const value = hit?.[displayField];
|
||||
return value && String(value).toLowerCase() === loweredQuery;
|
||||
});
|
||||
if (exactMatch?.id) {
|
||||
return { id: exactMatch.id, hit: exactMatch };
|
||||
}
|
||||
}
|
||||
|
||||
const match = hits[0];
|
||||
if (match?.id) {
|
||||
return { id: match.id, hit: match };
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(`Meilisearch lookup failed: ${error.message}`);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async searchRecords(
|
||||
tenantId: string,
|
||||
objectApiName: string,
|
||||
query: string,
|
||||
options?: { limit?: number; offset?: number },
|
||||
): Promise<{ hits: any[]; total: number }> {
|
||||
const config = this.getConfig();
|
||||
if (!config) return { hits: [], total: 0 };
|
||||
|
||||
const indexName = this.buildIndexName(config, tenantId, objectApiName);
|
||||
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/search`;
|
||||
const limit = Number.isFinite(Number(options?.limit)) ? Number(options?.limit) : 20;
|
||||
const offset = Number.isFinite(Number(options?.offset)) ? Number(options?.offset) : 0;
|
||||
|
||||
try {
|
||||
const response = await this.requestJson('POST', url, {
|
||||
q: query,
|
||||
limit,
|
||||
offset,
|
||||
}, this.buildHeaders(config));
|
||||
|
||||
console.log('Meilisearch response body:', response.body);
|
||||
|
||||
if (!this.isSuccessStatus(response.status)) {
|
||||
this.logger.warn(
|
||||
`Meilisearch query failed for index ${indexName}: ${response.status}`,
|
||||
);
|
||||
return { hits: [], total: 0 };
|
||||
}
|
||||
|
||||
const hits = Array.isArray(response.body?.hits) ? response.body.hits : [];
|
||||
const total =
|
||||
response.body?.estimatedTotalHits ??
|
||||
response.body?.nbHits ??
|
||||
hits.length;
|
||||
return { hits, total };
|
||||
} catch (error) {
|
||||
this.logger.warn(`Meilisearch query failed: ${error.message}`);
|
||||
return { hits: [], total: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
async upsertRecord(
|
||||
tenantId: string,
|
||||
objectApiName: string,
|
||||
record: Record<string, any>,
|
||||
fieldsToIndex: string[],
|
||||
): Promise<void> {
|
||||
const config = this.getConfig();
|
||||
if (!config || !record?.id) return;
|
||||
|
||||
const indexName = this.buildIndexName(config, tenantId, objectApiName);
|
||||
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/documents?primaryKey=id`;
|
||||
const document = this.pickRecordFields(record, fieldsToIndex);
|
||||
|
||||
try {
|
||||
const response = await this.requestJson('POST', url, [document], this.buildHeaders(config));
|
||||
if (!this.isSuccessStatus(response.status)) {
|
||||
this.logger.warn(
|
||||
`Meilisearch upsert failed for index ${indexName}: ${response.status}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(`Meilisearch upsert failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async deleteRecord(
|
||||
tenantId: string,
|
||||
objectApiName: string,
|
||||
recordId: string,
|
||||
): Promise<void> {
|
||||
const config = this.getConfig();
|
||||
if (!config || !recordId) return;
|
||||
|
||||
const indexName = this.buildIndexName(config, tenantId, objectApiName);
|
||||
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/documents/${encodeURIComponent(recordId)}`;
|
||||
|
||||
try {
|
||||
const response = await this.requestJson('DELETE', url, undefined, this.buildHeaders(config));
|
||||
if (!this.isSuccessStatus(response.status)) {
|
||||
this.logger.warn(
|
||||
`Meilisearch delete failed for index ${indexName}: ${response.status}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(`Meilisearch delete failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
private getConfig(): MeiliConfig | null {
|
||||
const host = process.env.MEILI_HOST || process.env.MEILISEARCH_HOST;
|
||||
if (!host) return null;
|
||||
const trimmedHost = host.replace(/\/+$/, '');
|
||||
const apiKey = process.env.MEILI_API_KEY || process.env.MEILISEARCH_API_KEY;
|
||||
const indexPrefix = process.env.MEILI_INDEX_PREFIX || 'tenant_';
|
||||
return { host: trimmedHost, apiKey, indexPrefix };
|
||||
}
|
||||
|
||||
private buildIndexName(config: MeiliConfig, tenantId: string, objectApiName: string): string {
|
||||
return `${config.indexPrefix}${tenantId}_${objectApiName}`.toLowerCase();
|
||||
}
|
||||
|
||||
private buildHeaders(config: MeiliConfig): Record<string, string> {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
};
|
||||
if (config.apiKey) {
|
||||
headers['X-Meili-API-Key'] = config.apiKey;
|
||||
headers.Authorization = `Bearer ${config.apiKey}`;
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
private pickRecordFields(record: Record<string, any>, fields: string[]): Record<string, any> {
|
||||
const document: Record<string, any> = { id: record.id };
|
||||
for (const field of fields) {
|
||||
if (record[field] !== undefined) {
|
||||
document[field] = record[field];
|
||||
}
|
||||
}
|
||||
return document;
|
||||
}
|
||||
|
||||
private isSuccessStatus(status: number): boolean {
|
||||
return status >= 200 && status < 300;
|
||||
}
|
||||
|
||||
private requestJson(
|
||||
method: 'POST' | 'DELETE',
|
||||
url: string,
|
||||
payload: any,
|
||||
headers: Record<string, string>,
|
||||
): Promise<{ status: number; body: any }> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const parsedUrl = new URL(url);
|
||||
const client = parsedUrl.protocol === 'https:' ? https : http;
|
||||
const request = client.request(
|
||||
{
|
||||
method,
|
||||
hostname: parsedUrl.hostname,
|
||||
port: parsedUrl.port,
|
||||
path: `${parsedUrl.pathname}${parsedUrl.search}`,
|
||||
headers,
|
||||
},
|
||||
(response) => {
|
||||
let data = '';
|
||||
response.on('data', (chunk) => {
|
||||
data += chunk;
|
||||
});
|
||||
response.on('end', () => {
|
||||
if (!data) {
|
||||
resolve({ status: response.statusCode || 0, body: null });
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const body = JSON.parse(data);
|
||||
resolve({ status: response.statusCode || 0, body });
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
request.on('error', reject);
|
||||
if (payload !== undefined) {
|
||||
request.write(JSON.stringify(payload));
|
||||
}
|
||||
request.end();
|
||||
});
|
||||
}
|
||||
}
|
||||
368
backend/src/tenant/central-admin.controller.ts
Normal file
368
backend/src/tenant/central-admin.controller.ts
Normal file
@@ -0,0 +1,368 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Put,
|
||||
Delete,
|
||||
Body,
|
||||
Param,
|
||||
Query,
|
||||
UseGuards,
|
||||
UnauthorizedException,
|
||||
Req,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { CentralTenant, CentralDomain, CentralUser } from '../models/central.model';
|
||||
import { getCentralKnex, initCentralModels } from './central-database.service';
|
||||
import { TenantProvisioningService } from './tenant-provisioning.service';
|
||||
import { TenantDatabaseService } from './tenant-database.service';
|
||||
import * as bcrypt from 'bcrypt';
|
||||
|
||||
/**
|
||||
* Controller for managing central database entities (tenants, domains, users)
|
||||
* Only accessible when logged in as central admin
|
||||
*/
|
||||
@Controller('central')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class CentralAdminController {
|
||||
constructor(
|
||||
private readonly provisioningService: TenantProvisioningService,
|
||||
private readonly tenantDbService: TenantDatabaseService,
|
||||
) {
|
||||
// Initialize central models on controller creation
|
||||
initCentralModels();
|
||||
}
|
||||
|
||||
private checkCentralAdmin(req: any) {
|
||||
const subdomain = req.raw?.subdomain;
|
||||
const centralSubdomains = (process.env.CENTRAL_SUBDOMAINS || 'central,admin').split(',');
|
||||
|
||||
if (!subdomain || !centralSubdomains.includes(subdomain)) {
|
||||
throw new UnauthorizedException('This endpoint is only accessible to central administrators');
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== TENANTS ====================
|
||||
|
||||
@Get('tenants')
|
||||
async getTenants(@Req() req: any) {
|
||||
this.checkCentralAdmin(req);
|
||||
return CentralTenant.query().withGraphFetched('domains');
|
||||
}
|
||||
|
||||
@Get('tenants/:id')
|
||||
async getTenant(@Req() req: any, @Param('id') id: string) {
|
||||
this.checkCentralAdmin(req);
|
||||
return CentralTenant.query()
|
||||
.findById(id)
|
||||
.withGraphFetched('domains');
|
||||
}
|
||||
|
||||
@Post('tenants')
|
||||
async createTenant(
|
||||
@Req() req: any,
|
||||
@Body() data: {
|
||||
name: string;
|
||||
slug?: string;
|
||||
primaryDomain: string;
|
||||
dbHost?: string;
|
||||
dbPort?: number;
|
||||
},
|
||||
) {
|
||||
this.checkCentralAdmin(req);
|
||||
|
||||
// Use the provisioning service to create tenant with database and migrations
|
||||
const result = await this.provisioningService.provisionTenant({
|
||||
name: data.name,
|
||||
slug: data.slug || data.name.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, ''),
|
||||
primaryDomain: data.primaryDomain,
|
||||
dbHost: data.dbHost,
|
||||
dbPort: data.dbPort,
|
||||
});
|
||||
|
||||
// Return the created tenant
|
||||
return CentralTenant.query()
|
||||
.findById(result.tenantId)
|
||||
.withGraphFetched('domains');
|
||||
}
|
||||
|
||||
@Put('tenants/:id')
|
||||
async updateTenant(
|
||||
@Req() req: any,
|
||||
@Param('id') id: string,
|
||||
@Body() data: {
|
||||
name?: string;
|
||||
slug?: string;
|
||||
dbHost?: string;
|
||||
dbPort?: number;
|
||||
dbName?: string;
|
||||
dbUsername?: string;
|
||||
status?: string;
|
||||
},
|
||||
) {
|
||||
this.checkCentralAdmin(req);
|
||||
return CentralTenant.query()
|
||||
.patchAndFetchById(id, data);
|
||||
}
|
||||
|
||||
@Delete('tenants/:id')
|
||||
async deleteTenant(@Req() req: any, @Param('id') id: string) {
|
||||
this.checkCentralAdmin(req);
|
||||
await CentralTenant.query().deleteById(id);
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
// Get users for a specific tenant
|
||||
@Get('tenants/:id/users')
|
||||
async getTenantUsers(@Req() req: any, @Param('id') tenantId: string) {
|
||||
this.checkCentralAdmin(req);
|
||||
|
||||
try {
|
||||
// Get tenant to verify it exists
|
||||
const tenant = await CentralTenant.query().findById(tenantId);
|
||||
|
||||
if (!tenant) {
|
||||
throw new UnauthorizedException('Tenant not found');
|
||||
}
|
||||
|
||||
// Connect to tenant database using tenant ID directly
|
||||
const tenantKnex = await this.tenantDbService.getTenantKnexById(tenantId);
|
||||
|
||||
// Fetch users from tenant database
|
||||
const users = await tenantKnex('users').select('*');
|
||||
|
||||
// Remove password from response
|
||||
return users.map(({ password, ...user }) => user);
|
||||
} catch (error) {
|
||||
console.error('Error fetching tenant users:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Create a user in a specific tenant
|
||||
@Post('tenants/:id/users')
|
||||
async createTenantUser(
|
||||
@Req() req: any,
|
||||
@Param('id') tenantId: string,
|
||||
@Body() data: {
|
||||
email: string;
|
||||
password: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
},
|
||||
) {
|
||||
this.checkCentralAdmin(req);
|
||||
|
||||
try {
|
||||
// Get tenant to verify it exists
|
||||
const tenant = await CentralTenant.query().findById(tenantId);
|
||||
|
||||
if (!tenant) {
|
||||
throw new UnauthorizedException('Tenant not found');
|
||||
}
|
||||
|
||||
// Connect to tenant database using tenant ID directly
|
||||
const tenantKnex = await this.tenantDbService.getTenantKnexById(tenantId);
|
||||
|
||||
// Hash password
|
||||
const hashedPassword = await bcrypt.hash(data.password, 10);
|
||||
|
||||
// Generate UUID for the new user
|
||||
const userId = require('crypto').randomUUID();
|
||||
|
||||
// Create user in tenant database
|
||||
await tenantKnex('users').insert({
|
||||
id: userId,
|
||||
email: data.email,
|
||||
password: hashedPassword,
|
||||
firstName: data.firstName || null,
|
||||
lastName: data.lastName || null,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
});
|
||||
|
||||
// Fetch and return the created user
|
||||
const user = await tenantKnex('users').where('id', userId).first();
|
||||
|
||||
if (!user) {
|
||||
throw new Error('Failed to create user');
|
||||
}
|
||||
|
||||
const { password, ...userWithoutPassword } = user;
|
||||
|
||||
return userWithoutPassword;
|
||||
} catch (error) {
|
||||
console.error('Error creating tenant user:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== DOMAINS ====================
|
||||
|
||||
@Get('domains')
|
||||
async getDomains(
|
||||
@Req() req: any,
|
||||
@Query('parentId') parentId?: string,
|
||||
@Query('tenantId') tenantId?: string,
|
||||
) {
|
||||
this.checkCentralAdmin(req);
|
||||
|
||||
let query = CentralDomain.query().withGraphFetched('tenant');
|
||||
|
||||
// Filter by parent/tenant ID if provided (for related lists)
|
||||
if (parentId || tenantId) {
|
||||
query = query.where('tenantId', parentId || tenantId);
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
@Get('domains/:id')
|
||||
async getDomain(@Req() req: any, @Param('id') id: string) {
|
||||
this.checkCentralAdmin(req);
|
||||
return CentralDomain.query()
|
||||
.findById(id)
|
||||
.withGraphFetched('tenant');
|
||||
}
|
||||
|
||||
@Post('domains')
|
||||
async createDomain(
|
||||
@Req() req: any,
|
||||
@Body() data: {
|
||||
domain: string;
|
||||
tenantId: string;
|
||||
isPrimary?: boolean;
|
||||
},
|
||||
) {
|
||||
this.checkCentralAdmin(req);
|
||||
return CentralDomain.query().insert({
|
||||
domain: data.domain,
|
||||
tenantId: data.tenantId,
|
||||
isPrimary: data.isPrimary || false,
|
||||
});
|
||||
}
|
||||
|
||||
@Put('domains/:id')
|
||||
async updateDomain(
|
||||
@Req() req: any,
|
||||
@Param('id') id: string,
|
||||
@Body() data: {
|
||||
domain?: string;
|
||||
tenantId?: string;
|
||||
isPrimary?: boolean;
|
||||
},
|
||||
) {
|
||||
this.checkCentralAdmin(req);
|
||||
return CentralDomain.query()
|
||||
.patchAndFetchById(id, data);
|
||||
}
|
||||
|
||||
@Delete('domains/:id')
|
||||
async deleteDomain(@Req() req: any, @Param('id') id: string) {
|
||||
this.checkCentralAdmin(req);
|
||||
|
||||
// Get domain info before deleting to invalidate cache
|
||||
const domain = await CentralDomain.query().findById(id);
|
||||
|
||||
// Delete the domain
|
||||
await CentralDomain.query().deleteById(id);
|
||||
|
||||
// Invalidate tenant connection cache for this domain
|
||||
if (domain) {
|
||||
this.tenantDbService.removeTenantConnection(domain.domain);
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
// ==================== USERS (Central Admin Users) ====================
|
||||
|
||||
@Get('users')
|
||||
async getUsers(@Req() req: any) {
|
||||
this.checkCentralAdmin(req);
|
||||
const users = await CentralUser.query();
|
||||
// Remove password from response
|
||||
return users.map(({ password, ...user }) => user);
|
||||
}
|
||||
|
||||
@Get('users/:id')
|
||||
async getUser(@Req() req: any, @Param('id') id: string) {
|
||||
this.checkCentralAdmin(req);
|
||||
const user = await CentralUser.query().findById(id);
|
||||
|
||||
if (!user) {
|
||||
throw new UnauthorizedException('User not found');
|
||||
}
|
||||
|
||||
const { password, ...userWithoutPassword } = user;
|
||||
return userWithoutPassword;
|
||||
}
|
||||
|
||||
@Post('users')
|
||||
async createUser(
|
||||
@Req() req: any,
|
||||
@Body() data: {
|
||||
email: string;
|
||||
password: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
role?: string;
|
||||
isActive?: boolean;
|
||||
},
|
||||
) {
|
||||
this.checkCentralAdmin(req);
|
||||
|
||||
const hashedPassword = await bcrypt.hash(data.password, 10);
|
||||
|
||||
const user = await CentralUser.query().insert({
|
||||
email: data.email,
|
||||
password: hashedPassword,
|
||||
firstName: data.firstName || null,
|
||||
lastName: data.lastName || null,
|
||||
role: data.role || 'admin',
|
||||
isActive: data.isActive !== undefined ? data.isActive : true,
|
||||
});
|
||||
|
||||
const { password, ...userWithoutPassword } = user;
|
||||
return userWithoutPassword;
|
||||
}
|
||||
|
||||
@Put('users/:id')
|
||||
async updateUser(
|
||||
@Req() req: any,
|
||||
@Param('id') id: string,
|
||||
@Body() data: {
|
||||
email?: string;
|
||||
password?: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
role?: string;
|
||||
isActive?: boolean;
|
||||
},
|
||||
) {
|
||||
this.checkCentralAdmin(req);
|
||||
|
||||
const updateData: any = { ...data };
|
||||
|
||||
// Hash password if provided
|
||||
if (data.password) {
|
||||
updateData.password = await bcrypt.hash(data.password, 10);
|
||||
} else {
|
||||
// Remove password from update if not provided
|
||||
delete updateData.password;
|
||||
}
|
||||
|
||||
const user = await CentralUser.query()
|
||||
.patchAndFetchById(id, updateData);
|
||||
|
||||
const { password, ...userWithoutPassword } = user;
|
||||
return userWithoutPassword;
|
||||
}
|
||||
|
||||
@Delete('users/:id')
|
||||
async deleteUser(@Req() req: any, @Param('id') id: string) {
|
||||
this.checkCentralAdmin(req);
|
||||
await CentralUser.query().deleteById(id);
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
43
backend/src/tenant/central-database.service.ts
Normal file
43
backend/src/tenant/central-database.service.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import Knex from 'knex';
|
||||
import { Model } from 'objection';
|
||||
import { CentralTenant, CentralDomain, CentralUser } from '../models/central.model';
|
||||
|
||||
let centralKnex: Knex.Knex | null = null;
|
||||
|
||||
/**
|
||||
* Get or create a Knex instance for the central database
|
||||
* This is used for Objection models that work with central entities
|
||||
*/
|
||||
export function getCentralKnex(): Knex.Knex {
|
||||
if (!centralKnex) {
|
||||
const centralDbUrl = process.env.CENTRAL_DATABASE_URL;
|
||||
|
||||
if (!centralDbUrl) {
|
||||
throw new Error('CENTRAL_DATABASE_URL environment variable is not set');
|
||||
}
|
||||
|
||||
centralKnex = Knex({
|
||||
client: 'mysql2',
|
||||
connection: centralDbUrl,
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10,
|
||||
},
|
||||
});
|
||||
|
||||
// Bind Objection models to this knex instance
|
||||
Model.knex(centralKnex);
|
||||
}
|
||||
|
||||
return centralKnex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize central models with the knex instance
|
||||
*/
|
||||
export function initCentralModels() {
|
||||
const knex = getCentralKnex();
|
||||
CentralTenant.knex(knex);
|
||||
CentralDomain.knex(knex);
|
||||
CentralUser.knex(knex);
|
||||
}
|
||||
@@ -8,32 +8,117 @@ export class TenantDatabaseService {
|
||||
private readonly logger = new Logger(TenantDatabaseService.name);
|
||||
private tenantConnections: Map<string, Knex> = new Map();
|
||||
|
||||
async getTenantKnex(tenantIdOrSlug: string): Promise<Knex> {
|
||||
if (this.tenantConnections.has(tenantIdOrSlug)) {
|
||||
return this.tenantConnections.get(tenantIdOrSlug);
|
||||
/**
|
||||
* Get tenant database connection by domain (for subdomain-based authentication)
|
||||
* This is used when users log in via tenant subdomains
|
||||
*/
|
||||
async getTenantKnexByDomain(domain: string): Promise<Knex> {
|
||||
const cacheKey = `domain:${domain}`;
|
||||
|
||||
// Check if we have a cached connection
|
||||
if (this.tenantConnections.has(cacheKey)) {
|
||||
// Validate the domain still exists before returning cached connection
|
||||
const centralPrisma = getCentralPrisma();
|
||||
|
||||
try {
|
||||
const domainRecord = await centralPrisma.domain.findUnique({
|
||||
where: { domain },
|
||||
});
|
||||
|
||||
// If domain no longer exists, remove cached connection
|
||||
if (!domainRecord) {
|
||||
this.logger.warn(`Domain ${domain} no longer exists, removing cached connection`);
|
||||
await this.disconnectTenant(cacheKey);
|
||||
throw new Error(`Domain ${domain} not found`);
|
||||
}
|
||||
} catch (error) {
|
||||
// If domain doesn't exist, remove from cache and re-throw
|
||||
if (error.message.includes('not found')) {
|
||||
throw error;
|
||||
}
|
||||
// For other errors, log but continue with cached connection
|
||||
this.logger.warn(`Error validating domain ${domain}:`, error.message);
|
||||
}
|
||||
|
||||
return this.tenantConnections.get(cacheKey);
|
||||
}
|
||||
|
||||
const centralPrisma = getCentralPrisma();
|
||||
|
||||
// Try to find tenant by ID first, then by slug
|
||||
let tenant = await centralPrisma.tenant.findUnique({
|
||||
where: { id: tenantIdOrSlug },
|
||||
// Find tenant by domain
|
||||
const domainRecord = await centralPrisma.domain.findUnique({
|
||||
where: { domain },
|
||||
include: { tenant: true },
|
||||
});
|
||||
|
||||
if (!tenant) {
|
||||
tenant = await centralPrisma.tenant.findUnique({
|
||||
where: { slug: tenantIdOrSlug },
|
||||
});
|
||||
|
||||
if (!domainRecord) {
|
||||
throw new Error(`Domain ${domain} not found`);
|
||||
}
|
||||
|
||||
const tenant = domainRecord.tenant;
|
||||
this.logger.log(`Found tenant by domain: ${domain} -> ${tenant.name}`);
|
||||
|
||||
if (tenant.status !== 'active') {
|
||||
throw new Error(`Tenant ${tenant.name} is not active`);
|
||||
}
|
||||
|
||||
// Create connection and cache it
|
||||
const tenantKnex = await this.createTenantConnection(tenant);
|
||||
this.tenantConnections.set(cacheKey, tenantKnex);
|
||||
|
||||
return tenantKnex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tenant database connection by tenant ID (for central admin operations)
|
||||
* This is used when central admin needs to access tenant databases
|
||||
*/
|
||||
async getTenantKnexById(tenantId: string): Promise<Knex> {
|
||||
const cacheKey = `id:${tenantId}`;
|
||||
|
||||
// Check if we have a cached connection (no validation needed for ID-based lookups)
|
||||
if (this.tenantConnections.has(cacheKey)) {
|
||||
return this.tenantConnections.get(cacheKey);
|
||||
}
|
||||
|
||||
const centralPrisma = getCentralPrisma();
|
||||
|
||||
// Find tenant by ID
|
||||
const tenant = await centralPrisma.tenant.findUnique({
|
||||
where: { id: tenantId },
|
||||
});
|
||||
|
||||
if (!tenant) {
|
||||
throw new Error(`Tenant ${tenantIdOrSlug} not found`);
|
||||
throw new Error(`Tenant ${tenantId} not found`);
|
||||
}
|
||||
|
||||
if (tenant.status !== 'active') {
|
||||
throw new Error(`Tenant ${tenantIdOrSlug} is not active`);
|
||||
throw new Error(`Tenant ${tenant.name} is not active`);
|
||||
}
|
||||
|
||||
this.logger.log(`Connecting to tenant database by ID: ${tenant.name}`);
|
||||
|
||||
// Create connection and cache it
|
||||
const tenantKnex = await this.createTenantConnection(tenant);
|
||||
this.tenantConnections.set(cacheKey, tenantKnex);
|
||||
|
||||
return tenantKnex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Legacy method - delegates to domain-based lookup
|
||||
* @deprecated Use getTenantKnexByDomain or getTenantKnexById instead
|
||||
*/
|
||||
async getTenantKnex(tenantIdOrSlug: string): Promise<Knex> {
|
||||
// Resolve tenant ID first, then get connection by ID
|
||||
const tenantId = await this.resolveTenantId(tenantIdOrSlug);
|
||||
return this.getTenantKnexById(tenantId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Knex connection to a tenant database
|
||||
*/
|
||||
private async createTenantConnection(tenant: any): Promise<Knex> {
|
||||
// Decrypt password
|
||||
const decryptedPassword = this.decryptPassword(tenant.dbPassword);
|
||||
|
||||
@@ -64,7 +149,6 @@ export class TenantDatabaseService {
|
||||
throw error;
|
||||
}
|
||||
|
||||
this.tenantConnections.set(tenantIdOrSlug, tenantKnex);
|
||||
return tenantKnex;
|
||||
}
|
||||
|
||||
@@ -86,6 +170,36 @@ export class TenantDatabaseService {
|
||||
return domainRecord.tenant;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve tenant by ID or slug
|
||||
* Tries ID first, then falls back to slug
|
||||
*/
|
||||
async resolveTenantId(idOrSlug: string): Promise<string> {
|
||||
const centralPrisma = getCentralPrisma();
|
||||
|
||||
// Try by ID first
|
||||
let tenant = await centralPrisma.tenant.findUnique({
|
||||
where: { id: idOrSlug },
|
||||
});
|
||||
|
||||
// If not found, try by slug
|
||||
if (!tenant) {
|
||||
tenant = await centralPrisma.tenant.findUnique({
|
||||
where: { slug: idOrSlug },
|
||||
});
|
||||
}
|
||||
|
||||
if (!tenant) {
|
||||
throw new Error(`Tenant ${idOrSlug} not found`);
|
||||
}
|
||||
|
||||
if (tenant.status !== 'active') {
|
||||
throw new Error(`Tenant ${tenant.name} is not active`);
|
||||
}
|
||||
|
||||
return tenant.id;
|
||||
}
|
||||
|
||||
async disconnectTenant(tenantId: string) {
|
||||
const connection = this.tenantConnections.get(tenantId);
|
||||
if (connection) {
|
||||
@@ -129,4 +243,26 @@ export class TenantDatabaseService {
|
||||
decrypted += decipher.final('utf8');
|
||||
return decrypted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt integrations config JSON object
|
||||
* @param config - Plain object containing integration credentials
|
||||
* @returns Encrypted JSON string
|
||||
*/
|
||||
encryptIntegrationsConfig(config: any): string {
|
||||
if (!config) return null;
|
||||
const jsonString = JSON.stringify(config);
|
||||
return this.encryptPassword(jsonString);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypt integrations config JSON string
|
||||
* @param encryptedConfig - Encrypted JSON string
|
||||
* @returns Plain object with integration credentials
|
||||
*/
|
||||
decryptIntegrationsConfig(encryptedConfig: string): any {
|
||||
if (!encryptedConfig) return null;
|
||||
const decrypted = this.decryptPassword(encryptedConfig);
|
||||
return JSON.parse(decrypted);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -176,7 +176,7 @@ export class TenantProvisioningService {
|
||||
* Seed default data for new tenant
|
||||
*/
|
||||
private async seedDefaultData(tenantId: string) {
|
||||
const tenantKnex = await this.tenantDbService.getTenantKnex(tenantId);
|
||||
const tenantKnex = await this.tenantDbService.getTenantKnexById(tenantId);
|
||||
|
||||
try {
|
||||
// Create default roles
|
||||
|
||||
155
backend/src/tenant/tenant.controller.ts
Normal file
155
backend/src/tenant/tenant.controller.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Put,
|
||||
Body,
|
||||
UseGuards,
|
||||
Req,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantDatabaseService } from './tenant-database.service';
|
||||
import { getCentralPrisma } from '../prisma/central-prisma.service';
|
||||
import { TenantId } from './tenant.decorator';
|
||||
|
||||
@Controller('tenant')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class TenantController {
|
||||
constructor(private readonly tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
/**
|
||||
* Get integrations configuration for the current tenant
|
||||
*/
|
||||
@Get('integrations')
|
||||
async getIntegrationsConfig(@TenantId() domain: string) {
|
||||
const centralPrisma = getCentralPrisma();
|
||||
|
||||
// Look up tenant by domain
|
||||
const domainRecord = await centralPrisma.domain.findUnique({
|
||||
where: { domain },
|
||||
include: { tenant: { select: { id: true, integrationsConfig: true } } },
|
||||
});
|
||||
|
||||
if (!domainRecord?.tenant || !domainRecord.tenant.integrationsConfig) {
|
||||
return { data: null };
|
||||
}
|
||||
|
||||
// Decrypt the config
|
||||
const config = this.tenantDbService.decryptIntegrationsConfig(
|
||||
domainRecord.tenant.integrationsConfig as any,
|
||||
);
|
||||
|
||||
// Return config with sensitive fields masked
|
||||
const maskedConfig = this.maskSensitiveFields(config);
|
||||
|
||||
return { data: maskedConfig };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update integrations configuration for the current tenant
|
||||
*/
|
||||
@Put('integrations')
|
||||
async updateIntegrationsConfig(
|
||||
@TenantId() domain: string,
|
||||
@Body() body: { integrationsConfig: any },
|
||||
) {
|
||||
const { integrationsConfig } = body;
|
||||
|
||||
if (!domain) {
|
||||
throw new Error('Domain is missing from request');
|
||||
}
|
||||
|
||||
// Look up tenant by domain
|
||||
const centralPrisma = getCentralPrisma();
|
||||
const domainRecord = await centralPrisma.domain.findUnique({
|
||||
where: { domain },
|
||||
include: { tenant: { select: { id: true, integrationsConfig: true } } },
|
||||
});
|
||||
|
||||
if (!domainRecord?.tenant) {
|
||||
throw new Error(`Tenant with domain ${domain} not found`);
|
||||
}
|
||||
|
||||
// Merge with existing config to preserve masked values
|
||||
let finalConfig = integrationsConfig;
|
||||
if (domainRecord.tenant.integrationsConfig) {
|
||||
const existingConfig = this.tenantDbService.decryptIntegrationsConfig(
|
||||
domainRecord.tenant.integrationsConfig as any,
|
||||
);
|
||||
|
||||
// Replace masked values with actual values from existing config
|
||||
finalConfig = this.unmaskConfig(integrationsConfig, existingConfig);
|
||||
}
|
||||
|
||||
// Encrypt the config
|
||||
const encryptedConfig = this.tenantDbService.encryptIntegrationsConfig(
|
||||
finalConfig,
|
||||
);
|
||||
|
||||
// Update in database
|
||||
await centralPrisma.tenant.update({
|
||||
where: { id: domainRecord.tenant.id },
|
||||
data: {
|
||||
integrationsConfig: encryptedConfig as any,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Integrations configuration updated successfully',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Unmask config by replacing masked values with actual values from existing config
|
||||
*/
|
||||
private unmaskConfig(newConfig: any, existingConfig: any): any {
|
||||
const result = { ...newConfig };
|
||||
|
||||
// Unmask Twilio credentials
|
||||
if (result.twilio && existingConfig.twilio) {
|
||||
if (result.twilio.authToken === '••••••••' && existingConfig.twilio.authToken) {
|
||||
result.twilio.authToken = existingConfig.twilio.authToken;
|
||||
}
|
||||
if (result.twilio.apiSecret === '••••••••' && existingConfig.twilio.apiSecret) {
|
||||
result.twilio.apiSecret = existingConfig.twilio.apiSecret;
|
||||
}
|
||||
}
|
||||
|
||||
// Unmask OpenAI credentials
|
||||
if (result.openai && existingConfig.openai) {
|
||||
if (result.openai.apiKey === '••••••••' && existingConfig.openai.apiKey) {
|
||||
result.openai.apiKey = existingConfig.openai.apiKey;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mask sensitive fields for API responses
|
||||
*/
|
||||
private maskSensitiveFields(config: any): any {
|
||||
if (!config) return null;
|
||||
|
||||
const masked = { ...config };
|
||||
|
||||
// Mask Twilio credentials
|
||||
if (masked.twilio) {
|
||||
masked.twilio = {
|
||||
...masked.twilio,
|
||||
authToken: masked.twilio.authToken ? '••••••••' : '',
|
||||
apiSecret: masked.twilio.apiSecret ? '••••••••' : '',
|
||||
};
|
||||
}
|
||||
|
||||
// Mask OpenAI credentials
|
||||
if (masked.openai) {
|
||||
masked.openai = {
|
||||
...masked.openai,
|
||||
apiKey: masked.openai.apiKey ? '••••••••' : '',
|
||||
};
|
||||
}
|
||||
|
||||
return masked;
|
||||
}
|
||||
}
|
||||
@@ -17,9 +17,14 @@ export class TenantMiddleware implements NestMiddleware {
|
||||
// Extract subdomain from hostname
|
||||
const host = req.headers.host || '';
|
||||
const hostname = host.split(':')[0]; // Remove port if present
|
||||
const parts = hostname.split('.');
|
||||
|
||||
// Check Origin header to get frontend subdomain (for API calls)
|
||||
const origin = req.headers.origin as string;
|
||||
const referer = req.headers.referer as string;
|
||||
|
||||
let parts = hostname.split('.');
|
||||
|
||||
this.logger.log(`Host header: ${host}, hostname: ${hostname}, parts: ${JSON.stringify(parts)}`);
|
||||
this.logger.log(`Host header: ${host}, hostname: ${hostname}, origin: ${origin}, referer: ${referer}, parts: ${JSON.stringify(parts)}`);
|
||||
|
||||
// For local development, accept x-tenant-id header
|
||||
let tenantId = req.headers['x-tenant-id'] as string;
|
||||
@@ -27,12 +32,26 @@ export class TenantMiddleware implements NestMiddleware {
|
||||
|
||||
this.logger.log(`Host header: ${host}, hostname: ${hostname}, parts: ${JSON.stringify(parts)}, x-tenant-id: ${tenantId}`);
|
||||
|
||||
// If x-tenant-id is explicitly provided, use it directly
|
||||
if (tenantId) {
|
||||
this.logger.log(`Using explicit x-tenant-id: ${tenantId}`);
|
||||
(req as any).tenantId = tenantId;
|
||||
next();
|
||||
return;
|
||||
// Try to extract subdomain from Origin header first (for API calls from frontend)
|
||||
if (origin) {
|
||||
try {
|
||||
const originUrl = new URL(origin);
|
||||
const originHost = originUrl.hostname;
|
||||
parts = originHost.split('.');
|
||||
this.logger.log(`Using Origin header hostname: ${originHost}, parts: ${JSON.stringify(parts)}`);
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to parse origin: ${origin}`);
|
||||
}
|
||||
} else if (referer && !tenantId) {
|
||||
// Fallback to Referer if no Origin
|
||||
try {
|
||||
const refererUrl = new URL(referer);
|
||||
const refererHost = refererUrl.hostname;
|
||||
parts = refererHost.split('.');
|
||||
this.logger.log(`Using Referer header hostname: ${refererHost}, parts: ${JSON.stringify(parts)}`);
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to parse referer: ${referer}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract subdomain (e.g., "tenant1" from "tenant1.routebox.co")
|
||||
@@ -51,6 +70,36 @@ export class TenantMiddleware implements NestMiddleware {
|
||||
|
||||
this.logger.log(`Extracted subdomain: ${subdomain}`);
|
||||
|
||||
// Always attach subdomain to request if present
|
||||
if (subdomain) {
|
||||
(req as any).subdomain = subdomain;
|
||||
}
|
||||
|
||||
// If x-tenant-id is explicitly provided, use it directly but still keep subdomain
|
||||
if (tenantId) {
|
||||
this.logger.log(`Using explicit x-tenant-id: ${tenantId}`);
|
||||
(req as any).tenantId = tenantId;
|
||||
next();
|
||||
return;
|
||||
}
|
||||
|
||||
// Always attach subdomain to request if present
|
||||
if (subdomain) {
|
||||
(req as any).subdomain = subdomain;
|
||||
}
|
||||
|
||||
// Check if this is a central subdomain
|
||||
const centralSubdomains = (process.env.CENTRAL_SUBDOMAINS || 'central,admin').split(',');
|
||||
const isCentral = subdomain && centralSubdomains.includes(subdomain);
|
||||
|
||||
// If it's a central subdomain, skip tenant resolution
|
||||
if (isCentral) {
|
||||
this.logger.log(`Central subdomain detected: ${subdomain}, skipping tenant resolution`);
|
||||
(req as any).subdomain = subdomain;
|
||||
next();
|
||||
return;
|
||||
}
|
||||
|
||||
// Get tenant by subdomain if available
|
||||
if (subdomain) {
|
||||
try {
|
||||
@@ -72,9 +121,6 @@ export class TenantMiddleware implements NestMiddleware {
|
||||
if (tenantId) {
|
||||
// Attach tenant info to request object
|
||||
(req as any).tenantId = tenantId;
|
||||
if (subdomain) {
|
||||
(req as any).subdomain = subdomain;
|
||||
}
|
||||
} else {
|
||||
this.logger.warn(`No tenant identified from host: ${hostname}`);
|
||||
}
|
||||
|
||||
@@ -3,11 +3,13 @@ import { TenantMiddleware } from './tenant.middleware';
|
||||
import { TenantDatabaseService } from './tenant-database.service';
|
||||
import { TenantProvisioningService } from './tenant-provisioning.service';
|
||||
import { TenantProvisioningController } from './tenant-provisioning.controller';
|
||||
import { CentralAdminController } from './central-admin.controller';
|
||||
import { TenantController } from './tenant.controller';
|
||||
import { PrismaModule } from '../prisma/prisma.module';
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule],
|
||||
controllers: [TenantProvisioningController],
|
||||
controllers: [TenantProvisioningController, CentralAdminController, TenantController],
|
||||
providers: [
|
||||
TenantDatabaseService,
|
||||
TenantProvisioningService,
|
||||
|
||||
214
backend/src/voice/audio-converter.service.ts
Normal file
214
backend/src/voice/audio-converter.service.ts
Normal file
@@ -0,0 +1,214 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
/**
|
||||
* Audio format converter for Twilio <-> OpenAI audio streaming
|
||||
*
|
||||
* Twilio Media Streams format:
|
||||
* - Codec: μ-law (G.711)
|
||||
* - Sample rate: 8kHz
|
||||
* - Encoding: base64
|
||||
* - Chunk size: 20ms (160 bytes)
|
||||
*
|
||||
* OpenAI Realtime API format:
|
||||
* - Codec: PCM16
|
||||
* - Sample rate: 24kHz
|
||||
* - Encoding: base64
|
||||
* - Mono channel
|
||||
*/
|
||||
@Injectable()
|
||||
export class AudioConverterService {
|
||||
private readonly logger = new Logger(AudioConverterService.name);
|
||||
|
||||
// μ-law decode lookup table
|
||||
private readonly MULAW_DECODE_TABLE = this.buildMuLawDecodeTable();
|
||||
|
||||
// μ-law encode lookup table
|
||||
private readonly MULAW_ENCODE_TABLE = this.buildMuLawEncodeTable();
|
||||
|
||||
/**
|
||||
* Build μ-law to linear PCM16 decode table
|
||||
*/
|
||||
private buildMuLawDecodeTable(): Int16Array {
|
||||
const table = new Int16Array(256);
|
||||
for (let i = 0; i < 256; i++) {
|
||||
const mulaw = ~i;
|
||||
const exponent = (mulaw >> 4) & 0x07;
|
||||
const mantissa = mulaw & 0x0f;
|
||||
let sample = (mantissa << 3) + 0x84;
|
||||
sample <<= exponent;
|
||||
sample -= 0x84;
|
||||
if ((mulaw & 0x80) === 0) {
|
||||
sample = -sample;
|
||||
}
|
||||
table[i] = sample;
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build linear PCM16 to μ-law encode table
|
||||
*/
|
||||
private buildMuLawEncodeTable(): Uint8Array {
|
||||
const table = new Uint8Array(65536);
|
||||
for (let i = 0; i < 65536; i++) {
|
||||
const sample = (i - 32768);
|
||||
const sign = sample < 0 ? 0x80 : 0x00;
|
||||
const magnitude = Math.abs(sample);
|
||||
|
||||
// Add bias
|
||||
let biased = magnitude + 0x84;
|
||||
|
||||
// Find exponent
|
||||
let exponent = 7;
|
||||
for (let exp = 0; exp < 8; exp++) {
|
||||
if (biased <= (0xff << exp)) {
|
||||
exponent = exp;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract mantissa
|
||||
const mantissa = (biased >> (exponent + 3)) & 0x0f;
|
||||
|
||||
// Combine sign, exponent, mantissa
|
||||
const mulaw = ~(sign | (exponent << 4) | mantissa);
|
||||
table[i] = mulaw & 0xff;
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode μ-law audio to linear PCM16
|
||||
* @param mulawData - Buffer containing μ-law encoded audio
|
||||
* @returns Buffer containing PCM16 audio (16-bit little-endian)
|
||||
*/
|
||||
decodeMuLaw(mulawData: Buffer): Buffer {
|
||||
const pcm16 = Buffer.allocUnsafe(mulawData.length * 2);
|
||||
|
||||
for (let i = 0; i < mulawData.length; i++) {
|
||||
const sample = this.MULAW_DECODE_TABLE[mulawData[i]];
|
||||
pcm16.writeInt16LE(sample, i * 2);
|
||||
}
|
||||
|
||||
return pcm16;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode linear PCM16 to μ-law
|
||||
* @param pcm16Data - Buffer containing PCM16 audio (16-bit little-endian)
|
||||
* @returns Buffer containing μ-law encoded audio
|
||||
*/
|
||||
encodeMuLaw(pcm16Data: Buffer): Buffer {
|
||||
const mulaw = Buffer.allocUnsafe(pcm16Data.length / 2);
|
||||
|
||||
for (let i = 0; i < pcm16Data.length; i += 2) {
|
||||
const sample = pcm16Data.readInt16LE(i);
|
||||
const index = (sample + 32768) & 0xffff;
|
||||
mulaw[i / 2] = this.MULAW_ENCODE_TABLE[index];
|
||||
}
|
||||
|
||||
return mulaw;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resample audio from 8kHz to 24kHz (linear interpolation)
|
||||
* @param pcm16Data - Buffer containing 8kHz PCM16 audio
|
||||
* @returns Buffer containing 24kHz PCM16 audio
|
||||
*/
|
||||
resample8kTo24k(pcm16Data: Buffer): Buffer {
|
||||
const inputSamples = pcm16Data.length / 2;
|
||||
const outputSamples = Math.floor(inputSamples * 3); // 8k * 3 = 24k
|
||||
const output = Buffer.allocUnsafe(outputSamples * 2);
|
||||
|
||||
for (let i = 0; i < outputSamples; i++) {
|
||||
const srcIndex = i / 3;
|
||||
const srcIndexFloor = Math.floor(srcIndex);
|
||||
const srcIndexCeil = Math.min(srcIndexFloor + 1, inputSamples - 1);
|
||||
const fraction = srcIndex - srcIndexFloor;
|
||||
|
||||
const sample1 = pcm16Data.readInt16LE(srcIndexFloor * 2);
|
||||
const sample2 = pcm16Data.readInt16LE(srcIndexCeil * 2);
|
||||
|
||||
// Linear interpolation
|
||||
const interpolated = Math.round(sample1 + (sample2 - sample1) * fraction);
|
||||
output.writeInt16LE(interpolated, i * 2);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resample audio from 24kHz to 8kHz (decimation with averaging)
|
||||
* @param pcm16Data - Buffer containing 24kHz PCM16 audio
|
||||
* @returns Buffer containing 8kHz PCM16 audio
|
||||
*/
|
||||
resample24kTo8k(pcm16Data: Buffer): Buffer {
|
||||
const inputSamples = pcm16Data.length / 2;
|
||||
const outputSamples = Math.floor(inputSamples / 3); // 24k / 3 = 8k
|
||||
const output = Buffer.allocUnsafe(outputSamples * 2);
|
||||
|
||||
for (let i = 0; i < outputSamples; i++) {
|
||||
// Average 3 samples for anti-aliasing
|
||||
const idx1 = Math.min(i * 3, inputSamples - 1);
|
||||
const idx2 = Math.min(i * 3 + 1, inputSamples - 1);
|
||||
const idx3 = Math.min(i * 3 + 2, inputSamples - 1);
|
||||
|
||||
const sample1 = pcm16Data.readInt16LE(idx1 * 2);
|
||||
const sample2 = pcm16Data.readInt16LE(idx2 * 2);
|
||||
const sample3 = pcm16Data.readInt16LE(idx3 * 2);
|
||||
|
||||
const averaged = Math.round((sample1 + sample2 + sample3) / 3);
|
||||
output.writeInt16LE(averaged, i * 2);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Twilio μ-law 8kHz to OpenAI PCM16 24kHz
|
||||
* @param twilioBase64 - Base64-encoded μ-law audio from Twilio
|
||||
* @returns Base64-encoded PCM16 24kHz audio for OpenAI
|
||||
*/
|
||||
twilioToOpenAI(twilioBase64: string): string {
|
||||
try {
|
||||
// Decode base64
|
||||
const mulawBuffer = Buffer.from(twilioBase64, 'base64');
|
||||
|
||||
// μ-law -> PCM16
|
||||
const pcm16_8k = this.decodeMuLaw(mulawBuffer);
|
||||
|
||||
// 8kHz -> 24kHz
|
||||
const pcm16_24k = this.resample8kTo24k(pcm16_8k);
|
||||
|
||||
// Encode to base64
|
||||
return pcm16_24k.toString('base64');
|
||||
} catch (error) {
|
||||
this.logger.error('Error converting Twilio to OpenAI audio', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert OpenAI PCM16 24kHz to Twilio μ-law 8kHz
|
||||
* @param openaiBase64 - Base64-encoded PCM16 24kHz audio from OpenAI
|
||||
* @returns Base64-encoded μ-law 8kHz audio for Twilio
|
||||
*/
|
||||
openAIToTwilio(openaiBase64: string): string {
|
||||
try {
|
||||
// Decode base64
|
||||
const pcm16_24k = Buffer.from(openaiBase64, 'base64');
|
||||
|
||||
// 24kHz -> 8kHz
|
||||
const pcm16_8k = this.resample24kTo8k(pcm16_24k);
|
||||
|
||||
// PCM16 -> μ-law
|
||||
const mulawBuffer = this.encodeMuLaw(pcm16_8k);
|
||||
|
||||
// Encode to base64
|
||||
return mulawBuffer.toString('base64');
|
||||
} catch (error) {
|
||||
this.logger.error('Error converting OpenAI to Twilio audio', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
25
backend/src/voice/dto/call-event.dto.ts
Normal file
25
backend/src/voice/dto/call-event.dto.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
export interface CallEventDto {
|
||||
callSid: string;
|
||||
direction: 'inbound' | 'outbound';
|
||||
fromNumber: string;
|
||||
toNumber: string;
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface DtmfEventDto {
|
||||
callSid: string;
|
||||
digit: string;
|
||||
}
|
||||
|
||||
export interface TranscriptEventDto {
|
||||
callSid: string;
|
||||
transcript: string;
|
||||
isFinal: boolean;
|
||||
}
|
||||
|
||||
export interface AiSuggestionDto {
|
||||
callSid: string;
|
||||
suggestion: string;
|
||||
type: 'response' | 'action' | 'insight';
|
||||
data?: any;
|
||||
}
|
||||
10
backend/src/voice/dto/initiate-call.dto.ts
Normal file
10
backend/src/voice/dto/initiate-call.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { IsString, IsNotEmpty, Matches } from 'class-validator';
|
||||
|
||||
export class InitiateCallDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@Matches(/^\+?[1-9]\d{1,14}$/, {
|
||||
message: 'Invalid phone number format (use E.164 format)',
|
||||
})
|
||||
toNumber: string;
|
||||
}
|
||||
20
backend/src/voice/interfaces/integration-config.interface.ts
Normal file
20
backend/src/voice/interfaces/integration-config.interface.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
export interface TwilioConfig {
|
||||
accountSid: string;
|
||||
authToken: string;
|
||||
phoneNumber: string;
|
||||
apiKey?: string; // API Key SID for generating access tokens
|
||||
apiSecret?: string; // API Key Secret
|
||||
twimlAppSid?: string; // TwiML App SID for Voice SDK
|
||||
}
|
||||
|
||||
export interface OpenAIConfig {
|
||||
apiKey: string;
|
||||
assistantId?: string;
|
||||
model?: string;
|
||||
voice?: string;
|
||||
}
|
||||
|
||||
export interface IntegrationsConfig {
|
||||
twilio?: TwilioConfig;
|
||||
openai?: OpenAIConfig;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user