Compare commits
7 Commits
permission
...
de65aa4025
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
de65aa4025 | ||
|
|
ded413b99b | ||
|
|
20fc90a3fb | ||
|
|
51c82d3d95 | ||
|
|
a4577ddcf3 | ||
|
|
5f3fcef1ec | ||
|
|
16907aadf8 |
5
.env.api
5
.env.api
@@ -5,6 +5,11 @@ DATABASE_URL="mysql://platform:platform@db:3306/platform"
|
||||
CENTRAL_DATABASE_URL="mysql://root:asjdnfqTash37faggT@db:3306/central_platform"
|
||||
REDIS_URL="redis://redis:6379"
|
||||
|
||||
# Meilisearch (optional)
|
||||
MEILI_HOST="http://meilisearch:7700"
|
||||
MEILI_API_KEY="dev-meili-master-key"
|
||||
MEILI_INDEX_PREFIX="tenant_"
|
||||
|
||||
# JWT, multi-tenant hints, etc.
|
||||
JWT_SECRET="devsecret"
|
||||
TENANCY_STRATEGY="single-db"
|
||||
|
||||
2
.env.web
2
.env.web
@@ -2,4 +2,4 @@ NUXT_PORT=3001
|
||||
NUXT_HOST=0.0.0.0
|
||||
|
||||
# Point Nuxt to the API container (not localhost)
|
||||
NUXT_PUBLIC_API_BASE_URL=http://jupiter.routebox.co:3000
|
||||
NUXT_PUBLIC_API_BASE_URL=https://tenant1.routebox.co
|
||||
|
||||
324
AI_PROCESS_BUILDER_README.md
Normal file
324
AI_PROCESS_BUILDER_README.md
Normal file
@@ -0,0 +1,324 @@
|
||||
# AI Process Builder + Chat Orchestrator
|
||||
|
||||
A complete implementation of tenant-scoped AI process automation where admins design LangGraph-compiled workflows via React Flow UI, and end-users execute them through a Deep Agent chat orchestrator with deterministic, audited execution.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
### Backend Components
|
||||
|
||||
#### 1. **Deep Agent Orchestrator** ([deep-agent.orchestrator.ts](backend/src/ai-processes/deep-agent.orchestrator.ts))
|
||||
- Uses LangChain/OpenAI to intelligently select processes
|
||||
- Extracts structured inputs from natural language
|
||||
- Generates friendly confirmation messages
|
||||
- Three-step workflow: discover → select → extract → execute
|
||||
|
||||
#### 2. **Graph Compiler** ([ai-processes.compiler.ts](backend/src/ai-processes/ai-processes.compiler.ts))
|
||||
- Validates ReactFlow JSON graphs (Start/End nodes, reachability, cycles)
|
||||
- Compiles to LangGraph-compatible state machines
|
||||
- Validates tool allowlist and JSON schemas (Ajv)
|
||||
- Persists compiled artifact for versioned execution
|
||||
|
||||
#### 3. **Runtime Executor** ([ai-processes.runner.ts](backend/src/ai-processes/ai-processes.runner.ts))
|
||||
- Executes compiled graphs deterministically
|
||||
- Implements 4 node types: LLMDecisionNode, ToolNode, HumanInputNode, End
|
||||
- Handles conditional edges via jsonlogic
|
||||
- Emits real-time events for streaming updates
|
||||
|
||||
#### 4. **Tool Registry** ([tools/tool-registry.ts](backend/src/ai-processes/tools/tool-registry.ts))
|
||||
- Tenant-scoped tool allowlist (database-backed via AiToolConfig)
|
||||
- Demo tools wrapping ObjectService (findAccount, createAccount, etc.)
|
||||
- Context injection (tenantId, userId, knex) for secure execution
|
||||
|
||||
#### 5. **Orchestrator Service** ([ai-processes.orchestrator.service.ts](backend/src/ai-processes/ai-processes.orchestrator.service.ts))
|
||||
- Integrates Deep Agent for process selection
|
||||
- Falls back to standard AI assistant when no processes configured
|
||||
- Manages chat sessions and message history
|
||||
- Streams execution events via SSE
|
||||
|
||||
### Frontend Components
|
||||
|
||||
#### 1. **AIChatBar** ([components/AIChatBar.vue](frontend/components/AIChatBar.vue))
|
||||
- Updated to call `/ai-processes/chat/messages` endpoint
|
||||
- SSE event stream consumer for real-time updates
|
||||
- Displays process selection, node execution, tool calls
|
||||
- Handles NEED_INPUT events for human-in-the-loop
|
||||
|
||||
#### 2. **Process Management UI** ([pages/ai-processes/](frontend/pages/ai-processes/))
|
||||
- List view: displays all processes with versions
|
||||
- Editor view: React Flow integration via iframe + postMessage
|
||||
- Test runner for quick validation
|
||||
|
||||
#### 3. **React Flow Editor** ([ai-processes-editor/src/App.tsx](frontend/ai-processes-editor/src/App.tsx))
|
||||
- Node palette: Start, LLMDecisionNode, ToolNode, HumanInputNode, End
|
||||
- Visual graph designer with drag-drop
|
||||
- Auto-saves to parent window via postMessage
|
||||
- Loads existing graphs for editing
|
||||
|
||||
### Data Models (Objection.js)
|
||||
|
||||
```typescript
|
||||
AiProcess
|
||||
├── id, tenantId, name, description, latestVersion
|
||||
└── relations: versions[], runs[]
|
||||
|
||||
AiProcessVersion
|
||||
├── id, tenantId, processId, version
|
||||
├── graphJson (ReactFlow definition)
|
||||
└── compiledJson (LangGraph artifact)
|
||||
|
||||
AiProcessRun
|
||||
├── id, tenantId, processId, version, status
|
||||
├── inputJson, outputJson, errorJson, stateJson
|
||||
└── currentNodeId (for resume)
|
||||
|
||||
AiChatSession
|
||||
├── id, tenantId, userId
|
||||
└── relations: messages[]
|
||||
|
||||
AiChatMessage
|
||||
├── id, sessionId, role, content
|
||||
└── timestamps
|
||||
|
||||
AiAuditEvent
|
||||
├── id, tenantId, runId, eventType
|
||||
└── payloadJson (full event data)
|
||||
|
||||
AiToolConfig
|
||||
├── id, tenantId, toolName, enabled
|
||||
└── configJson (tool-specific settings)
|
||||
```
|
||||
|
||||
## Demo Process: Register New Pet
|
||||
|
||||
A complete workflow demonstrating conditional logic and tool orchestration:
|
||||
|
||||
1. **Extract Info** (LLMDecisionNode)
|
||||
- Parses user message for pet + owner details
|
||||
- Outputs structured JSON with validation
|
||||
|
||||
2. **Find/Create Account** (Conditional)
|
||||
- Searches for existing account by name/email
|
||||
- Creates new account if not found
|
||||
- Merges results into state
|
||||
|
||||
3. **Find/Create Contact** (Conditional)
|
||||
- Searches for existing contact under account
|
||||
- Creates new contact if not found
|
||||
|
||||
4. **Create Pet** (ToolNode)
|
||||
- Inserts pet record linked to contact
|
||||
- Returns pet ID
|
||||
|
||||
### Seed the Demo Process
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
npm run migrate:tenant -- <tenant-slug>
|
||||
npm run seed:demo-process -- <tenant-slug>
|
||||
```
|
||||
|
||||
### Test the Demo Process
|
||||
|
||||
1. Navigate to `/ai-processes` in your tenant subdomain
|
||||
2. Open "Register New Pet" process
|
||||
3. Click "Test Run" or use the chat bar:
|
||||
|
||||
```
|
||||
User: "Register a dog named Max, breed Golden Retriever, age 3,
|
||||
owned by John Smith, email john@example.com"
|
||||
|
||||
Agent: 🔄 Selected process: Register New Pet
|
||||
I'll register Max (Golden Retriever, 3 years old) for John Smith.
|
||||
|
||||
⚙️ Executing step: Extract Info
|
||||
✓ Extracted pet details
|
||||
|
||||
🔧 Using tool: findAccount
|
||||
ℹ️ Account not found, creating new account
|
||||
|
||||
🔧 Using tool: createAccount
|
||||
✓ Created account for John Smith
|
||||
|
||||
🔧 Using tool: findContact
|
||||
ℹ️ Contact not found, creating new contact
|
||||
|
||||
🔧 Using tool: createContact
|
||||
✓ Created contact: John Smith
|
||||
|
||||
🔧 Using tool: createPet
|
||||
✓ Created pet: Max (ID: pet_1234567890)
|
||||
|
||||
✅ Process completed successfully!
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### Process Management (Admin)
|
||||
|
||||
```typescript
|
||||
GET /tenants/:tenantId/ai-processes
|
||||
POST /tenants/:tenantId/ai-processes
|
||||
GET /tenants/:tenantId/ai-processes/:id
|
||||
POST /tenants/:tenantId/ai-processes/:id/versions
|
||||
GET /tenants/:tenantId/ai-processes/:id/versions
|
||||
|
||||
POST /tenants/:tenantId/ai-processes/:id/runs
|
||||
POST /tenants/:tenantId/ai-processes/runs/:runId/resume
|
||||
```
|
||||
|
||||
### Chat Orchestrator (End User)
|
||||
|
||||
```typescript
|
||||
POST /tenants/:tenantId/ai-processes/chat/messages
|
||||
SSE /tenants/:tenantId/ai-processes/stream?sessionId=xxx
|
||||
```
|
||||
|
||||
## Event Stream Types
|
||||
|
||||
```typescript
|
||||
type StreamEvent =
|
||||
| { type: 'agent_started' }
|
||||
| { type: 'processes_listed', data: { count: number } }
|
||||
| { type: 'process_selected', processId: string, version: number }
|
||||
| { type: 'agent_message', data: { message: string } }
|
||||
| { type: 'node_started', nodeId: string }
|
||||
| { type: 'node_completed', nodeId: string }
|
||||
| { type: 'tool_called', toolName: string, nodeId: string }
|
||||
| { type: 'llm_decision', nodeId: string, data: any }
|
||||
| { type: 'need_input', data: { prompt: string, schema: JSONSchema } }
|
||||
| { type: 'final', data: { output: any } }
|
||||
| { type: 'error', data: { error: string } }
|
||||
```
|
||||
|
||||
## Security & Guardrails
|
||||
|
||||
### 1. **Tenancy Isolation**
|
||||
- All queries filtered by `tenantId` (enforced in Objection models)
|
||||
- Tool context includes tenant scope
|
||||
- Database-per-tenant architecture (inherited from platform)
|
||||
|
||||
### 2. **Tool Allowlist**
|
||||
- Two-level validation:
|
||||
- Tenant-level: `AiToolConfig` table (enabled tools per tenant)
|
||||
- Compile-time: validates toolName exists in registry
|
||||
- Runtime check before tool execution
|
||||
|
||||
### 3. **Schema Validation**
|
||||
- LLMDecisionNode output validated against JSON Schema (Ajv)
|
||||
- HumanInputNode input validated before resume
|
||||
- Graph structure validated at compile time
|
||||
|
||||
### 4. **Audit Trail**
|
||||
- Every node execution logged to `ai_audit_events`
|
||||
- Includes: tool calls, LLM decisions, state mutations, errors
|
||||
- Queryable for compliance dashboards
|
||||
|
||||
### 5. **Versioning**
|
||||
- Immutable process versions (create-only)
|
||||
- Runs reference specific version number
|
||||
- Graph definition + compiled artifact stored together
|
||||
|
||||
## Running the System
|
||||
|
||||
### 1. **Run Migrations**
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
npm run migrate:tenant -- tenant1
|
||||
```
|
||||
|
||||
### 2. **Seed Demo Data**
|
||||
|
||||
```bash
|
||||
npm run seed:demo-process -- tenant1
|
||||
```
|
||||
|
||||
### 3. **Start Backend**
|
||||
|
||||
```bash
|
||||
npm run start:dev
|
||||
```
|
||||
|
||||
### 4. **Build Editor (if needed)**
|
||||
|
||||
```bash
|
||||
cd frontend/ai-processes-editor
|
||||
npm install
|
||||
npm run build
|
||||
```
|
||||
|
||||
### 5. **Start Frontend**
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### 6. **Access UI**
|
||||
|
||||
- Admin UI: `http://tenant1.localhost:3001/ai-processes`
|
||||
- Chat UI: Available in bottom drawer on any page (⌘K to toggle)
|
||||
|
||||
## Extension Points
|
||||
|
||||
### Adding New Node Types
|
||||
|
||||
1. Define type in [ai-processes.types.ts](backend/src/ai-processes/ai-processes.types.ts)
|
||||
2. Add schema validation in [ai-processes.schemas.ts](backend/src/ai-processes/ai-processes.schemas.ts)
|
||||
3. Implement executor in [ai-processes.runner.ts](backend/src/ai-processes/ai-processes.runner.ts)
|
||||
4. Add UI component in React Flow editor
|
||||
|
||||
### Adding New Tools
|
||||
|
||||
1. Implement handler in [tools/demo-tools.ts](backend/src/ai-processes/tools/demo-tools.ts)
|
||||
2. Register in `demoTools` export
|
||||
3. Add to tenant allowlist via UI or seed script
|
||||
4. Document input/output schema
|
||||
|
||||
### Custom LLM Decision Logic
|
||||
|
||||
Override `llmDecision` callback in [ai-processes.service.ts](backend/src/ai-processes/ai-processes.service.ts):
|
||||
|
||||
```typescript
|
||||
llmDecision: async (node, state) => {
|
||||
const prompt = renderTemplate(node.data.promptTemplate, state);
|
||||
const response = await callOpenAI(prompt, node.data.model);
|
||||
return validateAgainstSchema(response, node.data.outputSchema);
|
||||
}
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Process not appearing in chat
|
||||
|
||||
- Check: `npm run seed:demo-process` completed successfully
|
||||
- Verify: Process exists in database (`select * from ai_processes`)
|
||||
- Check: Tools enabled (`select * from ai_tool_configs`)
|
||||
|
||||
### Graph validation errors
|
||||
|
||||
- Ensure exactly one Start node
|
||||
- Ensure at least one End node
|
||||
- Check all edges reference valid node IDs
|
||||
- Verify tool names match registered tools
|
||||
|
||||
### SSE stream not working
|
||||
|
||||
- Check CORS settings for subdomain routing
|
||||
- Verify `sessionId` returned from initial message
|
||||
- Check browser console for connection errors
|
||||
- Fallback: use polling endpoint (TODO: implement)
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **Enhanced Input Extraction**: Use Deep Agent to extract required fields per process
|
||||
2. **Visual Schema Builder**: UI for JSON Schema creation (drag-drop fields)
|
||||
3. **Conditional Edge Builder**: Visual jsonlogic editor
|
||||
4. **Process Analytics**: Dashboard showing run success rates, avg duration
|
||||
5. **Human-in-Loop UI**: Dynamic form renderer for HumanInputNode
|
||||
6. **Process Marketplace**: Share processes across tenants (with permissions)
|
||||
7. **Python Microservice**: Optional Python runtime for native LangGraph support
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
83
DEBUG_INCOMING_CALL.md
Normal file
83
DEBUG_INCOMING_CALL.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Debugging Incoming Call Issue
|
||||
|
||||
## Current Problem
|
||||
- Hear "Connecting to your call" message (TwiML is executing)
|
||||
- No ring on mobile after "Connecting" message
|
||||
- Click Accept button does nothing
|
||||
- Call never connects
|
||||
|
||||
## Root Cause Hypothesis
|
||||
The Twilio Device SDK is likely **NOT receiving the incoming call event** from Twilio's Signaling Server. This could be because:
|
||||
|
||||
1. **Identity Mismatch**: The Device's identity (from JWT token) doesn't match the `<Client>ID</Client>` in TwiML
|
||||
2. **Device Not Registered**: Device registration isn't completing before the call arrives
|
||||
3. **Twilio Signaling Issue**: Device isn't connected to Twilio Signaling Server
|
||||
|
||||
## How to Debug
|
||||
|
||||
### Step 1: Check Device Identity in Console
|
||||
When you open the softphone dialog, **open Browser DevTools Console (F12)**
|
||||
|
||||
You should see logs like:
|
||||
```
|
||||
Token received, creating Device...
|
||||
Token identity: e6d45fa3-a108-4085-81e5-a8e05e85e6fb
|
||||
Token grants: {voice: {...}}
|
||||
Registering Twilio Device...
|
||||
✓ Twilio Device registered - ready to receive calls
|
||||
Device identity: e6d45fa3-a108-4085-81e5-a8e05e85e6fb
|
||||
Device state: ready
|
||||
```
|
||||
|
||||
**Note the Device identity value** - e.g., "e6d45fa3-a108-4085-81e5-a8e05e85e6fb"
|
||||
|
||||
### Step 2: Check Backend Logs
|
||||
When you make an inbound call, look for backend logs showing:
|
||||
|
||||
```
|
||||
╔════════════════════════════════════════╗
|
||||
║ === INBOUND CALL RECEIVED ===
|
||||
╚════════════════════════════════════════╝
|
||||
...
|
||||
Client IDs to dial: e6d45fa3-a108-4085-81e5-a8e05e85e6fb
|
||||
First Client ID format check: "e6d45fa3-a108-4085-81e5-a8e05e85e6fb" (length: 36)
|
||||
```
|
||||
|
||||
### Step 3: Compare Identities
|
||||
The Device identity from frontend console MUST MATCH the Client ID from backend logs.
|
||||
|
||||
**If they match**: The issue is with Twilio Signaling or Device SDK configuration
|
||||
**If they don't match**: We found the bug - identity mismatch
|
||||
|
||||
### Step 4: Monitor Incoming Event
|
||||
When you make the inbound call, keep watching the browser console for:
|
||||
|
||||
```
|
||||
🔔 Twilio Device INCOMING event received: {...}
|
||||
```
|
||||
|
||||
**If this appears**: The Device SDK IS receiving the call, so the Accept button issue is frontend
|
||||
**If this doesn't appear**: The Device SDK is NOT receiving the call, so it's an identity/registration issue
|
||||
|
||||
## What Changed
|
||||
- Frontend now relies on **Twilio Device SDK `incoming` event** (not Socket.IO) for showing incoming call
|
||||
- Added comprehensive logging to Device initialization
|
||||
- Added logging to Accept button handler
|
||||
- Backend logs Device ID format for comparison
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Make an inbound call
|
||||
2. Check browser console for the 5 logs above
|
||||
3. Check backend logs for Client ID
|
||||
4. Look for "🔔 Twilio Device INCOMING event" in browser console
|
||||
5. Try clicking Accept and watch console for "📞 Accepting call" logs
|
||||
6. Report back with:
|
||||
- Device identity from console
|
||||
- Client ID from backend logs
|
||||
- Whether "🔔 Twilio Device INCOMING event" appears
|
||||
- Whether any accept logs appear
|
||||
|
||||
## Important Files
|
||||
- Backend: `/backend/src/voice/voice.controller.ts` (lines 205-210 show Client ID logging)
|
||||
- Frontend: `/frontend/composables/useSoftphone.ts` (Device initialization and incoming handler)
|
||||
@@ -1,251 +0,0 @@
|
||||
# Authorization System Implementation Summary
|
||||
|
||||
## ✅ Implementation Complete
|
||||
|
||||
A comprehensive polymorphic record sharing and authorization system has been implemented with CASL, Objection.js, and NestJS.
|
||||
|
||||
## What Was Built
|
||||
|
||||
### Backend (NestJS + Objection.js + CASL)
|
||||
|
||||
#### 1. Database Layer
|
||||
- ✅ Migration for authorization tables (`20250128000001_add_authorization_system.js`)
|
||||
- ✅ Updated Prisma schema with new models
|
||||
- ✅ Objection.js models: `ObjectField`, `RoleRule`, `RecordShare`
|
||||
- ✅ Updated existing models with new relations
|
||||
|
||||
#### 2. Authorization Core
|
||||
- ✅ `AbilityFactory` - Builds CASL abilities from 3 layers (global, role, share)
|
||||
- ✅ Query scoping utilities for SQL-level authorization
|
||||
- ✅ Guards and decorators (`AbilitiesGuard`, `@CheckAbility()`, `@CurrentUser()`)
|
||||
- ✅ Middleware for attaching abilities to requests
|
||||
|
||||
#### 3. API Endpoints
|
||||
- ✅ **ShareController** - CRUD for record shares
|
||||
- POST /shares - Create share
|
||||
- GET /shares/record/:objectDefinitionId/:recordId - List shares
|
||||
- GET /shares/granted - Shares granted by user
|
||||
- GET /shares/received - Shares received by user
|
||||
- PATCH /shares/:id - Update share
|
||||
- DELETE /shares/:id - Revoke share
|
||||
|
||||
- ✅ **RoleController** - Role management
|
||||
- Standard CRUD for roles
|
||||
- RoleRuleController for CASL rules
|
||||
|
||||
- ✅ **ObjectAccessController** - Object-level permissions
|
||||
- GET/PUT /setup/objects/:apiName/access
|
||||
- POST /setup/objects/:apiName/fields/:fieldKey/permissions
|
||||
- PUT /setup/objects/:apiName/field-permissions
|
||||
|
||||
### Frontend (Nuxt 3 + Vue 3)
|
||||
|
||||
#### 4. Object Management Enhancement
|
||||
- ✅ Added "Access & Permissions" tab to object setup page
|
||||
- ✅ `ObjectAccessSettings.vue` component:
|
||||
- Configure access model (public/owner/mixed)
|
||||
- Set public CRUD permissions
|
||||
- Configure owner field
|
||||
- Set field-level read/write permissions
|
||||
|
||||
#### 5. Role Management
|
||||
- ✅ New page: `/setup/roles`
|
||||
- ✅ `RolePermissionsEditor.vue` component:
|
||||
- Configure CRUD permissions per object
|
||||
- Apply conditions (e.g., own records only)
|
||||
- Visual permission matrix
|
||||
|
||||
#### 6. Record Sharing
|
||||
- ✅ `RecordShareDialog.vue` component:
|
||||
- List current shares
|
||||
- Add new shares with permissions
|
||||
- Field-level scoping
|
||||
- Expiration dates
|
||||
- Revoke shares
|
||||
|
||||
## Key Features
|
||||
|
||||
### 🌍 Global Object Policies
|
||||
- Public/private access models
|
||||
- Default CRUD permissions per object
|
||||
- Configurable owner field
|
||||
- Field-level default permissions
|
||||
|
||||
### 👥 Role-Based Access
|
||||
- CASL rules stored in database
|
||||
- Per-object permissions
|
||||
- Condition-based rules (e.g., ownerId matching)
|
||||
- Multiple actions per rule
|
||||
|
||||
### 🔗 Per-Record Sharing
|
||||
- Polymorphic design (works with any object type)
|
||||
- Grant read/update access to specific users
|
||||
- Optional field-level scoping
|
||||
- Expiration and revocation support
|
||||
- Track who granted each share
|
||||
|
||||
### 🔒 SQL Query Scoping
|
||||
- Critical for list endpoints
|
||||
- Ensures users only see authorized records
|
||||
- Combines ownership + sharing logic
|
||||
- Works with public access flags
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
backend/
|
||||
├── migrations/tenant/
|
||||
│ └── 20250128000001_add_authorization_system.js
|
||||
├── src/
|
||||
│ ├── auth/
|
||||
│ │ ├── ability.factory.ts (CASL ability builder)
|
||||
│ │ ├── query-scope.util.ts (SQL scoping utilities)
|
||||
│ │ ├── guards/
|
||||
│ │ │ └── abilities.guard.ts
|
||||
│ │ ├── decorators/
|
||||
│ │ │ ├── auth.decorators.ts
|
||||
│ │ │ └── check-ability.decorator.ts
|
||||
│ │ └── middleware/
|
||||
│ │ └── ability.middleware.ts
|
||||
│ ├── models/
|
||||
│ │ ├── object-field.model.ts
|
||||
│ │ ├── role-rule.model.ts
|
||||
│ │ └── record-share.model.ts
|
||||
│ ├── rbac/
|
||||
│ │ ├── share.controller.ts
|
||||
│ │ └── role.controller.ts
|
||||
│ └── object/
|
||||
│ └── object-access.controller.ts
|
||||
|
||||
frontend/
|
||||
├── components/
|
||||
│ ├── ObjectAccessSettings.vue
|
||||
│ ├── RecordShareDialog.vue
|
||||
│ └── RolePermissionsEditor.vue
|
||||
└── pages/
|
||||
├── setup/
|
||||
│ ├── objects/[apiName].vue (enhanced with access tab)
|
||||
│ └── roles.vue
|
||||
└── ...
|
||||
|
||||
docs/
|
||||
└── AUTHORIZATION_SYSTEM.md (comprehensive documentation)
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
### 1. Run the Migration
|
||||
```bash
|
||||
cd backend
|
||||
npm run migrate:latest
|
||||
```
|
||||
|
||||
### 2. Initialize Existing Objects
|
||||
Set default access models for existing object definitions:
|
||||
```sql
|
||||
UPDATE object_definitions
|
||||
SET
|
||||
access_model = 'owner',
|
||||
public_read = false,
|
||||
public_create = false,
|
||||
public_update = false,
|
||||
public_delete = false,
|
||||
owner_field = 'ownerId'
|
||||
WHERE access_model IS NULL;
|
||||
```
|
||||
|
||||
### 3. Apply Query Scoping
|
||||
Update existing controllers to use query scoping:
|
||||
|
||||
```typescript
|
||||
import { applyReadScope } from '@/auth/query-scope.util';
|
||||
|
||||
// In your list endpoint
|
||||
async findAll(@CurrentUser() user: User) {
|
||||
const objectDef = await ObjectDefinition.query(this.knex)
|
||||
.findOne({ apiName: 'YourObject' });
|
||||
|
||||
let query = YourModel.query(this.knex);
|
||||
query = applyReadScope(query, user, objectDef, this.knex);
|
||||
|
||||
return query;
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Add Route Protection
|
||||
Use guards on sensitive endpoints:
|
||||
|
||||
```typescript
|
||||
@UseGuards(JwtAuthGuard, AbilitiesGuard)
|
||||
@CheckAbility({ action: 'update', subject: 'Post' })
|
||||
async update(@Body() data: any) {
|
||||
// Only users with 'update' permission on 'Post' can access
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Frontend Integration
|
||||
Add sharing button to record detail pages:
|
||||
|
||||
```vue
|
||||
<template>
|
||||
<div>
|
||||
<!-- Your record details -->
|
||||
<Button @click="showShareDialog = true">
|
||||
<Share class="w-4 h-4 mr-2" />
|
||||
Share
|
||||
</Button>
|
||||
|
||||
<RecordShareDialog
|
||||
:open="showShareDialog"
|
||||
:object-definition-id="objectDefinition.id"
|
||||
:record-id="record.id"
|
||||
:fields="objectDefinition.fields"
|
||||
@close="showShareDialog = false"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
```
|
||||
|
||||
## Testing Checklist
|
||||
|
||||
- [ ] Run database migration successfully
|
||||
- [ ] Create a test role with permissions
|
||||
- [ ] Configure object access settings via UI
|
||||
- [ ] Share a record with another user
|
||||
- [ ] Verify shared record appears in grantee's list
|
||||
- [ ] Verify query scoping filters unauthorized records
|
||||
- [ ] Test field-level permissions
|
||||
- [ ] Test share expiration
|
||||
- [ ] Test share revocation
|
||||
- [ ] Test role-based access with conditions
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
1. **Index Usage**: The migration creates proper indexes on foreign keys and commonly queried columns
|
||||
2. **Query Scoping**: Uses SQL EXISTS subqueries for efficient filtering
|
||||
3. **Ability Caching**: Consider caching abilities per request (already done via middleware)
|
||||
4. **Batch Loading**: When checking multiple records, batch the share lookups
|
||||
|
||||
## Security Notes
|
||||
|
||||
⚠️ **Important**: Always use SQL query scoping for list endpoints. Never fetch all records and filter in application code.
|
||||
|
||||
✅ **Best Practices**:
|
||||
- Share creation requires ownership verification
|
||||
- Only grantors can update/revoke shares
|
||||
- Expired/revoked shares are excluded from queries
|
||||
- Field-level permissions are enforced on write operations
|
||||
|
||||
## Documentation
|
||||
|
||||
Full documentation available in:
|
||||
- [AUTHORIZATION_SYSTEM.md](./AUTHORIZATION_SYSTEM.md) - Comprehensive guide
|
||||
- Inline code comments in all new files
|
||||
- JSDoc comments on key functions
|
||||
|
||||
## Support
|
||||
|
||||
For questions or issues:
|
||||
1. Check the documentation in `docs/AUTHORIZATION_SYSTEM.md`
|
||||
2. Review example usage in the controllers
|
||||
3. Examine the test cases (when added)
|
||||
173
SOFTPHONE_AI_ASSISTANT.md
Normal file
173
SOFTPHONE_AI_ASSISTANT.md
Normal file
@@ -0,0 +1,173 @@
|
||||
# Softphone AI Assistant - Complete Implementation
|
||||
|
||||
## 🎉 Features Implemented
|
||||
|
||||
### ✅ Real-time AI Call Assistant
|
||||
- **OpenAI Realtime API Integration** - Listens to live calls and provides suggestions
|
||||
- **Audio Streaming** - Twilio Media Streams fork audio to backend for AI processing
|
||||
- **Real-time Transcription** - Speech-to-text during calls
|
||||
- **Smart Suggestions** - AI analyzes conversation and advises the agent
|
||||
|
||||
## 🔧 Architecture
|
||||
|
||||
### Backend Flow
|
||||
```
|
||||
Inbound Call → TwiML (<Start><Stream> + <Dial>)
|
||||
→ Media Stream WebSocket → OpenAI Realtime API
|
||||
→ AI Processing → Socket.IO → Frontend
|
||||
```
|
||||
|
||||
### Key Components
|
||||
|
||||
1. **TwiML Structure** (`voice.controller.ts:226-234`)
|
||||
- `<Start><Stream>` - Forks audio for AI processing
|
||||
- `<Dial><Client>` - Connects call to agent's softphone
|
||||
|
||||
2. **OpenAI Integration** (`voice.service.ts:431-519`)
|
||||
- WebSocket connection to `wss://api.openai.com/v1/realtime?model=gpt-4o-realtime-preview-2024-10-01`
|
||||
- Session config with custom instructions for agent assistance
|
||||
- Handles transcripts and generates suggestions
|
||||
|
||||
3. **AI Message Handler** (`voice.service.ts:609-707`)
|
||||
- Processes OpenAI events (transcripts, suggestions, audio)
|
||||
- Routes suggestions to frontend via Socket.IO
|
||||
- Saves transcripts to database
|
||||
|
||||
4. **Voice Gateway** (`voice.gateway.ts:272-289`)
|
||||
- `notifyAiTranscript()` - Real-time transcript chunks
|
||||
- `notifyAiSuggestion()` - AI suggestions to agent
|
||||
|
||||
### Frontend Components
|
||||
|
||||
1. **Softphone Dialog** (`SoftphoneDialog.vue:104-135`)
|
||||
- AI Assistant section with badge showing suggestion count
|
||||
- Color-coded suggestions (blue=response, green=action, purple=insight)
|
||||
- Animated highlight for newest suggestion
|
||||
|
||||
2. **Softphone Composable** (`useSoftphone.ts:515-535`)
|
||||
- Socket.IO event handlers for `ai:suggestion` and `ai:transcript`
|
||||
- Maintains history of last 10 suggestions
|
||||
- Maintains history of last 50 transcript items
|
||||
|
||||
## 📋 AI Prompt Configuration
|
||||
|
||||
The AI is instructed to:
|
||||
- **Listen, not talk** - It advises the agent, not the caller
|
||||
- **Provide concise suggestions** - 1-2 sentences max
|
||||
- **Use formatted output**:
|
||||
- `💡 Suggestion: [advice]`
|
||||
- `⚠️ Alert: [important notice]`
|
||||
- `📋 Action: [CRM action]`
|
||||
|
||||
## 🎨 UI Features
|
||||
|
||||
### Suggestion Types
|
||||
- **Response** (Blue) - Suggested replies or approaches
|
||||
- **Action** (Green) - Recommended CRM actions
|
||||
- **Insight** (Purple) - Important alerts or observations
|
||||
|
||||
### Visual Feedback
|
||||
- Badge showing number of suggestions
|
||||
- Newest suggestion pulses for attention
|
||||
- Auto-scrolling suggestion list
|
||||
- Timestamp on each suggestion
|
||||
|
||||
## 🔍 How to Monitor
|
||||
|
||||
### 1. Backend Logs
|
||||
```bash
|
||||
# Watch for AI events
|
||||
docker logs -f neo-backend-1 | grep -E "AI|OpenAI|transcript|suggestion"
|
||||
```
|
||||
|
||||
Key log markers:
|
||||
- `📝 Transcript chunk:` - Real-time speech detection
|
||||
- `✅ Final transcript:` - Complete transcript saved
|
||||
- `💡 AI Suggestion:` - AI-generated advice
|
||||
|
||||
### 2. Database
|
||||
```sql
|
||||
-- View call transcripts
|
||||
SELECT call_sid, ai_transcript, created_at
|
||||
FROM calls
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 5;
|
||||
```
|
||||
|
||||
### 3. Frontend Console
|
||||
- Open browser DevTools Console
|
||||
- Watch for: "AI suggestion:", "AI transcript:"
|
||||
|
||||
## 🚀 Testing
|
||||
|
||||
1. **Make a test call** to your Twilio number
|
||||
2. **Accept the call** in the softphone dialog
|
||||
3. **Talk during the call** - Say something like "I need to schedule a follow-up"
|
||||
4. **Watch the UI** - AI suggestions appear in real-time
|
||||
5. **Check logs** - See transcription and suggestion generation
|
||||
|
||||
## 📊 Current Status
|
||||
|
||||
✅ **Working**:
|
||||
- Inbound calls ring softphone
|
||||
- Media stream forks audio to backend
|
||||
- OpenAI processes audio (1300+ packets/call)
|
||||
- AI generates suggestions
|
||||
- Suggestions appear in frontend
|
||||
- Transcripts saved to database
|
||||
|
||||
## 🔧 Configuration
|
||||
|
||||
### Required Environment Variables
|
||||
```env
|
||||
# OpenAI API Key (set in tenant integrations config)
|
||||
OPENAI_API_KEY=sk-...
|
||||
|
||||
# Optional overrides
|
||||
OPENAI_MODEL=gpt-4o-realtime-preview-2024-10-01
|
||||
OPENAI_VOICE=alloy
|
||||
```
|
||||
|
||||
### Tenant Configuration
|
||||
Set in Settings > Integrations:
|
||||
- OpenAI API Key
|
||||
- Model (optional)
|
||||
- Voice (optional)
|
||||
|
||||
## 🎯 Next Steps (Optional Enhancements)
|
||||
|
||||
1. **CRM Tool Execution** - Implement actual tool calls (search contacts, create tasks)
|
||||
2. **Audio Response** - Send OpenAI audio back to caller (two-way AI interaction)
|
||||
3. **Sentiment Analysis** - Track call sentiment in real-time
|
||||
4. **Call Summary** - Generate post-call summary automatically
|
||||
5. **Custom Prompts** - Allow agents to customize AI instructions per call type
|
||||
|
||||
## 🐛 Troubleshooting
|
||||
|
||||
### No suggestions appearing?
|
||||
1. Check OpenAI API key is configured
|
||||
2. Verify WebSocket connection logs show "OpenAI Realtime connected"
|
||||
3. Check frontend Socket.IO connection is established
|
||||
4. Verify user ID matches between backend and frontend
|
||||
|
||||
### Transcripts not saving?
|
||||
1. Check tenant database connection
|
||||
2. Verify `calls` table has `ai_transcript` column
|
||||
3. Check logs for "Failed to update transcript" errors
|
||||
|
||||
### OpenAI connection fails?
|
||||
1. Verify API key is valid
|
||||
2. Check model name is correct
|
||||
3. Review WebSocket close codes in logs
|
||||
|
||||
## 📝 Files Modified
|
||||
|
||||
**Backend:**
|
||||
- `/backend/src/voice/voice.service.ts` - OpenAI integration & AI message handling
|
||||
- `/backend/src/voice/voice.controller.ts` - TwiML generation with stream fork
|
||||
- `/backend/src/voice/voice.gateway.ts` - Socket.IO event emission
|
||||
- `/backend/src/main.ts` - Media stream WebSocket handler
|
||||
|
||||
**Frontend:**
|
||||
- `/frontend/components/SoftphoneDialog.vue` - AI suggestions UI
|
||||
- `/frontend/composables/useSoftphone.ts` - Socket.IO event handlers
|
||||
115
backend/insert-demo-process.sql
Normal file
115
backend/insert-demo-process.sql
Normal file
@@ -0,0 +1,115 @@
|
||||
-- Insert demo AI process directly
|
||||
SET @process_id = '2d883482-4df0-44d7-b6cf-8541b482afe4';
|
||||
SET @version_id = '437b1e72-405e-4862-a8bc-f368e554b482';
|
||||
SET @user_id = 'system';
|
||||
|
||||
-- Insert process
|
||||
INSERT INTO ai_processes (id, name, created_by)
|
||||
VALUES (@process_id, 'Register New Pet', @user_id);
|
||||
|
||||
-- Insert process version with compiled graph
|
||||
INSERT INTO ai_process_versions (id, process_id, version, graph_json, compiled_json, created_by)
|
||||
VALUES (
|
||||
@version_id,
|
||||
@process_id,
|
||||
1,
|
||||
'{}',
|
||||
JSON_OBJECT(
|
||||
'id', 'register_new_pet',
|
||||
'name', 'Register New Pet',
|
||||
'description', 'Complete pet registration workflow',
|
||||
'allowCycles', false,
|
||||
'startNodeId', 'start',
|
||||
'endNodeIds', JSON_ARRAY('end'),
|
||||
'maxIterations', 50,
|
||||
'nodes', JSON_ARRAY(
|
||||
JSON_OBJECT('id', 'start', 'type', 'Start', 'data', JSON_OBJECT('label', 'Start')),
|
||||
JSON_OBJECT('id', 'extract_info', 'type', 'LLMDecisionNode', 'data', JSON_OBJECT(
|
||||
'label', 'Extract Info',
|
||||
'promptTemplate', 'Extract: petName, species, ownerFirstName, ownerLastName, ownerEmail, accountName from: {{state.message}}',
|
||||
'inputKeys', JSON_ARRAY('message'),
|
||||
'outputSchema', JSON_OBJECT(
|
||||
'type', 'object',
|
||||
'properties', JSON_OBJECT(
|
||||
'petName', JSON_OBJECT('type', 'string'),
|
||||
'species', JSON_OBJECT('type', 'string'),
|
||||
'ownerFirstName', JSON_OBJECT('type', 'string'),
|
||||
'ownerLastName', JSON_OBJECT('type', 'string'),
|
||||
'ownerEmail', JSON_OBJECT('type', 'string'),
|
||||
'accountName', JSON_OBJECT('type', 'string')
|
||||
),
|
||||
'required', JSON_ARRAY('petName', 'species', 'ownerFirstName', 'ownerLastName')
|
||||
)
|
||||
)),
|
||||
JSON_OBJECT('id', 'find_account', 'type', 'ToolNode', 'data', JSON_OBJECT(
|
||||
'label', 'Find Account',
|
||||
'toolName', 'findAccount',
|
||||
'argsTemplate', JSON_OBJECT('name', '{{state.accountName}}', 'email', '{{state.ownerEmail}}'),
|
||||
'outputMapping', JSON_OBJECT('found', 'accountFound', 'accountId', 'accountId')
|
||||
)),
|
||||
JSON_OBJECT('id', 'create_account', 'type', 'ToolNode', 'data', JSON_OBJECT(
|
||||
'label', 'Create Account',
|
||||
'toolName', 'createAccount',
|
||||
'argsTemplate', JSON_OBJECT('name', '{{state.accountName}}', 'email', '{{state.ownerEmail}}'),
|
||||
'outputMapping', JSON_OBJECT('accountId', 'accountId')
|
||||
)),
|
||||
JSON_OBJECT('id', 'find_contact', 'type', 'ToolNode', 'data', JSON_OBJECT(
|
||||
'label', 'Find Contact',
|
||||
'toolName', 'findContact',
|
||||
'argsTemplate', JSON_OBJECT(
|
||||
'firstName', '{{state.ownerFirstName}}',
|
||||
'lastName', '{{state.ownerLastName}}',
|
||||
'email', '{{state.ownerEmail}}',
|
||||
'accountId', '{{state.accountId}}'
|
||||
),
|
||||
'outputMapping', JSON_OBJECT('found', 'contactFound', 'contactId', 'contactId')
|
||||
)),
|
||||
JSON_OBJECT('id', 'create_contact', 'type', 'ToolNode', 'data', JSON_OBJECT(
|
||||
'label', 'Create Contact',
|
||||
'toolName', 'createContact',
|
||||
'argsTemplate', JSON_OBJECT(
|
||||
'firstName', '{{state.ownerFirstName}}',
|
||||
'lastName', '{{state.ownerLastName}}',
|
||||
'email', '{{state.ownerEmail}}',
|
||||
'accountId', '{{state.accountId}}'
|
||||
),
|
||||
'outputMapping', JSON_OBJECT('contactId', 'contactId')
|
||||
)),
|
||||
JSON_OBJECT('id', 'create_pet', 'type', 'ToolNode', 'data', JSON_OBJECT(
|
||||
'label', 'Create Pet',
|
||||
'toolName', 'createPet',
|
||||
'argsTemplate', JSON_OBJECT(
|
||||
'name', '{{state.petName}}',
|
||||
'species', '{{state.species}}',
|
||||
'ownerId', '{{state.contactId}}'
|
||||
),
|
||||
'outputMapping', JSON_OBJECT('petId', 'petId')
|
||||
)),
|
||||
JSON_OBJECT('id', 'end', 'type', 'End', 'data', JSON_OBJECT('label', 'End'))
|
||||
),
|
||||
'edges', JSON_ARRAY(
|
||||
JSON_OBJECT('id', 'e1', 'source', 'start', 'target', 'extract_info'),
|
||||
JSON_OBJECT('id', 'e2', 'source', 'extract_info', 'target', 'find_account'),
|
||||
JSON_OBJECT('id', 'e3', 'source', 'find_account', 'target', 'find_contact', 'condition', JSON_OBJECT('==', JSON_ARRAY(JSON_OBJECT('var', 'accountFound'), true))),
|
||||
JSON_OBJECT('id', 'e4', 'source', 'find_account', 'target', 'create_account', 'condition', JSON_OBJECT('==', JSON_ARRAY(JSON_OBJECT('var', 'accountFound'), false))),
|
||||
JSON_OBJECT('id', 'e5', 'source', 'create_account', 'target', 'find_contact'),
|
||||
JSON_OBJECT('id', 'e6', 'source', 'find_contact', 'target', 'create_pet', 'condition', JSON_OBJECT('==', JSON_ARRAY(JSON_OBJECT('var', 'contactFound'), true))),
|
||||
JSON_OBJECT('id', 'e7', 'source', 'find_contact', 'target', 'create_contact', 'condition', JSON_OBJECT('==', JSON_ARRAY(JSON_OBJECT('var', 'contactFound'), false))),
|
||||
JSON_OBJECT('id', 'e8', 'source', 'create_contact', 'target', 'create_pet'),
|
||||
JSON_OBJECT('id', 'e9', 'source', 'create_pet', 'target', 'end')
|
||||
)
|
||||
),
|
||||
@user_id
|
||||
);
|
||||
|
||||
-- Insert tool allowlist
|
||||
INSERT INTO ai_tool_configs (id, tool_name, enabled)
|
||||
VALUES
|
||||
(UUID(), 'findAccount', true),
|
||||
(UUID(), 'createAccount', true),
|
||||
(UUID(), 'findContact', true),
|
||||
(UUID(), 'createContact', true),
|
||||
(UUID(), 'createPet', true)
|
||||
ON DUPLICATE KEY UPDATE enabled = true;
|
||||
|
||||
SELECT 'Demo process inserted successfully!' as result;
|
||||
@@ -1,101 +0,0 @@
|
||||
/**
|
||||
* Migration: Add authorization system (CASL + polymorphic sharing)
|
||||
*
|
||||
* This migration adds:
|
||||
* 1. Access control fields to object_definitions
|
||||
* 2. Field-level permissions to field_definitions
|
||||
* 3. role_rules table for CASL rules storage
|
||||
* 4. record_shares table for polymorphic per-record sharing
|
||||
*/
|
||||
|
||||
exports.up = async function(knex) {
|
||||
// 1. Add access control fields to object_definitions
|
||||
await knex.schema.table('object_definitions', (table) => {
|
||||
table.enum('access_model', ['public', 'owner', 'mixed']).defaultTo('owner');
|
||||
table.boolean('public_read').defaultTo(false);
|
||||
table.boolean('public_create').defaultTo(false);
|
||||
table.boolean('public_update').defaultTo(false);
|
||||
table.boolean('public_delete').defaultTo(false);
|
||||
table.string('owner_field', 100).defaultTo('ownerId');
|
||||
});
|
||||
|
||||
// 2. Add field-level permission columns to field_definitions
|
||||
await knex.schema.table('field_definitions', (table) => {
|
||||
table.boolean('default_readable').defaultTo(true);
|
||||
table.boolean('default_writable').defaultTo(true);
|
||||
});
|
||||
|
||||
// 3. Create role_rules table for storing CASL rules per role
|
||||
await knex.schema.createTable('role_rules', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.uuid('role_id').notNullable();
|
||||
table.json('rules_json').notNullable(); // Array of CASL rules
|
||||
table.timestamps(true, true);
|
||||
|
||||
// Foreign keys
|
||||
table.foreign('role_id')
|
||||
.references('id')
|
||||
.inTable('roles')
|
||||
.onDelete('CASCADE');
|
||||
|
||||
// Indexes
|
||||
table.index('role_id');
|
||||
});
|
||||
|
||||
// 4. Create record_shares table for polymorphic per-record sharing
|
||||
await knex.schema.createTable('record_shares', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.uuid('object_definition_id').notNullable();
|
||||
table.string('record_id', 255).notNullable(); // String to support UUID/int uniformly
|
||||
table.uuid('grantee_user_id').notNullable();
|
||||
table.uuid('granted_by_user_id').notNullable();
|
||||
table.json('actions').notNullable(); // Array like ["read"], ["read","update"]
|
||||
table.json('fields').nullable(); // Optional field scoping
|
||||
table.timestamp('expires_at').nullable();
|
||||
table.timestamp('revoked_at').nullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
|
||||
// Foreign keys
|
||||
table.foreign('object_definition_id')
|
||||
.references('id')
|
||||
.inTable('object_definitions')
|
||||
.onDelete('CASCADE');
|
||||
|
||||
table.foreign('grantee_user_id')
|
||||
.references('id')
|
||||
.inTable('users')
|
||||
.onDelete('CASCADE');
|
||||
|
||||
table.foreign('granted_by_user_id')
|
||||
.references('id')
|
||||
.inTable('users')
|
||||
.onDelete('CASCADE');
|
||||
|
||||
// Indexes for efficient querying
|
||||
table.index(['grantee_user_id', 'object_definition_id']);
|
||||
table.index(['object_definition_id', 'record_id']);
|
||||
table.unique(['object_definition_id', 'record_id', 'grantee_user_id']);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async function(knex) {
|
||||
// Drop tables in reverse order
|
||||
await knex.schema.dropTableIfExists('record_shares');
|
||||
await knex.schema.dropTableIfExists('role_rules');
|
||||
|
||||
// Remove columns from field_definitions
|
||||
await knex.schema.table('field_definitions', (table) => {
|
||||
table.dropColumn('default_readable');
|
||||
table.dropColumn('default_writable');
|
||||
});
|
||||
|
||||
// Remove columns from object_definitions
|
||||
await knex.schema.table('object_definitions', (table) => {
|
||||
table.dropColumn('access_model');
|
||||
table.dropColumn('public_read');
|
||||
table.dropColumn('public_create');
|
||||
table.dropColumn('public_update');
|
||||
table.dropColumn('public_delete');
|
||||
table.dropColumn('owner_field');
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,103 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
// Add orgWideDefault to object_definitions
|
||||
.alterTable('object_definitions', (table) => {
|
||||
table
|
||||
.enum('orgWideDefault', ['private', 'public_read', 'public_read_write'])
|
||||
.defaultTo('private')
|
||||
.notNullable();
|
||||
})
|
||||
// Create role_object_permissions table
|
||||
.createTable('role_object_permissions', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.uuid('roleId').notNullable();
|
||||
table.uuid('objectDefinitionId').notNullable();
|
||||
table.boolean('canCreate').defaultTo(false);
|
||||
table.boolean('canRead').defaultTo(false);
|
||||
table.boolean('canEdit').defaultTo(false);
|
||||
table.boolean('canDelete').defaultTo(false);
|
||||
table.boolean('canViewAll').defaultTo(false);
|
||||
table.boolean('canModifyAll').defaultTo(false);
|
||||
table.timestamps(true, true);
|
||||
|
||||
table
|
||||
.foreign('roleId')
|
||||
.references('id')
|
||||
.inTable('roles')
|
||||
.onDelete('CASCADE');
|
||||
table
|
||||
.foreign('objectDefinitionId')
|
||||
.references('id')
|
||||
.inTable('object_definitions')
|
||||
.onDelete('CASCADE');
|
||||
table.unique(['roleId', 'objectDefinitionId']);
|
||||
table.index(['roleId']);
|
||||
table.index(['objectDefinitionId']);
|
||||
})
|
||||
// Create role_field_permissions table
|
||||
.createTable('role_field_permissions', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.uuid('roleId').notNullable();
|
||||
table.uuid('fieldDefinitionId').notNullable();
|
||||
table.boolean('canRead').defaultTo(true);
|
||||
table.boolean('canEdit').defaultTo(true);
|
||||
table.timestamps(true, true);
|
||||
|
||||
table
|
||||
.foreign('roleId')
|
||||
.references('id')
|
||||
.inTable('roles')
|
||||
.onDelete('CASCADE');
|
||||
table
|
||||
.foreign('fieldDefinitionId')
|
||||
.references('id')
|
||||
.inTable('field_definitions')
|
||||
.onDelete('CASCADE');
|
||||
table.unique(['roleId', 'fieldDefinitionId']);
|
||||
table.index(['roleId']);
|
||||
table.index(['fieldDefinitionId']);
|
||||
})
|
||||
// Create record_shares table for sharing specific records
|
||||
.createTable('record_shares', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.uuid('objectDefinitionId').notNullable();
|
||||
table.uuid('recordId').notNullable();
|
||||
table.uuid('granteeUserId').notNullable();
|
||||
table.uuid('grantedByUserId').notNullable();
|
||||
table.json('accessLevel').notNullable(); // { canRead, canEdit, canDelete }
|
||||
table.timestamp('expiresAt').nullable();
|
||||
table.timestamp('revokedAt').nullable();
|
||||
table.timestamp('createdAt').defaultTo(knex.fn.now());
|
||||
table.timestamp('updatedAt').defaultTo(knex.fn.now());
|
||||
|
||||
table
|
||||
.foreign('objectDefinitionId')
|
||||
.references('id')
|
||||
.inTable('object_definitions')
|
||||
.onDelete('CASCADE');
|
||||
table
|
||||
.foreign('granteeUserId')
|
||||
.references('id')
|
||||
.inTable('users')
|
||||
.onDelete('CASCADE');
|
||||
table
|
||||
.foreign('grantedByUserId')
|
||||
.references('id')
|
||||
.inTable('users')
|
||||
.onDelete('CASCADE');
|
||||
table.index(['objectDefinitionId', 'recordId']);
|
||||
table.index(['granteeUserId']);
|
||||
table.index(['expiresAt']);
|
||||
table.index(['revokedAt']);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.dropTableIfExists('record_shares')
|
||||
.dropTableIfExists('role_field_permissions')
|
||||
.dropTableIfExists('role_object_permissions')
|
||||
.alterTable('object_definitions', (table) => {
|
||||
table.dropColumn('orgWideDefault');
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,55 @@
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.up = async function (knex) {
|
||||
// Create calls table for tracking voice calls
|
||||
await knex.schema.createTable('calls', (table) => {
|
||||
table.string('id', 36).primary();
|
||||
table.string('call_sid', 100).unique().notNullable().comment('Twilio call SID');
|
||||
table.enum('direction', ['inbound', 'outbound']).notNullable();
|
||||
table.string('from_number', 20).notNullable();
|
||||
table.string('to_number', 20).notNullable();
|
||||
table.enum('status', [
|
||||
'queued',
|
||||
'ringing',
|
||||
'in-progress',
|
||||
'completed',
|
||||
'busy',
|
||||
'failed',
|
||||
'no-answer',
|
||||
'canceled'
|
||||
]).notNullable().defaultTo('queued');
|
||||
table.integer('duration_seconds').unsigned().nullable();
|
||||
table.string('recording_url', 500).nullable();
|
||||
table.text('ai_transcript').nullable().comment('Full transcript from OpenAI');
|
||||
table.text('ai_summary').nullable().comment('AI-generated summary');
|
||||
table.json('ai_insights').nullable().comment('Structured insights from AI');
|
||||
table.string('user_id', 36).notNullable().comment('User who handled the call');
|
||||
table.timestamp('started_at').nullable();
|
||||
table.timestamp('ended_at').nullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.timestamp('updated_at').defaultTo(knex.fn.now());
|
||||
|
||||
// Indexes
|
||||
table.index('call_sid');
|
||||
table.index('user_id');
|
||||
table.index('status');
|
||||
table.index('direction');
|
||||
table.index(['created_at', 'user_id']);
|
||||
|
||||
// Foreign key to users table
|
||||
table.foreign('user_id').references('id').inTable('users').onDelete('CASCADE');
|
||||
});
|
||||
|
||||
console.log('✅ Created calls table');
|
||||
};
|
||||
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.down = async function (knex) {
|
||||
await knex.schema.dropTableIfExists('calls');
|
||||
console.log('✅ Dropped calls table');
|
||||
};
|
||||
@@ -0,0 +1,207 @@
|
||||
exports.up = async function (knex) {
|
||||
await knex.schema.createTable('contacts', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.string('firstName', 100).notNullable();
|
||||
table.string('lastName', 100).notNullable();
|
||||
table.uuid('accountId').notNullable();
|
||||
table.timestamps(true, true);
|
||||
|
||||
table
|
||||
.foreign('accountId')
|
||||
.references('id')
|
||||
.inTable('accounts')
|
||||
.onDelete('CASCADE');
|
||||
table.index(['accountId']);
|
||||
table.index(['lastName', 'firstName']);
|
||||
});
|
||||
|
||||
await knex.schema.createTable('contact_details', (table) => {
|
||||
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
|
||||
table.string('relatedObjectType', 100).notNullable();
|
||||
table.uuid('relatedObjectId').notNullable();
|
||||
table.string('detailType', 50).notNullable();
|
||||
table.string('label', 100);
|
||||
table.text('value').notNullable();
|
||||
table.boolean('isPrimary').defaultTo(false);
|
||||
table.timestamps(true, true);
|
||||
|
||||
table.index(['relatedObjectType', 'relatedObjectId']);
|
||||
table.index(['detailType']);
|
||||
});
|
||||
|
||||
const [contactObjectId] = await knex('object_definitions').insert({
|
||||
id: knex.raw('(UUID())'),
|
||||
apiName: 'Contact',
|
||||
label: 'Contact',
|
||||
pluralLabel: 'Contacts',
|
||||
description: 'Standard Contact object',
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
const contactObjectDefId =
|
||||
contactObjectId ||
|
||||
(await knex('object_definitions').where('apiName', 'Contact').first()).id;
|
||||
|
||||
await knex('field_definitions').insert([
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactObjectDefId,
|
||||
apiName: 'firstName',
|
||||
label: 'First Name',
|
||||
type: 'String',
|
||||
length: 100,
|
||||
isRequired: true,
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
displayOrder: 1,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactObjectDefId,
|
||||
apiName: 'lastName',
|
||||
label: 'Last Name',
|
||||
type: 'String',
|
||||
length: 100,
|
||||
isRequired: true,
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
displayOrder: 2,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactObjectDefId,
|
||||
apiName: 'accountId',
|
||||
label: 'Account',
|
||||
type: 'Reference',
|
||||
referenceObject: 'Account',
|
||||
isRequired: true,
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
displayOrder: 3,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
]);
|
||||
|
||||
const [contactDetailObjectId] = await knex('object_definitions').insert({
|
||||
id: knex.raw('(UUID())'),
|
||||
apiName: 'ContactDetail',
|
||||
label: 'Contact Detail',
|
||||
pluralLabel: 'Contact Details',
|
||||
description: 'Polymorphic contact detail object',
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
const contactDetailObjectDefId =
|
||||
contactDetailObjectId ||
|
||||
(await knex('object_definitions').where('apiName', 'ContactDetail').first())
|
||||
.id;
|
||||
|
||||
const contactDetailRelationObjects = ['Account', 'Contact']
|
||||
|
||||
await knex('field_definitions').insert([
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'relatedObjectType',
|
||||
label: 'Related Object Type',
|
||||
type: 'PICKLIST',
|
||||
length: 100,
|
||||
isRequired: true,
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 1,
|
||||
ui_metadata: JSON.stringify({
|
||||
options: contactDetailRelationObjects.map((value) => ({ label: value, value })),
|
||||
}),
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'relatedObjectId',
|
||||
label: 'Related Object ID',
|
||||
type: 'LOOKUP',
|
||||
length: 36,
|
||||
isRequired: true,
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 2,
|
||||
ui_metadata: JSON.stringify({
|
||||
relationObjects: contactDetailRelationObjects,
|
||||
relationTypeField: 'relatedObjectType',
|
||||
relationDisplayField: 'name',
|
||||
}),
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'detailType',
|
||||
label: 'Detail Type',
|
||||
type: 'String',
|
||||
length: 50,
|
||||
isRequired: true,
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 3,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'label',
|
||||
label: 'Label',
|
||||
type: 'String',
|
||||
length: 100,
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 4,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'value',
|
||||
label: 'Value',
|
||||
type: 'Text',
|
||||
isRequired: true,
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 5,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
{
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactDetailObjectDefId,
|
||||
apiName: 'isPrimary',
|
||||
label: 'Primary',
|
||||
type: 'Boolean',
|
||||
isSystem: false,
|
||||
isCustom: false,
|
||||
displayOrder: 6,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
},
|
||||
]);
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
await knex.schema.dropTableIfExists('contact_details');
|
||||
await knex.schema.dropTableIfExists('contacts');
|
||||
};
|
||||
@@ -0,0 +1,101 @@
|
||||
exports.up = async function (knex) {
|
||||
const contactDetailObject = await knex('object_definitions')
|
||||
.where({ apiName: 'ContactDetail' })
|
||||
.first();
|
||||
|
||||
if (!contactDetailObject) return;
|
||||
|
||||
const relationObjects = ['Account', 'Contact'];
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactDetailObject.id,
|
||||
apiName: 'relatedObjectType',
|
||||
})
|
||||
.update({
|
||||
type: 'PICKLIST',
|
||||
length: 100,
|
||||
isSystem: false,
|
||||
ui_metadata: JSON.stringify({
|
||||
options: relationObjects.map((value) => ({ label: value, value })),
|
||||
}),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactDetailObject.id,
|
||||
apiName: 'relatedObjectId',
|
||||
})
|
||||
.update({
|
||||
type: 'LOOKUP',
|
||||
length: 36,
|
||||
isSystem: false,
|
||||
ui_metadata: JSON.stringify({
|
||||
relationObjects,
|
||||
relationTypeField: 'relatedObjectType',
|
||||
relationDisplayField: 'name',
|
||||
}),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
await knex('field_definitions')
|
||||
.whereIn('apiName', [
|
||||
'detailType',
|
||||
'label',
|
||||
'value',
|
||||
'isPrimary',
|
||||
])
|
||||
.andWhere({ objectDefinitionId: contactDetailObject.id })
|
||||
.update({
|
||||
isSystem: false,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
const contactDetailObject = await knex('object_definitions')
|
||||
.where({ apiName: 'ContactDetail' })
|
||||
.first();
|
||||
|
||||
if (!contactDetailObject) return;
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactDetailObject.id,
|
||||
apiName: 'relatedObjectType',
|
||||
})
|
||||
.update({
|
||||
type: 'String',
|
||||
length: 100,
|
||||
isSystem: true,
|
||||
ui_metadata: null,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactDetailObject.id,
|
||||
apiName: 'relatedObjectId',
|
||||
})
|
||||
.update({
|
||||
type: 'String',
|
||||
length: 36,
|
||||
isSystem: true,
|
||||
ui_metadata: null,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
await knex('field_definitions')
|
||||
.whereIn('apiName', [
|
||||
'detailType',
|
||||
'label',
|
||||
'value',
|
||||
'isPrimary',
|
||||
])
|
||||
.andWhere({ objectDefinitionId: contactDetailObject.id })
|
||||
.update({
|
||||
isSystem: true,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,45 @@
|
||||
exports.up = async function (knex) {
|
||||
const contactDetailObject = await knex('object_definitions')
|
||||
.where({ apiName: 'ContactDetail' })
|
||||
.first();
|
||||
|
||||
if (!contactDetailObject) return;
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({ objectDefinitionId: contactDetailObject.id })
|
||||
.whereIn('apiName', [
|
||||
'relatedObjectType',
|
||||
'relatedObjectId',
|
||||
'detailType',
|
||||
'label',
|
||||
'value',
|
||||
'isPrimary',
|
||||
])
|
||||
.update({
|
||||
isSystem: false,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
const contactDetailObject = await knex('object_definitions')
|
||||
.where({ apiName: 'ContactDetail' })
|
||||
.first();
|
||||
|
||||
if (!contactDetailObject) return;
|
||||
|
||||
await knex('field_definitions')
|
||||
.where({ objectDefinitionId: contactDetailObject.id })
|
||||
.whereIn('apiName', [
|
||||
'relatedObjectType',
|
||||
'relatedObjectId',
|
||||
'detailType',
|
||||
'label',
|
||||
'value',
|
||||
'isPrimary',
|
||||
])
|
||||
.update({
|
||||
isSystem: true,
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,62 @@
|
||||
exports.up = async function (knex) {
|
||||
// Add ownerId column to contacts
|
||||
await knex.schema.alterTable('contacts', (table) => {
|
||||
table.uuid('ownerId');
|
||||
table
|
||||
.foreign('ownerId')
|
||||
.references('id')
|
||||
.inTable('users')
|
||||
.onDelete('SET NULL');
|
||||
table.index(['ownerId']);
|
||||
});
|
||||
|
||||
// Add ownerId field definition metadata for Contact object
|
||||
const contactObject = await knex('object_definitions')
|
||||
.where('apiName', 'Contact')
|
||||
.first();
|
||||
|
||||
if (contactObject) {
|
||||
const existingField = await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactObject.id,
|
||||
apiName: 'ownerId',
|
||||
})
|
||||
.first();
|
||||
|
||||
if (!existingField) {
|
||||
await knex('field_definitions').insert({
|
||||
id: knex.raw('(UUID())'),
|
||||
objectDefinitionId: contactObject.id,
|
||||
apiName: 'ownerId',
|
||||
label: 'Owner',
|
||||
type: 'Reference',
|
||||
referenceObject: 'User',
|
||||
isSystem: true,
|
||||
isCustom: false,
|
||||
displayOrder: 4,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
const contactObject = await knex('object_definitions')
|
||||
.where('apiName', 'Contact')
|
||||
.first();
|
||||
|
||||
if (contactObject) {
|
||||
await knex('field_definitions')
|
||||
.where({
|
||||
objectDefinitionId: contactObject.id,
|
||||
apiName: 'ownerId',
|
||||
})
|
||||
.delete();
|
||||
}
|
||||
|
||||
await knex.schema.alterTable('contacts', (table) => {
|
||||
table.dropForeign(['ownerId']);
|
||||
table.dropColumn('ownerId');
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,72 @@
|
||||
exports.up = async function (knex) {
|
||||
await knex.schema.createTable('ai_processes', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.string('name').notNullable();
|
||||
table.text('description');
|
||||
table.integer('latest_version').notNullable().defaultTo(1);
|
||||
table.string('created_by').notNullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.timestamp('updated_at').defaultTo(knex.fn.now());
|
||||
});
|
||||
|
||||
await knex.schema.createTable('ai_process_versions', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.uuid('process_id').notNullable();
|
||||
table.integer('version').notNullable();
|
||||
table.json('graph_json').notNullable();
|
||||
table.json('compiled_json').notNullable();
|
||||
table.string('created_by').notNullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.unique(['process_id', 'version']);
|
||||
table.index(['process_id']);
|
||||
});
|
||||
|
||||
await knex.schema.createTable('ai_process_runs', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.uuid('process_id').notNullable();
|
||||
table.integer('version').notNullable();
|
||||
table.string('status').notNullable();
|
||||
table.json('input_json').notNullable();
|
||||
table.json('output_json');
|
||||
table.json('error_json');
|
||||
table.json('state_json');
|
||||
table.string('current_node_id');
|
||||
table.timestamp('started_at').defaultTo(knex.fn.now());
|
||||
table.timestamp('ended_at');
|
||||
table.index(['process_id']);
|
||||
});
|
||||
|
||||
await knex.schema.createTable('ai_chat_sessions', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.string('user_id').notNullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.index(['user_id']);
|
||||
});
|
||||
|
||||
await knex.schema.createTable('ai_chat_messages', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.uuid('session_id').notNullable();
|
||||
table.string('role').notNullable();
|
||||
table.text('content').notNullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.index(['session_id']);
|
||||
});
|
||||
|
||||
await knex.schema.createTable('ai_audit_events', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.uuid('run_id').notNullable();
|
||||
table.string('event_type').notNullable();
|
||||
table.json('payload_json').notNullable();
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.index(['run_id']);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
await knex.schema.dropTableIfExists('ai_audit_events');
|
||||
await knex.schema.dropTableIfExists('ai_chat_messages');
|
||||
await knex.schema.dropTableIfExists('ai_chat_sessions');
|
||||
await knex.schema.dropTableIfExists('ai_process_runs');
|
||||
await knex.schema.dropTableIfExists('ai_process_versions');
|
||||
await knex.schema.dropTableIfExists('ai_processes');
|
||||
};
|
||||
@@ -0,0 +1,14 @@
|
||||
exports.up = async function (knex) {
|
||||
await knex.schema.createTable('ai_tool_configs', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.string('tool_name').notNullable().unique();
|
||||
table.boolean('enabled').notNullable().defaultTo(true);
|
||||
table.json('config_json');
|
||||
table.timestamp('created_at').defaultTo(knex.fn.now());
|
||||
table.timestamp('updated_at').defaultTo(knex.fn.now());
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async function (knex) {
|
||||
await knex.schema.dropTableIfExists('ai_tool_configs');
|
||||
};
|
||||
1390
backend/package-lock.json
generated
1390
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -23,10 +23,15 @@
|
||||
"migrate:rollback": "knex migrate:rollback --knexfile=knexfile.js",
|
||||
"migrate:status": "ts-node -r tsconfig-paths/register scripts/check-migration-status.ts",
|
||||
"migrate:tenant": "ts-node -r tsconfig-paths/register scripts/migrate-tenant.ts",
|
||||
"migrate:all-tenants": "ts-node -r tsconfig-paths/register scripts/migrate-all-tenants.ts"
|
||||
"migrate:all-tenants": "ts-node -r tsconfig-paths/register scripts/migrate-all-tenants.ts",
|
||||
"seed:demo-process": "ts-node -r tsconfig-paths/register scripts/seed-demo-process.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@casl/ability": "^6.7.5",
|
||||
"@fastify/websocket": "^10.0.1",
|
||||
"@langchain/core": "^1.1.12",
|
||||
"@langchain/langgraph": "^1.0.15",
|
||||
"@langchain/openai": "^1.2.1",
|
||||
"@nestjs/bullmq": "^10.1.0",
|
||||
"@nestjs/common": "^10.3.0",
|
||||
"@nestjs/config": "^3.1.1",
|
||||
@@ -34,20 +39,31 @@
|
||||
"@nestjs/jwt": "^10.2.0",
|
||||
"@nestjs/passport": "^10.0.3",
|
||||
"@nestjs/platform-fastify": "^10.3.0",
|
||||
"@nestjs/platform-socket.io": "^10.4.20",
|
||||
"@nestjs/serve-static": "^4.0.2",
|
||||
"@nestjs/websockets": "^10.4.20",
|
||||
"@prisma/client": "^5.8.0",
|
||||
"@types/json-logic-js": "^2.0.8",
|
||||
"ajv": "^8.17.1",
|
||||
"ajv-formats": "^3.0.1",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.1.0",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.1",
|
||||
"ioredis": "^5.3.2",
|
||||
"json-logic-js": "^2.0.5",
|
||||
"knex": "^3.1.0",
|
||||
"langchain": "^1.2.7",
|
||||
"mysql2": "^3.15.3",
|
||||
"objection": "^3.1.5",
|
||||
"objection-authorize": "^5.0.2",
|
||||
"openai": "^6.15.0",
|
||||
"passport": "^0.7.0",
|
||||
"passport-jwt": "^4.0.1",
|
||||
"reflect-metadata": "^0.2.1",
|
||||
"rxjs": "^7.8.1"
|
||||
"rxjs": "^7.8.1",
|
||||
"socket.io": "^4.8.3",
|
||||
"twilio": "^5.11.1",
|
||||
"ws": "^8.18.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nestjs/cli": "^10.3.0",
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE `tenants` ADD COLUMN `integrationsConfig` JSON NULL;
|
||||
@@ -32,6 +32,7 @@ model Tenant {
|
||||
dbName String // Database name
|
||||
dbUsername String // Database username
|
||||
dbPassword String // Encrypted database password
|
||||
integrationsConfig Json? // Encrypted JSON config for external services (Twilio, OpenAI, etc.)
|
||||
status String @default("active") // active, suspended, deleted
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@ -26,8 +26,6 @@ model User {
|
||||
|
||||
userRoles UserRole[]
|
||||
accounts Account[]
|
||||
sharesGranted RecordShare[] @relation("GrantedShares")
|
||||
sharesReceived RecordShare[] @relation("ReceivedShares")
|
||||
|
||||
@@map("users")
|
||||
}
|
||||
@@ -43,7 +41,6 @@ model Role {
|
||||
|
||||
userRoles UserRole[]
|
||||
rolePermissions RolePermission[]
|
||||
roleRules RoleRule[]
|
||||
|
||||
@@unique([name, guardName])
|
||||
@@map("roles")
|
||||
@@ -93,20 +90,6 @@ model RolePermission {
|
||||
@@map("role_permissions")
|
||||
}
|
||||
|
||||
// CASL Rules for Roles
|
||||
model RoleRule {
|
||||
id String @id @default(uuid())
|
||||
roleId String
|
||||
rulesJson Json @map("rules_json")
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
role Role @relation(fields: [roleId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([roleId])
|
||||
@@map("role_rules")
|
||||
}
|
||||
|
||||
// Object Definition (Metadata)
|
||||
model ObjectDefinition {
|
||||
id String @id @default(uuid())
|
||||
@@ -116,19 +99,11 @@ model ObjectDefinition {
|
||||
description String? @db.Text
|
||||
isSystem Boolean @default(false)
|
||||
isCustom Boolean @default(true)
|
||||
// Authorization fields
|
||||
accessModel String @default("owner") // 'public' | 'owner' | 'mixed'
|
||||
publicRead Boolean @default(false)
|
||||
publicCreate Boolean @default(false)
|
||||
publicUpdate Boolean @default(false)
|
||||
publicDelete Boolean @default(false)
|
||||
ownerField String @default("ownerId")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @updatedAt @map("updated_at")
|
||||
|
||||
fields FieldDefinition[]
|
||||
pages AppPage[]
|
||||
recordShares RecordShare[]
|
||||
|
||||
@@map("object_definitions")
|
||||
}
|
||||
@@ -151,9 +126,6 @@ model FieldDefinition {
|
||||
isCustom Boolean @default(true)
|
||||
displayOrder Int @default(0)
|
||||
uiMetadata Json? @map("ui_metadata")
|
||||
// Field-level permissions
|
||||
defaultReadable Boolean @default(true)
|
||||
defaultWritable Boolean @default(true)
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @updatedAt @map("updated_at")
|
||||
|
||||
@@ -164,29 +136,6 @@ model FieldDefinition {
|
||||
@@map("field_definitions")
|
||||
}
|
||||
|
||||
// Polymorphic per-record sharing
|
||||
model RecordShare {
|
||||
id String @id @default(uuid())
|
||||
objectDefinitionId String
|
||||
recordId String
|
||||
granteeUserId String
|
||||
grantedByUserId String
|
||||
actions Json // Array like ["read"], ["read","update"]
|
||||
fields Json? // Optional field scoping
|
||||
expiresAt DateTime? @map("expires_at")
|
||||
revokedAt DateTime? @map("revoked_at")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
|
||||
objectDefinition ObjectDefinition @relation(fields: [objectDefinitionId], references: [id], onDelete: Cascade)
|
||||
granteeUser User @relation("ReceivedShares", fields: [granteeUserId], references: [id], onDelete: Cascade)
|
||||
grantedByUser User @relation("GrantedShares", fields: [grantedByUserId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([objectDefinitionId, recordId, granteeUserId])
|
||||
@@index([granteeUserId, objectDefinitionId])
|
||||
@@index([objectDefinitionId, recordId])
|
||||
@@map("record_shares")
|
||||
}
|
||||
|
||||
// Example static object: Account
|
||||
model Account {
|
||||
id String @id @default(uuid())
|
||||
@@ -197,11 +146,136 @@ model Account {
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
owner User @relation(fields: [ownerId], references: [id])
|
||||
contacts Contact[]
|
||||
|
||||
@@index([ownerId])
|
||||
@@map("accounts")
|
||||
}
|
||||
|
||||
model Contact {
|
||||
id String @id @default(uuid())
|
||||
firstName String
|
||||
lastName String
|
||||
accountId String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
account Account @relation(fields: [accountId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([accountId])
|
||||
@@map("contacts")
|
||||
}
|
||||
|
||||
model ContactDetail {
|
||||
id String @id @default(uuid())
|
||||
relatedObjectType String
|
||||
relatedObjectId String
|
||||
detailType String
|
||||
label String?
|
||||
value String
|
||||
isPrimary Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@index([relatedObjectType, relatedObjectId])
|
||||
@@map("contact_details")
|
||||
}
|
||||
|
||||
// AI Process Builder + Chat Orchestrator
|
||||
model AiProcess {
|
||||
id String @id @default(uuid())
|
||||
tenantId String @map("tenant_id")
|
||||
name String
|
||||
description String? @db.Text
|
||||
latestVersion Int @default(1) @map("latest_version")
|
||||
createdBy String @map("created_by")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @updatedAt @map("updated_at")
|
||||
|
||||
versions AiProcessVersion[]
|
||||
runs AiProcessRun[]
|
||||
|
||||
@@index([tenantId])
|
||||
@@map("ai_processes")
|
||||
}
|
||||
|
||||
model AiProcessVersion {
|
||||
id String @id @default(uuid())
|
||||
tenantId String @map("tenant_id")
|
||||
processId String @map("process_id")
|
||||
version Int
|
||||
graphJson Json @map("graph_json")
|
||||
compiledJson Json @map("compiled_json")
|
||||
createdBy String @map("created_by")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
|
||||
process AiProcess @relation(fields: [processId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([processId, version])
|
||||
@@index([tenantId])
|
||||
@@map("ai_process_versions")
|
||||
}
|
||||
|
||||
model AiProcessRun {
|
||||
id String @id @default(uuid())
|
||||
tenantId String @map("tenant_id")
|
||||
processId String @map("process_id")
|
||||
version Int
|
||||
status String
|
||||
inputJson Json @map("input_json")
|
||||
outputJson Json? @map("output_json")
|
||||
errorJson Json? @map("error_json")
|
||||
stateJson Json? @map("state_json")
|
||||
currentNodeId String? @map("current_node_id")
|
||||
startedAt DateTime @default(now()) @map("started_at")
|
||||
endedAt DateTime? @map("ended_at")
|
||||
|
||||
process AiProcess @relation(fields: [processId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([tenantId])
|
||||
@@index([processId])
|
||||
@@map("ai_process_runs")
|
||||
}
|
||||
|
||||
model AiChatSession {
|
||||
id String @id @default(uuid())
|
||||
tenantId String @map("tenant_id")
|
||||
userId String @map("user_id")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
|
||||
messages AiChatMessage[]
|
||||
|
||||
@@index([tenantId])
|
||||
@@index([userId])
|
||||
@@map("ai_chat_sessions")
|
||||
}
|
||||
|
||||
model AiChatMessage {
|
||||
id String @id @default(uuid())
|
||||
sessionId String @map("session_id")
|
||||
role String
|
||||
content String @db.Text
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
|
||||
session AiChatSession @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([sessionId])
|
||||
@@map("ai_chat_messages")
|
||||
}
|
||||
|
||||
model AiAuditEvent {
|
||||
id String @id @default(uuid())
|
||||
tenantId String @map("tenant_id")
|
||||
runId String @map("run_id")
|
||||
eventType String @map("event_type")
|
||||
payloadJson Json @map("payload_json")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
|
||||
@@index([tenantId])
|
||||
@@index([runId])
|
||||
@@map("ai_audit_events")
|
||||
}
|
||||
|
||||
// Application Builder
|
||||
model App {
|
||||
id String @id @default(uuid())
|
||||
|
||||
181
backend/scripts/seed-default-roles.ts
Normal file
181
backend/scripts/seed-default-roles.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
import { Knex } from 'knex';
|
||||
import * as knexLib from 'knex';
|
||||
|
||||
/**
|
||||
* Create a Knex connection for tenant database
|
||||
*/
|
||||
function createKnexConnection(database: string): Knex {
|
||||
return knexLib.default({
|
||||
client: 'mysql2',
|
||||
connection: {
|
||||
host: process.env.DB_HOST || 'db',
|
||||
port: parseInt(process.env.DB_PORT || '3306'),
|
||||
user: 'root',
|
||||
password: 'asjdnfqTash37faggT',
|
||||
database: database,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
interface RoleWithPermissions {
|
||||
name: string;
|
||||
description: string;
|
||||
objectPermissions: {
|
||||
[objectApiName: string]: {
|
||||
canCreate: boolean;
|
||||
canRead: boolean;
|
||||
canEdit: boolean;
|
||||
canDelete: boolean;
|
||||
canViewAll: boolean;
|
||||
canModifyAll: boolean;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
const DEFAULT_ROLES: RoleWithPermissions[] = [
|
||||
{
|
||||
name: 'System Administrator',
|
||||
description: 'Full access to all objects and records. Can view and modify all data.',
|
||||
objectPermissions: {
|
||||
'*': {
|
||||
canCreate: true,
|
||||
canRead: true,
|
||||
canEdit: true,
|
||||
canDelete: true,
|
||||
canViewAll: true,
|
||||
canModifyAll: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Standard User',
|
||||
description: 'Can create, read, edit, and delete own records. Respects OWD settings.',
|
||||
objectPermissions: {
|
||||
'*': {
|
||||
canCreate: true,
|
||||
canRead: true,
|
||||
canEdit: true,
|
||||
canDelete: true,
|
||||
canViewAll: false,
|
||||
canModifyAll: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Read Only',
|
||||
description: 'Can only read records based on OWD settings. No create, edit, or delete.',
|
||||
objectPermissions: {
|
||||
'*': {
|
||||
canCreate: false,
|
||||
canRead: true,
|
||||
canEdit: false,
|
||||
canDelete: false,
|
||||
canViewAll: false,
|
||||
canModifyAll: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
async function seedRolesForTenant(knex: Knex, tenantName: string) {
|
||||
console.log(`\n🌱 Seeding roles for tenant: ${tenantName}`);
|
||||
|
||||
// Get all object definitions
|
||||
const objectDefinitions = await knex('object_definitions').select('id', 'apiName');
|
||||
|
||||
for (const roleData of DEFAULT_ROLES) {
|
||||
// Check if role already exists
|
||||
const existingRole = await knex('roles')
|
||||
.where({ name: roleData.name })
|
||||
.first();
|
||||
|
||||
let roleId: string;
|
||||
|
||||
if (existingRole) {
|
||||
console.log(` ℹ️ Role "${roleData.name}" already exists, skipping...`);
|
||||
roleId = existingRole.id;
|
||||
} else {
|
||||
// Create role
|
||||
await knex('roles').insert({
|
||||
name: roleData.name,
|
||||
guardName: 'api',
|
||||
description: roleData.description,
|
||||
});
|
||||
|
||||
// Get the inserted role
|
||||
const newRole = await knex('roles')
|
||||
.where({ name: roleData.name })
|
||||
.first();
|
||||
|
||||
roleId = newRole.id;
|
||||
console.log(` ✅ Created role: ${roleData.name}`);
|
||||
}
|
||||
|
||||
// Create object permissions for all objects
|
||||
const wildcardPermissions = roleData.objectPermissions['*'];
|
||||
|
||||
for (const objectDef of objectDefinitions) {
|
||||
// Check if permission already exists
|
||||
const existingPermission = await knex('role_object_permissions')
|
||||
.where({
|
||||
roleId: roleId,
|
||||
objectDefinitionId: objectDef.id,
|
||||
})
|
||||
.first();
|
||||
|
||||
if (!existingPermission) {
|
||||
await knex('role_object_permissions').insert({
|
||||
roleId: roleId,
|
||||
objectDefinitionId: objectDef.id,
|
||||
canCreate: wildcardPermissions.canCreate,
|
||||
canRead: wildcardPermissions.canRead,
|
||||
canEdit: wildcardPermissions.canEdit,
|
||||
canDelete: wildcardPermissions.canDelete,
|
||||
canViewAll: wildcardPermissions.canViewAll,
|
||||
canModifyAll: wildcardPermissions.canModifyAll,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
console.log(` 📋 Set permissions for ${objectDefinitions.length} objects`);
|
||||
}
|
||||
}
|
||||
|
||||
async function seedAllTenants() {
|
||||
console.log('🚀 Starting role seeding for all tenants...\n');
|
||||
|
||||
// For now, seed the main tenant database
|
||||
const databases = ['tenant_tenant1'];
|
||||
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (const database of databases) {
|
||||
try {
|
||||
const knex = createKnexConnection(database);
|
||||
await seedRolesForTenant(knex, database);
|
||||
await knex.destroy();
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error(`❌ ${database}: Seeding failed:`, error.message);
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n============================================================');
|
||||
console.log('📊 Seeding Summary');
|
||||
console.log('============================================================');
|
||||
console.log(`✅ Successful: ${successCount}`);
|
||||
console.log(`❌ Failed: ${errorCount}`);
|
||||
|
||||
if (errorCount === 0) {
|
||||
console.log('\n🎉 All tenant roles seeded successfully!');
|
||||
}
|
||||
}
|
||||
|
||||
seedAllTenants()
|
||||
.then(() => process.exit(0))
|
||||
.catch((error) => {
|
||||
console.error('Unhandled error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
332
backend/scripts/seed-demo-process.ts
Normal file
332
backend/scripts/seed-demo-process.ts
Normal file
@@ -0,0 +1,332 @@
|
||||
import { randomUUID } from 'crypto';
|
||||
import { AiProcess, AiProcessVersion, AiToolConfig } from '../src/models/ai-process.model';
|
||||
|
||||
// Bootstrap NestJS to get proper services
|
||||
async function getTenantContext(tenantSlugOrId: string) {
|
||||
const { NestFactory } = await import('@nestjs/core');
|
||||
const { AppModule } = await import('../src/app.module');
|
||||
const { TenantDatabaseService } = await import('../src/tenant/tenant-database.service');
|
||||
|
||||
// Create app context (without listening)
|
||||
const app = await NestFactory.createApplicationContext(AppModule, {
|
||||
logger: false,
|
||||
});
|
||||
|
||||
const tenantDbService = app.get(TenantDatabaseService);
|
||||
|
||||
// Resolve tenant ID
|
||||
const tenantId = await tenantDbService.resolveTenantId(tenantSlugOrId);
|
||||
|
||||
// Get proper Knex connection
|
||||
const knex = await tenantDbService.getTenantKnexById(tenantId);
|
||||
|
||||
return { tenantId, knex, app };
|
||||
}
|
||||
|
||||
/**
|
||||
* Seed script for demo AI Process: Register New Pet
|
||||
*
|
||||
* This process demonstrates:
|
||||
* - Conditional logic (find or create account/contact)
|
||||
* - Tool usage (findAccount, createAccount, findContact, createContact, createPet)
|
||||
* - Sequential execution
|
||||
* - LLM decision nodes with structured JSON output
|
||||
*
|
||||
* Usage:
|
||||
* npm run seed:demo-process -- <tenant-slug-or-id>
|
||||
*/
|
||||
|
||||
const demoProcessGraph = {
|
||||
id: 'register_new_pet',
|
||||
name: 'Register New Pet',
|
||||
description: 'Complete pet registration workflow with account and contact resolution',
|
||||
allowCycles: false,
|
||||
nodes: [
|
||||
{
|
||||
id: 'start',
|
||||
type: 'Start',
|
||||
position: { x: 250, y: 50 },
|
||||
data: { label: 'Start' },
|
||||
},
|
||||
{
|
||||
id: 'extract_info',
|
||||
type: 'LLMDecisionNode',
|
||||
position: { x: 250, y: 150 },
|
||||
data: {
|
||||
label: 'Extract Pet Info',
|
||||
promptTemplate: `Extract pet registration information from the user message.
|
||||
|
||||
User message: {{state.message}}
|
||||
|
||||
Extract:
|
||||
- Pet name (required)
|
||||
- Pet species (required, e.g., "dog", "cat", "bird")
|
||||
- Pet breed (optional)
|
||||
- Pet age (optional, as number)
|
||||
- Owner first name (required)
|
||||
- Owner last name (required)
|
||||
- Owner email (optional)
|
||||
- Owner phone (optional)
|
||||
- Account/Company name (optional, defaults to owner's full name)
|
||||
|
||||
Return JSON with these exact fields.`,
|
||||
inputKeys: ['message'],
|
||||
outputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
petName: { type: 'string' },
|
||||
species: { type: 'string' },
|
||||
breed: { type: 'string' },
|
||||
age: { type: 'number' },
|
||||
ownerFirstName: { type: 'string' },
|
||||
ownerLastName: { type: 'string' },
|
||||
ownerEmail: { type: 'string' },
|
||||
ownerPhone: { type: 'string' },
|
||||
accountName: { type: 'string' },
|
||||
},
|
||||
required: ['petName', 'species', 'ownerFirstName', 'ownerLastName'],
|
||||
},
|
||||
model: { name: 'gpt-4o', temperature: 0 },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'find_account',
|
||||
type: 'ToolNode',
|
||||
position: { x: 250, y: 280 },
|
||||
data: {
|
||||
label: 'Find Account',
|
||||
toolName: 'findAccount',
|
||||
argsTemplate: {
|
||||
name: '{{state.accountName}}',
|
||||
email: '{{state.ownerEmail}}',
|
||||
},
|
||||
outputMapping: {
|
||||
found: 'accountFound',
|
||||
accountId: 'accountId',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_account',
|
||||
type: 'ToolNode',
|
||||
position: { x: 450, y: 380 },
|
||||
data: {
|
||||
label: 'Create Account',
|
||||
toolName: 'createAccount',
|
||||
argsTemplate: {
|
||||
name: '{{state.accountName}}',
|
||||
email: '{{state.ownerEmail}}',
|
||||
phone: '{{state.ownerPhone}}',
|
||||
},
|
||||
outputMapping: {
|
||||
accountId: 'accountId',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'find_contact',
|
||||
type: 'ToolNode',
|
||||
position: { x: 250, y: 480 },
|
||||
data: {
|
||||
label: 'Find Contact',
|
||||
toolName: 'findContact',
|
||||
argsTemplate: {
|
||||
firstName: '{{state.ownerFirstName}}',
|
||||
lastName: '{{state.ownerLastName}}',
|
||||
email: '{{state.ownerEmail}}',
|
||||
accountId: '{{state.accountId}}',
|
||||
},
|
||||
outputMapping: {
|
||||
found: 'contactFound',
|
||||
contactId: 'contactId',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_contact',
|
||||
type: 'ToolNode',
|
||||
position: { x: 450, y: 580 },
|
||||
data: {
|
||||
label: 'Create Contact',
|
||||
toolName: 'createContact',
|
||||
argsTemplate: {
|
||||
firstName: '{{state.ownerFirstName}}',
|
||||
lastName: '{{state.ownerLastName}}',
|
||||
email: '{{state.ownerEmail}}',
|
||||
phone: '{{state.ownerPhone}}',
|
||||
accountId: '{{state.accountId}}',
|
||||
},
|
||||
outputMapping: {
|
||||
contactId: 'contactId',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_pet',
|
||||
type: 'ToolNode',
|
||||
position: { x: 250, y: 680 },
|
||||
data: {
|
||||
label: 'Create Pet Record',
|
||||
toolName: 'createPet',
|
||||
argsTemplate: {
|
||||
name: '{{state.petName}}',
|
||||
species: '{{state.species}}',
|
||||
breed: '{{state.breed}}',
|
||||
age: '{{state.age}}',
|
||||
ownerId: '{{state.contactId}}',
|
||||
},
|
||||
outputMapping: {
|
||||
petId: 'petId',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'end',
|
||||
type: 'End',
|
||||
position: { x: 250, y: 780 },
|
||||
data: { label: 'End' },
|
||||
},
|
||||
],
|
||||
edges: [
|
||||
{ id: 'e1', source: 'start', target: 'extract_info' },
|
||||
{ id: 'e2', source: 'extract_info', target: 'find_account' },
|
||||
{
|
||||
id: 'e3',
|
||||
source: 'find_account',
|
||||
target: 'find_contact',
|
||||
condition: { '==': [{ var: 'accountFound' }, true] },
|
||||
},
|
||||
{
|
||||
id: 'e4',
|
||||
source: 'find_account',
|
||||
target: 'create_account',
|
||||
condition: { '==': [{ var: 'accountFound' }, false] },
|
||||
},
|
||||
{ id: 'e5', source: 'create_account', target: 'find_contact' },
|
||||
{
|
||||
id: 'e6',
|
||||
source: 'find_contact',
|
||||
target: 'create_pet',
|
||||
condition: { '==': [{ var: 'contactFound' }, true] },
|
||||
},
|
||||
{
|
||||
id: 'e7',
|
||||
source: 'find_contact',
|
||||
target: 'create_contact',
|
||||
condition: { '==': [{ var: 'contactFound' }, false] },
|
||||
},
|
||||
{ id: 'e8', source: 'create_contact', target: 'create_pet' },
|
||||
{ id: 'e9', source: 'create_pet', target: 'end' },
|
||||
],
|
||||
};
|
||||
|
||||
const demoTools = [
|
||||
'findAccount',
|
||||
'createAccount',
|
||||
'findContact',
|
||||
'createContact',
|
||||
'createPet',
|
||||
];
|
||||
|
||||
async function seedDemoProcess(tenantSlugOrId: string) {
|
||||
let app;
|
||||
try {
|
||||
console.log(`\n🌱 Seeding demo AI process for tenant: ${tenantSlugOrId}\n`);
|
||||
|
||||
const context = await getTenantContext(tenantSlugOrId);
|
||||
const { tenantId, knex, app: nestApp } = context;
|
||||
app = nestApp;
|
||||
|
||||
console.log(`✓ Resolved tenant ID: ${tenantId}`);
|
||||
console.log(`✓ Connected to tenant database`);
|
||||
|
||||
// Check if process already exists
|
||||
const existing = await AiProcess.query(knex)
|
||||
.where('name', demoProcessGraph.name)
|
||||
.first();
|
||||
|
||||
if (existing) {
|
||||
console.log(`⚠ Process "${demoProcessGraph.name}" already exists (ID: ${existing.id})`);
|
||||
console.log(` To create a new version, update via the UI.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create process in transaction
|
||||
await knex.transaction(async (trx) => {
|
||||
const processId = randomUUID();
|
||||
const userId = 'system'; // System user for seed data
|
||||
|
||||
// Create process
|
||||
await AiProcess.query(trx).insert({
|
||||
id: processId,
|
||||
name: demoProcessGraph.name,
|
||||
description: demoProcessGraph.description,
|
||||
latestVersion: 1,
|
||||
createdBy: userId,
|
||||
});
|
||||
console.log(`✓ Created process: ${demoProcessGraph.name} (${processId})`);
|
||||
|
||||
// Create initial version
|
||||
// Note: In production, this would call the compiler service
|
||||
// For seed, we're storing a simplified version
|
||||
await AiProcessVersion.query(trx).insert({
|
||||
id: randomUUID(),
|
||||
processId,
|
||||
version: 1,
|
||||
graphJson: demoProcessGraph,
|
||||
compiledJson: {
|
||||
graphId: demoProcessGraph.id,
|
||||
version: 1,
|
||||
nodes: demoProcessGraph.nodes,
|
||||
edges: demoProcessGraph.edges,
|
||||
startNodeId: 'start',
|
||||
endNodeIds: ['end'],
|
||||
adjacency: {},
|
||||
},
|
||||
createdBy: userId,
|
||||
});
|
||||
console.log(`✓ Created process version 1`);
|
||||
|
||||
// Enable demo tools for tenant
|
||||
for (const toolName of demoTools) {
|
||||
const existingTool = await AiToolConfig.query(trx)
|
||||
.where('tool_name', toolName)
|
||||
.first();
|
||||
|
||||
if (!existingTool) {
|
||||
await AiToolConfig.query(trx).insert({
|
||||
id: randomUUID(),
|
||||
toolName,
|
||||
enabled: true,
|
||||
});
|
||||
console.log(`✓ Enabled tool: ${toolName}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`\n✅ Demo process seeded successfully!\n`);
|
||||
console.log(`Next steps:`);
|
||||
console.log(` 1. Navigate to /ai-processes in your frontend`);
|
||||
console.log(` 2. Open the "${demoProcessGraph.name}" process`);
|
||||
console.log(` 3. Test it by sending a message like:`);
|
||||
console.log(` "Register a dog named Max, owned by John Smith (john@email.com)"`);
|
||||
console.log();
|
||||
|
||||
if (app) await app.close();
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
console.error('❌ Seed failed:', error);
|
||||
if (app) await app.close();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Get tenant from command line args
|
||||
const tenantSlugOrId = process.argv[2];
|
||||
|
||||
if (!tenantSlugOrId) {
|
||||
console.error('Usage: npm run seed:demo-process -- <tenant-slug-or-id>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
seedDemoProcess(tenantSlugOrId);
|
||||
41
backend/src/ai-assistant/ai-assistant.controller.ts
Normal file
41
backend/src/ai-assistant/ai-assistant.controller.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { Body, Controller, Post, UseGuards } from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { CurrentUser } from '../auth/current-user.decorator';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { AiAssistantService } from './ai-assistant.service';
|
||||
import { AiChatRequestDto } from './dto/ai-chat.dto';
|
||||
import { AiSearchRequestDto } from './dto/ai-search.dto';
|
||||
|
||||
@Controller('ai')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class AiAssistantController {
|
||||
constructor(private readonly aiAssistantService: AiAssistantService) {}
|
||||
|
||||
@Post('chat')
|
||||
async chat(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Body() payload: AiChatRequestDto,
|
||||
) {
|
||||
return this.aiAssistantService.handleChat(
|
||||
tenantId,
|
||||
user.userId,
|
||||
payload.message,
|
||||
payload.history,
|
||||
payload.context,
|
||||
);
|
||||
}
|
||||
|
||||
@Post('search')
|
||||
async search(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Body() payload: AiSearchRequestDto,
|
||||
) {
|
||||
return this.aiAssistantService.searchRecords(
|
||||
tenantId,
|
||||
user.userId,
|
||||
payload,
|
||||
);
|
||||
}
|
||||
}
|
||||
15
backend/src/ai-assistant/ai-assistant.module.ts
Normal file
15
backend/src/ai-assistant/ai-assistant.module.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { AiAssistantController } from './ai-assistant.controller';
|
||||
import { AiAssistantService } from './ai-assistant.service';
|
||||
import { ObjectModule } from '../object/object.module';
|
||||
import { PageLayoutModule } from '../page-layout/page-layout.module';
|
||||
import { TenantModule } from '../tenant/tenant.module';
|
||||
import { MeilisearchModule } from '../search/meilisearch.module';
|
||||
|
||||
@Module({
|
||||
imports: [ObjectModule, PageLayoutModule, TenantModule, MeilisearchModule],
|
||||
controllers: [AiAssistantController],
|
||||
providers: [AiAssistantService],
|
||||
exports: [AiAssistantService],
|
||||
})
|
||||
export class AiAssistantModule {}
|
||||
1236
backend/src/ai-assistant/ai-assistant.service.ts
Normal file
1236
backend/src/ai-assistant/ai-assistant.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
32
backend/src/ai-assistant/ai-assistant.types.ts
Normal file
32
backend/src/ai-assistant/ai-assistant.types.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
export interface AiChatMessage {
|
||||
role: 'user' | 'assistant';
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface AiChatContext {
|
||||
objectApiName?: string;
|
||||
view?: string;
|
||||
recordId?: string;
|
||||
route?: string;
|
||||
}
|
||||
|
||||
export interface AiAssistantReply {
|
||||
reply: string;
|
||||
action?: 'create_record' | 'collect_fields' | 'clarify';
|
||||
missingFields?: string[];
|
||||
record?: any;
|
||||
}
|
||||
|
||||
export interface AiAssistantState {
|
||||
message: string;
|
||||
history?: AiChatMessage[];
|
||||
context: AiChatContext;
|
||||
objectDefinition?: any;
|
||||
pageLayout?: any;
|
||||
extractedFields?: Record<string, any>;
|
||||
requiredFields?: string[];
|
||||
missingFields?: string[];
|
||||
action?: AiAssistantReply['action'];
|
||||
record?: any;
|
||||
reply?: string;
|
||||
}
|
||||
36
backend/src/ai-assistant/dto/ai-chat.dto.ts
Normal file
36
backend/src/ai-assistant/dto/ai-chat.dto.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { Type } from 'class-transformer';
|
||||
import { IsNotEmpty, IsObject, IsOptional, IsString, ValidateNested } from 'class-validator';
|
||||
import { AiChatMessageDto } from './ai-chat.message.dto';
|
||||
|
||||
export class AiChatContextDto {
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
objectApiName?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
view?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
recordId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
route?: string;
|
||||
}
|
||||
|
||||
export class AiChatRequestDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
message: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
context?: AiChatContextDto;
|
||||
|
||||
@IsOptional()
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => AiChatMessageDto)
|
||||
history?: AiChatMessageDto[];
|
||||
}
|
||||
10
backend/src/ai-assistant/dto/ai-chat.message.dto.ts
Normal file
10
backend/src/ai-assistant/dto/ai-chat.message.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { IsIn, IsNotEmpty, IsString } from 'class-validator';
|
||||
|
||||
export class AiChatMessageDto {
|
||||
@IsIn(['user', 'assistant'])
|
||||
role: 'user' | 'assistant';
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
text: string;
|
||||
}
|
||||
22
backend/src/ai-assistant/dto/ai-search.dto.ts
Normal file
22
backend/src/ai-assistant/dto/ai-search.dto.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { Type } from 'class-transformer';
|
||||
import { IsNotEmpty, IsOptional, IsString, IsNumber } from 'class-validator';
|
||||
|
||||
export class AiSearchRequestDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
objectApiName: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
query: string;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsNumber()
|
||||
page?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsNumber()
|
||||
pageSize?: number;
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { compileProcessGraph, GraphValidationError } from '../ai-processes.compiler';
|
||||
import { demoRegisterNewPetProcess } from '../demo-process';
|
||||
|
||||
describe('ai-processes compiler', () => {
|
||||
it('throws when missing start node', () => {
|
||||
const badGraph = {
|
||||
...demoRegisterNewPetProcess,
|
||||
nodes: demoRegisterNewPetProcess.nodes.filter((n) => n.type !== 'Start'),
|
||||
};
|
||||
|
||||
expect(() =>
|
||||
compileProcessGraph(badGraph, { tenantId: 'default', version: 1 }),
|
||||
).toThrow(GraphValidationError);
|
||||
});
|
||||
|
||||
it('compiles the demo process graph', () => {
|
||||
const compiled = compileProcessGraph(demoRegisterNewPetProcess, {
|
||||
tenantId: 'default',
|
||||
version: 1,
|
||||
});
|
||||
|
||||
expect(compiled.startNodeId).toBe('start');
|
||||
expect(compiled.endNodeIds).toContain('end');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,40 @@
|
||||
import { compileProcessGraph } from '../ai-processes.compiler';
|
||||
import { demoRegisterNewPetProcess } from '../demo-process';
|
||||
import { runCompiledGraph } from '../ai-processes.runner';
|
||||
import { ToolRegistry } from '../tools/tool-registry';
|
||||
|
||||
describe('ai-processes runner', () => {
|
||||
it('runs the demo process until human input is required', async () => {
|
||||
const compiled = compileProcessGraph(demoRegisterNewPetProcess, {
|
||||
tenantId: 'default',
|
||||
version: 1,
|
||||
});
|
||||
|
||||
const result = await runCompiledGraph({
|
||||
compiledGraph: compiled,
|
||||
input: {
|
||||
accountName: 'Acme Inc',
|
||||
firstName: 'Jamie',
|
||||
lastName: 'Doe',
|
||||
},
|
||||
toolRegistry: new ToolRegistry(),
|
||||
toolContext: { tenantId: 'default', userId: 'user-1' },
|
||||
llmDecision: async (node, state) => {
|
||||
if (node.id === 'decide_account') {
|
||||
return { accountAction: 'find', accountName: state.accountName };
|
||||
}
|
||||
if (node.id === 'decide_contact') {
|
||||
return {
|
||||
contactAction: 'find',
|
||||
firstName: state.firstName,
|
||||
lastName: state.lastName,
|
||||
};
|
||||
}
|
||||
return {};
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.status).toBe('waiting');
|
||||
expect(result.currentNodeId).toBe('need_pet');
|
||||
});
|
||||
});
|
||||
191
backend/src/ai-processes/ai-processes.compiler.ts
Normal file
191
backend/src/ai-processes/ai-processes.compiler.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
import { apply as applyJsonLogic } from 'json-logic-js';
|
||||
import { createAjv } from './ai-processes.schemas';
|
||||
import {
|
||||
CompiledGraph,
|
||||
ProcessGraphDefinition,
|
||||
ProcessGraphEdge,
|
||||
ProcessGraphNode,
|
||||
} from './ai-processes.types';
|
||||
import { ToolRegistry } from './tools/tool-registry';
|
||||
|
||||
export class GraphValidationError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'GraphValidationError';
|
||||
}
|
||||
}
|
||||
|
||||
export interface CompileOptions {
|
||||
tenantId: string;
|
||||
version: number;
|
||||
}
|
||||
|
||||
export const validateGraphDefinition = (
|
||||
graph: ProcessGraphDefinition,
|
||||
tenantId: string,
|
||||
) => {
|
||||
const ajv = createAjv();
|
||||
const validate = ajv.getSchema<ProcessGraphDefinition>('processGraph');
|
||||
if (!validate) {
|
||||
throw new GraphValidationError('Graph schema is not registered.');
|
||||
}
|
||||
const valid = validate(graph);
|
||||
if (!valid) {
|
||||
throw new GraphValidationError(
|
||||
`Graph schema validation failed: ${ajv.errorsText(validate.errors)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const startNodes = graph.nodes.filter((node) => node.type === 'Start');
|
||||
const endNodes = graph.nodes.filter((node) => node.type === 'End');
|
||||
|
||||
if (startNodes.length !== 1) {
|
||||
throw new GraphValidationError('Graph must contain exactly one Start node.');
|
||||
}
|
||||
if (endNodes.length < 1) {
|
||||
throw new GraphValidationError('Graph must contain at least one End node.');
|
||||
}
|
||||
|
||||
const nodeIds = new Set(graph.nodes.map((node) => node.id));
|
||||
graph.edges.forEach((edge) => {
|
||||
if (!nodeIds.has(edge.source) || !nodeIds.has(edge.target)) {
|
||||
throw new GraphValidationError(`Edge ${edge.id} references unknown nodes.`);
|
||||
}
|
||||
});
|
||||
|
||||
const adjacency = buildAdjacency(graph.edges);
|
||||
const reachable = new Set<string>();
|
||||
const queue = [startNodes[0].id];
|
||||
|
||||
while (queue.length) {
|
||||
const current = queue.shift();
|
||||
if (!current || reachable.has(current)) continue;
|
||||
reachable.add(current);
|
||||
(adjacency[current] || []).forEach((neighbor) => queue.push(neighbor));
|
||||
}
|
||||
|
||||
graph.nodes.forEach((node) => {
|
||||
if (!reachable.has(node.id)) {
|
||||
throw new GraphValidationError(`Node ${node.id} is not reachable.`);
|
||||
}
|
||||
});
|
||||
|
||||
if (!graph.allowCycles && hasCycle(graph.nodes, graph.edges)) {
|
||||
throw new GraphValidationError('Graph contains cycles but allowCycles=false.');
|
||||
}
|
||||
|
||||
const toolRegistry = new ToolRegistry();
|
||||
const allToolNames = toolRegistry.getAllToolNames();
|
||||
|
||||
graph.nodes.forEach((node) => {
|
||||
if (node.type === 'ToolNode') {
|
||||
const toolName = (node.data as { toolName?: string }).toolName;
|
||||
if (!toolName) {
|
||||
throw new GraphValidationError(
|
||||
`ToolNode ${node.id} missing toolName configuration.`,
|
||||
);
|
||||
}
|
||||
// Validate tool exists in registry (allowlist check happens at runtime)
|
||||
if (!allToolNames.includes(toolName)) {
|
||||
throw new GraphValidationError(
|
||||
`Tool ${toolName} is not registered in the tool registry.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (node.type === 'LLMDecisionNode') {
|
||||
const data = node.data as {
|
||||
promptTemplate?: string;
|
||||
inputKeys?: string[];
|
||||
outputSchema?: Record<string, unknown>;
|
||||
model?: { name?: string; temperature?: number };
|
||||
};
|
||||
if (!data.promptTemplate || !data.outputSchema || !data.model?.name) {
|
||||
throw new GraphValidationError(
|
||||
`LLMDecisionNode ${node.id} missing required configuration.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (node.type === 'HumanInputNode') {
|
||||
const data = node.data as {
|
||||
requiredFieldsSchema?: Record<string, unknown>;
|
||||
promptToUser?: string;
|
||||
};
|
||||
if (!data.requiredFieldsSchema || !data.promptToUser) {
|
||||
throw new GraphValidationError(
|
||||
`HumanInputNode ${node.id} missing required configuration.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
graph.edges.forEach((edge) => {
|
||||
if (edge.condition) {
|
||||
try {
|
||||
applyJsonLogic(edge.condition, {});
|
||||
} catch (error) {
|
||||
throw new GraphValidationError(
|
||||
`Edge ${edge.id} has invalid json-logic condition.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const compileProcessGraph = (
|
||||
graph: ProcessGraphDefinition,
|
||||
options: CompileOptions,
|
||||
): CompiledGraph => {
|
||||
validateGraphDefinition(graph, options.tenantId);
|
||||
|
||||
const startNodeId = graph.nodes.find((node) => node.type === 'Start')?.id;
|
||||
if (!startNodeId) {
|
||||
throw new GraphValidationError('Start node missing after validation.');
|
||||
}
|
||||
|
||||
const endNodeIds = graph.nodes
|
||||
.filter((node) => node.type === 'End')
|
||||
.map((node) => node.id);
|
||||
|
||||
return {
|
||||
graphId: graph.id,
|
||||
version: options.version,
|
||||
nodes: graph.nodes,
|
||||
edges: graph.edges,
|
||||
startNodeId,
|
||||
endNodeIds,
|
||||
adjacency: buildAdjacency(graph.edges),
|
||||
allowCycles: graph.allowCycles,
|
||||
maxIterations: graph.maxIterations,
|
||||
};
|
||||
};
|
||||
|
||||
const buildAdjacency = (edges: ProcessGraphEdge[]) => {
|
||||
return edges.reduce<Record<string, string[]>>((acc, edge) => {
|
||||
if (!acc[edge.source]) {
|
||||
acc[edge.source] = [];
|
||||
}
|
||||
acc[edge.source].push(edge.target);
|
||||
return acc;
|
||||
}, {});
|
||||
};
|
||||
|
||||
const hasCycle = (nodes: ProcessGraphNode[], edges: ProcessGraphEdge[]) => {
|
||||
const adjacency = buildAdjacency(edges);
|
||||
const visited = new Set<string>();
|
||||
const stack = new Set<string>();
|
||||
|
||||
const visit = (nodeId: string): boolean => {
|
||||
if (stack.has(nodeId)) return true;
|
||||
if (visited.has(nodeId)) return false;
|
||||
visited.add(nodeId);
|
||||
stack.add(nodeId);
|
||||
const neighbors = adjacency[nodeId] || [];
|
||||
for (const neighbor of neighbors) {
|
||||
if (visit(neighbor)) return true;
|
||||
}
|
||||
stack.delete(nodeId);
|
||||
return false;
|
||||
};
|
||||
|
||||
return nodes.some((node) => visit(node.id));
|
||||
};
|
||||
144
backend/src/ai-processes/ai-processes.controller.ts
Normal file
144
backend/src/ai-processes/ai-processes.controller.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Get,
|
||||
Param,
|
||||
Post,
|
||||
Put,
|
||||
Query,
|
||||
Sse,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { CurrentUser } from '../auth/current-user.decorator';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { AiProcessesService } from './ai-processes.service';
|
||||
import { AiProcessesStreamService } from './ai-processes.stream.service';
|
||||
import { AiProcessesOrchestratorService } from './ai-processes.orchestrator.service';
|
||||
import { CreateAiProcessDto, UpdateAiProcessDto } from './dto/ai-process.dto';
|
||||
import { CreateAiRunDto, ResumeAiRunDto } from './dto/ai-run.dto';
|
||||
import { CreateChatSessionDto, SendChatMessageDto } from './dto/ai-chat.dto';
|
||||
|
||||
@Controller('tenants/:tenantId')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class AiProcessesController {
|
||||
constructor(
|
||||
private readonly processesService: AiProcessesService,
|
||||
private readonly streamService: AiProcessesStreamService,
|
||||
private readonly orchestratorService: AiProcessesOrchestratorService,
|
||||
) {}
|
||||
|
||||
@Get('ai-processes')
|
||||
async listProcesses(@TenantId() tenantId: string) {
|
||||
return this.processesService.listProcesses(tenantId);
|
||||
}
|
||||
|
||||
@Post('ai-processes')
|
||||
async createProcess(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Body() payload: CreateAiProcessDto,
|
||||
) {
|
||||
return this.processesService.createProcess(
|
||||
tenantId,
|
||||
user.userId,
|
||||
payload.name,
|
||||
payload.description,
|
||||
payload.graph,
|
||||
);
|
||||
}
|
||||
|
||||
@Put('ai-processes/:processId')
|
||||
async updateProcess(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Param('processId') processId: string,
|
||||
@Body() payload: UpdateAiProcessDto,
|
||||
) {
|
||||
return this.processesService.createProcessVersion(
|
||||
tenantId,
|
||||
user.userId,
|
||||
processId,
|
||||
payload.graph,
|
||||
);
|
||||
}
|
||||
|
||||
@Get('ai-processes/:processId/versions')
|
||||
async listVersions(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('processId') processId: string,
|
||||
) {
|
||||
return this.processesService.listProcessVersions(tenantId, processId);
|
||||
}
|
||||
|
||||
@Post('ai-processes/:processId/runs')
|
||||
async createRun(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Param('processId') processId: string,
|
||||
@Body() payload: CreateAiRunDto,
|
||||
) {
|
||||
return this.processesService.createRun(
|
||||
tenantId,
|
||||
user.userId,
|
||||
processId,
|
||||
payload.input,
|
||||
payload.sessionId,
|
||||
payload.sessionId
|
||||
? (event) => this.streamService.emit(payload.sessionId as string, event)
|
||||
: undefined,
|
||||
);
|
||||
}
|
||||
|
||||
@Post('ai-runs/:runId/resume')
|
||||
async resumeRun(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Param('runId') runId: string,
|
||||
@Body() payload: ResumeAiRunDto,
|
||||
) {
|
||||
return this.processesService.resumeRun(
|
||||
tenantId,
|
||||
user.userId,
|
||||
runId,
|
||||
payload.input,
|
||||
payload.sessionId,
|
||||
payload.sessionId
|
||||
? (event) => this.streamService.emit(payload.sessionId as string, event)
|
||||
: undefined,
|
||||
);
|
||||
}
|
||||
|
||||
@Post('ai-chat/sessions')
|
||||
async createSession(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Body() _payload: CreateChatSessionDto,
|
||||
) {
|
||||
return this.orchestratorService.createSession(tenantId, user.userId);
|
||||
}
|
||||
|
||||
@Post('ai-chat/messages')
|
||||
@Post('ai-processes/chat/messages')
|
||||
async sendChatMessage(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() user: any,
|
||||
@Body() payload: SendChatMessageDto,
|
||||
) {
|
||||
return this.orchestratorService.sendMessage(
|
||||
tenantId,
|
||||
user.userId,
|
||||
payload.message,
|
||||
payload.sessionId,
|
||||
payload.processId,
|
||||
payload.history,
|
||||
payload.context,
|
||||
);
|
||||
}
|
||||
|
||||
@Sse('ai-chat/stream')
|
||||
@Sse('ai-processes/stream')
|
||||
streamChat(@Query('sessionId') sessionId: string) {
|
||||
return this.streamService.getStream(sessionId);
|
||||
}
|
||||
}
|
||||
19
backend/src/ai-processes/ai-processes.module.ts
Normal file
19
backend/src/ai-processes/ai-processes.module.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { TenantModule } from '../tenant/tenant.module';
|
||||
import { AiAssistantModule } from '../ai-assistant/ai-assistant.module';
|
||||
import { AiProcessesController } from './ai-processes.controller';
|
||||
import { AiProcessesService } from './ai-processes.service';
|
||||
import { AiProcessesStreamService } from './ai-processes.stream.service';
|
||||
import { AiProcessesOrchestratorService } from './ai-processes.orchestrator.service';
|
||||
|
||||
@Module({
|
||||
imports: [TenantModule, AiAssistantModule],
|
||||
controllers: [AiProcessesController],
|
||||
providers: [
|
||||
AiProcessesService,
|
||||
AiProcessesStreamService,
|
||||
AiProcessesOrchestratorService,
|
||||
],
|
||||
exports: [AiProcessesService],
|
||||
})
|
||||
export class AiProcessesModule {}
|
||||
212
backend/src/ai-processes/ai-processes.orchestrator.service.ts
Normal file
212
backend/src/ai-processes/ai-processes.orchestrator.service.ts
Normal file
@@ -0,0 +1,212 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Knex } from 'knex';
|
||||
import { AiProcessesService } from './ai-processes.service';
|
||||
import { AiProcessesStreamService } from './ai-processes.stream.service';
|
||||
import { AiAssistantService } from '../ai-assistant/ai-assistant.service';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import { AiChatMessage, AiChatSession } from '../models/ai-chat.model';
|
||||
import { DeepAgentOrchestrator } from './deep-agent.orchestrator';
|
||||
|
||||
@Injectable()
|
||||
export class AiProcessesOrchestratorService {
|
||||
constructor(
|
||||
private readonly processesService: AiProcessesService,
|
||||
private readonly streamService: AiProcessesStreamService,
|
||||
private readonly tenantDbService: TenantDatabaseService,
|
||||
private readonly aiAssistantService: AiAssistantService,
|
||||
) {}
|
||||
|
||||
private async getTenantContext(tenantId: string) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
return { knex, tenantId: resolvedTenantId };
|
||||
}
|
||||
|
||||
private async createSessionWithContext(
|
||||
knex: Knex,
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
) {
|
||||
return AiChatSession.query(knex).insert({
|
||||
userId,
|
||||
});
|
||||
}
|
||||
|
||||
async createSession(tenantId: string, userId: string) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
return this.createSessionWithContext(knex, resolvedTenantId, userId);
|
||||
}
|
||||
|
||||
async sendMessage(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
message: string,
|
||||
sessionId?: string,
|
||||
processId?: string,
|
||||
history?: { role: string; text: string }[],
|
||||
context?: Record<string, unknown>,
|
||||
) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
|
||||
const session = sessionId
|
||||
? await AiChatSession.query(knex).findById(sessionId)
|
||||
: await this.createSessionWithContext(knex, resolvedTenantId, userId);
|
||||
|
||||
if (!session) {
|
||||
throw new Error('Chat session not found.');
|
||||
}
|
||||
|
||||
await AiChatMessage.query(knex).insert({
|
||||
sessionId: session.id,
|
||||
role: 'user',
|
||||
content: message,
|
||||
});
|
||||
|
||||
this.streamService.emit(session.id, { type: 'agent_started' });
|
||||
|
||||
const processes = await this.processesService.listProcesses(resolvedTenantId);
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'processes_listed',
|
||||
data: { count: processes.length },
|
||||
});
|
||||
|
||||
// If no processes configured, fallback to standard AI assistant
|
||||
if (!processes.length) {
|
||||
const response = await this.aiAssistantService.handleChat(
|
||||
resolvedTenantId,
|
||||
userId,
|
||||
message,
|
||||
(history ?? []) as any,
|
||||
context ?? {},
|
||||
);
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'final',
|
||||
data: { reply: response.reply, action: response.action },
|
||||
});
|
||||
|
||||
await AiChatMessage.query(knex).insert({
|
||||
sessionId: session.id,
|
||||
role: 'assistant',
|
||||
content: response.reply,
|
||||
});
|
||||
|
||||
return {
|
||||
sessionId: session.id,
|
||||
reply: response.reply,
|
||||
action: response.action,
|
||||
record: response.record,
|
||||
};
|
||||
}
|
||||
|
||||
// Get OpenAI credentials from tenant integrations
|
||||
const credentials = await this.aiAssistantService.getOpenAiConfig(resolvedTenantId);
|
||||
if (!credentials?.apiKey) {
|
||||
throw new Error('OpenAI credentials not configured for this tenant');
|
||||
}
|
||||
|
||||
// Create Deep Agent with tenant's credentials
|
||||
const deepAgent = new DeepAgentOrchestrator(credentials.apiKey, credentials.model);
|
||||
|
||||
// Use Deep Agent to select the best process
|
||||
const processInfos = processes.map((p) => ({
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
description: p.description || undefined,
|
||||
}));
|
||||
|
||||
const selection = await deepAgent.selectProcess(
|
||||
message,
|
||||
processInfos,
|
||||
history as any,
|
||||
);
|
||||
|
||||
// If we need more information or no match, respond with question
|
||||
if (selection.action === 'need_more_info' || selection.action === 'no_match') {
|
||||
const reply = selection.question || selection.reasoning ||
|
||||
'I\'m not sure which process to use. Could you provide more details?';
|
||||
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'final',
|
||||
data: { reply, needsMoreInfo: true },
|
||||
});
|
||||
|
||||
await AiChatMessage.query(knex).insert({
|
||||
sessionId: session.id,
|
||||
role: 'assistant',
|
||||
content: reply,
|
||||
});
|
||||
|
||||
return { sessionId: session.id, reply, needsMoreInfo: true };
|
||||
}
|
||||
|
||||
// Process selected - find it and execute
|
||||
const selectedProcess = processes.find((p) => p.id === selection.processId);
|
||||
if (!selectedProcess) {
|
||||
throw new Error('Selected process not found.');
|
||||
}
|
||||
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'process_selected',
|
||||
processId: selectedProcess.id,
|
||||
version: selectedProcess.latestVersion,
|
||||
data: { processName: selectedProcess.name, reasoning: selection.reasoning },
|
||||
});
|
||||
|
||||
// Extract inputs from the message
|
||||
// For now, we'll use a simple approach - just pass the message as input
|
||||
// In a more sophisticated implementation, we'd use the deep agent to extract structured inputs
|
||||
const startMessage = await deepAgent.generateStartMessage(
|
||||
selectedProcess.name,
|
||||
{ message },
|
||||
);
|
||||
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'agent_message',
|
||||
data: { message: startMessage },
|
||||
});
|
||||
|
||||
await AiChatMessage.query(knex).insert({
|
||||
sessionId: session.id,
|
||||
role: 'assistant',
|
||||
content: startMessage,
|
||||
});
|
||||
|
||||
const { run, result } = await this.processesService.createRun(
|
||||
resolvedTenantId,
|
||||
userId,
|
||||
selectedProcess.id,
|
||||
{ message, context: context || {} },
|
||||
session.id,
|
||||
(payload) => this.streamService.emit(session.id, payload),
|
||||
);
|
||||
|
||||
// Emit final event
|
||||
this.streamService.emit(session.id, {
|
||||
type: 'final',
|
||||
data: {
|
||||
runId: run.id,
|
||||
status: result.status,
|
||||
output: result.output,
|
||||
message: result.status === 'completed'
|
||||
? '✅ Workflow completed successfully!'
|
||||
: result.status === 'error'
|
||||
? `❌ Workflow failed: ${result.error?.message || 'Unknown error'}`
|
||||
: '⏸️ Workflow paused',
|
||||
},
|
||||
});
|
||||
|
||||
await AiChatMessage.query(knex).insert({
|
||||
sessionId: session.id,
|
||||
role: 'assistant',
|
||||
content: result.status === 'completed'
|
||||
? '✅ Workflow completed successfully!'
|
||||
: result.status === 'error'
|
||||
? `❌ Workflow failed: ${result.error?.message || 'Unknown error'}`
|
||||
: '⏸️ Workflow paused',
|
||||
});
|
||||
|
||||
return { sessionId: session.id, runId: run.id, status: result.status };
|
||||
}
|
||||
}
|
||||
222
backend/src/ai-processes/ai-processes.runner.ts
Normal file
222
backend/src/ai-processes/ai-processes.runner.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import { apply as applyJsonLogic } from 'json-logic-js';
|
||||
import Ajv from 'ajv';
|
||||
import { ToolRegistry, ToolContext } from './tools/tool-registry';
|
||||
import {
|
||||
AiProcessEventPayload,
|
||||
CompiledGraph,
|
||||
ProcessGraphNode,
|
||||
} from './ai-processes.types';
|
||||
|
||||
export interface RunOptions {
|
||||
compiledGraph: CompiledGraph;
|
||||
input: Record<string, unknown>;
|
||||
toolRegistry: ToolRegistry;
|
||||
toolContext: ToolContext;
|
||||
onEvent?: (event: AiProcessEventPayload) => void;
|
||||
llmDecision: (
|
||||
node: ProcessGraphNode,
|
||||
state: Record<string, unknown>,
|
||||
) => Promise<Record<string, unknown>>;
|
||||
}
|
||||
|
||||
export interface RunResult {
|
||||
status: 'running' | 'waiting' | 'completed' | 'error';
|
||||
state: Record<string, unknown>;
|
||||
currentNodeId?: string;
|
||||
output?: Record<string, unknown>;
|
||||
error?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export const runCompiledGraph = async (
|
||||
options: RunOptions,
|
||||
startNodeId?: string,
|
||||
): Promise<RunResult> => {
|
||||
const {
|
||||
compiledGraph,
|
||||
input,
|
||||
toolRegistry,
|
||||
toolContext,
|
||||
onEvent,
|
||||
llmDecision,
|
||||
} = options;
|
||||
|
||||
const state: Record<string, unknown> = { ...input };
|
||||
let currentNodeId = startNodeId ?? compiledGraph.startNodeId;
|
||||
let iterations = 0;
|
||||
const maxIterations = compiledGraph.maxIterations ?? 50;
|
||||
|
||||
const emit = (payload: AiProcessEventPayload) => {
|
||||
if (onEvent) {
|
||||
onEvent(payload);
|
||||
}
|
||||
};
|
||||
|
||||
while (currentNodeId) {
|
||||
if (
|
||||
compiledGraph.nodes.length > 0 &&
|
||||
compiledGraph.endNodeIds.includes(currentNodeId)
|
||||
) {
|
||||
emit({ type: 'node_started', nodeId: currentNodeId });
|
||||
emit({ type: 'node_completed', nodeId: currentNodeId });
|
||||
emit({ type: 'final', data: { output: state } });
|
||||
return { status: 'completed', state, output: state };
|
||||
}
|
||||
|
||||
const node = compiledGraph.nodes.find((item) => item.id === currentNodeId);
|
||||
if (!node) {
|
||||
return {
|
||||
status: 'error',
|
||||
state,
|
||||
error: { message: `Node ${currentNodeId} not found.` },
|
||||
};
|
||||
}
|
||||
|
||||
emit({ type: 'node_started', nodeId: node.id });
|
||||
|
||||
if (node.type === 'LLMDecisionNode') {
|
||||
const output = await llmDecision(node, state);
|
||||
validateNodeOutput(node, output);
|
||||
Object.assign(state, output);
|
||||
}
|
||||
|
||||
if (node.type === 'ToolNode') {
|
||||
const toolName = (node.data as { toolName: string }).toolName;
|
||||
emit({ type: 'tool_called', nodeId: node.id, toolName });
|
||||
const tool = toolRegistry.getTool(toolName);
|
||||
const argsTemplate = (node.data as { argsTemplate: Record<string, unknown> })
|
||||
.argsTemplate;
|
||||
const resolvedArgs = resolveTemplate(argsTemplate, state);
|
||||
|
||||
// Debug logging
|
||||
console.log(`[ToolNode ${node.id}] Tool: ${toolName}`);
|
||||
console.log(`[ToolNode ${node.id}] State keys:`, Object.keys(state));
|
||||
console.log(`[ToolNode ${node.id}] ArgsTemplate:`, JSON.stringify(argsTemplate));
|
||||
console.log(`[ToolNode ${node.id}] ResolvedArgs:`, JSON.stringify(resolvedArgs));
|
||||
|
||||
const toolResult = await tool(toolContext, {
|
||||
...resolvedArgs,
|
||||
state,
|
||||
});
|
||||
|
||||
console.log(`[ToolNode ${node.id}] ToolResult:`, JSON.stringify(toolResult));
|
||||
|
||||
const outputMapping = (node.data as { outputMapping: Record<string, string> })
|
||||
.outputMapping;
|
||||
Object.entries(outputMapping).forEach(([key, path]) => {
|
||||
console.log(`[ToolNode ${node.id}] Mapping: toolResult['${key}'] = ${toolResult[key]} -> state['${path}']`);
|
||||
state[path] = toolResult[key];
|
||||
});
|
||||
}
|
||||
|
||||
if (node.type === 'HumanInputNode') {
|
||||
const data = node.data as {
|
||||
requiredFieldsSchema: Record<string, unknown>;
|
||||
promptToUser: string;
|
||||
};
|
||||
emit({
|
||||
type: 'need_input',
|
||||
nodeId: node.id,
|
||||
data: {
|
||||
requiredFieldsSchema: data.requiredFieldsSchema,
|
||||
promptToUser: data.promptToUser,
|
||||
},
|
||||
});
|
||||
return { status: 'waiting', state, currentNodeId: node.id };
|
||||
}
|
||||
|
||||
emit({ type: 'node_completed', nodeId: node.id });
|
||||
|
||||
const nextTargets = compiledGraph.edges.filter(
|
||||
(edge) => edge.source === node.id,
|
||||
);
|
||||
|
||||
if (nextTargets.length === 0) {
|
||||
return {
|
||||
status: 'error',
|
||||
state,
|
||||
error: { message: `No outgoing edges for node ${node.id}.` },
|
||||
};
|
||||
}
|
||||
|
||||
const selectedEdge = selectEdge(nextTargets, state);
|
||||
if (!selectedEdge) {
|
||||
return {
|
||||
status: 'error',
|
||||
state,
|
||||
error: { message: `No edge conditions matched for node ${node.id}.` },
|
||||
};
|
||||
}
|
||||
|
||||
currentNodeId = selectedEdge.target;
|
||||
iterations += 1;
|
||||
|
||||
if (!compiledGraph.allowCycles && iterations > compiledGraph.nodes.length) {
|
||||
return {
|
||||
status: 'error',
|
||||
state,
|
||||
error: { message: 'Cycle detected during execution.' },
|
||||
};
|
||||
}
|
||||
|
||||
if (compiledGraph.allowCycles && iterations > maxIterations) {
|
||||
return {
|
||||
status: 'error',
|
||||
state,
|
||||
error: { message: 'Max iterations exceeded.' },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return { status: 'completed', state, output: state };
|
||||
};
|
||||
|
||||
const resolveTemplate = (
|
||||
template: Record<string, unknown>,
|
||||
state: Record<string, unknown>,
|
||||
) => {
|
||||
return Object.entries(template).reduce<Record<string, unknown>>(
|
||||
(acc, [key, value]) => {
|
||||
if (typeof value === 'string' && value.startsWith('{{state.')) {
|
||||
const path = value.replace('{{state.', '').replace('}}', '');
|
||||
acc[key] = state[path];
|
||||
} else {
|
||||
acc[key] = value;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{},
|
||||
);
|
||||
};
|
||||
|
||||
const selectEdge = (
|
||||
edges: { condition?: Record<string, unknown>; target: string }[],
|
||||
state: Record<string, unknown>,
|
||||
) => {
|
||||
if (edges.length === 1) return edges[0];
|
||||
|
||||
return edges.find((edge) => {
|
||||
if (!edge.condition) return true;
|
||||
try {
|
||||
return Boolean(applyJsonLogic(edge.condition, state));
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const validateNodeOutput = (
|
||||
node: ProcessGraphNode,
|
||||
output: Record<string, unknown>,
|
||||
) => {
|
||||
const schema = (node.data as { outputSchema?: Record<string, unknown> })
|
||||
.outputSchema;
|
||||
if (!schema) return;
|
||||
const ajv = new Ajv({ allErrors: true, strict: false });
|
||||
const validate = ajv.compile(schema);
|
||||
if (!validate(output)) {
|
||||
const errors = validate.errors?.map(e => `${e.instancePath} ${e.message}`).join(', ');
|
||||
throw new Error(
|
||||
`LLM output invalid for node ${node.id}. Errors: ${errors}. Output: ${JSON.stringify(output)}`
|
||||
);
|
||||
}
|
||||
};
|
||||
79
backend/src/ai-processes/ai-processes.schemas.ts
Normal file
79
backend/src/ai-processes/ai-processes.schemas.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import Ajv, { JSONSchemaType } from 'ajv';
|
||||
import addFormats from 'ajv-formats';
|
||||
import {
|
||||
AiNodeType,
|
||||
ProcessGraphDefinition,
|
||||
ProcessGraphEdge,
|
||||
ProcessGraphNode,
|
||||
} from './ai-processes.types';
|
||||
|
||||
const nodeTypes: AiNodeType[] = [
|
||||
'Start',
|
||||
'LLMDecisionNode',
|
||||
'ToolNode',
|
||||
'HumanInputNode',
|
||||
'End',
|
||||
];
|
||||
|
||||
export const graphSchema: any = {
|
||||
type: 'object',
|
||||
required: ['id', 'name', 'nodes', 'edges'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
description: { type: 'string', nullable: true },
|
||||
allowCycles: { type: 'boolean', nullable: true },
|
||||
maxIterations: { type: 'number', nullable: true },
|
||||
nodes: {
|
||||
type: 'array',
|
||||
items: { $ref: '#/definitions/processGraphNode' },
|
||||
minItems: 1,
|
||||
},
|
||||
edges: {
|
||||
type: 'array',
|
||||
items: { $ref: '#/definitions/processGraphEdge' },
|
||||
minItems: 0,
|
||||
},
|
||||
},
|
||||
definitions: {
|
||||
processGraphEdge: {
|
||||
type: 'object',
|
||||
required: ['id', 'source', 'target'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
source: { type: 'string' },
|
||||
target: { type: 'string' },
|
||||
condition: { type: 'object', nullable: true },
|
||||
},
|
||||
},
|
||||
processGraphNode: {
|
||||
type: 'object',
|
||||
required: ['id', 'type', 'data'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
type: { type: 'string', enum: nodeTypes },
|
||||
position: {
|
||||
type: 'object',
|
||||
nullable: true,
|
||||
required: ['x', 'y'],
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
x: { type: 'number' },
|
||||
y: { type: 'number' },
|
||||
},
|
||||
},
|
||||
data: { type: 'object' },
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const createAjv = () => {
|
||||
const ajv = new Ajv({ allErrors: true, strict: false });
|
||||
addFormats(ajv);
|
||||
ajv.addSchema(graphSchema, 'processGraph');
|
||||
return ajv;
|
||||
};
|
||||
319
backend/src/ai-processes/ai-processes.service.ts
Normal file
319
backend/src/ai-processes/ai-processes.service.ts
Normal file
@@ -0,0 +1,319 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { Knex } from 'knex';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import {
|
||||
AiAuditEvent,
|
||||
AiProcess,
|
||||
AiProcessRun,
|
||||
AiProcessVersion,
|
||||
} from '../models/ai-process.model';
|
||||
import { compileProcessGraph } from './ai-processes.compiler';
|
||||
import { runCompiledGraph } from './ai-processes.runner';
|
||||
import {
|
||||
AiProcessEventPayload,
|
||||
CompiledGraph,
|
||||
ProcessGraphDefinition,
|
||||
} from './ai-processes.types';
|
||||
import { ToolRegistry } from './tools/tool-registry';
|
||||
import { demoTools } from './tools/demo-tools';
|
||||
|
||||
@Injectable()
|
||||
export class AiProcessesService {
|
||||
constructor(private readonly tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
private async getTenantContext(tenantId: string) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
return { knex, tenantId: resolvedTenantId };
|
||||
}
|
||||
|
||||
async listProcesses(tenantId: string) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
return AiProcess.query(knex)
|
||||
.withGraphFetched('versions')
|
||||
.orderBy('created_at', 'desc');
|
||||
}
|
||||
|
||||
async getProcess(tenantId: string, processId: string) {
|
||||
const { knex } = await this.getTenantContext(tenantId);
|
||||
return AiProcess.query(knex)
|
||||
.findById(processId)
|
||||
.withGraphFetched('versions');
|
||||
}
|
||||
|
||||
async createProcess(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
name: string,
|
||||
description: string | undefined,
|
||||
graph: ProcessGraphDefinition,
|
||||
) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
const compiled = compileProcessGraph(graph, {
|
||||
tenantId: resolvedTenantId,
|
||||
version: 1,
|
||||
});
|
||||
|
||||
return knex.transaction(async (trx) => {
|
||||
const processId = randomUUID();
|
||||
|
||||
await AiProcess.query(trx).insert({
|
||||
id: processId,
|
||||
name,
|
||||
description,
|
||||
latestVersion: 1,
|
||||
createdBy: userId,
|
||||
});
|
||||
|
||||
await trx('ai_process_versions').insert({
|
||||
id: randomUUID(),
|
||||
process_id: processId,
|
||||
version: 1,
|
||||
graph_json: JSON.stringify(graph),
|
||||
compiled_json: JSON.stringify(compiled),
|
||||
created_by: userId,
|
||||
created_at: new Date(),
|
||||
});
|
||||
|
||||
return AiProcess.query(trx)
|
||||
.findById(processId)
|
||||
.withGraphFetched('versions');
|
||||
});
|
||||
}
|
||||
|
||||
async createProcessVersion(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
processId: string,
|
||||
graph: ProcessGraphDefinition,
|
||||
) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
|
||||
const process = await AiProcess.query(knex).findById(processId);
|
||||
if (!process) {
|
||||
throw new Error('Process not found.');
|
||||
}
|
||||
|
||||
const nextVersion = process.latestVersion + 1;
|
||||
const compiled = compileProcessGraph(graph, {
|
||||
tenantId: resolvedTenantId,
|
||||
version: nextVersion,
|
||||
});
|
||||
|
||||
return knex.transaction(async (trx) => {
|
||||
await AiProcess.query(trx)
|
||||
.findById(processId)
|
||||
.patch({ latestVersion: nextVersion });
|
||||
|
||||
const versionId = randomUUID();
|
||||
await trx('ai_process_versions').insert({
|
||||
id: versionId,
|
||||
process_id: processId,
|
||||
version: nextVersion,
|
||||
graph_json: JSON.stringify(graph),
|
||||
compiled_json: JSON.stringify(compiled),
|
||||
created_by: userId,
|
||||
created_at: new Date(),
|
||||
});
|
||||
|
||||
return AiProcessVersion.query(trx).findById(versionId);
|
||||
});
|
||||
}
|
||||
|
||||
async listProcessVersions(tenantId: string, processId: string) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
return AiProcessVersion.query(knex)
|
||||
.where({ process_id: processId })
|
||||
.orderBy('version', 'desc');
|
||||
}
|
||||
|
||||
async createRun(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
processId: string,
|
||||
input: Record<string, unknown>,
|
||||
sessionId: string | undefined,
|
||||
emitEvent?: (payload: AiProcessEventPayload) => void,
|
||||
) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
const process = await AiProcess.query(knex).findById(processId);
|
||||
if (!process) {
|
||||
throw new Error('Process not found.');
|
||||
}
|
||||
|
||||
const versionRecord = await AiProcessVersion.query(knex).findOne({
|
||||
process_id: processId,
|
||||
version: process.latestVersion,
|
||||
});
|
||||
|
||||
if (!versionRecord) {
|
||||
throw new Error('Process version not found.');
|
||||
}
|
||||
|
||||
const runId = randomUUID();
|
||||
await AiProcessRun.query(knex).insert({
|
||||
id: runId,
|
||||
processId,
|
||||
version: versionRecord.version,
|
||||
status: 'running',
|
||||
inputJson: input,
|
||||
stateJson: input,
|
||||
currentNodeId: null,
|
||||
});
|
||||
|
||||
const run = await AiProcessRun.query(knex).findById(runId);
|
||||
if (!run) {
|
||||
throw new Error('Run not created.');
|
||||
}
|
||||
|
||||
const compiled = versionRecord.compiledJson as unknown as CompiledGraph;
|
||||
const toolRegistry = new ToolRegistry(demoTools);
|
||||
await toolRegistry.loadTenantAllowlist(resolvedTenantId, knex);
|
||||
|
||||
const emitAndAudit = (event: AiProcessEventPayload) => {
|
||||
emitEvent?.(event);
|
||||
void AiAuditEvent.query(knex).insert({
|
||||
id: randomUUID(),
|
||||
runId,
|
||||
eventType: event.type,
|
||||
payloadJson: event as any,
|
||||
});
|
||||
};
|
||||
const result = await runCompiledGraph(
|
||||
{
|
||||
compiledGraph: compiled,
|
||||
input,
|
||||
toolRegistry,
|
||||
toolContext: { tenantId: resolvedTenantId, userId, knex },
|
||||
onEvent: (event) => emitAndAudit({ ...event, runId, sessionId }),
|
||||
llmDecision: async (node, state) =>
|
||||
this.mockDecision(node.id, state),
|
||||
},
|
||||
run.currentNodeId ?? undefined,
|
||||
);
|
||||
|
||||
const updatedRun = await this.persistRunResult(runId, result, knex);
|
||||
|
||||
return { run: updatedRun, result };
|
||||
}
|
||||
|
||||
async resumeRun(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
runId: string,
|
||||
input: Record<string, unknown>,
|
||||
sessionId: string | undefined,
|
||||
emitEvent?: (payload: AiProcessEventPayload) => void,
|
||||
) {
|
||||
const { knex, tenantId: resolvedTenantId } =
|
||||
await this.getTenantContext(tenantId);
|
||||
const run = await AiProcessRun.query(knex).findById(runId);
|
||||
if (!run) {
|
||||
throw new Error('Run not found.');
|
||||
}
|
||||
const versionRecord = await AiProcessVersion.query(knex).findOne({
|
||||
process_id: run.processId,
|
||||
version: run.version,
|
||||
});
|
||||
if (!versionRecord) {
|
||||
throw new Error('Process version not found.');
|
||||
}
|
||||
|
||||
const compiled = versionRecord.compiledJson as unknown as CompiledGraph;
|
||||
const toolRegistry = new ToolRegistry(demoTools);
|
||||
await toolRegistry.loadTenantAllowlist(resolvedTenantId, knex);
|
||||
|
||||
const mergedState = { ...(run.stateJson || {}), ...input };
|
||||
const emitAndAudit = (event: AiProcessEventPayload) => {
|
||||
emitEvent?.(event);
|
||||
void AiAuditEvent.query(knex).insert({
|
||||
id: randomUUID(),
|
||||
runId: run.id,
|
||||
eventType: event.type,
|
||||
payloadJson: event as any,
|
||||
});
|
||||
};
|
||||
|
||||
const result = await runCompiledGraph(
|
||||
{
|
||||
compiledGraph: compiled,
|
||||
input: mergedState,
|
||||
toolRegistry,
|
||||
toolContext: { tenantId: resolvedTenantId, userId, knex },
|
||||
onEvent: (event) =>
|
||||
emitAndAudit({ ...event, runId: run.id, sessionId }),
|
||||
llmDecision: async (node, state) =>
|
||||
this.mockDecision(node.id, state),
|
||||
},
|
||||
run.currentNodeId ?? undefined,
|
||||
);
|
||||
|
||||
const updatedRun = await this.persistRunResult(run.id, result, knex);
|
||||
|
||||
return { run: updatedRun, result };
|
||||
}
|
||||
|
||||
private async persistRunResult(runId: string, result: any, knex: Knex) {
|
||||
const endedAt =
|
||||
result.status === 'completed' || result.status === 'error'
|
||||
? new Date()
|
||||
: null;
|
||||
|
||||
return AiProcessRun.query(knex).patchAndFetchById(runId, {
|
||||
status: result.status,
|
||||
outputJson: result.output,
|
||||
errorJson: result.error,
|
||||
stateJson: result.state,
|
||||
currentNodeId: result.currentNodeId ?? null,
|
||||
endedAt,
|
||||
});
|
||||
}
|
||||
|
||||
private async mockDecision(
|
||||
nodeId: string,
|
||||
state: Record<string, unknown>,
|
||||
) {
|
||||
if (nodeId === 'extract_info') {
|
||||
// Extract pet registration info from the message
|
||||
const message = (state.message as string) || '';
|
||||
|
||||
// Simple extraction (in production, this would use an LLM)
|
||||
const petNameMatch = message.match(/(?:dog|cat|pet)\s+named\s+(\w+)/i);
|
||||
const petTypeMatch = message.match(/(dog|cat)/i);
|
||||
const ownerNameMatch = message.match(/owned\s+by\s+([\w\s]+?)(?:\s*\(|$)/i);
|
||||
const emailMatch = message.match(/\(?([\w\.-]+@[\w\.-]+\.\w+)\)?/i);
|
||||
|
||||
const ownerName = ownerNameMatch?.[1]?.trim() || 'Unknown Owner';
|
||||
const nameParts = ownerName.split(/\s+/);
|
||||
const firstName = nameParts[0] || 'Unknown';
|
||||
const lastName = nameParts.slice(1).join(' ') || 'Owner';
|
||||
|
||||
return {
|
||||
petName: petNameMatch?.[1] || 'Unknown Pet',
|
||||
species: petTypeMatch?.[1]?.toLowerCase() || 'dog',
|
||||
ownerFirstName: firstName,
|
||||
ownerLastName: lastName,
|
||||
ownerEmail: emailMatch?.[1] || null,
|
||||
accountName: `${firstName} ${lastName}`,
|
||||
};
|
||||
}
|
||||
if (nodeId === 'decide_account') {
|
||||
const accountName = (state.accountName as string) ?? 'New Account';
|
||||
const accountAction = state.accountId ? 'find' : 'create';
|
||||
return { accountAction, accountName };
|
||||
}
|
||||
if (nodeId === 'decide_contact') {
|
||||
const firstName = (state.firstName as string) ?? 'Jane';
|
||||
const lastName = (state.lastName as string) ?? 'Doe';
|
||||
const contactAction = state.contactId ? 'find' : 'create';
|
||||
return { contactAction, firstName, lastName };
|
||||
}
|
||||
return {};
|
||||
}
|
||||
}
|
||||
33
backend/src/ai-processes/ai-processes.stream.service.ts
Normal file
33
backend/src/ai-processes/ai-processes.stream.service.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { MessageEvent } from '@nestjs/common';
|
||||
import { Observable, Subject } from 'rxjs';
|
||||
import { AiProcessEventPayload } from './ai-processes.types';
|
||||
|
||||
@Injectable()
|
||||
export class AiProcessesStreamService {
|
||||
private readonly streams = new Map<string, Subject<MessageEvent>>();
|
||||
|
||||
getStream(sessionId: string): Observable<MessageEvent> {
|
||||
return this.getSubject(sessionId).asObservable();
|
||||
}
|
||||
|
||||
emit(sessionId: string, payload: AiProcessEventPayload) {
|
||||
const subject = this.getSubject(sessionId);
|
||||
subject.next({ type: payload.type, data: payload });
|
||||
}
|
||||
|
||||
close(sessionId: string) {
|
||||
const subject = this.streams.get(sessionId);
|
||||
if (subject) {
|
||||
subject.complete();
|
||||
this.streams.delete(sessionId);
|
||||
}
|
||||
}
|
||||
|
||||
private getSubject(sessionId: string) {
|
||||
if (!this.streams.has(sessionId)) {
|
||||
this.streams.set(sessionId, new Subject<MessageEvent>());
|
||||
}
|
||||
return this.streams.get(sessionId) as Subject<MessageEvent>;
|
||||
}
|
||||
}
|
||||
125
backend/src/ai-processes/ai-processes.types.ts
Normal file
125
backend/src/ai-processes/ai-processes.types.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { JSONSchema7 } from 'json-schema';
|
||||
|
||||
export type AiNodeType =
|
||||
| 'Start'
|
||||
| 'LLMDecisionNode'
|
||||
| 'ToolNode'
|
||||
| 'HumanInputNode'
|
||||
| 'End';
|
||||
|
||||
export interface ProcessGraphDefinition {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
allowCycles?: boolean;
|
||||
maxIterations?: number;
|
||||
nodes: ProcessGraphNode[];
|
||||
edges: ProcessGraphEdge[];
|
||||
}
|
||||
|
||||
export interface ProcessGraphNode {
|
||||
id: string;
|
||||
type: AiNodeType;
|
||||
position?: { x: number; y: number };
|
||||
data:
|
||||
| StartNodeData
|
||||
| LLMDecisionNodeData
|
||||
| ToolNodeData
|
||||
| HumanInputNodeData
|
||||
| EndNodeData;
|
||||
}
|
||||
|
||||
export interface ProcessGraphEdge {
|
||||
id: string;
|
||||
source: string;
|
||||
target: string;
|
||||
condition?: JsonLogicExpression;
|
||||
}
|
||||
|
||||
export type JsonLogicExpression = Record<string, unknown>;
|
||||
|
||||
export interface StartNodeData {
|
||||
label?: string;
|
||||
}
|
||||
|
||||
export interface EndNodeData {
|
||||
label?: string;
|
||||
}
|
||||
|
||||
export interface LLMDecisionNodeData {
|
||||
label?: string;
|
||||
promptTemplate: string;
|
||||
inputKeys: string[];
|
||||
outputSchema: JSONSchema7;
|
||||
model: {
|
||||
name: string;
|
||||
temperature: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ToolNodeData {
|
||||
label?: string;
|
||||
toolName: string;
|
||||
argsTemplate: Record<string, unknown>;
|
||||
outputMapping: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface HumanInputNodeData {
|
||||
label?: string;
|
||||
requiredFieldsSchema: JSONSchema7;
|
||||
promptToUser: string;
|
||||
}
|
||||
|
||||
export interface CompiledGraph {
|
||||
graphId: string;
|
||||
version: number;
|
||||
nodes: ProcessGraphNode[];
|
||||
edges: ProcessGraphEdge[];
|
||||
startNodeId: string;
|
||||
endNodeIds: string[];
|
||||
adjacency: Record<string, string[]>;
|
||||
allowCycles?: boolean;
|
||||
maxIterations?: number;
|
||||
}
|
||||
|
||||
export type AiProcessStatus = 'running' | 'waiting' | 'completed' | 'error';
|
||||
|
||||
export interface AiProcessRunContext {
|
||||
state: Record<string, unknown>;
|
||||
currentNodeId?: string;
|
||||
iterationCount?: number;
|
||||
}
|
||||
|
||||
export type AiProcessEventType =
|
||||
| 'agent_started'
|
||||
| 'processes_listed'
|
||||
| 'process_selected'
|
||||
| 'agent_message'
|
||||
| 'node_started'
|
||||
| 'tool_called'
|
||||
| 'node_completed'
|
||||
| 'need_input'
|
||||
| 'final'
|
||||
| 'error';
|
||||
|
||||
export interface AiProcessEventPayload {
|
||||
type: AiProcessEventType;
|
||||
runId?: string;
|
||||
sessionId?: string;
|
||||
nodeId?: string;
|
||||
toolName?: string;
|
||||
processId?: string;
|
||||
version?: number;
|
||||
data?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface NeedInputPayload {
|
||||
runId: string;
|
||||
requiredFieldsSchema: JSONSchema7;
|
||||
promptToUser: string;
|
||||
}
|
||||
|
||||
export interface ProcessSelection {
|
||||
processId: string;
|
||||
version: number;
|
||||
}
|
||||
202
backend/src/ai-processes/deep-agent.orchestrator.ts
Normal file
202
backend/src/ai-processes/deep-agent.orchestrator.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { JsonOutputParser } from '@langchain/core/output_parsers';
|
||||
import { SystemMessage, HumanMessage } from '@langchain/core/messages';
|
||||
|
||||
export interface ProcessInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface ProcessSelectionResult {
|
||||
action: 'select_process' | 'need_more_info' | 'no_match';
|
||||
processId?: string;
|
||||
question?: string;
|
||||
reasoning?: string;
|
||||
}
|
||||
|
||||
export interface InputExtractionResult {
|
||||
hasAllInputs: boolean;
|
||||
extractedInputs: Record<string, unknown>;
|
||||
missingFields?: string[];
|
||||
question?: string;
|
||||
}
|
||||
|
||||
export class DeepAgentOrchestrator {
|
||||
private model: ChatOpenAI;
|
||||
|
||||
constructor(
|
||||
apiKey: string,
|
||||
modelName: string = 'gpt-4o',
|
||||
temperature: number = 0,
|
||||
) {
|
||||
this.model = new ChatOpenAI({
|
||||
apiKey,
|
||||
modelName,
|
||||
temperature,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Step 1: Select the best matching process from available processes
|
||||
*/
|
||||
async selectProcess(
|
||||
userMessage: string,
|
||||
availableProcesses: ProcessInfo[],
|
||||
conversationHistory?: { role: string; text: string }[],
|
||||
): Promise<ProcessSelectionResult> {
|
||||
const processList = availableProcesses
|
||||
.map((p) => `- ${p.name} (ID: ${p.id}): ${p.description || 'No description'}`)
|
||||
.join('\n');
|
||||
|
||||
const historyContext =
|
||||
conversationHistory && conversationHistory.length > 0
|
||||
? `\n\nConversation history:\n${conversationHistory
|
||||
.map((msg) => `${msg.role}: ${msg.text}`)
|
||||
.join('\n')}`
|
||||
: '';
|
||||
|
||||
const systemPrompt = `You are an intelligent process orchestrator. Your task is to select the most appropriate business process based on the user's request.
|
||||
|
||||
Available processes:
|
||||
${processList}
|
||||
|
||||
Rules:
|
||||
1. Select exactly ONE process that best matches the user's intent
|
||||
2. If the request is ambiguous or matches multiple processes, ask for clarification
|
||||
3. If no process matches, indicate no match
|
||||
4. Always provide reasoning for your decision
|
||||
|
||||
Respond with JSON:
|
||||
{
|
||||
"action": "select_process" | "need_more_info" | "no_match",
|
||||
"processId": "selected process ID or null",
|
||||
"question": "clarifying question if needed",
|
||||
"reasoning": "brief explanation of decision"
|
||||
}`;
|
||||
|
||||
const userPrompt = `User request: ${userMessage}${historyContext}`;
|
||||
|
||||
try {
|
||||
const response = await this.model.invoke([
|
||||
new SystemMessage(systemPrompt),
|
||||
new HumanMessage(userPrompt),
|
||||
]);
|
||||
|
||||
const parser = new JsonOutputParser<ProcessSelectionResult>();
|
||||
const content = response.content as string;
|
||||
const jsonMatch = content.match(/\{[\s\S]*\}/);
|
||||
|
||||
if (jsonMatch) {
|
||||
return await parser.parse(jsonMatch[0]);
|
||||
}
|
||||
|
||||
return {
|
||||
action: 'no_match',
|
||||
reasoning: 'Failed to parse LLM response',
|
||||
};
|
||||
} catch (error: any) {
|
||||
console.error('Process selection error:', error);
|
||||
return {
|
||||
action: 'no_match',
|
||||
reasoning: `Error: ${error.message}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Step 2: Extract required inputs from user message
|
||||
*/
|
||||
async extractInputs(
|
||||
userMessage: string,
|
||||
requiredFields: { name: string; description: string; required: boolean }[],
|
||||
conversationHistory?: { role: string; text: string }[],
|
||||
context?: Record<string, unknown>,
|
||||
): Promise<InputExtractionResult> {
|
||||
const fieldsList = requiredFields
|
||||
.map((f) => `- ${f.name} (${f.required ? 'required' : 'optional'}): ${f.description}`)
|
||||
.join('\n');
|
||||
|
||||
const historyContext =
|
||||
conversationHistory && conversationHistory.length > 0
|
||||
? `\n\nConversation history:\n${conversationHistory
|
||||
.map((msg) => `${msg.role}: ${msg.text}`)
|
||||
.join('\n')}`
|
||||
: '';
|
||||
|
||||
const contextInfo = context ? `\n\nAvailable context: ${JSON.stringify(context)}` : '';
|
||||
|
||||
const systemPrompt = `You are an input extraction assistant. Extract structured data from the user's message and conversation history.
|
||||
|
||||
Required fields for this process:
|
||||
${fieldsList}${contextInfo}
|
||||
|
||||
Rules:
|
||||
1. Extract as many fields as possible from the message and context
|
||||
2. Only mark hasAllInputs=true if ALL required fields are present
|
||||
3. If required fields are missing, generate a natural question to ask the user
|
||||
4. Use context data when available (e.g., current page context)
|
||||
|
||||
Respond with JSON:
|
||||
{
|
||||
"hasAllInputs": true | false,
|
||||
"extractedInputs": { "field1": "value1", ... },
|
||||
"missingFields": ["field1", "field2"] or undefined,
|
||||
"question": "natural language question" or undefined
|
||||
}`;
|
||||
|
||||
const userPrompt = `User message: ${userMessage}${historyContext}`;
|
||||
|
||||
try {
|
||||
const response = await this.model.invoke([
|
||||
new SystemMessage(systemPrompt),
|
||||
new HumanMessage(userPrompt),
|
||||
]);
|
||||
|
||||
const parser = new JsonOutputParser<InputExtractionResult>();
|
||||
const content = response.content as string;
|
||||
const jsonMatch = content.match(/\{[\s\S]*\}/);
|
||||
|
||||
if (jsonMatch) {
|
||||
return await parser.parse(jsonMatch[0]);
|
||||
}
|
||||
|
||||
return {
|
||||
hasAllInputs: false,
|
||||
extractedInputs: {},
|
||||
missingFields: requiredFields.filter((f) => f.required).map((f) => f.name),
|
||||
question: 'I need more information to proceed. Could you provide additional details?',
|
||||
};
|
||||
} catch (error: any) {
|
||||
console.error('Input extraction error:', error);
|
||||
return {
|
||||
hasAllInputs: false,
|
||||
extractedInputs: {},
|
||||
question: 'I encountered an error processing your request. Please try again.',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Step 3: Generate a friendly response explaining what will happen
|
||||
*/
|
||||
async generateStartMessage(
|
||||
processName: string,
|
||||
extractedInputs: Record<string, unknown>,
|
||||
): Promise<string> {
|
||||
const systemPrompt = `You are a friendly assistant explaining what process will be executed. Be concise and clear.`;
|
||||
|
||||
const userPrompt = `Generate a brief message (1-2 sentences) confirming that you will execute the "${processName}" process with these inputs: ${JSON.stringify(extractedInputs)}`;
|
||||
|
||||
try {
|
||||
const response = await this.model.invoke([
|
||||
new SystemMessage(systemPrompt),
|
||||
new HumanMessage(userPrompt),
|
||||
]);
|
||||
|
||||
return (response.content as string).trim();
|
||||
} catch (error) {
|
||||
return `I'll execute the ${processName} process with your provided information.`;
|
||||
}
|
||||
}
|
||||
}
|
||||
173
backend/src/ai-processes/demo-process.ts
Normal file
173
backend/src/ai-processes/demo-process.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import { ProcessGraphDefinition } from './ai-processes.types';
|
||||
|
||||
export const demoRegisterNewPetProcess: ProcessGraphDefinition = {
|
||||
id: 'register_new_pet',
|
||||
name: 'Register New Pet',
|
||||
description: 'Resolve account/contact then create pet.',
|
||||
allowCycles: false,
|
||||
nodes: [
|
||||
{
|
||||
id: 'start',
|
||||
type: 'Start',
|
||||
data: { label: 'Start' },
|
||||
},
|
||||
{
|
||||
id: 'decide_account',
|
||||
type: 'LLMDecisionNode',
|
||||
data: {
|
||||
label: 'Decide Account Action',
|
||||
promptTemplate:
|
||||
'Decide whether to find or create an account. Return JSON {"accountAction":"find|create","accountName":"string"}.',
|
||||
inputKeys: ['accountName'],
|
||||
outputSchema: {
|
||||
type: 'object',
|
||||
required: ['accountAction', 'accountName'],
|
||||
properties: {
|
||||
accountAction: { type: 'string', enum: ['find', 'create'] },
|
||||
accountName: { type: 'string' },
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
model: { name: 'gpt-4o-mini', temperature: 0 },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'find_account',
|
||||
type: 'ToolNode',
|
||||
data: {
|
||||
label: 'Find Account',
|
||||
toolName: 'findAccount',
|
||||
argsTemplate: { accountName: '{{state.accountName}}' },
|
||||
outputMapping: { accountId: 'accountId', found: 'accountFound' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_account',
|
||||
type: 'ToolNode',
|
||||
data: {
|
||||
label: 'Create Account',
|
||||
toolName: 'createAccount',
|
||||
argsTemplate: { accountName: '{{state.accountName}}' },
|
||||
outputMapping: { accountId: 'accountId' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'decide_contact',
|
||||
type: 'LLMDecisionNode',
|
||||
data: {
|
||||
label: 'Decide Contact Action',
|
||||
promptTemplate:
|
||||
'Decide whether to find or create a contact. Return JSON {"contactAction":"find|create","firstName":"string","lastName":"string"}.',
|
||||
inputKeys: ['firstName', 'lastName'],
|
||||
outputSchema: {
|
||||
type: 'object',
|
||||
required: ['contactAction', 'firstName', 'lastName'],
|
||||
properties: {
|
||||
contactAction: { type: 'string', enum: ['find', 'create'] },
|
||||
firstName: { type: 'string' },
|
||||
lastName: { type: 'string' },
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
model: { name: 'gpt-4o-mini', temperature: 0 },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'find_contact',
|
||||
type: 'ToolNode',
|
||||
data: {
|
||||
label: 'Find Contact',
|
||||
toolName: 'findContact',
|
||||
argsTemplate: {
|
||||
accountId: '{{state.accountId}}',
|
||||
firstName: '{{state.firstName}}',
|
||||
lastName: '{{state.lastName}}',
|
||||
},
|
||||
outputMapping: { contactId: 'contactId', found: 'contactFound' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_contact',
|
||||
type: 'ToolNode',
|
||||
data: {
|
||||
label: 'Create Contact',
|
||||
toolName: 'createContact',
|
||||
argsTemplate: {
|
||||
accountId: '{{state.accountId}}',
|
||||
firstName: '{{state.firstName}}',
|
||||
lastName: '{{state.lastName}}',
|
||||
},
|
||||
outputMapping: { contactId: 'contactId' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'need_pet',
|
||||
type: 'HumanInputNode',
|
||||
data: {
|
||||
label: 'Collect Pet Info',
|
||||
promptToUser: 'What is the pet name and type?',
|
||||
requiredFieldsSchema: {
|
||||
type: 'object',
|
||||
required: ['petName', 'petType'],
|
||||
properties: {
|
||||
petName: { type: 'string' },
|
||||
petType: { type: 'string' },
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'create_pet',
|
||||
type: 'ToolNode',
|
||||
data: {
|
||||
label: 'Create Pet',
|
||||
toolName: 'createPet',
|
||||
argsTemplate: {
|
||||
contactId: '{{state.contactId}}',
|
||||
petName: '{{state.petName}}',
|
||||
petType: '{{state.petType}}',
|
||||
},
|
||||
outputMapping: { petId: 'petId' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'end',
|
||||
type: 'End',
|
||||
data: { label: 'End' },
|
||||
},
|
||||
],
|
||||
edges: [
|
||||
{ id: 'e_start_account', source: 'start', target: 'decide_account' },
|
||||
{
|
||||
id: 'e_account_find',
|
||||
source: 'decide_account',
|
||||
target: 'find_account',
|
||||
condition: { '==': [{ var: 'accountAction' }, 'find'] },
|
||||
},
|
||||
{
|
||||
id: 'e_account_create',
|
||||
source: 'decide_account',
|
||||
target: 'create_account',
|
||||
condition: { '==': [{ var: 'accountAction' }, 'create'] },
|
||||
},
|
||||
{ id: 'e_account_to_contact', source: 'find_account', target: 'decide_contact' },
|
||||
{ id: 'e_create_account_to_contact', source: 'create_account', target: 'decide_contact' },
|
||||
{
|
||||
id: 'e_contact_find',
|
||||
source: 'decide_contact',
|
||||
target: 'find_contact',
|
||||
condition: { '==': [{ var: 'contactAction' }, 'find'] },
|
||||
},
|
||||
{
|
||||
id: 'e_contact_create',
|
||||
source: 'decide_contact',
|
||||
target: 'create_contact',
|
||||
condition: { '==': [{ var: 'contactAction' }, 'create'] },
|
||||
},
|
||||
{ id: 'e_contact_to_pet', source: 'find_contact', target: 'need_pet' },
|
||||
{ id: 'e_create_contact_to_pet', source: 'create_contact', target: 'need_pet' },
|
||||
{ id: 'e_need_pet_to_create', source: 'need_pet', target: 'create_pet' },
|
||||
{ id: 'e_pet_to_end', source: 'create_pet', target: 'end' },
|
||||
],
|
||||
};
|
||||
28
backend/src/ai-processes/dto/ai-chat.dto.ts
Normal file
28
backend/src/ai-processes/dto/ai-chat.dto.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { IsArray, IsObject, IsOptional, IsString } from 'class-validator';
|
||||
|
||||
export class CreateChatSessionDto {
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
context?: string;
|
||||
}
|
||||
|
||||
export class SendChatMessageDto {
|
||||
@IsString()
|
||||
message!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
history?: { role: string; text: string }[];
|
||||
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
context?: Record<string, unknown>;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
sessionId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
processId?: string;
|
||||
}
|
||||
24
backend/src/ai-processes/dto/ai-process.dto.ts
Normal file
24
backend/src/ai-processes/dto/ai-process.dto.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { IsArray, IsObject, IsOptional, IsString } from 'class-validator';
|
||||
import { ProcessGraphDefinition } from '../ai-processes.types';
|
||||
|
||||
export class CreateAiProcessDto {
|
||||
@IsString()
|
||||
name!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
description?: string;
|
||||
|
||||
@IsObject()
|
||||
graph!: ProcessGraphDefinition;
|
||||
}
|
||||
|
||||
export class UpdateAiProcessDto {
|
||||
@IsObject()
|
||||
graph!: ProcessGraphDefinition;
|
||||
}
|
||||
|
||||
export class AiProcessListResponseDto {
|
||||
@IsArray()
|
||||
items!: Record<string, unknown>[];
|
||||
}
|
||||
19
backend/src/ai-processes/dto/ai-run.dto.ts
Normal file
19
backend/src/ai-processes/dto/ai-run.dto.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { IsObject, IsOptional, IsString } from 'class-validator';
|
||||
|
||||
export class CreateAiRunDto {
|
||||
@IsObject()
|
||||
input!: Record<string, unknown>;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
sessionId?: string;
|
||||
}
|
||||
|
||||
export class ResumeAiRunDto {
|
||||
@IsObject()
|
||||
input!: Record<string, unknown>;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
sessionId?: string;
|
||||
}
|
||||
226
backend/src/ai-processes/tools/demo-tools.ts
Normal file
226
backend/src/ai-processes/tools/demo-tools.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import { ToolContext, ToolHandler } from './tool-registry';
|
||||
import { Account } from '../../models/account.model';
|
||||
import { Contact } from '../../models/contact.model';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
/**
|
||||
* Demo tools that wrap ObjectService operations
|
||||
* These tools provide structured access to CRM entities
|
||||
*/
|
||||
|
||||
export const findAccount: ToolHandler = async (ctx, args) => {
|
||||
if (!ctx.knex) {
|
||||
throw new Error('Knex connection required for findAccount');
|
||||
}
|
||||
|
||||
const { name } = args as { name?: string };
|
||||
|
||||
if (!name) {
|
||||
return { found: false, accountId: null, message: 'Name required' };
|
||||
}
|
||||
|
||||
try {
|
||||
const query = Account.query(ctx.knex).where('name', 'like', `%${name}%`);
|
||||
|
||||
const account = await query.first();
|
||||
|
||||
if (account) {
|
||||
return {
|
||||
found: true,
|
||||
accountId: account.id,
|
||||
account: {
|
||||
id: account.id,
|
||||
name: account.name,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return { found: false, accountId: null };
|
||||
} catch (error: any) {
|
||||
return { found: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
export const createAccount: ToolHandler = async (ctx, args) => {
|
||||
if (!ctx.knex) {
|
||||
throw new Error('Knex connection required for createAccount');
|
||||
}
|
||||
|
||||
const { name, email, phone, industry } = args as {
|
||||
name: string;
|
||||
email?: string;
|
||||
phone?: string;
|
||||
industry?: string;
|
||||
};
|
||||
|
||||
if (!name) {
|
||||
throw new Error('Account name is required');
|
||||
}
|
||||
|
||||
try {
|
||||
const accountId = randomUUID();
|
||||
await ctx.knex('accounts').insert({
|
||||
id: accountId,
|
||||
name,
|
||||
phone,
|
||||
industry,
|
||||
ownerId: ctx.userId,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
accountId,
|
||||
account: {
|
||||
id: accountId,
|
||||
name,
|
||||
},
|
||||
};
|
||||
} catch (error: any) {
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
export const findContact: ToolHandler = async (ctx, args) => {
|
||||
if (!ctx.knex) {
|
||||
throw new Error('Knex connection required for findContact');
|
||||
}
|
||||
|
||||
const { firstName, lastName, accountId } = args as {
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
accountId?: string;
|
||||
};
|
||||
|
||||
if (!firstName && !lastName) {
|
||||
return {
|
||||
found: false,
|
||||
contactId: null,
|
||||
message: 'First name or last name required',
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
let query = Contact.query(ctx.knex);
|
||||
|
||||
if (firstName) {
|
||||
query = query.where('firstName', 'like', `%${firstName}%`);
|
||||
}
|
||||
if (lastName) {
|
||||
query = query.where('lastName', 'like', `%${lastName}%`);
|
||||
}
|
||||
if (accountId) {
|
||||
query = query.where('accountId', accountId);
|
||||
}
|
||||
|
||||
const contact = await query.first();
|
||||
|
||||
if (contact) {
|
||||
return {
|
||||
found: true,
|
||||
contactId: contact.id,
|
||||
contact: {
|
||||
id: contact.id,
|
||||
firstName: contact.firstName,
|
||||
lastName: contact.lastName,
|
||||
accountId: contact.accountId,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return { found: false, contactId: null };
|
||||
} catch (error: any) {
|
||||
return { found: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
export const createContact: ToolHandler = async (ctx, args) => {
|
||||
if (!ctx.knex) {
|
||||
throw new Error('Knex connection required for createContact');
|
||||
}
|
||||
|
||||
const { firstName, lastName, email, phone, accountId } = args as {
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
email?: string;
|
||||
phone?: string;
|
||||
accountId?: string;
|
||||
};
|
||||
|
||||
if (!firstName || !lastName) {
|
||||
throw new Error('First name and last name are required');
|
||||
}
|
||||
|
||||
try {
|
||||
const contactId = randomUUID();
|
||||
await ctx.knex('contacts').insert({
|
||||
id: contactId,
|
||||
firstName,
|
||||
lastName,
|
||||
accountId,
|
||||
ownerId: ctx.userId,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
contactId,
|
||||
contact: {
|
||||
id: contactId,
|
||||
firstName,
|
||||
lastName,
|
||||
accountId,
|
||||
},
|
||||
};
|
||||
} catch (error: any) {
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
export const createPet: ToolHandler = async (ctx, args) => {
|
||||
if (!ctx.knex) {
|
||||
throw new Error('Knex connection required for createPet');
|
||||
}
|
||||
|
||||
const { name, species, breed, age, ownerId } = args as {
|
||||
name: string;
|
||||
species: string;
|
||||
breed?: string;
|
||||
age?: number;
|
||||
ownerId: string; // Contact ID
|
||||
};
|
||||
|
||||
if (!name || !ownerId) {
|
||||
throw new Error('Pet name and owner (contact) are required');
|
||||
}
|
||||
|
||||
try {
|
||||
const petId = randomUUID();
|
||||
|
||||
// Get the accountId from the contact
|
||||
const contact = await ctx.knex('contacts').where('id', ownerId).first();
|
||||
|
||||
// Insert into dogs table
|
||||
await ctx.knex('dogs').insert({
|
||||
id: petId,
|
||||
name,
|
||||
ownerId,
|
||||
accountId: contact?.accountId,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
petId,
|
||||
pet: { id: petId, name, ownerId, accountId: contact?.accountId },
|
||||
};
|
||||
} catch (error: any) {
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
// Export all demo tools
|
||||
export const demoTools = {
|
||||
findAccount,
|
||||
createAccount,
|
||||
findContact,
|
||||
createContact,
|
||||
createPet,
|
||||
};
|
||||
89
backend/src/ai-processes/tools/tool-registry.ts
Normal file
89
backend/src/ai-processes/tools/tool-registry.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { Knex } from 'knex';
|
||||
import { AiToolConfig } from '../../models/ai-process.model';
|
||||
|
||||
export interface ToolContext {
|
||||
tenantId: string;
|
||||
userId: string;
|
||||
knex?: Knex;
|
||||
authScopes?: string[];
|
||||
}
|
||||
|
||||
export type ToolHandler = (
|
||||
ctx: ToolContext,
|
||||
args: Record<string, unknown>,
|
||||
) => Promise<Record<string, unknown>>;
|
||||
|
||||
export interface ToolDefinition {
|
||||
name: string;
|
||||
description: string;
|
||||
handler: ToolHandler;
|
||||
inputSchema?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
const defaultTools: Record<string, ToolHandler> = {
|
||||
findAccount: async () => ({ accountId: null, found: false }),
|
||||
createAccount: async (_ctx, args) => ({ accountId: `acc_${Date.now()}`, args }),
|
||||
findContact: async () => ({ contactId: null, found: false }),
|
||||
createContact: async (_ctx, args) => ({ contactId: `con_${Date.now()}`, args }),
|
||||
createPet: async (_ctx, args) => ({ petId: `pet_${Date.now()}`, args }),
|
||||
};
|
||||
|
||||
const tenantAllowlist: Record<string, string[]> = {
|
||||
default: Object.keys(defaultTools),
|
||||
};
|
||||
|
||||
export class ToolRegistry {
|
||||
private tools: Record<string, ToolHandler>;
|
||||
private allowlist: Record<string, string[]>;
|
||||
private dbAllowlistCache: Map<string, Set<string>> = new Map();
|
||||
|
||||
constructor(
|
||||
tools: Record<string, ToolHandler> = defaultTools,
|
||||
allowlist: Record<string, string[]> = tenantAllowlist,
|
||||
) {
|
||||
this.tools = tools;
|
||||
this.allowlist = allowlist;
|
||||
}
|
||||
|
||||
registerTool(name: string, handler: ToolHandler) {
|
||||
this.tools[name] = handler;
|
||||
}
|
||||
|
||||
async loadTenantAllowlist(tenantId: string, knex: Knex) {
|
||||
const configs = await AiToolConfig.query(knex)
|
||||
.where('enabled', true);
|
||||
|
||||
const allowed = new Set(configs.map((c) => c.toolName));
|
||||
this.dbAllowlistCache.set(tenantId, allowed);
|
||||
return allowed;
|
||||
}
|
||||
|
||||
async isToolAllowed(tenantId: string, toolName: string, knex?: Knex) {
|
||||
// Check database cache first
|
||||
if (this.dbAllowlistCache.has(tenantId)) {
|
||||
return this.dbAllowlistCache.get(tenantId)!.has(toolName);
|
||||
}
|
||||
|
||||
// Load from database if knex provided
|
||||
if (knex) {
|
||||
const allowed = await this.loadTenantAllowlist(tenantId, knex);
|
||||
return allowed.has(toolName);
|
||||
}
|
||||
|
||||
// Fallback to static allowlist
|
||||
const allowed = this.allowlist[tenantId] || this.allowlist.default || [];
|
||||
return allowed.includes(toolName);
|
||||
}
|
||||
|
||||
getTool(toolName: string): ToolHandler {
|
||||
const tool = this.tools[toolName];
|
||||
if (!tool) {
|
||||
throw new Error(`Tool ${toolName} is not registered.`);
|
||||
}
|
||||
return tool;
|
||||
}
|
||||
|
||||
getAllToolNames(): string[] {
|
||||
return Object.keys(this.tools);
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,9 @@ import { RbacModule } from './rbac/rbac.module';
|
||||
import { ObjectModule } from './object/object.module';
|
||||
import { AppBuilderModule } from './app-builder/app-builder.module';
|
||||
import { PageLayoutModule } from './page-layout/page-layout.module';
|
||||
import { VoiceModule } from './voice/voice.module';
|
||||
import { AiAssistantModule } from './ai-assistant/ai-assistant.module';
|
||||
import { AiProcessesModule } from './ai-processes/ai-processes.module';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
@@ -20,6 +23,9 @@ import { PageLayoutModule } from './page-layout/page-layout.module';
|
||||
ObjectModule,
|
||||
AppBuilderModule,
|
||||
PageLayoutModule,
|
||||
VoiceModule,
|
||||
AiAssistantModule,
|
||||
AiProcessesModule,
|
||||
],
|
||||
})
|
||||
export class AppModule {}
|
||||
|
||||
@@ -1,207 +0,0 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Ability, AbilityBuilder, AbilityClass, ExtractSubjectType, InferSubjects, createMongoAbility } from '@casl/ability';
|
||||
import { User } from '../models/user.model';
|
||||
import { ObjectDefinition } from '../models/object-definition.model';
|
||||
import { FieldDefinition } from '../models/field-definition.model';
|
||||
import { RoleRule } from '../models/role-rule.model';
|
||||
import { RecordShare } from '../models/record-share.model';
|
||||
import { UserRole } from '../models/user-role.model';
|
||||
import { Knex } from 'knex';
|
||||
|
||||
// Define actions
|
||||
export type Action = 'read' | 'create' | 'update' | 'delete' | 'share';
|
||||
|
||||
// Define subjects - can be string (object type key) or model class
|
||||
export type Subjects = InferSubjects<any> | 'all';
|
||||
|
||||
export type AppAbility = Ability<[Action, Subjects]>;
|
||||
|
||||
@Injectable()
|
||||
export class AbilityFactory {
|
||||
/**
|
||||
* Build CASL Ability for a user
|
||||
* Rules come from 3 layers:
|
||||
* 1. Global object rules (from object_definitions + object_fields)
|
||||
* 2. Role rules (from role_rules)
|
||||
* 3. Share rules (from record_shares for this user)
|
||||
*/
|
||||
async buildForUser(user: User, knex: Knex): Promise<AppAbility> {
|
||||
const { can, cannot, build } = new AbilityBuilder<AppAbility>(
|
||||
createMongoAbility as any,
|
||||
);
|
||||
|
||||
// 1. Load global object rules
|
||||
await this.addGlobalRules(user, knex, can, cannot);
|
||||
|
||||
// 2. Load role rules
|
||||
await this.addRoleRules(user, knex, can);
|
||||
|
||||
// 3. Load share rules
|
||||
await this.addShareRules(user, knex, can);
|
||||
|
||||
return build({
|
||||
// Optional: detect subject type from instance
|
||||
detectSubjectType: (item) => {
|
||||
if (typeof item === 'string') return item;
|
||||
return item.constructor?.name || 'unknown';
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add global rules from object_definitions and object_fields
|
||||
*/
|
||||
private async addGlobalRules(
|
||||
user: User,
|
||||
knex: Knex,
|
||||
can: any,
|
||||
cannot: any,
|
||||
) {
|
||||
const objectDefs = await knex<ObjectDefinition>('object_definitions').select('*');
|
||||
|
||||
for (const objDef of objectDefs) {
|
||||
const subject = objDef.apiName;
|
||||
|
||||
// Handle public access
|
||||
if (objDef.publicRead) {
|
||||
can('read', subject);
|
||||
}
|
||||
if (objDef.publicCreate) {
|
||||
can('create', subject);
|
||||
}
|
||||
if (objDef.publicUpdate) {
|
||||
can('update', subject);
|
||||
}
|
||||
if (objDef.publicDelete) {
|
||||
can('delete', subject);
|
||||
}
|
||||
|
||||
// Handle owner-based access
|
||||
if (objDef.accessModel === 'owner' || objDef.accessModel === 'mixed') {
|
||||
const ownerCondition = { [objDef.ownerField]: user.id };
|
||||
|
||||
can('read', subject, ownerCondition);
|
||||
can('update', subject, ownerCondition);
|
||||
can('delete', subject, ownerCondition);
|
||||
can('share', subject, ownerCondition); // Owner can share their records
|
||||
}
|
||||
|
||||
// Load field-level permissions for this object
|
||||
const fields = await knex<FieldDefinition>('field_definitions')
|
||||
.where('objectDefinitionId', objDef.id)
|
||||
.select('*');
|
||||
|
||||
// Build field lists
|
||||
const readableFields = fields
|
||||
.filter((f) => f.defaultReadable)
|
||||
.map((f) => f.apiName);
|
||||
const writableFields = fields
|
||||
.filter((f) => f.defaultWritable)
|
||||
.map((f) => f.apiName);
|
||||
|
||||
// Add field-level rules if we have field restrictions
|
||||
if (fields.length > 0) {
|
||||
// For read, limit to readable fields
|
||||
if (readableFields.length > 0) {
|
||||
can('read', subject, readableFields);
|
||||
}
|
||||
// For update/create, limit to writable fields
|
||||
if (writableFields.length > 0) {
|
||||
can(['update', 'create'], subject, writableFields);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add role-based rules from role_rules
|
||||
*/
|
||||
private async addRoleRules(user: User, knex: Knex, can: any) {
|
||||
// Get user's roles
|
||||
const userRoles = await knex<UserRole>('user_roles')
|
||||
.where('userId', user.id)
|
||||
.select('roleId');
|
||||
|
||||
if (userRoles.length === 0) return;
|
||||
|
||||
const roleIds = userRoles.map((ur) => ur.roleId);
|
||||
|
||||
// Get all role rules for these roles
|
||||
const roleRules = await knex<RoleRule>('role_rules')
|
||||
.whereIn('roleId', roleIds)
|
||||
.select('*');
|
||||
|
||||
for (const roleRule of roleRules) {
|
||||
// Parse and add each rule from the JSON
|
||||
const rules = roleRule.rulesJson;
|
||||
if (Array.isArray(rules)) {
|
||||
rules.forEach((rule) => {
|
||||
if (rule.inverted) {
|
||||
// Handle "cannot" rules
|
||||
// CASL format: { action, subject, conditions?, fields?, inverted: true }
|
||||
// We'd need to properly parse this - for now, skip inverted rules in factory
|
||||
} else {
|
||||
// Handle "can" rules
|
||||
const { action, subject, conditions, fields } = rule;
|
||||
|
||||
if (fields && fields.length > 0) {
|
||||
can(action, subject, fields, conditions);
|
||||
} else if (conditions) {
|
||||
can(action, subject, conditions);
|
||||
} else {
|
||||
can(action, subject);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add per-record sharing rules from record_shares
|
||||
*/
|
||||
private async addShareRules(user: User, knex: Knex, can: any) {
|
||||
const now = new Date();
|
||||
|
||||
// Get all active shares for this user (grantee)
|
||||
const shares = await knex<RecordShare>('record_shares')
|
||||
.where('granteeUserId', user.id)
|
||||
.whereNull('revokedAt')
|
||||
.where(function () {
|
||||
this.whereNull('expiresAt').orWhere('expiresAt', '>', now);
|
||||
})
|
||||
.select('*');
|
||||
|
||||
// Also need to join with object_definitions to get the apiName (subject)
|
||||
const sharesWithObjects = await knex('record_shares')
|
||||
.join('object_definitions', 'record_shares.objectDefinitionId', 'object_definitions.id')
|
||||
.where('record_shares.granteeUserId', user.id)
|
||||
.whereNull('record_shares.revokedAt')
|
||||
.where(function () {
|
||||
this.whereNull('record_shares.expiresAt').orWhere('record_shares.expiresAt', '>', now);
|
||||
})
|
||||
.select(
|
||||
'record_shares.*',
|
||||
'object_definitions.apiName as objectApiName',
|
||||
);
|
||||
|
||||
for (const share of sharesWithObjects) {
|
||||
const subject = share.objectApiName;
|
||||
const actions = Array.isArray(share.actions) ? share.actions : JSON.parse(share.actions);
|
||||
const fields = share.fields ? (Array.isArray(share.fields) ? share.fields : JSON.parse(share.fields)) : null;
|
||||
|
||||
// Create condition: record must match the shared recordId
|
||||
const condition = { id: share.recordId };
|
||||
|
||||
for (const action of actions) {
|
||||
if (fields && fields.length > 0) {
|
||||
// Field-scoped share
|
||||
can(action, subject, fields, condition);
|
||||
} else {
|
||||
// Full record share
|
||||
can(action, subject, condition);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,8 +6,6 @@ import { AuthService } from './auth.service';
|
||||
import { AuthController } from './auth.controller';
|
||||
import { JwtStrategy } from './jwt.strategy';
|
||||
import { TenantModule } from '../tenant/tenant.module';
|
||||
import { AbilityFactory } from './ability.factory';
|
||||
import { AbilitiesGuard } from './guards/abilities.guard';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
@@ -21,8 +19,8 @@ import { AbilitiesGuard } from './guards/abilities.guard';
|
||||
}),
|
||||
}),
|
||||
],
|
||||
providers: [AuthService, JwtStrategy, AbilityFactory, AbilitiesGuard],
|
||||
providers: [AuthService, JwtStrategy],
|
||||
controllers: [AuthController],
|
||||
exports: [AuthService, AbilityFactory, AbilitiesGuard],
|
||||
exports: [AuthService],
|
||||
})
|
||||
export class AuthModule {}
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
import { createParamDecorator, ExecutionContext } from '@nestjs/common';
|
||||
import { AppAbility } from '../ability.factory';
|
||||
|
||||
/**
|
||||
* Decorator to inject the current user's ability into a route handler
|
||||
* Usage: @CurrentAbility() ability: AppAbility
|
||||
*/
|
||||
export const CurrentAbility = createParamDecorator(
|
||||
(data: unknown, ctx: ExecutionContext): AppAbility => {
|
||||
const request = ctx.switchToHttp().getRequest();
|
||||
return request.ability;
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* Decorator to inject the current user into a route handler
|
||||
* Usage: @CurrentUser() user: User
|
||||
*/
|
||||
export const CurrentUser = createParamDecorator(
|
||||
(data: unknown, ctx: ExecutionContext) => {
|
||||
const request = ctx.switchToHttp().getRequest();
|
||||
return request.user;
|
||||
},
|
||||
);
|
||||
@@ -1,10 +0,0 @@
|
||||
import { SetMetadata } from '@nestjs/common';
|
||||
import { Action } from '../ability.factory';
|
||||
import { CHECK_ABILITY_KEY, RequiredRule } from '../guards/abilities.guard';
|
||||
|
||||
/**
|
||||
* Decorator to check abilities
|
||||
* Usage: @CheckAbility({ action: 'read', subject: 'Post' })
|
||||
*/
|
||||
export const CheckAbility = (...rules: RequiredRule[]) =>
|
||||
SetMetadata(CHECK_ABILITY_KEY, rules);
|
||||
@@ -1,51 +0,0 @@
|
||||
import { Injectable, CanActivate, ExecutionContext, ForbiddenException } from '@nestjs/common';
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { Action, AppAbility } from '../ability.factory';
|
||||
|
||||
export interface RequiredRule {
|
||||
action: Action;
|
||||
subject: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Key for metadata
|
||||
*/
|
||||
export const CHECK_ABILITY_KEY = 'check_ability';
|
||||
|
||||
/**
|
||||
* Guard that checks CASL abilities
|
||||
* Use with @CheckAbility() decorator
|
||||
*/
|
||||
@Injectable()
|
||||
export class AbilitiesGuard implements CanActivate {
|
||||
constructor(private reflector: Reflector) {}
|
||||
|
||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
const rules = this.reflector.get<RequiredRule[]>(
|
||||
CHECK_ABILITY_KEY,
|
||||
context.getHandler(),
|
||||
) || [];
|
||||
|
||||
if (rules.length === 0) {
|
||||
return true; // No rules specified, allow
|
||||
}
|
||||
|
||||
const request = context.switchToHttp().getRequest();
|
||||
const ability: AppAbility = request.ability;
|
||||
|
||||
if (!ability) {
|
||||
throw new ForbiddenException('Ability not found on request');
|
||||
}
|
||||
|
||||
// Check all rules
|
||||
for (const rule of rules) {
|
||||
if (!ability.can(rule.action, rule.subject)) {
|
||||
throw new ForbiddenException(
|
||||
`You don't have permission to ${rule.action} ${rule.subject}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
import { Injectable, NestMiddleware, Inject } from '@nestjs/common';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { AbilityFactory } from '../ability.factory';
|
||||
import { Knex } from 'knex';
|
||||
|
||||
/**
|
||||
* Middleware to build and attach CASL ability to request
|
||||
* Must run after authentication middleware
|
||||
*/
|
||||
@Injectable()
|
||||
export class AbilityMiddleware implements NestMiddleware {
|
||||
constructor(
|
||||
private readonly abilityFactory: AbilityFactory,
|
||||
@Inject('KnexConnection') private readonly knex: Knex,
|
||||
) {}
|
||||
|
||||
async use(req: Request & { user?: any; ability?: any }, res: Response, next: NextFunction) {
|
||||
if (req.user) {
|
||||
// Build ability for authenticated user
|
||||
req.ability = await this.abilityFactory.buildForUser(req.user, this.knex);
|
||||
}
|
||||
next();
|
||||
}
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
import { QueryBuilder, Model } from 'objection';
|
||||
import { User } from '../models/user.model';
|
||||
import { ObjectDefinition } from '../models/object-definition.model';
|
||||
import { Knex } from 'knex';
|
||||
|
||||
/**
|
||||
* Query scoping utilities for authorization
|
||||
* Apply SQL-level filtering to ensure users only see records they have access to
|
||||
*/
|
||||
|
||||
export interface AuthScopeOptions {
|
||||
user: User;
|
||||
objectDefinition: ObjectDefinition;
|
||||
action: 'read' | 'update' | 'delete';
|
||||
knex: Knex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply authorization scope to a query builder
|
||||
* This implements the SQL equivalent of the CASL ability checks
|
||||
*
|
||||
* Rules:
|
||||
* 1. If object is public_{action} => allow all
|
||||
* 2. If object is owner/mixed => allow owned OR shared
|
||||
*/
|
||||
export function applyAuthScope<M extends Model>(
|
||||
query: QueryBuilder<M, M[]>,
|
||||
options: AuthScopeOptions,
|
||||
): QueryBuilder<M, M[]> {
|
||||
const { user, objectDefinition, action, knex } = options;
|
||||
|
||||
// If public access for this action, no restrictions
|
||||
if (
|
||||
(action === 'read' && objectDefinition.publicRead) ||
|
||||
(action === 'update' && objectDefinition.publicUpdate) ||
|
||||
(action === 'delete' && objectDefinition.publicDelete)
|
||||
) {
|
||||
return query;
|
||||
}
|
||||
|
||||
// Otherwise, apply owner + share logic
|
||||
const ownerField = objectDefinition.ownerField || 'ownerId';
|
||||
const tableName = query.modelClass().tableName;
|
||||
|
||||
return query.where((builder) => {
|
||||
// Owner condition
|
||||
builder.where(`${tableName}.${ownerField}`, user.id);
|
||||
|
||||
// OR shared condition
|
||||
builder.orWhereExists((subquery) => {
|
||||
subquery
|
||||
.from('record_shares')
|
||||
.join('object_definitions', 'record_shares.object_definition_id', 'object_definitions.id')
|
||||
.whereRaw('record_shares.record_id = ??', [`${tableName}.id`])
|
||||
.where('record_shares.grantee_user_id', user.id)
|
||||
.where('object_definitions.id', objectDefinition.id)
|
||||
.whereNull('record_shares.revoked_at')
|
||||
.where(function () {
|
||||
this.whereNull('record_shares.expires_at')
|
||||
.orWhere('record_shares.expires_at', '>', knex.fn.now());
|
||||
})
|
||||
.whereRaw("JSON_CONTAINS(record_shares.actions, ?)", [JSON.stringify(action)]);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply read scope - most common use case
|
||||
*/
|
||||
export function applyReadScope<M extends Model>(
|
||||
query: QueryBuilder<M, M[]>,
|
||||
user: User,
|
||||
objectDefinition: ObjectDefinition,
|
||||
knex: Knex,
|
||||
): QueryBuilder<M, M[]> {
|
||||
return applyAuthScope(query, { user, objectDefinition, action: 'read', knex });
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply update scope
|
||||
*/
|
||||
export function applyUpdateScope<M extends Model>(
|
||||
query: QueryBuilder<M, M[]>,
|
||||
user: User,
|
||||
objectDefinition: ObjectDefinition,
|
||||
knex: Knex,
|
||||
): QueryBuilder<M, M[]> {
|
||||
return applyAuthScope(query, { user, objectDefinition, action: 'update', knex });
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply delete scope
|
||||
*/
|
||||
export function applyDeleteScope<M extends Model>(
|
||||
query: QueryBuilder<M, M[]>,
|
||||
user: User,
|
||||
objectDefinition: ObjectDefinition,
|
||||
knex: Knex,
|
||||
): QueryBuilder<M, M[]> {
|
||||
return applyAuthScope(query, { user, objectDefinition, action: 'delete', knex });
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user can access a specific record
|
||||
* This is for single-record operations
|
||||
*/
|
||||
export async function canAccessRecord(
|
||||
recordId: string,
|
||||
user: User,
|
||||
objectDefinition: ObjectDefinition,
|
||||
action: 'read' | 'update' | 'delete',
|
||||
knex: Knex,
|
||||
): Promise<boolean> {
|
||||
// If public access for this action
|
||||
if (
|
||||
(action === 'read' && objectDefinition.publicRead) ||
|
||||
(action === 'update' && objectDefinition.publicUpdate) ||
|
||||
(action === 'delete' && objectDefinition.publicDelete)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const ownerField = objectDefinition.ownerField || 'ownerId';
|
||||
|
||||
// Check if user owns the record (we need the table name, which we can't easily get here)
|
||||
// This function is meant to be used with a fetched record
|
||||
// For now, we'll check shares only
|
||||
|
||||
// Check if there's a valid share
|
||||
const now = new Date();
|
||||
const share = await knex('record_shares')
|
||||
.where({
|
||||
objectDefinitionId: objectDefinition.id,
|
||||
recordId: recordId,
|
||||
granteeUserId: user.id,
|
||||
})
|
||||
.whereNull('revokedAt')
|
||||
.where(function () {
|
||||
this.whereNull('expiresAt').orWhere('expiresAt', '>', now);
|
||||
})
|
||||
.whereRaw("JSON_CONTAINS(actions, ?)", [JSON.stringify(action)])
|
||||
.first();
|
||||
|
||||
return !!share;
|
||||
}
|
||||
@@ -3,13 +3,15 @@ import {
|
||||
FastifyAdapter,
|
||||
NestFastifyApplication,
|
||||
} from '@nestjs/platform-fastify';
|
||||
import { ValidationPipe } from '@nestjs/common';
|
||||
import { ValidationPipe, Logger } from '@nestjs/common';
|
||||
import { AppModule } from './app.module';
|
||||
import { VoiceService } from './voice/voice.service';
|
||||
import { AudioConverterService } from './voice/audio-converter.service';
|
||||
|
||||
async function bootstrap() {
|
||||
const app = await NestFactory.create<NestFastifyApplication>(
|
||||
AppModule,
|
||||
new FastifyAdapter(),
|
||||
new FastifyAdapter({ logger: true }),
|
||||
);
|
||||
|
||||
// Global validation pipe
|
||||
@@ -33,6 +35,145 @@ async function bootstrap() {
|
||||
const port = process.env.PORT || 3000;
|
||||
await app.listen(port, '0.0.0.0');
|
||||
|
||||
// After app is listening, register WebSocket handler
|
||||
const fastifyInstance = app.getHttpAdapter().getInstance();
|
||||
const logger = new Logger('MediaStreamWS');
|
||||
const voiceService = app.get(VoiceService);
|
||||
const audioConverter = app.get(AudioConverterService);
|
||||
|
||||
const WebSocketServer = require('ws').Server;
|
||||
const wss = new WebSocketServer({ noServer: true });
|
||||
|
||||
// Handle WebSocket upgrades at the server level
|
||||
const server = (fastifyInstance.server as any);
|
||||
|
||||
// Track active Media Streams connections: streamSid -> WebSocket
|
||||
const mediaStreams: Map<string, any> = new Map();
|
||||
|
||||
server.on('upgrade', (request: any, socket: any, head: any) => {
|
||||
if (request.url === '/api/voice/media-stream') {
|
||||
logger.log('=== MEDIA STREAM WEBSOCKET UPGRADE REQUEST ===');
|
||||
logger.log(`Path: ${request.url}`);
|
||||
|
||||
wss.handleUpgrade(request, socket, head, (ws: any) => {
|
||||
logger.log('=== MEDIA STREAM WEBSOCKET UPGRADED SUCCESSFULLY ===');
|
||||
handleMediaStreamSocket(ws);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
async function handleMediaStreamSocket(ws: any) {
|
||||
let streamSid: string | null = null;
|
||||
let callSid: string | null = null;
|
||||
let tenantDomain: string | null = null;
|
||||
let mediaPacketCount = 0;
|
||||
|
||||
ws.on('message', async (message: Buffer) => {
|
||||
try {
|
||||
const msg = JSON.parse(message.toString());
|
||||
|
||||
switch (msg.event) {
|
||||
case 'connected':
|
||||
logger.log('=== MEDIA STREAM EVENT: CONNECTED ===');
|
||||
logger.log(`Protocol: ${msg.protocol}`);
|
||||
logger.log(`Version: ${msg.version}`);
|
||||
break;
|
||||
|
||||
case 'start':
|
||||
streamSid = msg.streamSid;
|
||||
callSid = msg.start.callSid;
|
||||
tenantDomain = msg.start.customParameters?.tenantId || 'tenant1';
|
||||
|
||||
logger.log(`=== MEDIA STREAM EVENT: START ===`);
|
||||
logger.log(`StreamSid: ${streamSid}`);
|
||||
logger.log(`CallSid: ${callSid}`);
|
||||
logger.log(`Tenant: ${tenantDomain}`);
|
||||
logger.log(`MediaFormat: ${JSON.stringify(msg.start.mediaFormat)}`);
|
||||
|
||||
mediaStreams.set(streamSid, ws);
|
||||
logger.log(`Stored WebSocket for streamSid: ${streamSid}. Total active streams: ${mediaStreams.size}`);
|
||||
|
||||
// Initialize OpenAI Realtime connection
|
||||
logger.log(`Initializing OpenAI Realtime for call ${callSid}...`);
|
||||
try {
|
||||
await voiceService.initializeOpenAIRealtime({
|
||||
callSid,
|
||||
tenantId: tenantDomain,
|
||||
userId: msg.start.customParameters?.userId || 'system',
|
||||
});
|
||||
logger.log(`✓ OpenAI Realtime initialized for call ${callSid}`);
|
||||
} catch (error: any) {
|
||||
logger.error(`Failed to initialize OpenAI: ${error.message}`);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'media':
|
||||
mediaPacketCount++;
|
||||
// Only log every 500 packets to reduce noise
|
||||
if (mediaPacketCount % 500 === 0) {
|
||||
logger.log(`Received media packet #${mediaPacketCount} for StreamSid: ${streamSid}`);
|
||||
}
|
||||
|
||||
if (!callSid || !tenantDomain) {
|
||||
logger.warn('Received media before start event');
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
// Convert Twilio audio (μ-law 8kHz) to OpenAI format (PCM16 24kHz)
|
||||
const twilioAudio = msg.media.payload;
|
||||
const openaiAudio = audioConverter.twilioToOpenAI(twilioAudio);
|
||||
|
||||
// Send audio to OpenAI Realtime API
|
||||
await voiceService.sendAudioToOpenAI(callSid, openaiAudio);
|
||||
} catch (error: any) {
|
||||
logger.error(`Error processing media: ${error.message}`);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'stop':
|
||||
logger.log(`=== MEDIA STREAM EVENT: STOP ===`);
|
||||
logger.log(`StreamSid: ${streamSid}`);
|
||||
logger.log(`Total media packets received: ${mediaPacketCount}`);
|
||||
|
||||
if (streamSid) {
|
||||
mediaStreams.delete(streamSid);
|
||||
logger.log(`Removed WebSocket for streamSid: ${streamSid}`);
|
||||
}
|
||||
|
||||
// Clean up OpenAI connection
|
||||
if (callSid) {
|
||||
try {
|
||||
logger.log(`Cleaning up OpenAI connection for call ${callSid}...`);
|
||||
await voiceService.cleanupOpenAIConnection(callSid);
|
||||
logger.log(`✓ OpenAI connection cleaned up`);
|
||||
} catch (error: any) {
|
||||
logger.error(`Failed to cleanup OpenAI: ${error.message}`);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.debug(`Unknown media stream event: ${msg.event}`);
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`Error processing media stream message: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('close', () => {
|
||||
logger.log(`=== MEDIA STREAM WEBSOCKET CLOSED ===`);
|
||||
if (streamSid) {
|
||||
mediaStreams.delete(streamSid);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('error', (error: Error) => {
|
||||
logger.error(`=== MEDIA STREAM WEBSOCKET ERROR ===`);
|
||||
logger.error(`Error message: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
console.log(`🚀 Application is running on: http://localhost:${port}/api`);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import type { Knex } from 'knex';
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export interface CustomMigrationRecord {
|
||||
id: string;
|
||||
|
||||
63
backend/src/models/ai-chat.model.ts
Normal file
63
backend/src/models/ai-chat.model.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { randomUUID } from 'crypto';
|
||||
import { snakeCaseMappers } from 'objection';
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class AiChatSession extends BaseModel {
|
||||
static tableName = 'ai_chat_sessions';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
|
||||
id!: string;
|
||||
userId!: string;
|
||||
createdAt!: Date;
|
||||
|
||||
$beforeInsert() {
|
||||
this.id = this.id || randomUUID();
|
||||
this.createdAt = this.createdAt || new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate() {}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
messages: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: AiChatMessage,
|
||||
join: {
|
||||
from: 'ai_chat_sessions.id',
|
||||
to: 'ai_chat_messages.session_id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class AiChatMessage extends BaseModel {
|
||||
static tableName = 'ai_chat_messages';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
|
||||
id!: string;
|
||||
sessionId!: string;
|
||||
role!: string;
|
||||
content!: string;
|
||||
createdAt!: Date;
|
||||
|
||||
$beforeInsert() {
|
||||
this.id = this.id || randomUUID();
|
||||
this.createdAt = this.createdAt || new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate() {}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
session: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: AiChatSession,
|
||||
join: {
|
||||
from: 'ai_chat_messages.session_id',
|
||||
to: 'ai_chat_sessions.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
164
backend/src/models/ai-process.model.ts
Normal file
164
backend/src/models/ai-process.model.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import { randomUUID } from 'crypto';
|
||||
import { QueryContext, snakeCaseMappers } from 'objection';
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class AiProcess extends BaseModel {
|
||||
static tableName = 'ai_processes';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
|
||||
id!: string;
|
||||
name!: string;
|
||||
description?: string;
|
||||
latestVersion!: number;
|
||||
createdBy!: string;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
$beforeInsert(queryContext: QueryContext) {
|
||||
this.id = this.id || randomUUID();
|
||||
super.$beforeInsert(queryContext);
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
versions: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: AiProcessVersion,
|
||||
join: {
|
||||
from: 'ai_processes.id',
|
||||
to: 'ai_process_versions.process_id',
|
||||
},
|
||||
},
|
||||
runs: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: AiProcessRun,
|
||||
join: {
|
||||
from: 'ai_processes.id',
|
||||
to: 'ai_process_runs.process_id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class AiProcessVersion extends BaseModel {
|
||||
static tableName = 'ai_process_versions';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
static jsonAttributes = ['graphJson', 'compiledJson'];
|
||||
|
||||
id!: string;
|
||||
processId!: string;
|
||||
version!: number;
|
||||
graphJson!: Record<string, unknown>;
|
||||
compiledJson!: Record<string, unknown>;
|
||||
createdBy!: string;
|
||||
createdAt!: Date;
|
||||
|
||||
$beforeInsert() {
|
||||
this.id = this.id || randomUUID();
|
||||
this.createdAt = this.createdAt || new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate() {}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
process: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: AiProcess,
|
||||
join: {
|
||||
from: 'ai_process_versions.process_id',
|
||||
to: 'ai_processes.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class AiProcessRun extends BaseModel {
|
||||
static tableName = 'ai_process_runs';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
static jsonAttributes = ['inputJson', 'outputJson', 'errorJson', 'stateJson'];
|
||||
|
||||
id!: string;
|
||||
processId!: string;
|
||||
version!: number;
|
||||
status!: string;
|
||||
inputJson!: Record<string, unknown>;
|
||||
outputJson?: Record<string, unknown> | null;
|
||||
errorJson?: Record<string, unknown> | null;
|
||||
stateJson?: Record<string, unknown>;
|
||||
currentNodeId?: string | null;
|
||||
startedAt?: Date;
|
||||
endedAt?: Date | null;
|
||||
|
||||
$beforeInsert() {
|
||||
this.id = this.id || randomUUID();
|
||||
this.startedAt = this.startedAt || new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate() {}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
process: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: AiProcess,
|
||||
join: {
|
||||
from: 'ai_process_runs.process_id',
|
||||
to: 'ai_processes.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class AiAuditEvent extends BaseModel {
|
||||
static tableName = 'ai_audit_events';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
static jsonAttributes = ['payloadJson'];
|
||||
|
||||
id!: string;
|
||||
runId!: string;
|
||||
eventType!: string;
|
||||
payloadJson!: Record<string, unknown>;
|
||||
createdAt!: Date;
|
||||
|
||||
$beforeInsert() {
|
||||
this.id = this.id || randomUUID();
|
||||
this.createdAt = this.createdAt || new Date();
|
||||
}
|
||||
|
||||
$beforeUpdate() {}
|
||||
|
||||
static get relationMappings() {
|
||||
return {
|
||||
run: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: AiProcessRun,
|
||||
join: {
|
||||
from: 'ai_audit_events.run_id',
|
||||
to: 'ai_process_runs.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class AiToolConfig extends BaseModel {
|
||||
static tableName = 'ai_tool_configs';
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
static jsonAttributes = ['configJson'];
|
||||
|
||||
id!: string;
|
||||
toolName!: string;
|
||||
enabled!: boolean;
|
||||
configJson?: Record<string, unknown>;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
$beforeInsert(queryContext: QueryContext) {
|
||||
this.id = this.id || randomUUID();
|
||||
super.$beforeInsert(queryContext);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,38 @@
|
||||
import { Model, ModelOptions, QueryContext, snakeCaseMappers } from 'objection';
|
||||
import { Model, ModelOptions, QueryContext } from 'objection';
|
||||
|
||||
export class BaseModel extends Model {
|
||||
static columnNameMappers = snakeCaseMappers();
|
||||
/**
|
||||
* Use a minimal column mapper: keep property names as-is, but handle
|
||||
* timestamp fields that are stored as created_at/updated_at in the DB.
|
||||
*/
|
||||
static columnNameMappers = {
|
||||
parse(dbRow: Record<string, any>) {
|
||||
const mapped: Record<string, any> = {};
|
||||
for (const [key, value] of Object.entries(dbRow || {})) {
|
||||
if (key === 'created_at') {
|
||||
mapped.createdAt = value;
|
||||
} else if (key === 'updated_at') {
|
||||
mapped.updatedAt = value;
|
||||
} else {
|
||||
mapped[key] = value;
|
||||
}
|
||||
}
|
||||
return mapped;
|
||||
},
|
||||
format(model: Record<string, any>) {
|
||||
const mapped: Record<string, any> = {};
|
||||
for (const [key, value] of Object.entries(model || {})) {
|
||||
if (key === 'createdAt') {
|
||||
mapped.created_at = value;
|
||||
} else if (key === 'updatedAt') {
|
||||
mapped.updated_at = value;
|
||||
} else {
|
||||
mapped[key] = value;
|
||||
}
|
||||
}
|
||||
return mapped;
|
||||
},
|
||||
};
|
||||
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
|
||||
33
backend/src/models/contact-detail.model.ts
Normal file
33
backend/src/models/contact-detail.model.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class ContactDetail extends BaseModel {
|
||||
static tableName = 'contact_details';
|
||||
|
||||
id!: string;
|
||||
relatedObjectType!: 'Account' | 'Contact';
|
||||
relatedObjectId!: string;
|
||||
detailType!: string;
|
||||
label?: string;
|
||||
value!: string;
|
||||
isPrimary!: boolean;
|
||||
|
||||
// Provide optional relations for each supported parent type.
|
||||
static relationMappings = {
|
||||
account: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: 'account.model',
|
||||
join: {
|
||||
from: 'contact_details.relatedObjectId',
|
||||
to: 'accounts.id',
|
||||
},
|
||||
},
|
||||
contact: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: 'contact.model',
|
||||
join: {
|
||||
from: 'contact_details.relatedObjectId',
|
||||
to: 'contacts.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
30
backend/src/models/contact.model.ts
Normal file
30
backend/src/models/contact.model.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class Contact extends BaseModel {
|
||||
static tableName = 'contacts';
|
||||
|
||||
id!: string;
|
||||
firstName!: string;
|
||||
lastName!: string;
|
||||
accountId!: string;
|
||||
ownerId?: string;
|
||||
|
||||
static relationMappings = {
|
||||
account: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: 'account.model',
|
||||
join: {
|
||||
from: 'contacts.accountId',
|
||||
to: 'accounts.id',
|
||||
},
|
||||
},
|
||||
owner: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: 'user.model',
|
||||
join: {
|
||||
from: 'contacts.ownerId',
|
||||
to: 'users.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -30,6 +30,8 @@ export interface UIMetadata {
|
||||
step?: number; // For number
|
||||
accept?: string; // For file/image
|
||||
relationDisplayField?: string; // Which field to display for relations
|
||||
relationObjects?: string[]; // For polymorphic relations
|
||||
relationTypeField?: string; // Field API name storing the selected relation type
|
||||
|
||||
// Formatting
|
||||
format?: string; // Date format, number format, etc.
|
||||
@@ -64,9 +66,6 @@ export class FieldDefinition extends BaseModel {
|
||||
isCustom!: boolean;
|
||||
displayOrder!: number;
|
||||
uiMetadata?: UIMetadata;
|
||||
// Field-level permissions
|
||||
defaultReadable!: boolean;
|
||||
defaultWritable!: boolean;
|
||||
|
||||
static relationMappings = {
|
||||
objectDefinition: {
|
||||
@@ -77,5 +76,13 @@ export class FieldDefinition extends BaseModel {
|
||||
to: 'object_definitions.id',
|
||||
},
|
||||
},
|
||||
rolePermissions: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: () => require('./role-field-permission.model').RoleFieldPermission,
|
||||
join: {
|
||||
from: 'field_definitions.id',
|
||||
to: 'role_field_permissions.fieldDefinitionId',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -10,15 +10,11 @@ export class ObjectDefinition extends BaseModel {
|
||||
description?: string;
|
||||
isSystem: boolean;
|
||||
isCustom: boolean;
|
||||
// Authorization fields
|
||||
accessModel: 'public' | 'owner' | 'mixed';
|
||||
publicRead: boolean;
|
||||
publicCreate: boolean;
|
||||
publicUpdate: boolean;
|
||||
publicDelete: boolean;
|
||||
ownerField: string;
|
||||
orgWideDefault: 'private' | 'public_read' | 'public_read_write';
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
fields?: any[];
|
||||
rolePermissions?: any[];
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
@@ -32,19 +28,14 @@ export class ObjectDefinition extends BaseModel {
|
||||
description: { type: 'string' },
|
||||
isSystem: { type: 'boolean' },
|
||||
isCustom: { type: 'boolean' },
|
||||
accessModel: { type: 'string', enum: ['public', 'owner', 'mixed'] },
|
||||
publicRead: { type: 'boolean' },
|
||||
publicCreate: { type: 'boolean' },
|
||||
publicUpdate: { type: 'boolean' },
|
||||
publicDelete: { type: 'boolean' },
|
||||
ownerField: { type: 'string' },
|
||||
orgWideDefault: { type: 'string', enum: ['private', 'public_read', 'public_read_write'] },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
const { FieldDefinition } = require('./field-definition.model');
|
||||
const { RecordShare } = require('./record-share.model');
|
||||
const { RoleObjectPermission } = require('./role-object-permission.model');
|
||||
|
||||
return {
|
||||
fields: {
|
||||
@@ -55,12 +46,12 @@ export class ObjectDefinition extends BaseModel {
|
||||
to: 'field_definitions.objectDefinitionId',
|
||||
},
|
||||
},
|
||||
recordShares: {
|
||||
rolePermissions: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: RecordShare,
|
||||
modelClass: RoleObjectPermission,
|
||||
join: {
|
||||
from: 'object_definitions.id',
|
||||
to: 'record_shares.objectDefinitionId',
|
||||
to: 'role_object_permissions.objectDefinitionId',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,39 +1,80 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export interface RecordShareAccessLevel {
|
||||
canRead: boolean;
|
||||
canEdit: boolean;
|
||||
canDelete: boolean;
|
||||
}
|
||||
|
||||
export class RecordShare extends BaseModel {
|
||||
static tableName = 'record_shares';
|
||||
|
||||
// Don't use snake_case mapping since DB columns are already camelCase
|
||||
static get columnNameMappers() {
|
||||
return {
|
||||
parse(obj: any) {
|
||||
return obj;
|
||||
},
|
||||
format(obj: any) {
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Don't auto-set timestamps - let DB defaults handle them
|
||||
$beforeInsert() {
|
||||
// Don't call super - skip BaseModel's timestamp logic
|
||||
}
|
||||
|
||||
$beforeUpdate() {
|
||||
// Don't call super - skip BaseModel's timestamp logic
|
||||
}
|
||||
|
||||
id!: string;
|
||||
objectDefinitionId!: string;
|
||||
recordId!: string;
|
||||
granteeUserId!: string;
|
||||
grantedByUserId!: string;
|
||||
actions!: any; // JSON field - will be string[] when parsed
|
||||
fields?: any; // JSON field - will be string[] when parsed
|
||||
accessLevel!: RecordShareAccessLevel;
|
||||
expiresAt?: Date;
|
||||
revokedAt?: Date;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required: ['objectDefinitionId', 'recordId', 'granteeUserId', 'grantedByUserId', 'actions'],
|
||||
required: ['objectDefinitionId', 'recordId', 'granteeUserId', 'grantedByUserId', 'accessLevel'],
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
objectDefinitionId: { type: 'string' },
|
||||
recordId: { type: 'string' },
|
||||
granteeUserId: { type: 'string' },
|
||||
grantedByUserId: { type: 'string' },
|
||||
actions: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
accessLevel: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
canRead: { type: 'boolean' },
|
||||
canEdit: { type: 'boolean' },
|
||||
canDelete: { type: 'boolean' },
|
||||
},
|
||||
fields: {
|
||||
type: ['array', 'null'],
|
||||
items: { type: 'string' },
|
||||
},
|
||||
expiresAt: { type: ['string', 'null'], format: 'date-time' },
|
||||
revokedAt: { type: ['string', 'null'], format: 'date-time' },
|
||||
expiresAt: {
|
||||
anyOf: [
|
||||
{ type: 'string', format: 'date-time' },
|
||||
{ type: 'null' },
|
||||
{ type: 'object' } // Allow Date objects
|
||||
]
|
||||
},
|
||||
revokedAt: {
|
||||
anyOf: [
|
||||
{ type: 'string', format: 'date-time' },
|
||||
{ type: 'null' },
|
||||
{ type: 'object' } // Allow Date objects
|
||||
]
|
||||
},
|
||||
createdAt: { type: ['string', 'object'], format: 'date-time' },
|
||||
updatedAt: { type: ['string', 'object'], format: 'date-time' },
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -69,11 +110,4 @@ export class RecordShare extends BaseModel {
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Check if share is currently valid
|
||||
isValid(): boolean {
|
||||
if (this.revokedAt) return false;
|
||||
if (this.expiresAt && new Date(this.expiresAt) < new Date()) return false;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
51
backend/src/models/role-field-permission.model.ts
Normal file
51
backend/src/models/role-field-permission.model.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class RoleFieldPermission extends BaseModel {
|
||||
static tableName = 'role_field_permissions';
|
||||
|
||||
id!: string;
|
||||
roleId!: string;
|
||||
fieldDefinitionId!: string;
|
||||
canRead!: boolean;
|
||||
canEdit!: boolean;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required: ['roleId', 'fieldDefinitionId'],
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
roleId: { type: 'string' },
|
||||
fieldDefinitionId: { type: 'string' },
|
||||
canRead: { type: 'boolean' },
|
||||
canEdit: { type: 'boolean' },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
const { Role } = require('./role.model');
|
||||
const { FieldDefinition } = require('./field-definition.model');
|
||||
|
||||
return {
|
||||
role: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: Role,
|
||||
join: {
|
||||
from: 'role_field_permissions.roleId',
|
||||
to: 'roles.id',
|
||||
},
|
||||
},
|
||||
fieldDefinition: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: FieldDefinition,
|
||||
join: {
|
||||
from: 'role_field_permissions.fieldDefinitionId',
|
||||
to: 'field_definitions.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
59
backend/src/models/role-object-permission.model.ts
Normal file
59
backend/src/models/role-object-permission.model.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class RoleObjectPermission extends BaseModel {
|
||||
static tableName = 'role_object_permissions';
|
||||
|
||||
id!: string;
|
||||
roleId!: string;
|
||||
objectDefinitionId!: string;
|
||||
canCreate!: boolean;
|
||||
canRead!: boolean;
|
||||
canEdit!: boolean;
|
||||
canDelete!: boolean;
|
||||
canViewAll!: boolean;
|
||||
canModifyAll!: boolean;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required: ['roleId', 'objectDefinitionId'],
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
roleId: { type: 'string' },
|
||||
objectDefinitionId: { type: 'string' },
|
||||
canCreate: { type: 'boolean' },
|
||||
canRead: { type: 'boolean' },
|
||||
canEdit: { type: 'boolean' },
|
||||
canDelete: { type: 'boolean' },
|
||||
canViewAll: { type: 'boolean' },
|
||||
canModifyAll: { type: 'boolean' },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
const { Role } = require('./role.model');
|
||||
const { ObjectDefinition } = require('./object-definition.model');
|
||||
|
||||
return {
|
||||
role: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: Role,
|
||||
join: {
|
||||
from: 'role_object_permissions.roleId',
|
||||
to: 'roles.id',
|
||||
},
|
||||
},
|
||||
objectDefinition: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: ObjectDefinition,
|
||||
join: {
|
||||
from: 'role_object_permissions.objectDefinitionId',
|
||||
to: 'object_definitions.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
export class RoleRule extends BaseModel {
|
||||
static tableName = 'role_rules';
|
||||
|
||||
id: string;
|
||||
roleId: string;
|
||||
rulesJson: any[]; // Array of CASL rules
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
|
||||
static get jsonSchema() {
|
||||
return {
|
||||
type: 'object',
|
||||
required: ['roleId', 'rulesJson'],
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
roleId: { type: 'string' },
|
||||
rulesJson: { type: 'array' },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
static get relationMappings() {
|
||||
const { Role } = require('./role.model');
|
||||
|
||||
return {
|
||||
role: {
|
||||
relation: BaseModel.BelongsToOneRelation,
|
||||
modelClass: Role,
|
||||
join: {
|
||||
from: 'role_rules.roleId',
|
||||
to: 'roles.id',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,8 @@ export class Role extends BaseModel {
|
||||
const { RolePermission } = require('./role-permission.model');
|
||||
const { Permission } = require('./permission.model');
|
||||
const { User } = require('./user.model');
|
||||
const { RoleRule } = require('./role-rule.model');
|
||||
const { RoleObjectPermission } = require('./role-object-permission.model');
|
||||
const { RoleFieldPermission } = require('./role-field-permission.model');
|
||||
|
||||
return {
|
||||
rolePermissions: {
|
||||
@@ -62,12 +63,20 @@ export class Role extends BaseModel {
|
||||
to: 'users.id',
|
||||
},
|
||||
},
|
||||
roleRules: {
|
||||
objectPermissions: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: RoleRule,
|
||||
modelClass: RoleObjectPermission,
|
||||
join: {
|
||||
from: 'roles.id',
|
||||
to: 'role_rules.roleId',
|
||||
to: 'role_object_permissions.roleId',
|
||||
},
|
||||
},
|
||||
fieldPermissions: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: RoleFieldPermission,
|
||||
join: {
|
||||
from: 'roles.id',
|
||||
to: 'role_field_permissions.roleId',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -30,7 +30,6 @@ export class User extends BaseModel {
|
||||
static get relationMappings() {
|
||||
const { UserRole } = require('./user-role.model');
|
||||
const { Role } = require('./role.model');
|
||||
const { RecordShare } = require('./record-share.model');
|
||||
|
||||
return {
|
||||
userRoles: {
|
||||
@@ -53,22 +52,6 @@ export class User extends BaseModel {
|
||||
to: 'roles.id',
|
||||
},
|
||||
},
|
||||
sharesGranted: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: RecordShare,
|
||||
join: {
|
||||
from: 'users.id',
|
||||
to: 'record_shares.grantedByUserId',
|
||||
},
|
||||
},
|
||||
sharesReceived: {
|
||||
relation: BaseModel.HasManyRelation,
|
||||
modelClass: RecordShare,
|
||||
join: {
|
||||
from: 'users.id',
|
||||
to: 'record_shares.granteeUserId',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,9 @@ export interface FieldConfigDTO {
|
||||
step?: number;
|
||||
accept?: string;
|
||||
relationObject?: string;
|
||||
relationObjects?: string[];
|
||||
relationDisplayField?: string;
|
||||
relationTypeField?: string;
|
||||
format?: string;
|
||||
prefix?: string;
|
||||
suffix?: string;
|
||||
@@ -43,6 +45,14 @@ export interface ObjectDefinitionDTO {
|
||||
description?: string;
|
||||
isSystem: boolean;
|
||||
fields: FieldConfigDTO[];
|
||||
relatedLists?: Array<{
|
||||
title: string;
|
||||
relationName: string;
|
||||
objectApiName: string;
|
||||
fields: FieldConfigDTO[];
|
||||
canCreate?: boolean;
|
||||
createRoute?: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
@@ -98,10 +108,12 @@ export class FieldMapperService {
|
||||
step: uiMetadata.step,
|
||||
accept: uiMetadata.accept,
|
||||
relationObject: field.referenceObject,
|
||||
relationObjects: uiMetadata.relationObjects,
|
||||
// For lookup fields, provide default display field if not specified
|
||||
relationDisplayField: isLookupField
|
||||
? (uiMetadata.relationDisplayField || 'name')
|
||||
: uiMetadata.relationDisplayField,
|
||||
relationTypeField: uiMetadata.relationTypeField,
|
||||
|
||||
// Formatting
|
||||
format: uiMetadata.format,
|
||||
@@ -206,6 +218,17 @@ export class FieldMapperService {
|
||||
.filter((f: any) => f.isActive !== false)
|
||||
.sort((a: any, b: any) => (a.displayOrder || 0) - (b.displayOrder || 0))
|
||||
.map((f: any) => this.mapFieldToDTO(f)),
|
||||
relatedLists: (objectDef.relatedLists || []).map((list: any) => ({
|
||||
title: list.title,
|
||||
relationName: list.relationName,
|
||||
objectApiName: list.objectApiName,
|
||||
fields: (list.fields || [])
|
||||
.filter((f: any) => f.isActive !== false)
|
||||
.map((f: any) => this.mapFieldToDTO(f))
|
||||
.filter((f: any) => f.showOnList !== false),
|
||||
canCreate: list.canCreate,
|
||||
createRoute: list.createRoute,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Model } from 'objection';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
/**
|
||||
* Base model for all dynamic and system models
|
||||
@@ -10,26 +11,23 @@ export class BaseModel extends Model {
|
||||
tenantId?: string;
|
||||
ownerId?: string;
|
||||
name?: string;
|
||||
created_at?: Date;
|
||||
updated_at?: Date;
|
||||
created_at?: string;
|
||||
updated_at?: string;
|
||||
|
||||
// Hook to set system-managed fields
|
||||
$beforeInsert() {
|
||||
// created_at and updated_at are handled by the database
|
||||
// ownerId should be set by the controller/service
|
||||
async $beforeInsert() {
|
||||
if (!this.id) {
|
||||
this.id = randomUUID();
|
||||
}
|
||||
|
||||
$beforeUpdate() {
|
||||
// updated_at is handled by the database
|
||||
if (!this.created_at) {
|
||||
this.created_at = new Date().toISOString().slice(0, 19).replace('T', ' ');
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Get the API name for this object
|
||||
* Override in subclasses
|
||||
*/
|
||||
static get objectApiName(): string {
|
||||
return 'BaseModel';
|
||||
if (!this.updated_at) {
|
||||
this.updated_at = new Date().toISOString().slice(0, 19).replace('T', ' ');
|
||||
}
|
||||
}
|
||||
|
||||
async $beforeUpdate() {
|
||||
this.updated_at = new Date().toISOString().slice(0, 19).replace('T', ' ');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { randomUUID } from 'crypto';
|
||||
import { ModelClass, JSONSchema, RelationMappings, Model } from 'objection';
|
||||
import { BaseModel } from './base.model';
|
||||
|
||||
@@ -28,6 +27,14 @@ export interface ObjectMetadata {
|
||||
}
|
||||
|
||||
export class DynamicModelFactory {
|
||||
/**
|
||||
* Get relation name from lookup field API name
|
||||
* Converts "ownerId" -> "owner", "customFieldId" -> "customfield"
|
||||
*/
|
||||
static getRelationName(lookupFieldApiName: string): string {
|
||||
return lookupFieldApiName.replace(/Id$/, '').toLowerCase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a dynamic model class from object metadata
|
||||
* @param meta Object metadata
|
||||
@@ -49,8 +56,7 @@ export class DynamicModelFactory {
|
||||
updated_at: { type: 'string', format: 'date-time' },
|
||||
};
|
||||
|
||||
// Don't require system-managed fields (id, tenantId, ownerId, timestamps)
|
||||
// These are auto-set by hooks or database
|
||||
// Don't require id or tenantId - they'll be set automatically
|
||||
const required: string[] = [];
|
||||
|
||||
// Add custom fields
|
||||
@@ -70,20 +76,13 @@ export class DynamicModelFactory {
|
||||
// Store lookup fields metadata for later use
|
||||
const lookupFieldsInfo = lookupFields.map(f => ({
|
||||
apiName: f.apiName,
|
||||
relationName: f.apiName.replace(/Id$/, '').toLowerCase(),
|
||||
relationName: DynamicModelFactory.getRelationName(f.apiName),
|
||||
referenceObject: f.referenceObject,
|
||||
targetTable: this.getTableName(f.referenceObject),
|
||||
}));
|
||||
|
||||
// Create the dynamic model class extending Model directly
|
||||
class DynamicModel extends Model {
|
||||
id?: string;
|
||||
tenantId?: string;
|
||||
ownerId?: string;
|
||||
name?: string;
|
||||
created_at?: string;
|
||||
updated_at?: string;
|
||||
|
||||
// Create the dynamic model class extending BaseModel
|
||||
class DynamicModel extends BaseModel {
|
||||
static tableName = tableName;
|
||||
|
||||
static objectApiName = apiName;
|
||||
@@ -120,6 +119,47 @@ export class DynamicModelFactory {
|
||||
};
|
||||
}
|
||||
|
||||
// Add additional relation mappings (e.g., hasMany)
|
||||
for (const relation of relations) {
|
||||
if (mappings[relation.name]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let modelClass: any = relation.targetObjectApiName;
|
||||
if (getModel) {
|
||||
const resolvedModel = getModel(relation.targetObjectApiName);
|
||||
if (resolvedModel) {
|
||||
modelClass = resolvedModel;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const targetTable = DynamicModelFactory.getTableName(relation.targetObjectApiName);
|
||||
|
||||
if (relation.type === 'belongsTo') {
|
||||
mappings[relation.name] = {
|
||||
relation: Model.BelongsToOneRelation,
|
||||
modelClass,
|
||||
join: {
|
||||
from: `${tableName}.${relation.fromColumn}`,
|
||||
to: `${targetTable}.${relation.toColumn}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (relation.type === 'hasMany') {
|
||||
mappings[relation.name] = {
|
||||
relation: Model.HasManyRelation,
|
||||
modelClass,
|
||||
join: {
|
||||
from: `${tableName}.${relation.fromColumn}`,
|
||||
to: `${targetTable}.${relation.toColumn}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return mappings;
|
||||
}
|
||||
|
||||
@@ -130,23 +170,6 @@ export class DynamicModelFactory {
|
||||
properties,
|
||||
};
|
||||
}
|
||||
|
||||
async $beforeInsert() {
|
||||
if (!this.id) {
|
||||
this.id = randomUUID();
|
||||
}
|
||||
if (!this.created_at) {
|
||||
this.created_at = new Date().toISOString().slice(0, 19).replace('T', ' ');
|
||||
}
|
||||
if (!this.updated_at) {
|
||||
this.updated_at = new Date().toISOString().slice(0, 19).replace('T', ' ');
|
||||
}
|
||||
}
|
||||
|
||||
async $beforeUpdate(opt: any, queryContext: any) {
|
||||
await super.$beforeUpdate(opt, queryContext);
|
||||
this.updated_at = new Date().toISOString().slice(0, 19).replace('T', ' ');
|
||||
}
|
||||
}
|
||||
|
||||
return DynamicModel as any;
|
||||
@@ -156,6 +179,7 @@ export class DynamicModelFactory {
|
||||
* Convert a field definition to JSON schema property
|
||||
*/
|
||||
private static fieldToJsonSchema(field: FieldDefinition): Record<string, any> {
|
||||
const baseSchema = () => {
|
||||
switch (field.type.toUpperCase()) {
|
||||
case 'TEXT':
|
||||
case 'STRING':
|
||||
@@ -203,6 +227,18 @@ export class DynamicModelFactory {
|
||||
default:
|
||||
return { type: 'string' };
|
||||
}
|
||||
};
|
||||
|
||||
const schema = baseSchema();
|
||||
|
||||
// Allow null for non-required fields so optional strings/numbers don't fail validation
|
||||
if (!field.isRequired) {
|
||||
return {
|
||||
anyOf: [schema, { type: 'null' }],
|
||||
};
|
||||
}
|
||||
|
||||
return schema;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -214,6 +250,9 @@ export class DynamicModelFactory {
|
||||
.replace(/([A-Z])/g, '_$1')
|
||||
.toLowerCase()
|
||||
.replace(/^_/, '');
|
||||
if (snakeCase.endsWith('y')) {
|
||||
return `${snakeCase.slice(0, -1)}ies`;
|
||||
}
|
||||
return snakeCase.endsWith('s') ? snakeCase : `${snakeCase}s`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,13 +16,17 @@ export class ModelRegistry {
|
||||
*/
|
||||
registerModel(apiName: string, modelClass: ModelClass<BaseModel>): void {
|
||||
this.registry.set(apiName, modelClass);
|
||||
const lowerKey = apiName.toLowerCase();
|
||||
if (lowerKey !== apiName && !this.registry.has(lowerKey)) {
|
||||
this.registry.set(lowerKey, modelClass);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a model from the registry
|
||||
*/
|
||||
getModel(apiName: string): ModelClass<BaseModel> {
|
||||
const model = this.registry.get(apiName);
|
||||
const model = this.registry.get(apiName) || this.registry.get(apiName.toLowerCase());
|
||||
if (!model) {
|
||||
throw new Error(`Model for ${apiName} not found in registry`);
|
||||
}
|
||||
@@ -33,7 +37,7 @@ export class ModelRegistry {
|
||||
* Check if a model exists in the registry
|
||||
*/
|
||||
hasModel(apiName: string): boolean {
|
||||
return this.registry.has(apiName);
|
||||
return this.registry.has(apiName) || this.registry.has(apiName.toLowerCase());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -46,7 +50,8 @@ export class ModelRegistry {
|
||||
// Returns undefined if model not found (for models not yet registered)
|
||||
const model = DynamicModelFactory.createModel(
|
||||
metadata,
|
||||
(apiName: string) => this.registry.get(apiName),
|
||||
(apiName: string) =>
|
||||
this.registry.get(apiName) || this.registry.get(apiName.toLowerCase()),
|
||||
);
|
||||
this.registerModel(metadata.apiName, model);
|
||||
return model;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import type { Knex } from 'knex';
|
||||
import { Knex } from 'knex';
|
||||
import { ModelClass } from 'objection';
|
||||
import { BaseModel } from './base.model';
|
||||
import { ModelRegistry } from './model.registry';
|
||||
@@ -171,6 +171,25 @@ export class ModelService {
|
||||
}
|
||||
}
|
||||
|
||||
if (objectMetadata.relations) {
|
||||
for (const relation of objectMetadata.relations) {
|
||||
if (relation.targetObjectApiName) {
|
||||
try {
|
||||
await this.ensureModelWithDependencies(
|
||||
tenantId,
|
||||
relation.targetObjectApiName,
|
||||
fetchMetadata,
|
||||
visited,
|
||||
);
|
||||
} catch (error) {
|
||||
this.logger.debug(
|
||||
`Skipping registration of related model ${relation.targetObjectApiName}: ${error.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now create and register this model (all dependencies are ready)
|
||||
await this.createModelForObject(tenantId, objectMetadata);
|
||||
this.logger.log(`Registered model for ${objectApiName} in tenant ${tenantId}`);
|
||||
|
||||
@@ -6,11 +6,13 @@ import { SchemaManagementService } from './schema-management.service';
|
||||
import { FieldMapperService } from './field-mapper.service';
|
||||
import { TenantModule } from '../tenant/tenant.module';
|
||||
import { MigrationModule } from '../migration/migration.module';
|
||||
import { RbacModule } from '../rbac/rbac.module';
|
||||
import { ModelRegistry } from './models/model.registry';
|
||||
import { ModelService } from './models/model.service';
|
||||
import { MeilisearchModule } from '../search/meilisearch.module';
|
||||
|
||||
@Module({
|
||||
imports: [TenantModule, MigrationModule],
|
||||
imports: [TenantModule, MigrationModule, RbacModule, MeilisearchModule],
|
||||
providers: [
|
||||
ObjectService,
|
||||
SchemaManagementService,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -95,4 +95,20 @@ export class RuntimeObjectController {
|
||||
user.userId,
|
||||
);
|
||||
}
|
||||
|
||||
@Post(':objectApiName/records/bulk-delete')
|
||||
async deleteRecords(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Body() body: { recordIds?: string[]; ids?: string[] },
|
||||
@CurrentUser() user: any,
|
||||
) {
|
||||
const recordIds: string[] = body?.recordIds || body?.ids || [];
|
||||
return this.objectService.deleteRecords(
|
||||
tenantId,
|
||||
objectApiName,
|
||||
recordIds,
|
||||
user.userId,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import type { Knex } from 'knex';
|
||||
import { Knex } from 'knex';
|
||||
import { ObjectDefinition } from '../models/object-definition.model';
|
||||
import { FieldDefinition } from '../models/field-definition.model';
|
||||
|
||||
@@ -15,7 +15,11 @@ export class SchemaManagementService {
|
||||
objectDefinition: ObjectDefinition,
|
||||
fields: FieldDefinition[],
|
||||
) {
|
||||
const tableName = this.getTableName(objectDefinition.apiName);
|
||||
const tableName = this.getTableName(
|
||||
objectDefinition.apiName,
|
||||
objectDefinition.label,
|
||||
objectDefinition.pluralLabel,
|
||||
);
|
||||
|
||||
// Check if table already exists
|
||||
const exists = await knex.schema.hasTable(tableName);
|
||||
@@ -44,8 +48,10 @@ export class SchemaManagementService {
|
||||
knex: Knex,
|
||||
objectApiName: string,
|
||||
field: FieldDefinition,
|
||||
objectLabel?: string,
|
||||
pluralLabel?: string,
|
||||
) {
|
||||
const tableName = this.getTableName(objectApiName);
|
||||
const tableName = this.getTableName(objectApiName, objectLabel, pluralLabel);
|
||||
|
||||
await knex.schema.alterTable(tableName, (table) => {
|
||||
this.addFieldColumn(table, field);
|
||||
@@ -61,8 +67,10 @@ export class SchemaManagementService {
|
||||
knex: Knex,
|
||||
objectApiName: string,
|
||||
fieldApiName: string,
|
||||
objectLabel?: string,
|
||||
pluralLabel?: string,
|
||||
) {
|
||||
const tableName = this.getTableName(objectApiName);
|
||||
const tableName = this.getTableName(objectApiName, objectLabel, pluralLabel);
|
||||
|
||||
await knex.schema.alterTable(tableName, (table) => {
|
||||
table.dropColumn(fieldApiName);
|
||||
@@ -71,11 +79,44 @@ export class SchemaManagementService {
|
||||
this.logger.log(`Removed field ${fieldApiName} from table ${tableName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Alter a field in an existing object table
|
||||
* Handles safe updates like changing NOT NULL or constraints
|
||||
* Warns about potentially destructive operations
|
||||
*/
|
||||
async alterFieldInTable(
|
||||
knex: Knex,
|
||||
objectApiName: string,
|
||||
fieldApiName: string,
|
||||
field: FieldDefinition,
|
||||
objectLabel?: string,
|
||||
pluralLabel?: string,
|
||||
options?: {
|
||||
skipTypeChange?: boolean; // Skip if type change would lose data
|
||||
},
|
||||
) {
|
||||
const tableName = this.getTableName(objectApiName, objectLabel, pluralLabel);
|
||||
const skipTypeChange = options?.skipTypeChange ?? true;
|
||||
|
||||
await knex.schema.alterTable(tableName, (table) => {
|
||||
// Drop the existing column and recreate with new definition
|
||||
// Note: This approach works for metadata changes, but type changes may need data migration
|
||||
table.dropColumn(fieldApiName);
|
||||
});
|
||||
|
||||
// Recreate the column with new definition
|
||||
await knex.schema.alterTable(tableName, (table) => {
|
||||
this.addFieldColumn(table, field);
|
||||
});
|
||||
|
||||
this.logger.log(`Altered field ${fieldApiName} in table ${tableName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop an object table
|
||||
*/
|
||||
async dropObjectTable(knex: Knex, objectApiName: string) {
|
||||
const tableName = this.getTableName(objectApiName);
|
||||
async dropObjectTable(knex: Knex, objectApiName: string, objectLabel?: string, pluralLabel?: string) {
|
||||
const tableName = this.getTableName(objectApiName, objectLabel, pluralLabel);
|
||||
|
||||
await knex.schema.dropTableIfExists(tableName);
|
||||
|
||||
@@ -94,15 +135,30 @@ export class SchemaManagementService {
|
||||
let column: Knex.ColumnBuilder;
|
||||
|
||||
switch (field.type) {
|
||||
// Text types
|
||||
case 'String':
|
||||
case 'TEXT':
|
||||
case 'EMAIL':
|
||||
case 'PHONE':
|
||||
case 'URL':
|
||||
column = table.string(columnName, field.length || 255);
|
||||
break;
|
||||
|
||||
case 'Text':
|
||||
case 'LONG_TEXT':
|
||||
column = table.text(columnName);
|
||||
break;
|
||||
|
||||
case 'PICKLIST':
|
||||
case 'MULTI_PICKLIST':
|
||||
column = table.string(columnName, 255);
|
||||
break;
|
||||
|
||||
// Numeric types
|
||||
case 'Number':
|
||||
case 'NUMBER':
|
||||
case 'CURRENCY':
|
||||
case 'PERCENT':
|
||||
if (field.scale && field.scale > 0) {
|
||||
column = table.decimal(
|
||||
columnName,
|
||||
@@ -115,18 +171,28 @@ export class SchemaManagementService {
|
||||
break;
|
||||
|
||||
case 'Boolean':
|
||||
case 'BOOLEAN':
|
||||
column = table.boolean(columnName).defaultTo(false);
|
||||
break;
|
||||
|
||||
// Date types
|
||||
case 'Date':
|
||||
case 'DATE':
|
||||
column = table.date(columnName);
|
||||
break;
|
||||
|
||||
case 'DateTime':
|
||||
case 'DATE_TIME':
|
||||
column = table.datetime(columnName);
|
||||
break;
|
||||
|
||||
case 'TIME':
|
||||
column = table.time(columnName);
|
||||
break;
|
||||
|
||||
// Relationship types
|
||||
case 'Reference':
|
||||
case 'LOOKUP':
|
||||
column = table.uuid(columnName);
|
||||
if (field.referenceObject) {
|
||||
const refTableName = this.getTableName(field.referenceObject);
|
||||
@@ -134,19 +200,30 @@ export class SchemaManagementService {
|
||||
}
|
||||
break;
|
||||
|
||||
// Email (legacy)
|
||||
case 'Email':
|
||||
column = table.string(columnName, 255);
|
||||
break;
|
||||
|
||||
// Phone (legacy)
|
||||
case 'Phone':
|
||||
column = table.string(columnName, 50);
|
||||
break;
|
||||
|
||||
// Url (legacy)
|
||||
case 'Url':
|
||||
column = table.string(columnName, 255);
|
||||
break;
|
||||
|
||||
// File types
|
||||
case 'FILE':
|
||||
case 'IMAGE':
|
||||
column = table.text(columnName); // Store file path or URL
|
||||
break;
|
||||
|
||||
// JSON
|
||||
case 'Json':
|
||||
case 'JSON':
|
||||
column = table.json(columnName);
|
||||
break;
|
||||
|
||||
@@ -174,16 +251,35 @@ export class SchemaManagementService {
|
||||
/**
|
||||
* Convert object API name to table name (convert to snake_case, pluralize)
|
||||
*/
|
||||
private getTableName(apiName: string): string {
|
||||
// Convert PascalCase to snake_case
|
||||
const snakeCase = apiName
|
||||
.replace(/([A-Z])/g, '_$1')
|
||||
private getTableName(apiName: string, objectLabel?: string, pluralLabel?: string): string {
|
||||
const toSnakePlural = (source: string): string => {
|
||||
const cleaned = source.replace(/[\s-]+/g, '_');
|
||||
const snake = cleaned
|
||||
.replace(/([a-z0-9])([A-Z])/g, '$1_$2')
|
||||
.replace(/__+/g, '_')
|
||||
.toLowerCase()
|
||||
.replace(/^_/, '');
|
||||
|
||||
// Simple pluralization (append 's' if not already plural)
|
||||
// In production, use a proper pluralization library
|
||||
return snakeCase.endsWith('s') ? snakeCase : `${snakeCase}s`;
|
||||
if (snake.endsWith('y')) return `${snake.slice(0, -1)}ies`;
|
||||
if (snake.endsWith('s')) return snake;
|
||||
return `${snake}s`;
|
||||
};
|
||||
|
||||
const fromApi = toSnakePlural(apiName);
|
||||
const fromLabel = objectLabel ? toSnakePlural(objectLabel) : null;
|
||||
const fromPlural = pluralLabel ? toSnakePlural(pluralLabel) : null;
|
||||
|
||||
if (fromLabel && fromLabel.includes('_') && !fromApi.includes('_')) {
|
||||
return fromLabel;
|
||||
}
|
||||
if (fromPlural && fromPlural.includes('_') && !fromApi.includes('_')) {
|
||||
return fromPlural;
|
||||
}
|
||||
|
||||
if (fromLabel && fromLabel !== fromApi) return fromLabel;
|
||||
if (fromPlural && fromPlural !== fromApi) return fromPlural;
|
||||
|
||||
return fromApi;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -2,19 +2,18 @@ import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Patch,
|
||||
Put,
|
||||
Delete,
|
||||
Param,
|
||||
Body,
|
||||
UseGuards,
|
||||
Inject,
|
||||
} from '@nestjs/common';
|
||||
import { ObjectService } from './object.service';
|
||||
import { FieldMapperService } from './field-mapper.service';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { ObjectDefinition } from '../models/object-definition.model';
|
||||
import { FieldDefinition } from '../models/field-definition.model';
|
||||
import { Knex } from 'knex';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
|
||||
@Controller('setup/objects')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@@ -22,7 +21,7 @@ export class SetupObjectController {
|
||||
constructor(
|
||||
private objectService: ObjectService,
|
||||
private fieldMapperService: FieldMapperService,
|
||||
@Inject('KnexConnection') private readonly knex: Knex,
|
||||
private tenantDbService: TenantDatabaseService,
|
||||
) {}
|
||||
|
||||
@Get()
|
||||
@@ -74,121 +73,84 @@ export class SetupObjectController {
|
||||
return this.fieldMapperService.mapFieldToDTO(field);
|
||||
}
|
||||
|
||||
// Access & Permissions endpoints
|
||||
|
||||
/**
|
||||
* Get object access configuration
|
||||
*/
|
||||
@Get(':objectApiName/access')
|
||||
async getAccess(
|
||||
@Put(':objectApiName/fields/:fieldApiName')
|
||||
async updateFieldDefinition(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('fieldApiName') fieldApiName: string,
|
||||
@Body() data: any,
|
||||
) {
|
||||
const objectDef = await ObjectDefinition.query(this.knex)
|
||||
.findOne({ apiName: objectApiName })
|
||||
.withGraphFetched('fields');
|
||||
|
||||
if (!objectDef) {
|
||||
throw new Error('Object definition not found');
|
||||
const field = await this.objectService.updateFieldDefinition(
|
||||
tenantId,
|
||||
objectApiName,
|
||||
fieldApiName,
|
||||
data,
|
||||
);
|
||||
return this.fieldMapperService.mapFieldToDTO(field);
|
||||
}
|
||||
|
||||
return {
|
||||
accessModel: objectDef.accessModel,
|
||||
publicRead: objectDef.publicRead,
|
||||
publicCreate: objectDef.publicCreate,
|
||||
publicUpdate: objectDef.publicUpdate,
|
||||
publicDelete: objectDef.publicDelete,
|
||||
ownerField: objectDef.ownerField,
|
||||
fields: objectDef['fields'] || [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Update object access configuration
|
||||
*/
|
||||
@Put(':objectApiName/access')
|
||||
async updateAccess(
|
||||
@Delete(':objectApiName/fields/:fieldApiName')
|
||||
async deleteFieldDefinition(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Body() dto: any,
|
||||
@Param('fieldApiName') fieldApiName: string,
|
||||
) {
|
||||
|
||||
console.log('dto', JSON.stringify(dto));
|
||||
|
||||
const objectDef = await ObjectDefinition.query(this.knex)
|
||||
.findOne({ apiName: objectApiName });
|
||||
|
||||
if (!objectDef) {
|
||||
throw new Error('Object definition not found');
|
||||
return this.objectService.deleteFieldDefinition(
|
||||
tenantId,
|
||||
objectApiName,
|
||||
fieldApiName,
|
||||
);
|
||||
}
|
||||
|
||||
return ObjectDefinition.query(this.knex).patchAndFetchById(objectDef.id, dto);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create or update field-level permissions
|
||||
*/
|
||||
@Post(':objectApiName/fields/:fieldKey/permissions')
|
||||
async setFieldPermissions(
|
||||
@Patch(':objectApiName')
|
||||
async updateObjectDefinition(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('fieldKey') fieldKey: string,
|
||||
@Body() dto: any,
|
||||
@Body() data: any,
|
||||
) {
|
||||
const objectDef = await ObjectDefinition.query(this.knex)
|
||||
.findOne({ apiName: objectApiName });
|
||||
|
||||
if (!objectDef) {
|
||||
throw new Error('Object definition not found');
|
||||
return this.objectService.updateObjectDefinition(tenantId, objectApiName, data);
|
||||
}
|
||||
|
||||
// Find the field definition
|
||||
const field = await FieldDefinition.query(this.knex)
|
||||
.findOne({
|
||||
objectDefinitionId: objectDef.id,
|
||||
apiName: fieldKey,
|
||||
});
|
||||
|
||||
if (!field) {
|
||||
throw new Error('Field definition not found');
|
||||
@Get(':objectId/field-permissions')
|
||||
async getFieldPermissions(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectId') objectId: string,
|
||||
) {
|
||||
return this.objectService.getFieldPermissions(tenantId, objectId);
|
||||
}
|
||||
|
||||
// Update field permissions
|
||||
return FieldDefinition.query(this.knex).patchAndFetchById(field.id, {
|
||||
defaultReadable: dto.defaultReadable ?? field.defaultReadable,
|
||||
defaultWritable: dto.defaultWritable ?? field.defaultWritable,
|
||||
});
|
||||
@Put(':objectId/field-permissions')
|
||||
async updateFieldPermission(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectId') objectId: string,
|
||||
@Body() data: { roleId: string; fieldDefinitionId: string; canRead: boolean; canEdit: boolean },
|
||||
) {
|
||||
return this.objectService.updateFieldPermission(tenantId, data.roleId, data.fieldDefinitionId, data.canRead, data.canEdit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk set field permissions for an object
|
||||
*/
|
||||
@Put(':objectApiName/field-permissions')
|
||||
async bulkSetFieldPermissions(
|
||||
@Get(':objectApiName/permissions/:roleId')
|
||||
async getObjectPermissions(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Body() fields: { fieldKey: string; defaultReadable: boolean; defaultWritable: boolean }[],
|
||||
@Param('roleId') roleId: string,
|
||||
) {
|
||||
const objectDef = await ObjectDefinition.query(this.knex)
|
||||
.findOne({ apiName: objectApiName });
|
||||
|
||||
if (!objectDef) {
|
||||
throw new Error('Object definition not found');
|
||||
return this.objectService.getObjectPermissions(tenantId, objectApiName, roleId);
|
||||
}
|
||||
|
||||
// Update each field in the field_definitions table
|
||||
for (const fieldUpdate of fields) {
|
||||
await FieldDefinition.query(this.knex)
|
||||
.where({
|
||||
objectDefinitionId: objectDef.id,
|
||||
apiName: fieldUpdate.fieldKey,
|
||||
})
|
||||
.patch({
|
||||
defaultReadable: fieldUpdate.defaultReadable,
|
||||
defaultWritable: fieldUpdate.defaultWritable,
|
||||
});
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
@Put(':objectApiName/permissions')
|
||||
async updateObjectPermissions(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Body() data: {
|
||||
roleId: string;
|
||||
canCreate: boolean;
|
||||
canRead: boolean;
|
||||
canEdit: boolean;
|
||||
canDelete: boolean;
|
||||
canViewAll: boolean;
|
||||
canModifyAll: boolean;
|
||||
},
|
||||
) {
|
||||
return this.objectService.updateObjectPermissions(tenantId, objectApiName, data);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ export class CreatePageLayoutDto {
|
||||
w: number;
|
||||
h: number;
|
||||
}>;
|
||||
relatedLists?: string[];
|
||||
};
|
||||
|
||||
@IsString()
|
||||
@@ -46,6 +47,7 @@ export class UpdatePageLayoutDto {
|
||||
w: number;
|
||||
h: number;
|
||||
}>;
|
||||
relatedLists?: string[];
|
||||
};
|
||||
|
||||
@IsString()
|
||||
|
||||
199
backend/src/rbac/ability.factory.ts
Normal file
199
backend/src/rbac/ability.factory.ts
Normal file
@@ -0,0 +1,199 @@
|
||||
import { AbilityBuilder, PureAbility, AbilityClass } from '@casl/ability';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { User } from '../models/user.model';
|
||||
import { RoleObjectPermission } from '../models/role-object-permission.model';
|
||||
import { RoleFieldPermission } from '../models/role-field-permission.model';
|
||||
import { RecordShare } from '../models/record-share.model';
|
||||
|
||||
// Define action types
|
||||
export type Action = 'create' | 'read' | 'update' | 'delete' | 'view_all' | 'modify_all';
|
||||
|
||||
// Define subject types - can be string (object API name) or actual object with fields
|
||||
export type Subject = string | { objectApiName: string; ownerId?: string; id?: string; [key: string]: any };
|
||||
|
||||
// Define field actions
|
||||
export type FieldAction = 'read' | 'edit';
|
||||
|
||||
export type AppAbility = PureAbility<[Action, Subject], { field?: string }>;
|
||||
|
||||
@Injectable()
|
||||
export class AbilityFactory {
|
||||
/**
|
||||
* Build CASL ability for a user based on their roles and permissions
|
||||
* This aggregates permissions from all roles the user has
|
||||
*/
|
||||
async defineAbilityFor(
|
||||
user: User & { roles?: Array<{ objectPermissions?: RoleObjectPermission[]; fieldPermissions?: RoleFieldPermission[] }> },
|
||||
recordShares?: RecordShare[],
|
||||
): Promise<AppAbility> {
|
||||
const { can, cannot, build } = new AbilityBuilder<AppAbility>(PureAbility as AbilityClass<AppAbility>);
|
||||
|
||||
if (!user.roles || user.roles.length === 0) {
|
||||
// No roles = no permissions
|
||||
return build();
|
||||
}
|
||||
|
||||
// Aggregate object permissions from all roles
|
||||
const objectPermissionsMap = new Map<string, {
|
||||
canCreate: boolean;
|
||||
canRead: boolean;
|
||||
canEdit: boolean;
|
||||
canDelete: boolean;
|
||||
canViewAll: boolean;
|
||||
canModifyAll: boolean;
|
||||
}>();
|
||||
|
||||
// Aggregate field permissions from all roles
|
||||
const fieldPermissionsMap = new Map<string, {
|
||||
canRead: boolean;
|
||||
canEdit: boolean;
|
||||
}>();
|
||||
|
||||
// Process all roles
|
||||
for (const role of user.roles) {
|
||||
// Aggregate object permissions
|
||||
if (role.objectPermissions) {
|
||||
for (const perm of role.objectPermissions) {
|
||||
const existing = objectPermissionsMap.get(perm.objectDefinitionId) || {
|
||||
canCreate: false,
|
||||
canRead: false,
|
||||
canEdit: false,
|
||||
canDelete: false,
|
||||
canViewAll: false,
|
||||
canModifyAll: false,
|
||||
};
|
||||
|
||||
// Union of permissions (if any role grants it, user has it)
|
||||
objectPermissionsMap.set(perm.objectDefinitionId, {
|
||||
canCreate: existing.canCreate || perm.canCreate,
|
||||
canRead: existing.canRead || perm.canRead,
|
||||
canEdit: existing.canEdit || perm.canEdit,
|
||||
canDelete: existing.canDelete || perm.canDelete,
|
||||
canViewAll: existing.canViewAll || perm.canViewAll,
|
||||
canModifyAll: existing.canModifyAll || perm.canModifyAll,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Aggregate field permissions
|
||||
if (role.fieldPermissions) {
|
||||
for (const perm of role.fieldPermissions) {
|
||||
const existing = fieldPermissionsMap.get(perm.fieldDefinitionId) || {
|
||||
canRead: false,
|
||||
canEdit: false,
|
||||
};
|
||||
|
||||
fieldPermissionsMap.set(perm.fieldDefinitionId, {
|
||||
canRead: existing.canRead || perm.canRead,
|
||||
canEdit: existing.canEdit || perm.canEdit,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert aggregated permissions to CASL rules
|
||||
for (const [objectId, perms] of objectPermissionsMap) {
|
||||
// Create permission
|
||||
if (perms.canCreate) {
|
||||
can('create', objectId);
|
||||
}
|
||||
|
||||
// Read permission
|
||||
if (perms.canRead) {
|
||||
can('read', objectId);
|
||||
}
|
||||
|
||||
// View all permission (can see all records regardless of ownership)
|
||||
if (perms.canViewAll) {
|
||||
can('view_all', objectId);
|
||||
}
|
||||
|
||||
// Edit permission
|
||||
if (perms.canEdit) {
|
||||
can('update', objectId);
|
||||
}
|
||||
|
||||
// Modify all permission (can edit all records regardless of ownership)
|
||||
if (perms.canModifyAll) {
|
||||
can('modify_all', objectId);
|
||||
}
|
||||
|
||||
// Delete permission
|
||||
if (perms.canDelete) {
|
||||
can('delete', objectId);
|
||||
}
|
||||
}
|
||||
|
||||
// Add record sharing permissions
|
||||
if (recordShares) {
|
||||
for (const share of recordShares) {
|
||||
// Only add if share is active (not expired, not revoked)
|
||||
const now = new Date();
|
||||
const isExpired = share.expiresAt && share.expiresAt < now;
|
||||
const isRevoked = share.revokedAt !== null;
|
||||
|
||||
if (!isExpired && !isRevoked) {
|
||||
// Note: Record-level sharing will be checked in authorization service
|
||||
// CASL abilities are primarily for object-level permissions
|
||||
// Individual record access is validated in applyScopeToQuery
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user can access a specific field
|
||||
* Returns true if user has permission or if no restriction exists
|
||||
*/
|
||||
canAccessField(
|
||||
fieldDefinitionId: string,
|
||||
action: FieldAction,
|
||||
user: User & { roles?: Array<{ fieldPermissions?: RoleFieldPermission[] }> },
|
||||
): boolean {
|
||||
if (!user.roles || user.roles.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Collect all field permissions from all roles
|
||||
const allFieldPermissions: RoleFieldPermission[] = [];
|
||||
for (const role of user.roles) {
|
||||
if (role.fieldPermissions) {
|
||||
allFieldPermissions.push(...role.fieldPermissions);
|
||||
}
|
||||
}
|
||||
|
||||
// If there are NO field permissions configured at all, allow by default
|
||||
if (allFieldPermissions.length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If field permissions exist, check for explicit grants (union of all roles)
|
||||
for (const role of user.roles) {
|
||||
if (role.fieldPermissions) {
|
||||
const fieldPerm = role.fieldPermissions.find(fp => fp.fieldDefinitionId === fieldDefinitionId);
|
||||
if (fieldPerm) {
|
||||
if (action === 'read' && fieldPerm.canRead) return true;
|
||||
if (action === 'edit' && fieldPerm.canEdit) return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No explicit rule for this field but other field permissions exist.
|
||||
// Default to allow so new fields don't get silently stripped and fail validation.
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter fields based on user permissions
|
||||
* Returns array of field IDs the user can access with the specified action
|
||||
*/
|
||||
filterFields(
|
||||
fieldDefinitionIds: string[],
|
||||
action: FieldAction,
|
||||
user: User & { roles?: Array<{ fieldPermissions?: RoleFieldPermission[] }> },
|
||||
): string[] {
|
||||
return fieldDefinitionIds.filter(fieldId => this.canAccessField(fieldId, action, user));
|
||||
}
|
||||
}
|
||||
282
backend/src/rbac/authorization.service.ts
Normal file
282
backend/src/rbac/authorization.service.ts
Normal file
@@ -0,0 +1,282 @@
|
||||
import { Injectable, ForbiddenException } from '@nestjs/common';
|
||||
import { Knex } from 'knex';
|
||||
import { User } from '../models/user.model';
|
||||
import { ObjectDefinition } from '../models/object-definition.model';
|
||||
import { FieldDefinition } from '../models/field-definition.model';
|
||||
import { RecordShare } from '../models/record-share.model';
|
||||
import { AbilityFactory, AppAbility, Action } from './ability.factory';
|
||||
import { DynamicModelFactory } from '../object/models/dynamic-model.factory';
|
||||
import { subject } from '@casl/ability';
|
||||
|
||||
@Injectable()
|
||||
export class AuthorizationService {
|
||||
constructor(private abilityFactory: AbilityFactory) {}
|
||||
|
||||
/**
|
||||
* Apply authorization scope to a query based on OWD and user permissions
|
||||
* This determines which records the user can see
|
||||
* Modifies the query in place and returns void
|
||||
*/
|
||||
async applyScopeToQuery<T = any>(
|
||||
query: any, // Accept both Knex and Objection query builders
|
||||
objectDef: ObjectDefinition,
|
||||
user: User & { roles?: any[] },
|
||||
action: Action,
|
||||
knex: Knex,
|
||||
): Promise<void> {
|
||||
// Get user's ability
|
||||
const recordShares = await this.getActiveRecordShares(objectDef.id, user.id, knex);
|
||||
const ability = await this.abilityFactory.defineAbilityFor(user, recordShares);
|
||||
|
||||
// Check if user has the base permission for this action
|
||||
// Use object ID, not API name, since permissions are stored by object ID
|
||||
if (!ability.can(action, objectDef.id)) {
|
||||
// No permission at all - return empty result
|
||||
query.where(knex.raw('1 = 0'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Check special permissions
|
||||
const hasViewAll = ability.can('view_all', objectDef.id);
|
||||
const hasModifyAll = ability.can('modify_all', objectDef.id);
|
||||
|
||||
// If user has view_all or modify_all, they can see all records
|
||||
if (hasViewAll || hasModifyAll) {
|
||||
// No filtering needed
|
||||
return;
|
||||
}
|
||||
|
||||
// Apply OWD (Org-Wide Default) restrictions
|
||||
switch (objectDef.orgWideDefault) {
|
||||
case 'public_read_write':
|
||||
// Everyone can see all records
|
||||
return;
|
||||
|
||||
case 'public_read':
|
||||
// Everyone can see all records (write operations checked separately)
|
||||
return;
|
||||
|
||||
case 'private':
|
||||
default:
|
||||
// Only owner and explicitly shared records
|
||||
await this.applyPrivateScope(query, objectDef, user, recordShares, knex);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply private scope: owner + shared records
|
||||
*/
|
||||
private async applyPrivateScope<T = any>(
|
||||
query: any, // Accept both Knex and Objection query builders
|
||||
objectDef: ObjectDefinition,
|
||||
user: User,
|
||||
recordShares: RecordShare[],
|
||||
knex: Knex,
|
||||
): Promise<void> {
|
||||
const tableName = this.getTableName(objectDef.apiName);
|
||||
|
||||
// Check if table has ownerId column
|
||||
const hasOwner = await knex.schema.hasColumn(tableName, 'ownerId');
|
||||
|
||||
if (!hasOwner && recordShares.length === 0) {
|
||||
// No ownership and no shares - user can't see anything
|
||||
query.where(knex.raw('1 = 0'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Build conditions: ownerId = user OR record shared with user
|
||||
query.where((builder) => {
|
||||
if (hasOwner) {
|
||||
builder.orWhere(`${tableName}.ownerId`, user.id);
|
||||
}
|
||||
|
||||
if (recordShares.length > 0) {
|
||||
const sharedRecordIds = recordShares.map(share => share.recordId);
|
||||
builder.orWhereIn(`${tableName}.id`, sharedRecordIds);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user can perform action on a specific record
|
||||
*/
|
||||
async canPerformAction(
|
||||
action: Action,
|
||||
objectDef: ObjectDefinition,
|
||||
record: any,
|
||||
user: User & { roles?: any[] },
|
||||
knex: Knex,
|
||||
): Promise<boolean> {
|
||||
const recordShares = await this.getActiveRecordShares(objectDef.id, user.id, knex);
|
||||
const ability = await this.abilityFactory.defineAbilityFor(user, recordShares);
|
||||
|
||||
// Check base permission - use object ID not API name
|
||||
if (!ability.can(action, objectDef.id)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check special permissions - use object ID not API name
|
||||
const hasViewAll = ability.can('view_all', objectDef.id);
|
||||
const hasModifyAll = ability.can('modify_all', objectDef.id);
|
||||
|
||||
// canViewAll only grants read access to all records
|
||||
if (action === 'read' && hasViewAll) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// canModifyAll grants edit/delete access to all records
|
||||
if ((action === 'update' || action === 'delete') && hasModifyAll) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check OWD
|
||||
switch (objectDef.orgWideDefault) {
|
||||
case 'public_read_write':
|
||||
return true;
|
||||
|
||||
case 'public_read':
|
||||
if (action === 'read') return true;
|
||||
// For write actions, check ownership
|
||||
return record.ownerId === user.id;
|
||||
|
||||
case 'private':
|
||||
default:
|
||||
// Check ownership
|
||||
if (record.ownerId === user.id) return true;
|
||||
|
||||
// Check if record is shared with user
|
||||
const share = recordShares.find(s => s.recordId === record.id);
|
||||
if (share) {
|
||||
if (action === 'read' && share.accessLevel.canRead) return true;
|
||||
if (action === 'update' && share.accessLevel.canEdit) return true;
|
||||
if (action === 'delete' && share.accessLevel.canDelete) return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter data based on field-level permissions
|
||||
* Removes fields the user cannot read
|
||||
*/
|
||||
async filterReadableFields(
|
||||
data: any,
|
||||
fields: FieldDefinition[],
|
||||
user: User & { roles?: any[] },
|
||||
): Promise<any> {
|
||||
const filtered: any = {};
|
||||
|
||||
// Always include id - it's required for navigation and record identification
|
||||
if (data.id !== undefined) {
|
||||
filtered.id = data.id;
|
||||
}
|
||||
|
||||
for (const field of fields) {
|
||||
if (this.abilityFactory.canAccessField(field.id, 'read', user)) {
|
||||
if (data[field.apiName] !== undefined) {
|
||||
filtered[field.apiName] = data[field.apiName];
|
||||
}
|
||||
|
||||
// For lookup fields, also include the related object (e.g., ownerId -> owner)
|
||||
if (field.type === 'LOOKUP') {
|
||||
const relationName = DynamicModelFactory.getRelationName(field.apiName);
|
||||
if (data[relationName] !== undefined) {
|
||||
filtered[relationName] = data[relationName];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filtered;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter data based on field-level permissions
|
||||
* Removes fields the user cannot edit
|
||||
*/
|
||||
async filterEditableFields(
|
||||
data: any,
|
||||
fields: FieldDefinition[],
|
||||
user: User & { roles?: any[] },
|
||||
): Promise<any> {
|
||||
const filtered: any = {};
|
||||
|
||||
for (const field of fields) {
|
||||
if (this.abilityFactory.canAccessField(field.id, 'edit', user)) {
|
||||
if (data[field.apiName] !== undefined) {
|
||||
filtered[field.apiName] = data[field.apiName];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filtered;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active record shares for a user on an object
|
||||
*/
|
||||
private async getActiveRecordShares(
|
||||
objectDefinitionId: string,
|
||||
userId: string,
|
||||
knex: Knex,
|
||||
): Promise<RecordShare[]> {
|
||||
const now = new Date();
|
||||
|
||||
return await RecordShare.query(knex)
|
||||
.where('objectDefinitionId', objectDefinitionId)
|
||||
.where('granteeUserId', userId)
|
||||
.whereNull('revokedAt')
|
||||
.where((builder) => {
|
||||
builder.whereNull('expiresAt').orWhere('expiresAt', '>', now);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user has permission to create records
|
||||
*/
|
||||
async canCreate(
|
||||
objectDef: ObjectDefinition,
|
||||
user: User & { roles?: any[] },
|
||||
): Promise<boolean> {
|
||||
const ability = await this.abilityFactory.defineAbilityFor(user, []);
|
||||
return ability.can('create', objectDef.id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Throw exception if user cannot perform action
|
||||
*/
|
||||
async assertCanPerformAction(
|
||||
action: Action,
|
||||
objectDef: ObjectDefinition,
|
||||
record: any,
|
||||
user: User & { roles?: any[] },
|
||||
knex: Knex,
|
||||
): Promise<void> {
|
||||
const can = await this.canPerformAction(action, objectDef, record, user, knex);
|
||||
if (!can) {
|
||||
throw new ForbiddenException(`You do not have permission to ${action} this record`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get table name from API name
|
||||
*/
|
||||
private getTableName(apiName: string): string {
|
||||
// Convert CamelCase to snake_case and pluralize
|
||||
const snakeCase = apiName
|
||||
.replace(/([A-Z])/g, '_$1')
|
||||
.toLowerCase()
|
||||
.replace(/^_/, '');
|
||||
|
||||
// Simple pluralization
|
||||
if (snakeCase.endsWith('y')) {
|
||||
return snakeCase.slice(0, -1) + 'ies';
|
||||
} else if (snakeCase.endsWith('s')) {
|
||||
return snakeCase;
|
||||
} else {
|
||||
return snakeCase + 's';
|
||||
}
|
||||
}
|
||||
}
|
||||
19
backend/src/rbac/dto/create-record-share.dto.ts
Normal file
19
backend/src/rbac/dto/create-record-share.dto.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { IsString, IsBoolean, IsOptional, IsDateString } from 'class-validator';
|
||||
|
||||
export class CreateRecordShareDto {
|
||||
@IsString()
|
||||
granteeUserId: string;
|
||||
|
||||
@IsBoolean()
|
||||
canRead: boolean;
|
||||
|
||||
@IsBoolean()
|
||||
canEdit: boolean;
|
||||
|
||||
@IsBoolean()
|
||||
canDelete: boolean;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
expiresAt?: string;
|
||||
}
|
||||
@@ -1,14 +1,16 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { RbacService } from './rbac.service';
|
||||
import { ShareController } from './share.controller';
|
||||
import { RoleController, RoleRuleController } from './role.controller';
|
||||
import { UserController } from './user.controller';
|
||||
import { AbilityFactory } from './ability.factory';
|
||||
import { AuthorizationService } from './authorization.service';
|
||||
import { SetupRolesController } from './setup-roles.controller';
|
||||
import { SetupUsersController } from './setup-users.controller';
|
||||
import { RecordSharingController } from './record-sharing.controller';
|
||||
import { TenantModule } from '../tenant/tenant.module';
|
||||
|
||||
@Module({
|
||||
imports: [TenantModule],
|
||||
providers: [RbacService],
|
||||
controllers: [ShareController, RoleController, RoleRuleController, UserController],
|
||||
exports: [RbacService],
|
||||
controllers: [SetupRolesController, SetupUsersController, RecordSharingController],
|
||||
providers: [RbacService, AbilityFactory, AuthorizationService],
|
||||
exports: [RbacService, AbilityFactory, AuthorizationService],
|
||||
})
|
||||
export class RbacModule {}
|
||||
|
||||
350
backend/src/rbac/record-sharing.controller.ts
Normal file
350
backend/src/rbac/record-sharing.controller.ts
Normal file
@@ -0,0 +1,350 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Delete,
|
||||
Param,
|
||||
Body,
|
||||
UseGuards,
|
||||
ForbiddenException,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { CurrentUser } from '../auth/current-user.decorator';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import { RecordShare } from '../models/record-share.model';
|
||||
import { ObjectDefinition } from '../models/object-definition.model';
|
||||
import { User } from '../models/user.model';
|
||||
import { AuthorizationService } from './authorization.service';
|
||||
import { CreateRecordShareDto } from './dto/create-record-share.dto';
|
||||
|
||||
@Controller('runtime/objects/:objectApiName/records/:recordId/shares')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class RecordSharingController {
|
||||
constructor(
|
||||
private tenantDbService: TenantDatabaseService,
|
||||
private authService: AuthorizationService,
|
||||
) {}
|
||||
|
||||
@Get()
|
||||
async getRecordShares(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('recordId') recordId: string,
|
||||
@CurrentUser() currentUser: any,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Get object definition
|
||||
const objectDef = await ObjectDefinition.query(knex)
|
||||
.findOne({ apiName: objectApiName });
|
||||
|
||||
if (!objectDef) {
|
||||
throw new Error('Object not found');
|
||||
}
|
||||
|
||||
// Get the record to check ownership
|
||||
const tableName = this.getTableName(
|
||||
objectDef.apiName,
|
||||
objectDef.label,
|
||||
objectDef.pluralLabel,
|
||||
);
|
||||
const record = await knex(tableName)
|
||||
.where({ id: recordId })
|
||||
.first();
|
||||
|
||||
if (!record) {
|
||||
throw new Error('Record not found');
|
||||
}
|
||||
|
||||
// Only owner can view shares
|
||||
if (record.ownerId !== currentUser.userId) {
|
||||
// Check if user has modify all permission
|
||||
const user: any = await User.query(knex)
|
||||
.findById(currentUser.userId)
|
||||
.withGraphFetched('roles.objectPermissions');
|
||||
|
||||
if (!user) {
|
||||
throw new ForbiddenException('User not found');
|
||||
}
|
||||
|
||||
const hasModifyAll = user.roles?.some(role =>
|
||||
role.objectPermissions?.some(
|
||||
perm => perm.objectDefinitionId === objectDef.id && perm.canModifyAll
|
||||
)
|
||||
);
|
||||
|
||||
if (!hasModifyAll) {
|
||||
throw new ForbiddenException('Only the record owner or users with Modify All permission can view shares');
|
||||
}
|
||||
}
|
||||
|
||||
// Get all active shares for this record
|
||||
const shares = await RecordShare.query(knex)
|
||||
.where({ objectDefinitionId: objectDef.id, recordId })
|
||||
.whereNull('revokedAt')
|
||||
.where(builder => {
|
||||
builder.whereNull('expiresAt').orWhere('expiresAt', '>', new Date());
|
||||
})
|
||||
.withGraphFetched('[granteeUser]')
|
||||
.orderBy('createdAt', 'desc');
|
||||
|
||||
return shares;
|
||||
}
|
||||
|
||||
@Post()
|
||||
async createRecordShare(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('recordId') recordId: string,
|
||||
@CurrentUser() currentUser: any,
|
||||
@Body() data: CreateRecordShareDto,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Get object definition
|
||||
const objectDef = await ObjectDefinition.query(knex)
|
||||
.findOne({ apiName: objectApiName });
|
||||
|
||||
if (!objectDef) {
|
||||
throw new Error('Object not found');
|
||||
}
|
||||
|
||||
// Get the record to check ownership
|
||||
const tableName = this.getTableName(
|
||||
objectDef.apiName,
|
||||
objectDef.label,
|
||||
objectDef.pluralLabel,
|
||||
);
|
||||
const record = await knex(tableName)
|
||||
.where({ id: recordId })
|
||||
.first();
|
||||
|
||||
if (!record) {
|
||||
throw new Error('Record not found');
|
||||
}
|
||||
|
||||
// Check if user can share - either owner or has modify permissions
|
||||
const canShare = await this.canUserShareRecord(
|
||||
currentUser.userId,
|
||||
record,
|
||||
objectDef,
|
||||
knex,
|
||||
);
|
||||
|
||||
if (!canShare) {
|
||||
throw new ForbiddenException('You do not have permission to share this record');
|
||||
}
|
||||
|
||||
// Cannot share with self
|
||||
if (data.granteeUserId === currentUser.userId) {
|
||||
throw new Error('Cannot share record with yourself');
|
||||
}
|
||||
|
||||
// Check if share already exists
|
||||
const existingShare = await RecordShare.query(knex)
|
||||
.where({
|
||||
objectDefinitionId: objectDef.id,
|
||||
recordId,
|
||||
granteeUserId: data.granteeUserId,
|
||||
})
|
||||
.whereNull('revokedAt')
|
||||
.first();
|
||||
|
||||
if (existingShare) {
|
||||
// Update existing share
|
||||
const updated = await RecordShare.query(knex)
|
||||
.patchAndFetchById(existingShare.id, {
|
||||
accessLevel: {
|
||||
canRead: data.canRead,
|
||||
canEdit: data.canEdit,
|
||||
canDelete: data.canDelete,
|
||||
},
|
||||
// Convert ISO string to MySQL datetime format
|
||||
expiresAt: data.expiresAt
|
||||
? knex.raw('?', [new Date(data.expiresAt).toISOString().slice(0, 19).replace('T', ' ')])
|
||||
: null,
|
||||
} as any);
|
||||
|
||||
return RecordShare.query(knex)
|
||||
.findById(updated.id)
|
||||
.withGraphFetched('[granteeUser]');
|
||||
}
|
||||
|
||||
// Create new share
|
||||
const share = await RecordShare.query(knex).insertAndFetch({
|
||||
objectDefinitionId: objectDef.id,
|
||||
recordId,
|
||||
granteeUserId: data.granteeUserId,
|
||||
grantedByUserId: currentUser.userId,
|
||||
accessLevel: {
|
||||
canRead: data.canRead,
|
||||
canEdit: data.canEdit,
|
||||
canDelete: data.canDelete,
|
||||
},
|
||||
// Convert ISO string to MySQL datetime format: YYYY-MM-DD HH:MM:SS
|
||||
expiresAt: data.expiresAt
|
||||
? knex.raw('?', [new Date(data.expiresAt).toISOString().slice(0, 19).replace('T', ' ')])
|
||||
: null,
|
||||
} as any);
|
||||
|
||||
return RecordShare.query(knex)
|
||||
.findById(share.id)
|
||||
.withGraphFetched('[granteeUser]');
|
||||
}
|
||||
|
||||
@Delete(':shareId')
|
||||
async deleteRecordShare(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('recordId') recordId: string,
|
||||
@Param('shareId') shareId: string,
|
||||
@CurrentUser() currentUser: any,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Get object definition
|
||||
const objectDef = await ObjectDefinition.query(knex)
|
||||
.findOne({ apiName: objectApiName });
|
||||
|
||||
if (!objectDef) {
|
||||
throw new Error('Object not found');
|
||||
}
|
||||
|
||||
// Get the record to check ownership
|
||||
const tableName = this.getTableName(
|
||||
objectDef.apiName,
|
||||
objectDef.label,
|
||||
objectDef.pluralLabel,
|
||||
);
|
||||
const record = await knex(tableName)
|
||||
.where({ id: recordId })
|
||||
.first();
|
||||
|
||||
if (!record) {
|
||||
throw new Error('Record not found');
|
||||
}
|
||||
|
||||
// Only owner can revoke shares
|
||||
if (record.ownerId !== currentUser.userId) {
|
||||
// Check if user has modify all permission
|
||||
const user: any = await User.query(knex)
|
||||
.findById(currentUser.userId)
|
||||
.withGraphFetched('roles.objectPermissions');
|
||||
|
||||
if (!user) {
|
||||
throw new ForbiddenException('User not found');
|
||||
}
|
||||
|
||||
const hasModifyAll = user.roles?.some(role =>
|
||||
role.objectPermissions?.some(
|
||||
perm => perm.objectDefinitionId === objectDef.id && perm.canModifyAll
|
||||
)
|
||||
);
|
||||
|
||||
if (!hasModifyAll) {
|
||||
throw new ForbiddenException('Only the record owner or users with Modify All permission can revoke shares');
|
||||
}
|
||||
}
|
||||
|
||||
// Revoke the share (soft delete)
|
||||
await RecordShare.query(knex)
|
||||
.patchAndFetchById(shareId, {
|
||||
revokedAt: knex.fn.now() as any,
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
private async canUserShareRecord(
|
||||
userId: string,
|
||||
record: any,
|
||||
objectDef: ObjectDefinition,
|
||||
knex: any,
|
||||
): Promise<boolean> {
|
||||
// Owner can always share
|
||||
if (record.ownerId === userId) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if user has modify all or edit permissions
|
||||
const user: any = await User.query(knex)
|
||||
.findById(userId)
|
||||
.withGraphFetched('roles.objectPermissions');
|
||||
|
||||
if (!user) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for canModifyAll permission
|
||||
const hasModifyAll = user.roles?.some(role =>
|
||||
role.objectPermissions?.some(
|
||||
perm => perm.objectDefinitionId === objectDef.id && perm.canModifyAll
|
||||
)
|
||||
);
|
||||
|
||||
if (hasModifyAll) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for canEdit permission (user needs edit to share)
|
||||
const hasEdit = user.roles?.some(role =>
|
||||
role.objectPermissions?.some(
|
||||
perm => perm.objectDefinitionId === objectDef.id && perm.canEdit
|
||||
)
|
||||
);
|
||||
|
||||
// If user has edit permission, check if they can actually edit this record
|
||||
// by using the authorization service
|
||||
if (hasEdit) {
|
||||
try {
|
||||
await this.authService.assertCanPerformAction(
|
||||
'update',
|
||||
objectDef,
|
||||
record,
|
||||
user,
|
||||
knex,
|
||||
);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private getTableName(apiName: string, objectLabel?: string, pluralLabel?: string): string {
|
||||
const toSnakePlural = (source: string): string => {
|
||||
const cleaned = source.replace(/[\s-]+/g, '_');
|
||||
const snake = cleaned
|
||||
.replace(/([a-z0-9])([A-Z])/g, '$1_$2')
|
||||
.replace(/__+/g, '_')
|
||||
.toLowerCase()
|
||||
.replace(/^_/, '');
|
||||
|
||||
if (snake.endsWith('y')) return `${snake.slice(0, -1)}ies`;
|
||||
if (snake.endsWith('s')) return snake;
|
||||
return `${snake}s`;
|
||||
};
|
||||
|
||||
const fromApi = toSnakePlural(apiName);
|
||||
const fromLabel = objectLabel ? toSnakePlural(objectLabel) : null;
|
||||
const fromPlural = pluralLabel ? toSnakePlural(pluralLabel) : null;
|
||||
|
||||
if (fromLabel && fromLabel.includes('_') && !fromApi.includes('_')) {
|
||||
return fromLabel;
|
||||
}
|
||||
if (fromPlural && fromPlural.includes('_') && !fromApi.includes('_')) {
|
||||
return fromPlural;
|
||||
}
|
||||
|
||||
if (fromLabel && fromLabel !== fromApi) return fromLabel;
|
||||
if (fromPlural && fromPlural !== fromApi) return fromPlural;
|
||||
|
||||
return fromApi;
|
||||
}
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Put,
|
||||
Delete,
|
||||
Body,
|
||||
Param,
|
||||
UseGuards,
|
||||
Inject,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { Role } from '../models/role.model';
|
||||
import { RoleRule } from '../models/role-rule.model';
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export class CreateRoleDto {
|
||||
name: string;
|
||||
guardName?: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export class UpdateRoleDto {
|
||||
name?: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export class CreateRoleRuleDto {
|
||||
roleId: string;
|
||||
rulesJson: any[]; // Array of CASL rules
|
||||
}
|
||||
|
||||
export class UpdateRoleRuleDto {
|
||||
rulesJson: any[];
|
||||
}
|
||||
|
||||
@Controller('roles')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class RoleController {
|
||||
constructor(@Inject('KnexConnection') private readonly knex: Knex) {}
|
||||
|
||||
/**
|
||||
* List all roles
|
||||
*/
|
||||
@Get()
|
||||
async list() {
|
||||
return Role.query(this.knex).withGraphFetched('[roleRules]');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single role by ID
|
||||
*/
|
||||
@Get(':id')
|
||||
async get(@Param('id') id: string) {
|
||||
return Role.query(this.knex)
|
||||
.findById(id)
|
||||
.withGraphFetched('[roleRules, permissions]');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new role
|
||||
*/
|
||||
@Post()
|
||||
async create(@Body() createDto: CreateRoleDto) {
|
||||
return Role.query(this.knex).insert({
|
||||
name: createDto.name,
|
||||
guardName: createDto.guardName || 'api',
|
||||
description: createDto.description,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a role
|
||||
*/
|
||||
@Put(':id')
|
||||
async update(@Param('id') id: string, @Body() updateDto: UpdateRoleDto) {
|
||||
return Role.query(this.knex).patchAndFetchById(id, updateDto);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a role
|
||||
*/
|
||||
@Delete(':id')
|
||||
async delete(@Param('id') id: string) {
|
||||
await Role.query(this.knex).deleteById(id);
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
|
||||
@Controller('role-rules')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class RoleRuleController {
|
||||
constructor(@Inject('KnexConnection') private readonly knex: Knex) {}
|
||||
|
||||
/**
|
||||
* Get rules for a role
|
||||
*/
|
||||
@Get('role/:roleId')
|
||||
async getForRole(@Param('roleId') roleId: string) {
|
||||
return RoleRule.query(this.knex).where('roleId', roleId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create or update role rules
|
||||
* This will replace existing rules for the role
|
||||
*/
|
||||
@Post()
|
||||
async createOrUpdate(@Body() dto: CreateRoleRuleDto) {
|
||||
// Delete existing rules for this role
|
||||
await RoleRule.query(this.knex).where('roleId', dto.roleId).delete();
|
||||
|
||||
// Insert new rules
|
||||
return RoleRule.query(this.knex).insert({
|
||||
roleId: dto.roleId,
|
||||
rulesJson: dto.rulesJson,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update role rules by ID
|
||||
*/
|
||||
@Put(':id')
|
||||
async update(@Param('id') id: string, @Body() dto: UpdateRoleRuleDto) {
|
||||
return RoleRule.query(this.knex).patchAndFetchById(id, {
|
||||
rulesJson: dto.rulesJson,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete role rules
|
||||
*/
|
||||
@Delete(':id')
|
||||
async delete(@Param('id') id: string) {
|
||||
await RoleRule.query(this.knex).deleteById(id);
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
141
backend/src/rbac/setup-roles.controller.ts
Normal file
141
backend/src/rbac/setup-roles.controller.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Patch,
|
||||
Delete,
|
||||
Param,
|
||||
Body,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import { Role } from '../models/role.model';
|
||||
|
||||
@Controller('setup/roles')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class SetupRolesController {
|
||||
constructor(private tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
@Get()
|
||||
async getRoles(@TenantId() tenantId: string) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
return await Role.query(knex).select('*').orderBy('name', 'asc');
|
||||
}
|
||||
|
||||
@Get(':id')
|
||||
async getRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
return await Role.query(knex).findById(id).withGraphFetched('users');
|
||||
}
|
||||
|
||||
@Post()
|
||||
async createRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Body() data: { name: string; description?: string; guardName?: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
const role = await Role.query(knex).insert({
|
||||
name: data.name,
|
||||
description: data.description,
|
||||
guardName: data.guardName || 'tenant',
|
||||
});
|
||||
|
||||
return role;
|
||||
}
|
||||
|
||||
@Patch(':id')
|
||||
async updateRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
@Body() data: { name?: string; description?: string; guardName?: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
const updateData: any = {};
|
||||
|
||||
if (data.name) updateData.name = data.name;
|
||||
if (data.description !== undefined) updateData.description = data.description;
|
||||
if (data.guardName) updateData.guardName = data.guardName;
|
||||
|
||||
const role = await Role.query(knex).patchAndFetchById(id, updateData);
|
||||
return role;
|
||||
}
|
||||
|
||||
@Delete(':id')
|
||||
async deleteRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Delete role user assignments first
|
||||
await knex('user_roles').where({ roleId: id }).delete();
|
||||
|
||||
// Delete role permissions
|
||||
await knex('role_permissions').where({ roleId: id }).delete();
|
||||
await knex('role_object_permissions').where({ roleId: id }).delete();
|
||||
|
||||
// Delete the role
|
||||
await Role.query(knex).deleteById(id);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Post(':roleId/users')
|
||||
async addUserToRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('roleId') roleId: string,
|
||||
@Body() data: { userId: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Check if assignment already exists
|
||||
const existing = await knex('user_roles')
|
||||
.where({ userId: data.userId, roleId })
|
||||
.first();
|
||||
|
||||
if (existing) {
|
||||
return { success: true, message: 'User already assigned' };
|
||||
}
|
||||
|
||||
await knex('user_roles').insert({
|
||||
id: knex.raw('(UUID())'),
|
||||
userId: data.userId,
|
||||
roleId,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Delete(':roleId/users/:userId')
|
||||
async removeUserFromRole(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('roleId') roleId: string,
|
||||
@Param('userId') userId: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
await knex('user_roles')
|
||||
.where({ userId, roleId })
|
||||
.delete();
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
146
backend/src/rbac/setup-users.controller.ts
Normal file
146
backend/src/rbac/setup-users.controller.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Patch,
|
||||
Delete,
|
||||
Param,
|
||||
Body,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import { User } from '../models/user.model';
|
||||
import * as bcrypt from 'bcrypt';
|
||||
|
||||
@Controller('setup/users')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class SetupUsersController {
|
||||
constructor(private tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
@Get()
|
||||
async getUsers(@TenantId() tenantId: string) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
return await User.query(knex).withGraphFetched('roles');
|
||||
}
|
||||
|
||||
@Get(':id')
|
||||
async getUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
return await User.query(knex).findById(id).withGraphFetched('roles');
|
||||
}
|
||||
|
||||
@Post()
|
||||
async createUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Body() data: { email: string; password: string; firstName?: string; lastName?: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Hash password
|
||||
const hashedPassword = await bcrypt.hash(data.password, 10);
|
||||
|
||||
const user = await User.query(knex).insert({
|
||||
email: data.email,
|
||||
password: hashedPassword,
|
||||
firstName: data.firstName,
|
||||
lastName: data.lastName,
|
||||
isActive: true,
|
||||
});
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
@Patch(':id')
|
||||
async updateUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
@Body() data: { email?: string; password?: string; firstName?: string; lastName?: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
const updateData: any = {};
|
||||
|
||||
if (data.email) updateData.email = data.email;
|
||||
if (data.firstName !== undefined) updateData.firstName = data.firstName;
|
||||
if (data.lastName !== undefined) updateData.lastName = data.lastName;
|
||||
|
||||
// Hash password if provided
|
||||
if (data.password) {
|
||||
updateData.password = await bcrypt.hash(data.password, 10);
|
||||
}
|
||||
|
||||
const user = await User.query(knex).patchAndFetchById(id, updateData);
|
||||
return user;
|
||||
}
|
||||
|
||||
@Delete(':id')
|
||||
async deleteUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('id') id: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Delete user role assignments first
|
||||
await knex('user_roles').where({ userId: id }).delete();
|
||||
|
||||
// Delete the user
|
||||
await User.query(knex).deleteById(id);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Post(':userId/roles')
|
||||
async addRoleToUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('userId') userId: string,
|
||||
@Body() data: { roleId: string },
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Check if assignment already exists
|
||||
const existing = await knex('user_roles')
|
||||
.where({ userId, roleId: data.roleId })
|
||||
.first();
|
||||
|
||||
if (existing) {
|
||||
return { success: true, message: 'Role already assigned' };
|
||||
}
|
||||
|
||||
await knex('user_roles').insert({
|
||||
id: knex.raw('(UUID())'),
|
||||
userId,
|
||||
roleId: data.roleId,
|
||||
created_at: knex.fn.now(),
|
||||
updated_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Delete(':userId/roles/:roleId')
|
||||
async removeRoleFromUser(
|
||||
@TenantId() tenantId: string,
|
||||
@Param('userId') userId: string,
|
||||
@Param('roleId') roleId: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
await knex('user_roles')
|
||||
.where({ userId, roleId })
|
||||
.delete();
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
@@ -1,243 +0,0 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Patch,
|
||||
Delete,
|
||||
Body,
|
||||
Param,
|
||||
Query,
|
||||
UseGuards,
|
||||
ForbiddenException,
|
||||
NotFoundException,
|
||||
} from '@nestjs/common';
|
||||
import { IsString, IsArray, IsOptional, IsDateString } from 'class-validator';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { CurrentUser } from '../auth/current-user.decorator';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
|
||||
export class CreateShareDto {
|
||||
@IsString()
|
||||
objectApiName: string;
|
||||
|
||||
@IsString()
|
||||
recordId: string;
|
||||
|
||||
@IsString()
|
||||
granteeUserId: string;
|
||||
|
||||
@IsArray()
|
||||
@IsString({ each: true })
|
||||
actions: string[]; // ["read"], ["read", "update"], etc.
|
||||
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
@IsString({ each: true })
|
||||
fields?: string[]; // Optional field scoping
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
expiresAt?: string;
|
||||
}
|
||||
|
||||
export class UpdateShareDto {
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
@IsString({ each: true })
|
||||
actions?: string[];
|
||||
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
@IsString({ each: true })
|
||||
fields?: string[];
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
expiresAt?: string;
|
||||
}
|
||||
|
||||
@Controller('rbac/shares')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class ShareController {
|
||||
constructor(private tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
/**
|
||||
* Create a new share
|
||||
* Only the owner (or users with share permission) can share a record
|
||||
*/
|
||||
@Post()
|
||||
async create(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() currentUser: any,
|
||||
@Body() createDto: CreateShareDto,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Get object definition by apiName
|
||||
const objectDef = await knex('object_definitions')
|
||||
.where({ apiName: createDto.objectApiName })
|
||||
.first();
|
||||
|
||||
if (!objectDef) {
|
||||
throw new NotFoundException('Object definition not found');
|
||||
}
|
||||
|
||||
// Get the table name for the object
|
||||
const tableName = this.getTableName(createDto.objectApiName);
|
||||
|
||||
// Verify the user owns the record
|
||||
const record = await knex(tableName)
|
||||
.where({ id: createDto.recordId })
|
||||
.first();
|
||||
|
||||
if (!record) {
|
||||
throw new NotFoundException('Record not found');
|
||||
}
|
||||
|
||||
if (record.ownerId !== currentUser.userId) {
|
||||
throw new ForbiddenException('Only the record owner can share it');
|
||||
}
|
||||
|
||||
// Create the share
|
||||
const shareId = require('crypto').randomUUID();
|
||||
await knex('record_shares').insert({
|
||||
id: shareId,
|
||||
object_definition_id: objectDef.id,
|
||||
record_id: createDto.recordId,
|
||||
grantee_user_id: createDto.granteeUserId,
|
||||
granted_by_user_id: currentUser.userId,
|
||||
actions: JSON.stringify(createDto.actions),
|
||||
fields: createDto.fields ? JSON.stringify(createDto.fields) : null,
|
||||
expires_at: createDto.expiresAt,
|
||||
created_at: knex.fn.now(),
|
||||
});
|
||||
|
||||
const share = await knex('record_shares').where({ id: shareId }).first();
|
||||
|
||||
return {
|
||||
...share,
|
||||
actions: typeof share.actions === 'string' ? JSON.parse(share.actions) : share.actions,
|
||||
fields: share.fields ? (typeof share.fields === 'string' ? JSON.parse(share.fields) : share.fields) : null,
|
||||
};
|
||||
}
|
||||
|
||||
private getTableName(objectApiName: string): string {
|
||||
const snakeCase = objectApiName
|
||||
.replace(/([A-Z])/g, '_$1')
|
||||
.toLowerCase()
|
||||
.replace(/^_/, '');
|
||||
|
||||
if (snakeCase.endsWith('y')) {
|
||||
return snakeCase.slice(0, -1) + 'ies';
|
||||
} else if (snakeCase.endsWith('s')) {
|
||||
return snakeCase;
|
||||
} else {
|
||||
return snakeCase + 's';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List shares for a specific record
|
||||
* Only owner or users with access can see shares
|
||||
*/
|
||||
@Get(':objectApiName/:recordId')
|
||||
async listForRecord(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() currentUser: any,
|
||||
@Param('objectApiName') objectApiName: string,
|
||||
@Param('recordId') recordId: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Get object definition
|
||||
const objectDef = await knex('object_definitions')
|
||||
.where({ apiName: objectApiName })
|
||||
.first();
|
||||
|
||||
if (!objectDef) {
|
||||
throw new NotFoundException('Object definition not found');
|
||||
}
|
||||
|
||||
// Get shares for this record
|
||||
const shares = await knex('record_shares')
|
||||
.where({
|
||||
object_definition_id: objectDef.id,
|
||||
record_id: recordId,
|
||||
})
|
||||
.whereNull('revoked_at')
|
||||
.select('*');
|
||||
|
||||
// Fetch user details for each share
|
||||
const sharesWithUsers = await Promise.all(
|
||||
shares.map(async (share: any) => {
|
||||
const granteeUser = await knex('users')
|
||||
.where({ id: share.grantee_user_id })
|
||||
.select('id', 'email', 'firstName', 'lastName', 'name')
|
||||
.first();
|
||||
|
||||
const grantedByUser = await knex('users')
|
||||
.where({ id: share.granted_by_user_id })
|
||||
.select('id', 'email', 'firstName', 'lastName', 'name')
|
||||
.first();
|
||||
|
||||
return {
|
||||
id: share.id,
|
||||
recordId: share.record_id,
|
||||
actions: typeof share.actions === 'string' ? JSON.parse(share.actions) : share.actions,
|
||||
fields: share.fields ? (typeof share.fields === 'string' ? JSON.parse(share.fields) : share.fields) : null,
|
||||
expiresAt: share.expires_at,
|
||||
createdAt: share.created_at,
|
||||
granteeUser: {
|
||||
id: granteeUser.id,
|
||||
email: granteeUser.email,
|
||||
name: granteeUser.firstName && granteeUser.lastName
|
||||
? `${granteeUser.firstName} ${granteeUser.lastName}`
|
||||
: granteeUser.name || granteeUser.email,
|
||||
},
|
||||
grantedByUser: {
|
||||
id: grantedByUser.id,
|
||||
email: grantedByUser.email,
|
||||
name: grantedByUser.firstName && grantedByUser.lastName
|
||||
? `${grantedByUser.firstName} ${grantedByUser.lastName}`
|
||||
: grantedByUser.name || grantedByUser.email,
|
||||
},
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
return sharesWithUsers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke a share (soft delete)
|
||||
*/
|
||||
@Delete(':id')
|
||||
async revoke(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() currentUser: any,
|
||||
@Param('id') id: string,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
const share = await knex('record_shares').where({ id }).first();
|
||||
|
||||
if (!share) {
|
||||
throw new NotFoundException('Share not found');
|
||||
}
|
||||
|
||||
// Only the grantor can revoke
|
||||
if (share.granted_by_user_id !== currentUser.userId) {
|
||||
throw new ForbiddenException('Unauthorized');
|
||||
}
|
||||
|
||||
await knex('record_shares')
|
||||
.where({ id })
|
||||
.update({ revoked_at: knex.fn.now() });
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
import { Controller, Get, UseGuards } from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantId } from '../tenant/tenant.decorator';
|
||||
import { CurrentUser } from '../auth/current-user.decorator';
|
||||
import { TenantDatabaseService } from '../tenant/tenant-database.service';
|
||||
import { User } from '../models/user.model';
|
||||
|
||||
@Controller('rbac/users')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class UserController {
|
||||
constructor(private tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
@Get()
|
||||
async getUsers(
|
||||
@TenantId() tenantId: string,
|
||||
@CurrentUser() currentUser: any,
|
||||
) {
|
||||
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
|
||||
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
|
||||
|
||||
// Get all active users from tenant database (excluding current user)
|
||||
let query = User.query(knex)
|
||||
.select('id', 'email', 'firstName', 'lastName')
|
||||
.where('isActive', true);
|
||||
|
||||
// Exclude current user if we have their ID
|
||||
if (currentUser?.userId) {
|
||||
query = query.whereNot('id', currentUser.userId);
|
||||
}
|
||||
|
||||
const users = await query;
|
||||
|
||||
return users.map((user) => ({
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
name: user.firstName && user.lastName
|
||||
? `${user.firstName} ${user.lastName}`
|
||||
: user.email,
|
||||
}));
|
||||
}
|
||||
}
|
||||
8
backend/src/search/meilisearch.module.ts
Normal file
8
backend/src/search/meilisearch.module.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { MeilisearchService } from './meilisearch.service';
|
||||
|
||||
@Module({
|
||||
providers: [MeilisearchService],
|
||||
exports: [MeilisearchService],
|
||||
})
|
||||
export class MeilisearchModule {}
|
||||
244
backend/src/search/meilisearch.service.ts
Normal file
244
backend/src/search/meilisearch.service.ts
Normal file
@@ -0,0 +1,244 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import * as http from 'http';
|
||||
import * as https from 'https';
|
||||
|
||||
type MeiliConfig = {
|
||||
host: string;
|
||||
apiKey?: string;
|
||||
indexPrefix: string;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class MeilisearchService {
|
||||
private readonly logger = new Logger(MeilisearchService.name);
|
||||
|
||||
isEnabled(): boolean {
|
||||
return Boolean(this.getConfig());
|
||||
}
|
||||
|
||||
async searchRecord(
|
||||
tenantId: string,
|
||||
objectApiName: string,
|
||||
query: string,
|
||||
displayField?: string,
|
||||
): Promise<{ id: string; hit: any } | null> {
|
||||
const config = this.getConfig();
|
||||
if (!config) return null;
|
||||
|
||||
const indexName = this.buildIndexName(config, tenantId, objectApiName);
|
||||
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/search`;
|
||||
|
||||
console.log('querying Meilisearch index:', { indexName, query, displayField });
|
||||
|
||||
try {
|
||||
const response = await this.requestJson('POST', url, {
|
||||
q: query,
|
||||
limit: 5,
|
||||
}, this.buildHeaders(config));
|
||||
|
||||
if (!this.isSuccessStatus(response.status)) {
|
||||
this.logger.warn(
|
||||
`Meilisearch query failed for index ${indexName}: ${response.status}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const hits = Array.isArray(response.body?.hits) ? response.body.hits : [];
|
||||
if (hits.length === 0) return null;
|
||||
|
||||
if (displayField) {
|
||||
const loweredQuery = query.toLowerCase();
|
||||
const exactMatch = hits.find((hit: any) => {
|
||||
const value = hit?.[displayField];
|
||||
return value && String(value).toLowerCase() === loweredQuery;
|
||||
});
|
||||
if (exactMatch?.id) {
|
||||
return { id: exactMatch.id, hit: exactMatch };
|
||||
}
|
||||
}
|
||||
|
||||
const match = hits[0];
|
||||
if (match?.id) {
|
||||
return { id: match.id, hit: match };
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(`Meilisearch lookup failed: ${error.message}`);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async searchRecords(
|
||||
tenantId: string,
|
||||
objectApiName: string,
|
||||
query: string,
|
||||
options?: { limit?: number; offset?: number },
|
||||
): Promise<{ hits: any[]; total: number }> {
|
||||
const config = this.getConfig();
|
||||
if (!config) return { hits: [], total: 0 };
|
||||
|
||||
const indexName = this.buildIndexName(config, tenantId, objectApiName);
|
||||
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/search`;
|
||||
const limit = Number.isFinite(Number(options?.limit)) ? Number(options?.limit) : 20;
|
||||
const offset = Number.isFinite(Number(options?.offset)) ? Number(options?.offset) : 0;
|
||||
|
||||
try {
|
||||
const response = await this.requestJson('POST', url, {
|
||||
q: query,
|
||||
limit,
|
||||
offset,
|
||||
}, this.buildHeaders(config));
|
||||
|
||||
console.log('Meilisearch response body:', response.body);
|
||||
|
||||
if (!this.isSuccessStatus(response.status)) {
|
||||
this.logger.warn(
|
||||
`Meilisearch query failed for index ${indexName}: ${response.status}`,
|
||||
);
|
||||
return { hits: [], total: 0 };
|
||||
}
|
||||
|
||||
const hits = Array.isArray(response.body?.hits) ? response.body.hits : [];
|
||||
const total =
|
||||
response.body?.estimatedTotalHits ??
|
||||
response.body?.nbHits ??
|
||||
hits.length;
|
||||
return { hits, total };
|
||||
} catch (error) {
|
||||
this.logger.warn(`Meilisearch query failed: ${error.message}`);
|
||||
return { hits: [], total: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
async upsertRecord(
|
||||
tenantId: string,
|
||||
objectApiName: string,
|
||||
record: Record<string, any>,
|
||||
fieldsToIndex: string[],
|
||||
): Promise<void> {
|
||||
const config = this.getConfig();
|
||||
if (!config || !record?.id) return;
|
||||
|
||||
const indexName = this.buildIndexName(config, tenantId, objectApiName);
|
||||
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/documents?primaryKey=id`;
|
||||
const document = this.pickRecordFields(record, fieldsToIndex);
|
||||
|
||||
try {
|
||||
const response = await this.requestJson('POST', url, [document], this.buildHeaders(config));
|
||||
if (!this.isSuccessStatus(response.status)) {
|
||||
this.logger.warn(
|
||||
`Meilisearch upsert failed for index ${indexName}: ${response.status}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(`Meilisearch upsert failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async deleteRecord(
|
||||
tenantId: string,
|
||||
objectApiName: string,
|
||||
recordId: string,
|
||||
): Promise<void> {
|
||||
const config = this.getConfig();
|
||||
if (!config || !recordId) return;
|
||||
|
||||
const indexName = this.buildIndexName(config, tenantId, objectApiName);
|
||||
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/documents/${encodeURIComponent(recordId)}`;
|
||||
|
||||
try {
|
||||
const response = await this.requestJson('DELETE', url, undefined, this.buildHeaders(config));
|
||||
if (!this.isSuccessStatus(response.status)) {
|
||||
this.logger.warn(
|
||||
`Meilisearch delete failed for index ${indexName}: ${response.status}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(`Meilisearch delete failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
private getConfig(): MeiliConfig | null {
|
||||
const host = process.env.MEILI_HOST || process.env.MEILISEARCH_HOST;
|
||||
if (!host) return null;
|
||||
const trimmedHost = host.replace(/\/+$/, '');
|
||||
const apiKey = process.env.MEILI_API_KEY || process.env.MEILISEARCH_API_KEY;
|
||||
const indexPrefix = process.env.MEILI_INDEX_PREFIX || 'tenant_';
|
||||
return { host: trimmedHost, apiKey, indexPrefix };
|
||||
}
|
||||
|
||||
private buildIndexName(config: MeiliConfig, tenantId: string, objectApiName: string): string {
|
||||
return `${config.indexPrefix}${tenantId}_${objectApiName}`.toLowerCase();
|
||||
}
|
||||
|
||||
private buildHeaders(config: MeiliConfig): Record<string, string> {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
};
|
||||
if (config.apiKey) {
|
||||
headers['X-Meili-API-Key'] = config.apiKey;
|
||||
headers.Authorization = `Bearer ${config.apiKey}`;
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
private pickRecordFields(record: Record<string, any>, fields: string[]): Record<string, any> {
|
||||
const document: Record<string, any> = { id: record.id };
|
||||
for (const field of fields) {
|
||||
if (record[field] !== undefined) {
|
||||
document[field] = record[field];
|
||||
}
|
||||
}
|
||||
return document;
|
||||
}
|
||||
|
||||
private isSuccessStatus(status: number): boolean {
|
||||
return status >= 200 && status < 300;
|
||||
}
|
||||
|
||||
private requestJson(
|
||||
method: 'POST' | 'DELETE',
|
||||
url: string,
|
||||
payload: any,
|
||||
headers: Record<string, string>,
|
||||
): Promise<{ status: number; body: any }> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const parsedUrl = new URL(url);
|
||||
const client = parsedUrl.protocol === 'https:' ? https : http;
|
||||
const request = client.request(
|
||||
{
|
||||
method,
|
||||
hostname: parsedUrl.hostname,
|
||||
port: parsedUrl.port,
|
||||
path: `${parsedUrl.pathname}${parsedUrl.search}`,
|
||||
headers,
|
||||
},
|
||||
(response) => {
|
||||
let data = '';
|
||||
response.on('data', (chunk) => {
|
||||
data += chunk;
|
||||
});
|
||||
response.on('end', () => {
|
||||
if (!data) {
|
||||
resolve({ status: response.statusCode || 0, body: null });
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const body = JSON.parse(data);
|
||||
resolve({ status: response.statusCode || 0, body });
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
request.on('error', reject);
|
||||
if (payload !== undefined) {
|
||||
request.write(JSON.stringify(payload));
|
||||
}
|
||||
request.end();
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,14 @@
|
||||
import Knex from 'knex';
|
||||
import type { Knex as KnexType } from 'knex';
|
||||
import { Model } from 'objection';
|
||||
import { CentralTenant, CentralDomain, CentralUser } from '../models/central.model';
|
||||
|
||||
let centralKnex: KnexType | null = null;
|
||||
let centralKnex: Knex.Knex | null = null;
|
||||
|
||||
/**
|
||||
* Get or create a Knex instance for the central database
|
||||
* This is used for Objection models that work with central entities
|
||||
*/
|
||||
export function getCentralKnex(): KnexType {
|
||||
export function getCentralKnex(): Knex.Knex {
|
||||
if (!centralKnex) {
|
||||
const centralDbUrl = process.env.CENTRAL_DATABASE_URL;
|
||||
|
||||
|
||||
@@ -110,8 +110,9 @@ export class TenantDatabaseService {
|
||||
* @deprecated Use getTenantKnexByDomain or getTenantKnexById instead
|
||||
*/
|
||||
async getTenantKnex(tenantIdOrSlug: string): Promise<Knex> {
|
||||
// Assume it's a domain if it contains a dot
|
||||
return this.getTenantKnexByDomain(tenantIdOrSlug);
|
||||
// Resolve tenant ID first, then get connection by ID
|
||||
const tenantId = await this.resolveTenantId(tenantIdOrSlug);
|
||||
return this.getTenantKnexById(tenantId);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -242,4 +243,26 @@ export class TenantDatabaseService {
|
||||
decrypted += decipher.final('utf8');
|
||||
return decrypted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt integrations config JSON object
|
||||
* @param config - Plain object containing integration credentials
|
||||
* @returns Encrypted JSON string
|
||||
*/
|
||||
encryptIntegrationsConfig(config: any): string {
|
||||
if (!config) return null;
|
||||
const jsonString = JSON.stringify(config);
|
||||
return this.encryptPassword(jsonString);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypt integrations config JSON string
|
||||
* @param encryptedConfig - Encrypted JSON string
|
||||
* @returns Plain object with integration credentials
|
||||
*/
|
||||
decryptIntegrationsConfig(encryptedConfig: string): any {
|
||||
if (!encryptedConfig) return null;
|
||||
const decrypted = this.decryptPassword(encryptedConfig);
|
||||
return JSON.parse(decrypted);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -176,7 +176,7 @@ export class TenantProvisioningService {
|
||||
* Seed default data for new tenant
|
||||
*/
|
||||
private async seedDefaultData(tenantId: string) {
|
||||
const tenantKnex = await this.tenantDbService.getTenantKnex(tenantId);
|
||||
const tenantKnex = await this.tenantDbService.getTenantKnexById(tenantId);
|
||||
|
||||
try {
|
||||
// Create default roles
|
||||
|
||||
155
backend/src/tenant/tenant.controller.ts
Normal file
155
backend/src/tenant/tenant.controller.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Put,
|
||||
Body,
|
||||
UseGuards,
|
||||
Req,
|
||||
} from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
|
||||
import { TenantDatabaseService } from './tenant-database.service';
|
||||
import { getCentralPrisma } from '../prisma/central-prisma.service';
|
||||
import { TenantId } from './tenant.decorator';
|
||||
|
||||
@Controller('tenant')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class TenantController {
|
||||
constructor(private readonly tenantDbService: TenantDatabaseService) {}
|
||||
|
||||
/**
|
||||
* Get integrations configuration for the current tenant
|
||||
*/
|
||||
@Get('integrations')
|
||||
async getIntegrationsConfig(@TenantId() domain: string) {
|
||||
const centralPrisma = getCentralPrisma();
|
||||
|
||||
// Look up tenant by domain
|
||||
const domainRecord = await centralPrisma.domain.findUnique({
|
||||
where: { domain },
|
||||
include: { tenant: { select: { id: true, integrationsConfig: true } } },
|
||||
});
|
||||
|
||||
if (!domainRecord?.tenant || !domainRecord.tenant.integrationsConfig) {
|
||||
return { data: null };
|
||||
}
|
||||
|
||||
// Decrypt the config
|
||||
const config = this.tenantDbService.decryptIntegrationsConfig(
|
||||
domainRecord.tenant.integrationsConfig as any,
|
||||
);
|
||||
|
||||
// Return config with sensitive fields masked
|
||||
const maskedConfig = this.maskSensitiveFields(config);
|
||||
|
||||
return { data: maskedConfig };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update integrations configuration for the current tenant
|
||||
*/
|
||||
@Put('integrations')
|
||||
async updateIntegrationsConfig(
|
||||
@TenantId() domain: string,
|
||||
@Body() body: { integrationsConfig: any },
|
||||
) {
|
||||
const { integrationsConfig } = body;
|
||||
|
||||
if (!domain) {
|
||||
throw new Error('Domain is missing from request');
|
||||
}
|
||||
|
||||
// Look up tenant by domain
|
||||
const centralPrisma = getCentralPrisma();
|
||||
const domainRecord = await centralPrisma.domain.findUnique({
|
||||
where: { domain },
|
||||
include: { tenant: { select: { id: true, integrationsConfig: true } } },
|
||||
});
|
||||
|
||||
if (!domainRecord?.tenant) {
|
||||
throw new Error(`Tenant with domain ${domain} not found`);
|
||||
}
|
||||
|
||||
// Merge with existing config to preserve masked values
|
||||
let finalConfig = integrationsConfig;
|
||||
if (domainRecord.tenant.integrationsConfig) {
|
||||
const existingConfig = this.tenantDbService.decryptIntegrationsConfig(
|
||||
domainRecord.tenant.integrationsConfig as any,
|
||||
);
|
||||
|
||||
// Replace masked values with actual values from existing config
|
||||
finalConfig = this.unmaskConfig(integrationsConfig, existingConfig);
|
||||
}
|
||||
|
||||
// Encrypt the config
|
||||
const encryptedConfig = this.tenantDbService.encryptIntegrationsConfig(
|
||||
finalConfig,
|
||||
);
|
||||
|
||||
// Update in database
|
||||
await centralPrisma.tenant.update({
|
||||
where: { id: domainRecord.tenant.id },
|
||||
data: {
|
||||
integrationsConfig: encryptedConfig as any,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Integrations configuration updated successfully',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Unmask config by replacing masked values with actual values from existing config
|
||||
*/
|
||||
private unmaskConfig(newConfig: any, existingConfig: any): any {
|
||||
const result = { ...newConfig };
|
||||
|
||||
// Unmask Twilio credentials
|
||||
if (result.twilio && existingConfig.twilio) {
|
||||
if (result.twilio.authToken === '••••••••' && existingConfig.twilio.authToken) {
|
||||
result.twilio.authToken = existingConfig.twilio.authToken;
|
||||
}
|
||||
if (result.twilio.apiSecret === '••••••••' && existingConfig.twilio.apiSecret) {
|
||||
result.twilio.apiSecret = existingConfig.twilio.apiSecret;
|
||||
}
|
||||
}
|
||||
|
||||
// Unmask OpenAI credentials
|
||||
if (result.openai && existingConfig.openai) {
|
||||
if (result.openai.apiKey === '••••••••' && existingConfig.openai.apiKey) {
|
||||
result.openai.apiKey = existingConfig.openai.apiKey;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mask sensitive fields for API responses
|
||||
*/
|
||||
private maskSensitiveFields(config: any): any {
|
||||
if (!config) return null;
|
||||
|
||||
const masked = { ...config };
|
||||
|
||||
// Mask Twilio credentials
|
||||
if (masked.twilio) {
|
||||
masked.twilio = {
|
||||
...masked.twilio,
|
||||
authToken: masked.twilio.authToken ? '••••••••' : '',
|
||||
apiSecret: masked.twilio.apiSecret ? '••••••••' : '',
|
||||
};
|
||||
}
|
||||
|
||||
// Mask OpenAI credentials
|
||||
if (masked.openai) {
|
||||
masked.openai = {
|
||||
...masked.openai,
|
||||
apiKey: masked.openai.apiKey ? '••••••••' : '',
|
||||
};
|
||||
}
|
||||
|
||||
return masked;
|
||||
}
|
||||
}
|
||||
@@ -1,43 +1,21 @@
|
||||
import { Module, NestModule, MiddlewareConsumer, Scope } from '@nestjs/common';
|
||||
import { REQUEST } from '@nestjs/core';
|
||||
import { Module, NestModule, MiddlewareConsumer } from '@nestjs/common';
|
||||
import { TenantMiddleware } from './tenant.middleware';
|
||||
import { TenantDatabaseService } from './tenant-database.service';
|
||||
import { TenantProvisioningService } from './tenant-provisioning.service';
|
||||
import { TenantProvisioningController } from './tenant-provisioning.controller';
|
||||
import { CentralAdminController } from './central-admin.controller';
|
||||
import { TenantController } from './tenant.controller';
|
||||
import { PrismaModule } from '../prisma/prisma.module';
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule],
|
||||
controllers: [TenantProvisioningController, CentralAdminController],
|
||||
controllers: [TenantProvisioningController, CentralAdminController, TenantController],
|
||||
providers: [
|
||||
TenantDatabaseService,
|
||||
TenantProvisioningService,
|
||||
TenantMiddleware,
|
||||
{
|
||||
provide: 'KnexConnection',
|
||||
scope: Scope.REQUEST,
|
||||
inject: [REQUEST, TenantDatabaseService],
|
||||
useFactory: async (request: any, tenantDbService: TenantDatabaseService) => {
|
||||
// Try to get subdomain first (for domain-based routing)
|
||||
const subdomain = request.raw?.subdomain || request.subdomain;
|
||||
const tenantId = request.raw?.tenantId || request.tenantId;
|
||||
|
||||
if (!subdomain && !tenantId) {
|
||||
throw new Error('Neither subdomain nor tenant ID found in request');
|
||||
}
|
||||
|
||||
// Prefer subdomain lookup (more reliable for domain-based routing)
|
||||
if (subdomain) {
|
||||
return await tenantDbService.getTenantKnexByDomain(subdomain);
|
||||
}
|
||||
|
||||
// Fallback to tenant ID lookup
|
||||
return await tenantDbService.getTenantKnexById(tenantId);
|
||||
},
|
||||
},
|
||||
],
|
||||
exports: [TenantDatabaseService, TenantProvisioningService, 'KnexConnection'],
|
||||
exports: [TenantDatabaseService, TenantProvisioningService],
|
||||
})
|
||||
export class TenantModule implements NestModule {
|
||||
configure(consumer: MiddlewareConsumer) {
|
||||
|
||||
214
backend/src/voice/audio-converter.service.ts
Normal file
214
backend/src/voice/audio-converter.service.ts
Normal file
@@ -0,0 +1,214 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
/**
|
||||
* Audio format converter for Twilio <-> OpenAI audio streaming
|
||||
*
|
||||
* Twilio Media Streams format:
|
||||
* - Codec: μ-law (G.711)
|
||||
* - Sample rate: 8kHz
|
||||
* - Encoding: base64
|
||||
* - Chunk size: 20ms (160 bytes)
|
||||
*
|
||||
* OpenAI Realtime API format:
|
||||
* - Codec: PCM16
|
||||
* - Sample rate: 24kHz
|
||||
* - Encoding: base64
|
||||
* - Mono channel
|
||||
*/
|
||||
@Injectable()
|
||||
export class AudioConverterService {
|
||||
private readonly logger = new Logger(AudioConverterService.name);
|
||||
|
||||
// μ-law decode lookup table
|
||||
private readonly MULAW_DECODE_TABLE = this.buildMuLawDecodeTable();
|
||||
|
||||
// μ-law encode lookup table
|
||||
private readonly MULAW_ENCODE_TABLE = this.buildMuLawEncodeTable();
|
||||
|
||||
/**
|
||||
* Build μ-law to linear PCM16 decode table
|
||||
*/
|
||||
private buildMuLawDecodeTable(): Int16Array {
|
||||
const table = new Int16Array(256);
|
||||
for (let i = 0; i < 256; i++) {
|
||||
const mulaw = ~i;
|
||||
const exponent = (mulaw >> 4) & 0x07;
|
||||
const mantissa = mulaw & 0x0f;
|
||||
let sample = (mantissa << 3) + 0x84;
|
||||
sample <<= exponent;
|
||||
sample -= 0x84;
|
||||
if ((mulaw & 0x80) === 0) {
|
||||
sample = -sample;
|
||||
}
|
||||
table[i] = sample;
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build linear PCM16 to μ-law encode table
|
||||
*/
|
||||
private buildMuLawEncodeTable(): Uint8Array {
|
||||
const table = new Uint8Array(65536);
|
||||
for (let i = 0; i < 65536; i++) {
|
||||
const sample = (i - 32768);
|
||||
const sign = sample < 0 ? 0x80 : 0x00;
|
||||
const magnitude = Math.abs(sample);
|
||||
|
||||
// Add bias
|
||||
let biased = magnitude + 0x84;
|
||||
|
||||
// Find exponent
|
||||
let exponent = 7;
|
||||
for (let exp = 0; exp < 8; exp++) {
|
||||
if (biased <= (0xff << exp)) {
|
||||
exponent = exp;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract mantissa
|
||||
const mantissa = (biased >> (exponent + 3)) & 0x0f;
|
||||
|
||||
// Combine sign, exponent, mantissa
|
||||
const mulaw = ~(sign | (exponent << 4) | mantissa);
|
||||
table[i] = mulaw & 0xff;
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode μ-law audio to linear PCM16
|
||||
* @param mulawData - Buffer containing μ-law encoded audio
|
||||
* @returns Buffer containing PCM16 audio (16-bit little-endian)
|
||||
*/
|
||||
decodeMuLaw(mulawData: Buffer): Buffer {
|
||||
const pcm16 = Buffer.allocUnsafe(mulawData.length * 2);
|
||||
|
||||
for (let i = 0; i < mulawData.length; i++) {
|
||||
const sample = this.MULAW_DECODE_TABLE[mulawData[i]];
|
||||
pcm16.writeInt16LE(sample, i * 2);
|
||||
}
|
||||
|
||||
return pcm16;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode linear PCM16 to μ-law
|
||||
* @param pcm16Data - Buffer containing PCM16 audio (16-bit little-endian)
|
||||
* @returns Buffer containing μ-law encoded audio
|
||||
*/
|
||||
encodeMuLaw(pcm16Data: Buffer): Buffer {
|
||||
const mulaw = Buffer.allocUnsafe(pcm16Data.length / 2);
|
||||
|
||||
for (let i = 0; i < pcm16Data.length; i += 2) {
|
||||
const sample = pcm16Data.readInt16LE(i);
|
||||
const index = (sample + 32768) & 0xffff;
|
||||
mulaw[i / 2] = this.MULAW_ENCODE_TABLE[index];
|
||||
}
|
||||
|
||||
return mulaw;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resample audio from 8kHz to 24kHz (linear interpolation)
|
||||
* @param pcm16Data - Buffer containing 8kHz PCM16 audio
|
||||
* @returns Buffer containing 24kHz PCM16 audio
|
||||
*/
|
||||
resample8kTo24k(pcm16Data: Buffer): Buffer {
|
||||
const inputSamples = pcm16Data.length / 2;
|
||||
const outputSamples = Math.floor(inputSamples * 3); // 8k * 3 = 24k
|
||||
const output = Buffer.allocUnsafe(outputSamples * 2);
|
||||
|
||||
for (let i = 0; i < outputSamples; i++) {
|
||||
const srcIndex = i / 3;
|
||||
const srcIndexFloor = Math.floor(srcIndex);
|
||||
const srcIndexCeil = Math.min(srcIndexFloor + 1, inputSamples - 1);
|
||||
const fraction = srcIndex - srcIndexFloor;
|
||||
|
||||
const sample1 = pcm16Data.readInt16LE(srcIndexFloor * 2);
|
||||
const sample2 = pcm16Data.readInt16LE(srcIndexCeil * 2);
|
||||
|
||||
// Linear interpolation
|
||||
const interpolated = Math.round(sample1 + (sample2 - sample1) * fraction);
|
||||
output.writeInt16LE(interpolated, i * 2);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resample audio from 24kHz to 8kHz (decimation with averaging)
|
||||
* @param pcm16Data - Buffer containing 24kHz PCM16 audio
|
||||
* @returns Buffer containing 8kHz PCM16 audio
|
||||
*/
|
||||
resample24kTo8k(pcm16Data: Buffer): Buffer {
|
||||
const inputSamples = pcm16Data.length / 2;
|
||||
const outputSamples = Math.floor(inputSamples / 3); // 24k / 3 = 8k
|
||||
const output = Buffer.allocUnsafe(outputSamples * 2);
|
||||
|
||||
for (let i = 0; i < outputSamples; i++) {
|
||||
// Average 3 samples for anti-aliasing
|
||||
const idx1 = Math.min(i * 3, inputSamples - 1);
|
||||
const idx2 = Math.min(i * 3 + 1, inputSamples - 1);
|
||||
const idx3 = Math.min(i * 3 + 2, inputSamples - 1);
|
||||
|
||||
const sample1 = pcm16Data.readInt16LE(idx1 * 2);
|
||||
const sample2 = pcm16Data.readInt16LE(idx2 * 2);
|
||||
const sample3 = pcm16Data.readInt16LE(idx3 * 2);
|
||||
|
||||
const averaged = Math.round((sample1 + sample2 + sample3) / 3);
|
||||
output.writeInt16LE(averaged, i * 2);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Twilio μ-law 8kHz to OpenAI PCM16 24kHz
|
||||
* @param twilioBase64 - Base64-encoded μ-law audio from Twilio
|
||||
* @returns Base64-encoded PCM16 24kHz audio for OpenAI
|
||||
*/
|
||||
twilioToOpenAI(twilioBase64: string): string {
|
||||
try {
|
||||
// Decode base64
|
||||
const mulawBuffer = Buffer.from(twilioBase64, 'base64');
|
||||
|
||||
// μ-law -> PCM16
|
||||
const pcm16_8k = this.decodeMuLaw(mulawBuffer);
|
||||
|
||||
// 8kHz -> 24kHz
|
||||
const pcm16_24k = this.resample8kTo24k(pcm16_8k);
|
||||
|
||||
// Encode to base64
|
||||
return pcm16_24k.toString('base64');
|
||||
} catch (error) {
|
||||
this.logger.error('Error converting Twilio to OpenAI audio', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert OpenAI PCM16 24kHz to Twilio μ-law 8kHz
|
||||
* @param openaiBase64 - Base64-encoded PCM16 24kHz audio from OpenAI
|
||||
* @returns Base64-encoded μ-law 8kHz audio for Twilio
|
||||
*/
|
||||
openAIToTwilio(openaiBase64: string): string {
|
||||
try {
|
||||
// Decode base64
|
||||
const pcm16_24k = Buffer.from(openaiBase64, 'base64');
|
||||
|
||||
// 24kHz -> 8kHz
|
||||
const pcm16_8k = this.resample24kTo8k(pcm16_24k);
|
||||
|
||||
// PCM16 -> μ-law
|
||||
const mulawBuffer = this.encodeMuLaw(pcm16_8k);
|
||||
|
||||
// Encode to base64
|
||||
return mulawBuffer.toString('base64');
|
||||
} catch (error) {
|
||||
this.logger.error('Error converting OpenAI to Twilio audio', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user