Compare commits

..

9 Commits

Author SHA1 Message Date
phyroslam
96989d0ec3 Enhance semantic links with LLM classification and richer UI labels 2026-04-12 21:37:07 -07:00
Francisco Gaona
12a82372f4 WIP - Semantic linking working asynchronously 2026-04-12 11:24:03 +02:00
Francisco Gaona
efa57c4ba8 WIP - semantic linking seems to be working fine 2026-04-12 11:09:43 +02:00
Francisco Gaona
3f9be316ce WIP - semantinc linking working with other fields 2026-04-12 10:48:37 +02:00
Francisco Gaona
385a842ab8 WIP - semantic linking working with just name 2026-04-12 09:26:23 +02:00
Francisco Gaona
320f8c4266 WIP - some progress with semantic linking but still needs a lot of work 2026-04-11 23:30:25 +02:00
Francisco Gaona
12b0a0881e WIP - initial UI for comments and semantic links 2026-04-11 22:14:24 +02:00
Francisco Gaona
dc18b08a3a WIP - enable embedings 2026-04-11 21:14:34 +02:00
phyroslam
df183230d8 Add phased knowledge layer: comments and semantic linking pipeline 2026-04-11 11:40:01 -07:00
24 changed files with 2175 additions and 3 deletions

View File

@@ -0,0 +1,93 @@
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = async function (knex) {
await knex.schema.createTable('comments', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
table.string('parent_object_api_name').notNullable();
table.uuid('parent_record_id').notNullable();
table.uuid('author_user_id').notNullable();
table.text('content').notNullable();
table.timestamps(true, true);
table.foreign('author_user_id').references('id').inTable('users').onDelete('CASCADE');
table.index(['parent_object_api_name', 'parent_record_id'], 'comments_parent_idx');
table.index(['author_user_id'], 'comments_author_idx');
});
await knex.schema.createTable('semantic_documents', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
table.string('entity_type').notNullable();
table.uuid('entity_id').notNullable();
table.string('title').nullable();
table.text('narrative').nullable();
table.json('metadata').nullable();
table.json('source_summary').nullable();
table.timestamps(true, true);
table.unique(['entity_type', 'entity_id'], {
indexName: 'semantic_documents_entity_unique',
});
table.index(['entity_type'], 'semantic_documents_type_idx');
});
await knex.schema.createTable('semantic_chunks', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
table.uuid('semantic_document_id').notNullable();
table.integer('chunk_index').notNullable();
table.string('source_kind').notNullable().defaultTo('base_record');
table.uuid('source_ref_id').nullable();
table.text('text').notNullable();
table.json('metadata').nullable();
table.timestamps(true, true);
table.foreign('semantic_document_id').references('id').inTable('semantic_documents').onDelete('CASCADE');
table.unique(['semantic_document_id', 'chunk_index'], {
indexName: 'semantic_chunks_doc_index_unique',
});
table.index(['semantic_document_id'], 'semantic_chunks_document_idx');
table.index(['source_kind'], 'semantic_chunks_source_kind_idx');
});
await knex.schema.createTable('semantic_links', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('(UUID())'));
table.string('source_entity_type', 100).notNullable();
table.uuid('source_entity_id').notNullable();
table.string('target_entity_type', 100).notNullable();
table.uuid('target_entity_id').notNullable();
table.string('link_type', 100).notNullable().defaultTo('related_to');
table.string('status').notNullable().defaultTo('suggested');
table.string('origin').notNullable().defaultTo('semantic');
table.decimal('confidence', 5, 4).notNullable().defaultTo(0);
table.text('reason').nullable();
table.json('evidence').nullable();
table.uuid('suggested_by_user_id').nullable();
table.uuid('reviewed_by_user_id').nullable();
table.timestamp('reviewed_at').nullable();
table.timestamps(true, true);
table.foreign('suggested_by_user_id').references('id').inTable('users').onDelete('SET NULL');
table.foreign('reviewed_by_user_id').references('id').inTable('users').onDelete('SET NULL');
table.unique(
['source_entity_type', 'source_entity_id', 'target_entity_type', 'target_entity_id', 'link_type'],
{ indexName: 'semantic_links_unique_pair_type' },
);
table.index(['source_entity_type', 'source_entity_id'], 'semantic_links_source_idx');
table.index(['target_entity_type', 'target_entity_id'], 'semantic_links_target_idx');
table.index(['status'], 'semantic_links_status_idx');
});
};
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = async function (knex) {
await knex.schema.dropTableIfExists('semantic_links');
await knex.schema.dropTableIfExists('semantic_chunks');
await knex.schema.dropTableIfExists('semantic_documents');
await knex.schema.dropTableIfExists('comments');
};

View File

@@ -1,5 +1,6 @@
import { Module } from '@nestjs/common';
import { ConfigModule } from '@nestjs/config';
import { BullModule } from '@nestjs/bullmq';
import { PrismaModule } from './prisma/prisma.module';
import { TenantModule } from './tenant/tenant.module';
import { AuthModule } from './auth/auth.module';
@@ -10,12 +11,19 @@ import { PageLayoutModule } from './page-layout/page-layout.module';
import { VoiceModule } from './voice/voice.module';
import { AiAssistantModule } from './ai-assistant/ai-assistant.module';
import { SavedListViewModule } from './saved-list-view/saved-list-view.module';
import { KnowledgeModule } from './knowledge/knowledge.module';
@Module({
imports: [
ConfigModule.forRoot({
isGlobal: true,
}),
BullModule.forRoot({
connection: {
host: process.env.REDIS_HOST || 'platform-redis',
port: parseInt(process.env.REDIS_PORT || '6379', 10),
},
}),
PrismaModule,
TenantModule,
AuthModule,
@@ -26,6 +34,7 @@ import { SavedListViewModule } from './saved-list-view/saved-list-view.module';
VoiceModule,
AiAssistantModule,
SavedListViewModule,
KnowledgeModule,
],
})
export class AppModule {}

View File

@@ -0,0 +1,89 @@
export type SemanticProjectionInput = {
objectApiName: string;
record: Record<string, any>;
objectDefinition?: any;
comments: Array<{ id: string; content: string; author_user_id: string; created_at?: string }>;
};
export type SemanticProjection = {
entityType: string;
entityId: string;
title: string;
narrative: string;
/** Plain text used for embedding — no 'key: value' labels, no comments (chunker handles those separately). */
embeddingNarrative: string;
metadata: Record<string, any>;
sourceSummary: {
includedFieldCount: number;
includedCommentCount: number;
includesComments: boolean;
};
};
export interface SemanticProjectionAdapter {
supports(objectApiName: string): boolean;
buildProjection(input: SemanticProjectionInput): SemanticProjection;
}
const EXCLUDED_FIELDS = new Set([
'id',
'created_at',
'updated_at',
'ownerId',
'owner_id',
'tenantId',
'tenant_id',
]);
export class DefaultSemanticProjectionAdapter implements SemanticProjectionAdapter {
supports(): boolean {
return true;
}
buildProjection(input: SemanticProjectionInput): SemanticProjection {
const fieldEntries = Object.entries(input.record || {}).filter(([key, value]) => {
if (EXCLUDED_FIELDS.has(key)) return false;
if (value === null || value === undefined || value === '') return false;
return ['string', 'number', 'boolean'].includes(typeof value);
});
const title =
input.record?.name ||
input.record?.title ||
input.record?.subject ||
`${input.objectApiName} ${input.record?.id || ''}`.trim();
const fieldNarrative = fieldEntries
.map(([key, value]) => `${key}: ${String(value)}`)
.join('\n');
const commentNarrative = (input.comments || [])
.map((comment, index) => `Comment ${index + 1}: ${comment.content}`)
.join('\n');
const narrative = [fieldNarrative, commentNarrative].filter(Boolean).join('\n\n');
// Plain values only — no 'key:' prefixes. Comments are handled separately by the chunker.
const embeddingNarrative = fieldEntries
.map(([, value]) => String(value))
.join('\n');
return {
entityType: input.objectApiName,
entityId: input.record.id,
title,
narrative,
embeddingNarrative,
metadata: {
objectApiName: input.objectApiName,
hasComments: (input.comments || []).length > 0,
},
sourceSummary: {
includedFieldCount: fieldEntries.length,
includedCommentCount: (input.comments || []).length,
includesComments: (input.comments || []).length > 0,
},
};
}
}

View File

@@ -0,0 +1,24 @@
import { IsNotEmpty, IsOptional, IsString, MaxLength, MinLength } from 'class-validator';
export class CreateCommentDto {
@IsString()
@IsNotEmpty()
parentObjectApiName: string;
@IsString()
@IsNotEmpty()
parentRecordId: string;
@IsString()
@MinLength(1)
@MaxLength(10000)
content: string;
}
export class UpdateCommentDto {
@IsOptional()
@IsString()
@MinLength(1)
@MaxLength(10000)
content?: string;
}

View File

@@ -0,0 +1,52 @@
import { IsIn, IsNumber, IsObject, IsOptional, IsString, Max, Min } from 'class-validator';
export const SEMANTIC_LINK_STATUSES = ['suggested', 'approved', 'rejected', 'dismissed'] as const;
export const SEMANTIC_LINK_ORIGINS = ['manual', 'semantic', 'llm', 'hybrid', 'rule_based'] as const;
export class ReviewSemanticLinkDto {
@IsString()
@IsIn(SEMANTIC_LINK_STATUSES)
status: (typeof SEMANTIC_LINK_STATUSES)[number];
}
export class UpsertSemanticLinkDto {
@IsString()
sourceEntityType: string;
@IsString()
sourceEntityId: string;
@IsString()
targetEntityType: string;
@IsString()
targetEntityId: string;
@IsOptional()
@IsString()
linkType?: string;
@IsOptional()
@IsString()
@IsIn(SEMANTIC_LINK_STATUSES)
status?: (typeof SEMANTIC_LINK_STATUSES)[number];
@IsOptional()
@IsString()
@IsIn(SEMANTIC_LINK_ORIGINS)
origin?: (typeof SEMANTIC_LINK_ORIGINS)[number];
@IsOptional()
@IsNumber()
@Min(0)
@Max(1)
confidence?: number;
@IsOptional()
@IsString()
reason?: string;
@IsOptional()
@IsObject()
evidence?: Record<string, any>;
}

View File

@@ -0,0 +1,124 @@
import {
Body,
Controller,
Delete,
Get,
Param,
Patch,
Post,
Query,
UseGuards,
} from '@nestjs/common';
import { JwtAuthGuard } from '../auth/jwt-auth.guard';
import { CurrentUser } from '../auth/current-user.decorator';
import { TenantId } from '../tenant/tenant.decorator';
import { CreateCommentDto, UpdateCommentDto } from './dto/comment.dto';
import { ReviewSemanticLinkDto } from './dto/semantic-link.dto';
import { CommentService } from './services/comment.service';
import { SemanticOrchestratorService } from './services/semantic-orchestrator.service';
import { SemanticLinkService } from './services/semantic-link.service';
import { TenantDatabaseService } from '../tenant/tenant-database.service';
@Controller('knowledge')
@UseGuards(JwtAuthGuard)
export class KnowledgeController {
constructor(
private readonly commentService: CommentService,
private readonly semanticOrchestratorService: SemanticOrchestratorService,
private readonly semanticLinkService: SemanticLinkService,
private readonly tenantDbService: TenantDatabaseService,
) {}
@Get('comments/:objectApiName/:recordId')
async getComments(
@TenantId() tenantId: string,
@Param('objectApiName') objectApiName: string,
@Param('recordId') recordId: string,
) {
return this.commentService.listComments(tenantId, objectApiName, recordId);
}
@Post('comments')
async createComment(
@TenantId() tenantId: string,
@Body() dto: CreateCommentDto,
@CurrentUser() user: any,
) {
return this.commentService.createComment(tenantId, dto, user.userId);
}
@Patch('comments/:id')
async updateComment(
@TenantId() tenantId: string,
@Param('id') id: string,
@Body() dto: UpdateCommentDto,
@CurrentUser() user: any,
) {
return this.commentService.updateComment(tenantId, id, dto, user.userId);
}
@Delete('comments/:id')
async deleteComment(
@TenantId() tenantId: string,
@Param('id') id: string,
@CurrentUser() user: any,
) {
return this.commentService.deleteComment(tenantId, id, user.userId);
}
@Post('semantic/refresh/:objectApiName/:recordId')
async refreshSemantic(
@TenantId() tenantId: string,
@Param('objectApiName') objectApiName: string,
@Param('recordId') recordId: string,
@CurrentUser() user: any,
) {
return this.semanticOrchestratorService.refreshRecord(
tenantId,
objectApiName,
recordId,
user.userId,
'manual_refresh',
);
}
@Post('semantic/reindex/:objectApiName')
async reindexObject(
@TenantId() tenantId: string,
@Param('objectApiName') objectApiName: string,
@CurrentUser() user: any,
@Query('limit') limit?: string,
) {
const parsedLimit = Number.isFinite(Number(limit)) ? Number(limit) : 250;
return this.semanticOrchestratorService.reindexObject(
tenantId,
objectApiName,
user.userId,
parsedLimit,
);
}
@Get('semantic/links/:objectApiName/:recordId')
async listLinks(
@TenantId() tenantId: string,
@Param('objectApiName') objectApiName: string,
@Param('recordId') recordId: string,
@Query('status') status?: string,
) {
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
return this.semanticLinkService.listForRecord(knex, objectApiName, recordId, status);
}
@Patch('semantic/links/:id/review')
async reviewLink(
@TenantId() tenantId: string,
@Param('id') id: string,
@Body() dto: ReviewSemanticLinkDto,
@CurrentUser() user: any,
) {
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
return this.semanticLinkService.reviewLink(knex, id, dto.status, user.userId);
}
}

View File

@@ -0,0 +1,31 @@
import { Module } from '@nestjs/common';
import { BullModule } from '@nestjs/bullmq';
import { KnowledgeController } from './knowledge.controller';
import { CommentService } from './services/comment.service';
import { SemanticOrchestratorService } from './services/semantic-orchestrator.service';
import { SemanticChunkerService } from './services/semantic-chunker.service';
import { SemanticLinkService } from './services/semantic-link.service';
import { SemanticRefreshQueueService } from './services/semantic-refresh-queue.service';
import { SemanticRefreshProcessor } from './semantic-refresh.processor';
import { TenantModule } from '../tenant/tenant.module';
import { MeilisearchModule } from '../search/meilisearch.module';
import { SEMANTIC_REFRESH_QUEUE } from './semantic-refresh.constants';
@Module({
imports: [
TenantModule,
MeilisearchModule,
BullModule.registerQueue({ name: SEMANTIC_REFRESH_QUEUE }),
],
controllers: [KnowledgeController],
providers: [
CommentService,
SemanticOrchestratorService,
SemanticChunkerService,
SemanticLinkService,
SemanticRefreshQueueService,
SemanticRefreshProcessor,
],
exports: [SemanticOrchestratorService, SemanticRefreshQueueService],
})
export class KnowledgeModule {}

View File

@@ -0,0 +1,3 @@
export const SEMANTIC_REFRESH_QUEUE = 'semantic-refresh';
export const SEMANTIC_REFRESH_JOB = 'refresh-record';

View File

@@ -0,0 +1,45 @@
import { Processor, WorkerHost } from '@nestjs/bullmq';
import { Logger } from '@nestjs/common';
import { Job } from 'bullmq';
import { SemanticOrchestratorService } from './services/semantic-orchestrator.service';
import { SEMANTIC_REFRESH_QUEUE } from './semantic-refresh.constants';
export type SemanticRefreshJobData = {
tenantId: string;
objectApiName: string;
recordId: string;
userId?: string;
trigger: string;
};
@Processor(SEMANTIC_REFRESH_QUEUE)
export class SemanticRefreshProcessor extends WorkerHost {
private readonly logger = new Logger(SemanticRefreshProcessor.name);
constructor(
private readonly semanticOrchestratorService: SemanticOrchestratorService,
) {
super();
}
async process(job: Job<SemanticRefreshJobData>): Promise<void> {
const { tenantId, objectApiName, recordId, userId, trigger } = job.data;
this.logger.log(
`Processing semantic refresh: ${objectApiName}:${recordId} trigger=${trigger}`,
);
try {
await this.semanticOrchestratorService.refreshRecord(
tenantId,
objectApiName,
recordId,
userId,
trigger,
);
} catch (error) {
this.logger.error(
`Semantic refresh failed: ${objectApiName}:${recordId} trigger=${trigger} error=${error.message}`,
);
throw error; // Let BullMQ handle retries
}
}
}

View File

@@ -0,0 +1,115 @@
import { ForbiddenException, Injectable, NotFoundException } from '@nestjs/common';
import { TenantDatabaseService } from '../../tenant/tenant-database.service';
import { CreateCommentDto, UpdateCommentDto } from '../dto/comment.dto';
import { SemanticRefreshQueueService } from './semantic-refresh-queue.service';
@Injectable()
export class CommentService {
constructor(
private readonly tenantDbService: TenantDatabaseService,
private readonly semanticRefreshQueue: SemanticRefreshQueueService,
) {}
async listComments(tenantId: string, parentObjectApiName: string, parentRecordId: string) {
const knex = await this.getKnex(tenantId);
return knex('comments')
.where({
parent_object_api_name: parentObjectApiName,
parent_record_id: parentRecordId,
})
.orderBy('created_at', 'desc');
}
async createComment(tenantId: string, dto: CreateCommentDto, userId: string) {
const knex = await this.getKnex(tenantId);
const [created] = await knex('comments')
.insert({
parent_object_api_name: dto.parentObjectApiName,
parent_record_id: dto.parentRecordId,
author_user_id: userId,
content: dto.content,
created_at: knex.fn.now(),
updated_at: knex.fn.now(),
})
.returning('*');
console.log(
`[Knowledge] Comment created: ${dto.parentObjectApiName}:${dto.parentRecordId} by ${userId}`,
);
await this.semanticRefreshQueue.enqueue(
tenantId,
dto.parentObjectApiName,
dto.parentRecordId,
userId,
'comment_created',
);
return created;
}
async updateComment(tenantId: string, commentId: string, dto: UpdateCommentDto, userId: string) {
const knex = await this.getKnex(tenantId);
const existing = await knex('comments').where({ id: commentId }).first();
if (!existing) {
throw new NotFoundException('Comment not found');
}
if (existing.author_user_id !== userId) {
throw new ForbiddenException('Only the author can edit this comment');
}
await knex('comments')
.where({ id: commentId })
.update({
...(dto.content ? { content: dto.content } : {}),
updated_at: knex.fn.now(),
});
console.log(
`[Knowledge] Comment updated: ${existing.parent_object_api_name}:${existing.parent_record_id} by ${userId}`,
);
await this.semanticRefreshQueue.enqueue(
tenantId,
existing.parent_object_api_name,
existing.parent_record_id,
userId,
'comment_updated',
);
return knex('comments').where({ id: commentId }).first();
}
async deleteComment(tenantId: string, commentId: string, userId: string) {
const knex = await this.getKnex(tenantId);
const existing = await knex('comments').where({ id: commentId }).first();
if (!existing) {
throw new NotFoundException('Comment not found');
}
if (existing.author_user_id !== userId) {
throw new ForbiddenException('Only the author can delete this comment');
}
await knex('comments').where({ id: commentId }).delete();
console.log(
`[Knowledge] Comment deleted: ${existing.parent_object_api_name}:${existing.parent_record_id} by ${userId}`,
);
await this.semanticRefreshQueue.enqueue(
tenantId,
existing.parent_object_api_name,
existing.parent_record_id,
userId,
'comment_deleted',
);
return { success: true };
}
private async getKnex(tenantId: string) {
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
return this.tenantDbService.getTenantKnexById(resolvedTenantId);
}
}

View File

@@ -0,0 +1,20 @@
import { SemanticChunkerService } from './semantic-chunker.service';
describe('SemanticChunkerService', () => {
let service: SemanticChunkerService;
beforeEach(() => {
service = new SemanticChunkerService();
});
it('creates chunks from base narrative and comments', () => {
const chunks = service.chunkText('Intro paragraph\n\nSecond paragraph', [
{ id: 'c-1', content: 'Comment body' },
]);
expect(chunks).toHaveLength(3);
expect(chunks[0].sourceKind).toBe('base_record');
expect(chunks[2].sourceKind).toBe('comment');
expect(chunks[2].sourceRefId).toBe('c-1');
});
});

View File

@@ -0,0 +1,71 @@
import { Injectable } from '@nestjs/common';
export type SemanticChunk = {
chunkIndex: number;
sourceKind: 'base_record' | 'comment' | 'mixed';
sourceRefId: string | null;
text: string;
metadata: Record<string, any>;
};
@Injectable()
export class SemanticChunkerService {
chunkText(
baseNarrative: string,
comments: Array<{ id: string; content: string }>,
): SemanticChunk[] {
const chunks: SemanticChunk[] = [];
const baseParts = this.splitText(baseNarrative);
for (const [index, text] of baseParts.entries()) {
chunks.push({
chunkIndex: chunks.length,
sourceKind: 'base_record',
sourceRefId: null,
text,
metadata: { section: 'base', localIndex: index },
});
}
for (const comment of comments || []) {
const commentParts = this.splitText(comment.content);
for (const [index, text] of commentParts.entries()) {
chunks.push({
chunkIndex: chunks.length,
sourceKind: 'comment',
sourceRefId: comment.id,
text,
metadata: { section: 'comment', localIndex: index, commentId: comment.id },
});
}
}
return chunks;
}
private splitText(text: string): string[] {
const normalized = (text || '').trim();
if (!normalized) return [];
const paragraphs = normalized
.split(/\n{2,}/)
.map((part) => part.trim())
.filter(Boolean);
const chunks: string[] = [];
for (const paragraph of paragraphs) {
if (paragraph.length <= 500) {
chunks.push(paragraph);
continue;
}
let cursor = 0;
while (cursor < paragraph.length) {
chunks.push(paragraph.slice(cursor, cursor + 500).trim());
cursor += 500;
}
}
return chunks.filter(Boolean);
}
}

View File

@@ -0,0 +1,20 @@
import { SemanticLinkService } from './semantic-link.service';
describe('SemanticLinkService', () => {
let service: SemanticLinkService;
beforeEach(() => {
service = new SemanticLinkService();
});
it('normalizes undirected pairs in deterministic order', () => {
const normalized = service.normalizeUndirectedPair('Contact', 'b-id', 'Account', 'a-id');
expect(normalized).toEqual({
sourceEntityType: 'Account',
sourceEntityId: 'a-id',
targetEntityType: 'Contact',
targetEntityId: 'b-id',
});
});
});

View File

@@ -0,0 +1,186 @@
import { Injectable, NotFoundException } from '@nestjs/common';
export type SemanticLinkUpsertInput = {
sourceEntityType: string;
sourceEntityId: string;
targetEntityType: string;
targetEntityId: string;
linkType?: string;
status?: string;
origin?: string;
confidence?: number;
reason?: string;
evidence?: Record<string, any>;
suggestedByUserId?: string | null;
};
@Injectable()
export class SemanticLinkService {
normalizeUndirectedPair(
sourceEntityType: string,
sourceEntityId: string,
targetEntityType: string,
targetEntityId: string,
) {
const sourceKey = `${sourceEntityType}:${sourceEntityId}`;
const targetKey = `${targetEntityType}:${targetEntityId}`;
if (sourceKey <= targetKey) {
return {
sourceEntityType,
sourceEntityId,
targetEntityType,
targetEntityId,
};
}
return {
sourceEntityType: targetEntityType,
sourceEntityId: targetEntityId,
targetEntityType: sourceEntityType,
targetEntityId: sourceEntityId,
};
}
async upsertSuggestedLink(knex: any, input: SemanticLinkUpsertInput) {
const normalized = this.normalizeUndirectedPair(
input.sourceEntityType,
input.sourceEntityId,
input.targetEntityType,
input.targetEntityId,
);
const payload = {
source_entity_type: normalized.sourceEntityType,
source_entity_id: normalized.sourceEntityId,
target_entity_type: normalized.targetEntityType,
target_entity_id: normalized.targetEntityId,
link_type: input.linkType || 'related_to',
status: input.status || 'suggested',
origin: input.origin || 'semantic',
confidence: input.confidence ?? 0,
reason: input.reason || null,
evidence: input.evidence ? JSON.stringify(input.evidence) : null,
suggested_by_user_id: input.suggestedByUserId || null,
updated_at: knex.fn.now(),
created_at: knex.fn.now(),
};
await knex('semantic_links')
.insert(payload)
.onConflict([
'source_entity_type',
'source_entity_id',
'target_entity_type',
'target_entity_id',
'link_type',
])
.merge({
status: knex.raw("IF(status = 'approved', status, VALUES(status))"),
origin: payload.origin,
confidence: knex.raw('GREATEST(confidence, VALUES(confidence))'),
reason: payload.reason,
evidence: payload.evidence,
updated_at: knex.fn.now(),
});
}
async listForRecord(knex: any, entityType: string, entityId: string, status?: string) {
const query = knex('semantic_links')
.where((builder: any) => {
builder
.where({ source_entity_type: entityType, source_entity_id: entityId })
.orWhere({ target_entity_type: entityType, target_entity_id: entityId });
})
.orderBy('updated_at', 'desc');
if (status) {
query.andWhere({ status });
}
const links = await query;
if (!links.length) return links;
const typeSet = new Set<string>();
for (const link of links) {
typeSet.add(link.source_entity_type);
typeSet.add(link.target_entity_type);
}
const definitions = await knex('object_definitions')
.whereIn('apiName', Array.from(typeSet))
.select('apiName', 'label', 'pluralLabel', 'tableName', 'fields');
const definitionByType = new Map<string, any>(
definitions.map((item: any) => [item.apiName, item]),
);
const displayNameCache = new Map<string, string>();
const getDisplayField = (definition: any) => {
let fields = [];
if (Array.isArray(definition?.fields)) {
fields = definition.fields;
} else if (typeof definition?.fields === 'string') {
try {
fields = JSON.parse(definition.fields);
} catch {
fields = [];
}
}
if (fields.some((field: any) => field?.apiName === 'name')) return 'name';
const textField = fields.find((field: any) =>
['STRING', 'TEXT', 'EMAIL'].includes(String(field?.type || '').toUpperCase()),
);
return textField?.apiName || 'id';
};
const resolveTableName = (definition: any) => {
if (definition?.tableName) return definition.tableName;
if (definition?.pluralLabel) {
return String(definition.pluralLabel).toLowerCase().replace(/[^a-z0-9]+/g, '_');
}
return `${String(definition?.apiName || '').toLowerCase()}s`;
};
const loadDisplayName = async (type: string, id: string) => {
const cacheKey = `${type}:${id}`;
if (displayNameCache.has(cacheKey)) return displayNameCache.get(cacheKey);
const definition = definitionByType.get(type);
if (!definition) {
displayNameCache.set(cacheKey, id);
return id;
}
const tableName = resolveTableName(definition);
const displayField = getDisplayField(definition);
const record = await knex(tableName).where({ id }).first();
const display = record?.[displayField] ? String(record[displayField]) : id;
displayNameCache.set(cacheKey, display);
return display;
};
for (const link of links) {
link.source_entity_label = definitionByType.get(link.source_entity_type)?.label || link.source_entity_type;
link.target_entity_label = definitionByType.get(link.target_entity_type)?.label || link.target_entity_type;
link.source_entity_name = await loadDisplayName(link.source_entity_type, link.source_entity_id);
link.target_entity_name = await loadDisplayName(link.target_entity_type, link.target_entity_id);
}
return links;
}
async reviewLink(knex: any, linkId: string, status: string, reviewerUserId: string) {
const updated = await knex('semantic_links')
.where({ id: linkId })
.update({
status,
reviewed_by_user_id: reviewerUserId,
reviewed_at: knex.fn.now(),
updated_at: knex.fn.now(),
});
if (!updated) {
throw new NotFoundException('Semantic link not found');
}
return knex('semantic_links').where({ id: linkId }).first();
}
}

View File

@@ -0,0 +1,540 @@
import { Injectable, Logger } from '@nestjs/common';
import { HumanMessage, SystemMessage } from '@langchain/core/messages';
import { ChatOpenAI } from '@langchain/openai';
import { TenantDatabaseService } from '../../tenant/tenant-database.service';
import { MeilisearchService } from '../../search/meilisearch.service';
import { getCentralPrisma } from '../../prisma/central-prisma.service';
import { OpenAIConfig } from '../../voice/interfaces/integration-config.interface';
import { randomUUID } from 'crypto';
import {
DefaultSemanticProjectionAdapter,
SemanticProjectionAdapter,
} from '../adapters/semantic-projection.adapter';
import { SemanticChunkerService } from './semantic-chunker.service';
import { SemanticLinkService } from './semantic-link.service';
@Injectable()
export class SemanticOrchestratorService {
private readonly logger = new Logger(SemanticOrchestratorService.name);
private readonly adapters: SemanticProjectionAdapter[] = [new DefaultSemanticProjectionAdapter()];
private readonly defaultEmbeddingModel =
process.env.OPENAI_EMBEDDING_MODEL || 'text-embedding-3-small';
private readonly semanticEmbedderName = 'default';
private readonly MIN_CONFIDENCE_BASE = 0.7;
private readonly MIN_CONFIDENCE_COMMENT = 0.52;
private readonly defaultChatModel = process.env.OPENAI_CHAT_MODEL || 'gpt-4o-mini';
constructor(
private readonly tenantDbService: TenantDatabaseService,
private readonly meilisearchService: MeilisearchService,
private readonly chunkerService: SemanticChunkerService,
private readonly semanticLinkService: SemanticLinkService,
) {}
async refreshRecord(
tenantId: string,
objectApiName: string,
recordId: string,
userId?: string,
trigger: string = 'manual',
) {
this.logger.log(
`Semantic refresh start: ${objectApiName}:${recordId} (trigger=${trigger})`,
);
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
const objectDefinition = await knex('object_definitions').where({ apiName: objectApiName }).first();
if (!objectDefinition) {
this.logger.warn(`Object definition ${objectApiName} not found. Skipping semantic refresh.`);
return { skipped: true };
}
const tableName = this.getTableName(objectDefinition);
const record = await knex(tableName).where({ id: recordId }).first();
if (!record) {
this.logger.warn(`Record not found for semantic refresh: ${objectApiName}:${recordId}`);
return { skipped: true };
}
const comments = await knex('comments')
.where({
parent_object_api_name: objectApiName,
parent_record_id: recordId,
})
.orderBy('created_at', 'asc');
this.logger.log(
`Semantic refresh source: ${objectApiName}:${recordId} comments=${comments.length}`,
);
const adapter = this.adapters.find((candidate) => candidate.supports(objectApiName))!;
const projection = adapter.buildProjection({
objectApiName,
record,
objectDefinition,
comments,
});
const documentId = await this.upsertSemanticDocument(knex, projection);
const chunks = this.chunkerService.chunkText(projection.embeddingNarrative, comments);
this.logger.log(
`Semantic refresh chunking: ${objectApiName}:${recordId} chunks=${chunks.length}`,
);
await this.replaceChunks(knex, documentId, chunks);
const openAiConfig = await this.getOpenAiConfig(resolvedTenantId);
const embedderReady = await this.indexChunks(resolvedTenantId, projection, chunks, openAiConfig);
await this.generateSuggestions(
resolvedTenantId,
projection,
chunks,
openAiConfig,
embedderReady,
userId,
trigger,
);
this.logger.log(
`Semantic refresh complete: ${objectApiName}:${recordId} document=${documentId}`,
);
return { documentId, chunkCount: chunks.length };
}
async reindexObject(tenantId: string, objectApiName: string, userId?: string, limit = 250) {
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
const objectDefinition = await knex('object_definitions').where({ apiName: objectApiName }).first();
if (!objectDefinition) {
return { total: 0, processed: 0 };
}
const tableName = this.getTableName(objectDefinition);
const records = await knex(tableName).select('id').limit(limit);
let processed = 0;
for (const record of records) {
await this.refreshRecord(resolvedTenantId, objectApiName, record.id, userId, 'batch_reindex');
processed += 1;
}
return { total: records.length, processed };
}
private async upsertSemanticDocument(knex: any, projection: any): Promise<string> {
const existing = await knex('semantic_documents')
.where({ entity_type: projection.entityType, entity_id: projection.entityId })
.first();
if (existing) {
await knex('semantic_documents')
.where({ id: existing.id })
.update({
title: projection.title,
narrative: projection.narrative,
metadata: JSON.stringify(projection.metadata || {}),
source_summary: JSON.stringify(projection.sourceSummary || {}),
updated_at: knex.fn.now(),
});
return existing.id;
}
const newId = randomUUID();
const [created] = await knex('semantic_documents')
.insert({
id: newId,
entity_type: projection.entityType,
entity_id: projection.entityId,
title: projection.title,
narrative: projection.narrative,
metadata: JSON.stringify(projection.metadata || {}),
source_summary: JSON.stringify(projection.sourceSummary || {}),
created_at: knex.fn.now(),
updated_at: knex.fn.now(),
})
.returning('id');
if (created && typeof created === 'object' && created.id) {
return created.id;
}
// MySQL may return a numeric insert id (often 0 for UUID PKs). Always trust the generated UUID.
return newId;
}
private async replaceChunks(knex: any, documentId: string, chunks: any[]) {
if (!documentId) {
this.logger.warn('Skipping chunk replace: missing semantic document id.');
return;
}
await knex('semantic_chunks').where({ semantic_document_id: documentId }).delete();
if (!chunks.length) return;
await knex('semantic_chunks').insert(
chunks.map((chunk) => ({
semantic_document_id: documentId,
chunk_index: chunk.chunkIndex,
source_kind: chunk.sourceKind,
source_ref_id: chunk.sourceRefId,
text: chunk.text,
metadata: JSON.stringify(chunk.metadata || {}),
created_at: knex.fn.now(),
updated_at: knex.fn.now(),
})),
);
}
private async indexChunks(
tenantId: string,
projection: any,
chunks: any[],
openAiConfig: OpenAIConfig | null,
) {
if (!this.meilisearchService.isEnabled()) {
this.logger.warn('Meilisearch disabled; skipping semantic chunk indexing.');
return false;
}
const indexName = this.meilisearchService.buildSemanticChunkIndexName(tenantId);
let embedderReady = false;
if (openAiConfig?.apiKey) {
embedderReady = await this.meilisearchService.ensureOpenAiEmbedder(indexName, {
embedderName: this.semanticEmbedderName,
apiKey: openAiConfig.apiKey,
model: openAiConfig.embeddingModel || this.defaultEmbeddingModel,
documentTemplate: '{{doc.title}}\n{{doc.text}}',
});
this.logger.log(
`Meilisearch embedder ensured: index=${indexName} model=${openAiConfig.embeddingModel || this.defaultEmbeddingModel}`,
);
} else {
this.logger.warn('OpenAI embedder not configured; semantic search will be lexical only.');
}
this.logger.log(`Indexing semantic chunks: index=${indexName} count=${chunks.length}`);
await this.meilisearchService.upsertDocuments(indexName, chunks.map((chunk) => ({
id: `${projection.entityType}_${projection.entityId}_${chunk.chunkIndex}`,
entityType: projection.entityType,
entityId: projection.entityId,
title: projection.title,
sourceKind: chunk.sourceKind,
sourceRefId: chunk.sourceRefId,
text: chunk.text,
})));
return embedderReady;
}
private async generateSuggestions(
tenantId: string,
projection: any,
chunks: any[],
openAiConfig: OpenAIConfig | null,
embedderReady: boolean,
userId?: string,
trigger: string = 'semantic_refresh',
) {
if (!this.meilisearchService.isEnabled() || !chunks.length) {
this.logger.warn(
`Skipping suggestion generation: meili=${this.meilisearchService.isEnabled()} chunks=${chunks.length}`,
);
return;
}
const indexName = this.meilisearchService.buildSemanticChunkIndexName(tenantId);
// Build query from all chunks (base record + comments), prioritising comments
// since they carry the most distinctive semantic signal.
const commentChunks = chunks.filter((c) => c.sourceKind === 'comment');
const baseChunks = chunks.filter((c) => c.sourceKind !== 'comment');
const orderedChunks = [...commentChunks, ...baseChunks];
const queryText = orderedChunks.map((chunk) => chunk.text).join(' ').slice(0, 1200);
this.logger.log(
`Generating suggestions: index=${indexName} queryLen=${queryText.length} hybrid=${embedderReady}`,
);
const search = await this.meilisearchService.searchIndex(
indexName,
queryText,
20,
// semanticRatio:1.0 = pure vector search, no lexical component that would
// match on shared tokens like 'name:' or 'Comment 1:' across all records.
embedderReady ? { embedder: this.semanticEmbedderName, semanticRatio: 1.0 } : undefined,
);
this.logger.log(
`Meilisearch results: index=${indexName} hits=${search.hits?.length || 0} total=${search.total}`,
);
const candidates = new Map<string, { hit: any; confidence: number; rankingDetails?: any }>();
for (const hit of search.hits || []) {
// Skip self
if (hit.entityId === projection.entityId) continue;
const confidence = hit._semanticScore ?? hit._rankingScore ?? 0;
// Use a lower threshold for comment chunks (short, conversational text
// naturally produces lower cosine similarity than structured field values).
const isComment = hit.sourceKind === 'comment';
const threshold = isComment ? this.MIN_CONFIDENCE_COMMENT : this.MIN_CONFIDENCE_BASE;
this.logger.log(
`Suggestion candidate: ${hit.entityType}:${hit.entityId} confidence=${confidence.toFixed(4)} kind=${hit.sourceKind || 'base'} threshold=${threshold} text="${String(hit.text || '').substring(0, 60)}"`,
);
if (confidence < threshold) {
this.logger.log(
`Skipping low-confidence match: ${hit.entityType}:${hit.entityId} confidence=${confidence.toFixed(4)} < ${threshold} (${isComment ? 'comment' : 'base'})`,
);
continue;
}
const key = `${hit.entityType}:${hit.entityId}`;
const existing = candidates.get(key);
if (!existing || confidence > existing.confidence) {
candidates.set(key, {
hit,
confidence,
rankingDetails: hit._rankingScoreDetails || null,
});
}
}
this.logger.log(`Filtered suggestions: ${candidates.size} passed thresholds (base=${this.MIN_CONFIDENCE_BASE}, comment=${this.MIN_CONFIDENCE_COMMENT})`);
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
const knex = await this.tenantDbService.getTenantKnexById(resolvedTenantId);
for (const [key, { hit, confidence, rankingDetails }] of candidates.entries()) {
const [targetType, targetId] = key.split(':');
const llmAssessment = await this.assessLinkWithLlm(
openAiConfig,
trigger,
projection,
chunks,
hit,
confidence,
rankingDetails,
);
const reason =
llmAssessment?.reason ||
this.humanizeTrigger(trigger) ||
'Suggested from semantic similarity.';
await this.semanticLinkService.upsertSuggestedLink(knex, {
sourceEntityType: projection.entityType,
sourceEntityId: projection.entityId,
targetEntityType: targetType,
targetEntityId: targetId,
linkType: llmAssessment?.linkType || 'related',
status: 'suggested',
origin: 'semantic',
confidence,
reason,
evidence: this.buildEvidencePayload(
trigger,
chunks,
hit,
confidence,
rankingDetails,
llmAssessment,
),
suggestedByUserId: userId || null,
});
}
}
private buildEvidencePayload(
trigger: string,
chunks: any[],
hit: any,
confidence: number,
rankingDetails: any,
llmAssessment?: {
reason?: string;
explanation?: string;
matchedSignals?: string[];
} | null,
) {
return {
trigger,
explanation:
llmAssessment?.explanation ||
llmAssessment?.reason ||
'Suggested using semantic similarity and ranked chunk evidence.',
sourceSignals: chunks.slice(0, 2).map((chunk) => ({
sourceKind: chunk.sourceKind,
text: chunk.text.slice(0, 220),
})),
matchedSignals: llmAssessment?.matchedSignals || [],
matchedChunks: [
{
sourceKind: hit.sourceKind,
text: String(hit.text || '').slice(0, 220),
score: confidence,
rankingDetails: rankingDetails || null,
},
],
};
}
private async assessLinkWithLlm(
openAiConfig: OpenAIConfig | null,
trigger: string,
projection: any,
chunks: any[],
hit: any,
confidence: number,
rankingDetails: any,
): Promise<{ linkType: string; reason?: string; explanation?: string; matchedSignals?: string[] } | null> {
if (!openAiConfig?.apiKey) {
return null;
}
const promptPayload = {
trigger,
source: {
entityType: projection.entityType,
title: projection.title,
narrative: String(projection.narrative || '').slice(0, 900),
keySignals: chunks.slice(0, 3).map((chunk) => ({
sourceKind: chunk.sourceKind,
text: String(chunk.text || '').slice(0, 220),
})),
},
target: {
entityType: hit.entityType,
title: hit.title,
sourceKind: hit.sourceKind,
text: String(hit.text || '').slice(0, 300),
},
confidence,
rankingDetails: rankingDetails || {},
allowedLinkTypes: [
'related',
'supports',
'contradicts',
'expands',
'duplicate_of',
'references',
'depends_on',
],
};
try {
const model = new ChatOpenAI({
apiKey: openAiConfig.apiKey,
model: openAiConfig.model || this.defaultChatModel,
temperature: 0.1,
});
const response = await model.invoke([
new SystemMessage(
'Classify semantic relationship. Return valid JSON only with keys: linkType, reason, explanation, matchedSignals. linkType must be one of related|supports|contradicts|expands|duplicate_of|references|depends_on.',
),
new HumanMessage(JSON.stringify(promptPayload)),
]);
const content = typeof response.content === 'string'
? response.content
: Array.isArray(response.content)
? response.content.map((part: any) => (typeof part === 'string' ? part : part?.text || '')).join('')
: '';
const normalized = this.extractJsonObject(content);
if (!normalized) return null;
const linkType = this.normalizeLinkType(normalized.linkType);
return {
linkType,
reason: typeof normalized.reason === 'string' ? normalized.reason.trim() : undefined,
explanation:
typeof normalized.explanation === 'string' ? normalized.explanation.trim() : undefined,
matchedSignals: Array.isArray(normalized.matchedSignals)
? normalized.matchedSignals
.map((item: any) => String(item || '').trim())
.filter(Boolean)
.slice(0, 3)
: undefined,
};
} catch (error) {
this.logger.warn(`Semantic LLM assessment failed: ${error.message}`);
return null;
}
}
private extractJsonObject(raw: string): Record<string, any> | null {
if (!raw) return null;
const trimmed = raw.trim();
try {
return JSON.parse(trimmed);
} catch {
const match = trimmed.match(/\{[\s\S]*\}/);
if (!match) return null;
try {
return JSON.parse(match[0]);
} catch {
return null;
}
}
}
private normalizeLinkType(value: any): string {
const supported = new Set([
'related',
'supports',
'contradicts',
'expands',
'duplicate_of',
'references',
'depends_on',
]);
const normalized = String(value || '').trim().toLowerCase();
if (supported.has(normalized)) return normalized;
return 'related';
}
private humanizeTrigger(trigger: string): string {
if (!trigger) return 'Suggested from semantic similarity.';
const map: Record<string, string> = {
comment_created: 'Suggested based on a comment added to the record.',
comment_updated: 'Suggested based on a comment update.',
manual_refresh: 'Suggested after a manual semantic refresh.',
batch_reindex: 'Suggested during semantic reindexing.',
};
return map[trigger] || 'Suggested from semantic similarity.';
}
private getTableName(objectDefinition: any): string {
if (objectDefinition.tableName) return objectDefinition.tableName;
if (objectDefinition.pluralLabel) {
return objectDefinition.pluralLabel.toLowerCase().replace(/[^a-z0-9]+/g, '_');
}
return `${objectDefinition.apiName.toLowerCase()}s`;
}
private async getOpenAiConfig(tenantId: string): Promise<OpenAIConfig | null> {
const resolvedTenantId = await this.tenantDbService.resolveTenantId(tenantId);
const centralPrisma = getCentralPrisma();
const tenant = await centralPrisma.tenant.findUnique({
where: { id: resolvedTenantId },
select: { integrationsConfig: true },
});
let config = tenant?.integrationsConfig
? typeof tenant.integrationsConfig === 'string'
? this.tenantDbService.decryptIntegrationsConfig(tenant.integrationsConfig)
: tenant.integrationsConfig
: null;
if (!config?.openai && process.env.OPENAI_API_KEY) {
config = {
...(config || {}),
openai: {
apiKey: process.env.OPENAI_API_KEY,
embeddingModel: this.defaultEmbeddingModel,
},
};
}
if (config?.openai?.apiKey) {
return {
apiKey: config.openai.apiKey,
embeddingModel: config.openai.embeddingModel || this.defaultEmbeddingModel,
};
}
return null;
}
}

View File

@@ -0,0 +1,42 @@
import { Injectable, Logger } from '@nestjs/common';
import { InjectQueue } from '@nestjs/bullmq';
import { Queue } from 'bullmq';
import {
SEMANTIC_REFRESH_QUEUE,
SEMANTIC_REFRESH_JOB,
} from '../semantic-refresh.constants';
import { SemanticRefreshJobData } from '../semantic-refresh.processor';
@Injectable()
export class SemanticRefreshQueueService {
private readonly logger = new Logger(SemanticRefreshQueueService.name);
constructor(
@InjectQueue(SEMANTIC_REFRESH_QUEUE) private readonly queue: Queue,
) {}
async enqueue(
tenantId: string,
objectApiName: string,
recordId: string,
userId?: string,
trigger: string = 'manual',
): Promise<void> {
const data: SemanticRefreshJobData = {
tenantId,
objectApiName,
recordId,
userId,
trigger,
};
await this.queue.add(SEMANTIC_REFRESH_JOB, data, {
attempts: 3,
backoff: { type: 'exponential', delay: 2000 },
removeOnComplete: 100,
removeOnFail: 50,
});
this.logger.debug(
`Enqueued semantic refresh: ${objectApiName}:${recordId} trigger=${trigger}`,
);
}
}

View File

@@ -10,9 +10,10 @@ import { RbacModule } from '../rbac/rbac.module';
import { ModelRegistry } from './models/model.registry';
import { ModelService } from './models/model.service';
import { MeilisearchModule } from '../search/meilisearch.module';
import { KnowledgeModule } from '../knowledge/knowledge.module';
@Module({
imports: [TenantModule, MigrationModule, RbacModule, MeilisearchModule],
imports: [TenantModule, MigrationModule, RbacModule, MeilisearchModule, KnowledgeModule],
providers: [
ObjectService,
SchemaManagementService,

View File

@@ -9,6 +9,7 @@ import { FieldDefinition } from '../models/field-definition.model';
import { User } from '../models/user.model';
import { ObjectMetadata } from './models/dynamic-model.factory';
import { MeilisearchService } from '../search/meilisearch.service';
import { SemanticRefreshQueueService } from '../knowledge/services/semantic-refresh-queue.service';
type SearchFilter = {
field: string;
@@ -39,6 +40,7 @@ export class ObjectService {
private modelService: ModelService,
private authService: AuthorizationService,
private meilisearchService: MeilisearchService,
private semanticRefreshQueue: SemanticRefreshQueueService,
) {}
// Setup endpoints - Object metadata management
@@ -1128,6 +1130,13 @@ export class ObjectService {
);
const record = await boundModel.query().insert(normalizedRecordData);
await this.indexRecord(resolvedTenantId, objectApiName, objectDefModel.fields, record);
await this.semanticRefreshQueue.enqueue(
resolvedTenantId,
objectApiName,
record.id,
userId,
'record_created',
);
return record;
}
@@ -1197,6 +1206,13 @@ export class ObjectService {
await boundModel.query().patch(normalizedEditableData).where({ id: recordId });
const record = await boundModel.query().where({ id: recordId }).first();
await this.indexRecord(resolvedTenantId, objectApiName, objectDefModel.fields, record);
await this.semanticRefreshQueue.enqueue(
resolvedTenantId,
objectApiName,
recordId,
userId,
'record_updated',
);
return record;
}

View File

@@ -8,9 +8,23 @@ type MeiliConfig = {
indexPrefix: string;
};
type HybridSearchOptions = {
embedder: string;
semanticRatio?: number;
};
type OpenAiEmbedderConfig = {
embedderName: string;
apiKey: string;
model: string;
documentTemplate: string;
};
@Injectable()
export class MeilisearchService {
private readonly logger = new Logger(MeilisearchService.name);
private readonly embedderCache = new Map<string, string>();
private vectorStoreEnabled = false;
isEnabled(): boolean {
return Boolean(this.getConfig());
@@ -158,6 +172,100 @@ export class MeilisearchService {
}
}
buildSemanticChunkIndexName(tenantId: string): string {
const config = this.getConfig();
const prefix = config?.indexPrefix || 'tenant_';
return `${prefix}${tenantId}_semantic_chunks`.toLowerCase();
}
async upsertDocuments(indexName: string, documents: Record<string, any>[]): Promise<void> {
const config = this.getConfig();
if (!config || !Array.isArray(documents) || documents.length === 0) return;
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/documents?primaryKey=id`;
try {
const response = await this.requestJson('POST', url, documents, this.buildHeaders(config));
if (!this.isSuccessStatus(response.status)) {
this.logger.warn(`Meilisearch document upsert failed for index ${indexName}: ${response.status}`);
return;
}
// Meilisearch indexes (and embeds) documents asynchronously. Wait for the task
// to complete so callers can immediately search and see the new documents.
const taskUid = response.body?.taskUid ?? response.body?.uid;
if (Number.isFinite(Number(taskUid))) {
const succeeded = await this.waitForTask(config, Number(taskUid), 30000);
if (!succeeded) {
this.logger.warn(`Meilisearch indexing task did not succeed within timeout: taskUid=${taskUid} index=${indexName}`);
}
}
} catch (error) {
this.logger.warn(`Meilisearch document upsert failed: ${error.message}`);
}
}
async searchIndex(
indexName: string,
query: string,
limit = 20,
hybrid?: HybridSearchOptions,
): Promise<{ hits: any[]; total: number }> {
const config = this.getConfig();
if (!config) return { hits: [], total: 0 };
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/search`;
try {
const response = await this.requestJson(
'POST',
url,
{
q: query,
limit,
showRankingScore: true,
...(hybrid ? { hybrid, showRankingScoreDetails: true } : {}),
},
this.buildHeaders(config),
);
if (!this.isSuccessStatus(response.status)) {
this.logger.warn(
`Meilisearch search failed for index ${indexName}: ${response.status}`,
);
this.logger.warn(
`Meilisearch search payload: ${JSON.stringify({ q: query, limit, hybrid })}`,
);
this.logger.warn(
`Meilisearch search error body: ${JSON.stringify(response.body)}`,
);
// If hybrid is invalid (embedder missing), retry once without hybrid
if (hybrid && response.body?.code === 'invalid_embedder') {
const fallback = await this.requestJson(
'POST',
url,
{ q: query, limit },
this.buildHeaders(config),
);
if (this.isSuccessStatus(fallback.status)) {
const hits = Array.isArray(fallback.body?.hits) ? fallback.body.hits : [];
const total =
fallback.body?.estimatedTotalHits ?? fallback.body?.nbHits ?? hits.length;
this.logger.warn(
`Meilisearch hybrid failed; fell back to lexical search for index ${indexName}.`,
);
return { hits, total };
}
}
return { hits: [], total: 0 };
}
const hits = Array.isArray(response.body?.hits) ? response.body.hits : [];
const total = response.body?.estimatedTotalHits ?? response.body?.nbHits ?? hits.length;
return { hits, total };
} catch (error) {
this.logger.warn(`Meilisearch search failed: ${error.message}`);
return { hits: [], total: 0 };
}
}
private getConfig(): MeiliConfig | null {
const host = process.env.MEILI_HOST || process.env.MEILISEARCH_HOST;
if (!host) return null;
@@ -198,7 +306,7 @@ export class MeilisearchService {
}
private requestJson(
method: 'POST' | 'DELETE',
method: 'POST' | 'DELETE' | 'PATCH' | 'GET',
url: string,
payload: any,
headers: Record<string, string>,
@@ -235,10 +343,141 @@ export class MeilisearchService {
);
request.on('error', reject);
if (payload !== undefined) {
if (payload !== undefined && method !== 'GET') {
request.write(JSON.stringify(payload));
}
request.end();
});
}
private async enableVectorStore(): Promise<void> {
// Temporarily disabled to avoid the overhead of checking on every save.
// Re-enable by removing the early return below.
return;
if (this.vectorStoreEnabled) return; // eslint-disable-line no-unreachable
const meiliConfig = this.getConfig();
if (!meiliConfig) return;
const url = `${meiliConfig.host}/experimental-features`;
try {
const response = await this.requestJson(
'PATCH',
url,
{ vectorStore: true },
this.buildHeaders(meiliConfig),
);
if (this.isSuccessStatus(response.status)) {
this.vectorStoreEnabled = true;
this.logger.log('Meilisearch vector store experimental feature enabled');
} else {
this.logger.warn(
`Failed to enable Meilisearch vector store: ${response.status} ${JSON.stringify(response.body)}`,
);
}
} catch (error) {
this.logger.warn(`Failed to enable Meilisearch vector store: ${error.message}`);
}
}
async ensureOpenAiEmbedder(
indexName: string,
config: OpenAiEmbedderConfig,
): Promise<boolean> {
const meiliConfig = this.getConfig();
if (!meiliConfig || !config?.apiKey) return false;
await this.enableVectorStore();
const signature = JSON.stringify({
embedderName: config.embedderName,
model: config.model,
documentTemplate: config.documentTemplate,
apiKey: config.apiKey,
});
const cacheKey = `${indexName}:${config.embedderName}`;
if (this.embedderCache.get(cacheKey) === signature) {
return true;
}
const url = `${meiliConfig.host}/indexes/${encodeURIComponent(indexName)}/settings/embedders`;
try {
const response = await this.requestJson(
'PATCH',
url,
{
[config.embedderName]: {
source: 'openAi',
model: config.model,
apiKey: config.apiKey,
documentTemplate: config.documentTemplate,
},
},
this.buildHeaders(meiliConfig),
);
if (!this.isSuccessStatus(response.status)) {
this.logger.warn(
`Meilisearch embedder update failed for index ${indexName}: ${response.status}`,
);
this.logger.warn(
`Meilisearch embedder error body: ${JSON.stringify(response.body)}`,
);
return false;
}
const taskUid = response.body?.taskUid ?? response.body?.uid;
if (Number.isFinite(Number(taskUid))) {
const succeeded = await this.waitForTask(meiliConfig, Number(taskUid), 8000);
if (!succeeded) {
this.logger.warn(`Meilisearch embedder task did not succeed: ${taskUid}`);
return false;
}
}
const hasEmbedder = await this.hasEmbedder(meiliConfig, indexName, config.embedderName);
if (!hasEmbedder) {
this.logger.warn(`Meilisearch embedder missing after update: ${config.embedderName}`);
return false;
}
this.embedderCache.set(cacheKey, signature);
return true;
} catch (error) {
this.logger.warn(`Meilisearch embedder update failed: ${error.message}`);
return false;
}
}
private async waitForTask(
config: MeiliConfig,
taskUid: number,
timeoutMs = 8000,
): Promise<boolean> {
const url = `${config.host}/tasks/${taskUid}`;
const start = Date.now();
while (Date.now() - start < timeoutMs) {
const response = await this.requestJson('GET', url, undefined, this.buildHeaders(config));
if (!this.isSuccessStatus(response.status)) {
return false;
}
const status = response.body?.status;
if (status === 'succeeded') return true;
if (status === 'failed' || status === 'canceled') {
this.logger.warn(`Meilisearch task ${taskUid} failed: ${JSON.stringify(response.body?.error)}`);
return false;
}
await new Promise((resolve) => setTimeout(resolve, 300));
}
return false;
}
private async hasEmbedder(
config: MeiliConfig,
indexName: string,
embedderName: string,
): Promise<boolean> {
const url = `${config.host}/indexes/${encodeURIComponent(indexName)}/settings/embedders`;
const response = await this.requestJson('GET', url, undefined, this.buildHeaders(config));
if (!this.isSuccessStatus(response.status)) {
return false;
}
const embedders = response.body || {};
return Boolean(embedders && embedders[embedderName]);
}
}

View File

@@ -11,6 +11,7 @@ export interface OpenAIConfig {
apiKey: string;
assistantId?: string;
model?: string;
embeddingModel?: string;
voice?: string;
}

View File

@@ -0,0 +1,181 @@
<script setup lang="ts">
import { ref, watch, computed } from 'vue'
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card'
import { Button } from '@/components/ui/button'
import { Textarea } from '@/components/ui/textarea'
import { Separator } from '@/components/ui/separator'
import { useApi } from '@/composables/useApi'
import { useAuth } from '@/composables/useAuth'
type CommentRecord = {
id: string
content: string
author_user_id: string
created_at: string
updated_at: string
}
interface Props {
objectApiName: string
recordId: string
}
const props = defineProps<Props>()
const { api } = useApi()
const { user } = useAuth()
const comments = ref<CommentRecord[]>([])
const loading = ref(false)
const error = ref<string | null>(null)
const newComment = ref('')
const saving = ref(false)
const editingId = ref<string | null>(null)
const editContent = ref('')
const isOwner = (comment: CommentRecord) => comment.author_user_id === user.value?.id
const formatDate = (value?: string) => {
if (!value) return ''
const date = new Date(value)
if (Number.isNaN(date.getTime())) return value
return date.toLocaleString()
}
const canSubmit = computed(() => newComment.value.trim().length > 0 && !saving.value)
const canSaveEdit = computed(() => editContent.value.trim().length > 0 && !saving.value)
const fetchComments = async () => {
if (!props.objectApiName || !props.recordId) return
loading.value = true
error.value = null
try {
const data = await api.get(`/knowledge/comments/${props.objectApiName}/${props.recordId}`)
comments.value = Array.isArray(data) ? data : []
} catch (e: any) {
error.value = e.message || 'Failed to load comments'
} finally {
loading.value = false
}
}
const addComment = async () => {
if (!canSubmit.value) return
saving.value = true
try {
await api.post('/knowledge/comments', {
parentObjectApiName: props.objectApiName,
parentRecordId: props.recordId,
content: newComment.value.trim(),
})
newComment.value = ''
await fetchComments()
} catch (e: any) {
error.value = e.message || 'Failed to add comment'
} finally {
saving.value = false
}
}
const startEdit = (comment: CommentRecord) => {
editingId.value = comment.id
editContent.value = comment.content
}
const cancelEdit = () => {
editingId.value = null
editContent.value = ''
}
const saveEdit = async () => {
if (!editingId.value || !canSaveEdit.value) return
saving.value = true
try {
await api.patch(`/knowledge/comments/${editingId.value}`, {
content: editContent.value.trim(),
})
editingId.value = null
editContent.value = ''
await fetchComments()
} catch (e: any) {
error.value = e.message || 'Failed to update comment'
} finally {
saving.value = false
}
}
const deleteComment = async (comment: CommentRecord) => {
if (!confirm('Delete this comment?')) return
saving.value = true
try {
await api.delete(`/knowledge/comments/${comment.id}`)
await fetchComments()
} catch (e: any) {
error.value = e.message || 'Failed to delete comment'
} finally {
saving.value = false
}
}
watch(
() => [props.objectApiName, props.recordId],
() => {
fetchComments()
},
{ immediate: true },
)
</script>
<template>
<Card>
<CardHeader>
<CardTitle>Comments</CardTitle>
</CardHeader>
<CardContent class="space-y-4">
<div class="space-y-3">
<Textarea
v-model="newComment"
placeholder="Add a comment..."
:disabled="saving"
class="min-h-[96px]"
/>
<div class="flex items-center justify-between">
<p class="text-sm text-muted-foreground" v-if="error">{{ error }}</p>
<Button size="sm" :disabled="!canSubmit" @click="addComment">
Add Comment
</Button>
</div>
</div>
<Separator />
<div v-if="loading" class="text-sm text-muted-foreground">Loading comments...</div>
<div v-else-if="comments.length === 0" class="text-sm text-muted-foreground">
No comments yet.
</div>
<div v-else class="space-y-4">
<div v-for="comment in comments" :key="comment.id" class="rounded-lg border p-4 space-y-2">
<div class="flex items-center justify-between">
<div class="text-xs text-muted-foreground">
<span>Author: {{ comment.author_user_id }}</span>
<span class="mx-2"></span>
<span>{{ formatDate(comment.created_at) }}</span>
</div>
<div class="flex items-center gap-2" v-if="isOwner(comment)">
<Button variant="ghost" size="sm" @click="startEdit(comment)">Edit</Button>
<Button variant="ghost" size="sm" @click="deleteComment(comment)">Delete</Button>
</div>
</div>
<div v-if="editingId === comment.id" class="space-y-2">
<Textarea v-model="editContent" :disabled="saving" class="min-h-[80px]" />
<div class="flex items-center gap-2">
<Button size="sm" :disabled="!canSaveEdit" @click="saveEdit">Save</Button>
<Button variant="ghost" size="sm" @click="cancelEdit">Cancel</Button>
</div>
</div>
<p v-else class="text-sm whitespace-pre-line">{{ comment.content }}</p>
</div>
</div>
</CardContent>
</Card>
</template>

View File

@@ -0,0 +1,237 @@
<script setup lang="ts">
import { ref, watch, computed } from 'vue'
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card'
import { Button } from '@/components/ui/button'
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs'
import { Separator } from '@/components/ui/separator'
import { useApi } from '@/composables/useApi'
type SemanticLink = {
id: string
source_entity_type: string
source_entity_id: string
target_entity_type: string
target_entity_id: string
source_entity_label?: string
target_entity_label?: string
source_entity_name?: string
target_entity_name?: string
link_type: string
status: string
origin: string
confidence?: number
reason?: string
evidence?: any
updated_at?: string
}
interface Props {
objectApiName: string
recordId: string
}
const props = defineProps<Props>()
const { api } = useApi()
const links = ref<SemanticLink[]>([])
const loading = ref(false)
const error = ref<string | null>(null)
const activeTab = ref<'all' | 'suggested' | 'approved' | 'rejected' | 'dismissed'>('suggested')
const formatDate = (value?: string) => {
if (!value) return ''
const date = new Date(value)
if (Number.isNaN(date.getTime())) return value
return date.toLocaleString()
}
const formatConfidence = (value?: number) => {
if (value === undefined || value === null) return '—'
return `${Math.round(value * 100)}%`
}
const getOtherSide = (link: SemanticLink) => {
const isSource =
link.source_entity_type === props.objectApiName &&
link.source_entity_id === props.recordId
return {
entityType: isSource ? link.target_entity_type : link.source_entity_type,
entityId: isSource ? link.target_entity_id : link.source_entity_id,
entityLabel: isSource ? link.target_entity_label : link.source_entity_label,
entityName: isSource ? link.target_entity_name : link.source_entity_name,
}
}
const formatLinkType = (value?: string) => {
if (!value) return 'Related'
return value
.replace(/_/g, ' ')
.replace(/\b\w/g, (c) => c.toUpperCase())
}
const parseEvidence = (raw: any) => {
if (!raw) return null
if (typeof raw === 'object') return raw
try {
return JSON.parse(raw)
} catch {
return null
}
}
const fetchLinks = async () => {
if (!props.objectApiName || !props.recordId) return
loading.value = true
error.value = null
try {
const params =
activeTab.value === 'all'
? undefined
: { status: activeTab.value }
const data = await api.get(`/knowledge/semantic/links/${props.objectApiName}/${props.recordId}`, {
params,
})
links.value = Array.isArray(data) ? data : []
} catch (e: any) {
error.value = e.message || 'Failed to load semantic links'
} finally {
loading.value = false
}
}
const reviewLink = async (id: string, status: 'approved' | 'rejected' | 'dismissed') => {
try {
await api.patch(`/knowledge/semantic/links/${id}/review`, { status })
await fetchLinks()
} catch (e: any) {
error.value = e.message || 'Failed to update link'
}
}
const canApprove = (status: string) => status !== 'approved'
const canReject = (status: string) => status !== 'rejected'
const canDismiss = (status: string) => status !== 'dismissed'
watch(
() => [props.objectApiName, props.recordId, activeTab.value],
() => {
fetchLinks()
},
{ immediate: true },
)
</script>
<template>
<Card>
<CardHeader class="flex flex-row items-center justify-between">
<CardTitle>Semantic Links</CardTitle>
<Button variant="ghost" size="sm" @click="fetchLinks">Refresh</Button>
</CardHeader>
<CardContent class="space-y-4">
<Tabs v-model="activeTab" class="space-y-4">
<TabsList>
<TabsTrigger value="suggested">Suggested</TabsTrigger>
<TabsTrigger value="approved">Approved</TabsTrigger>
<TabsTrigger value="rejected">Rejected</TabsTrigger>
<TabsTrigger value="dismissed">Dismissed</TabsTrigger>
<TabsTrigger value="all">All</TabsTrigger>
</TabsList>
<TabsContent :value="activeTab" class="space-y-4">
<div v-if="loading" class="text-sm text-muted-foreground">
Loading links...
</div>
<div v-else-if="error" class="text-sm text-destructive">
{{ error }}
</div>
<div v-else-if="links.length === 0" class="text-sm text-muted-foreground">
No links found.
</div>
<div v-else class="space-y-4">
<div
v-for="link in links"
:key="link.id"
class="rounded-lg border p-4 space-y-3"
>
<div class="flex flex-wrap items-center justify-between gap-2">
<div class="text-sm font-medium">
{{ getOtherSide(link).entityLabel || getOtherSide(link).entityType }} ·
{{ getOtherSide(link).entityName || getOtherSide(link).entityId }}
</div>
<div class="text-xs text-muted-foreground">
{{ formatLinkType(link.link_type) }} {{ link.origin }} {{ formatConfidence(link.confidence) }}
</div>
</div>
<div class="text-xs text-muted-foreground">
Status: <span class="font-medium text-foreground">{{ link.status }}</span>
<span v-if="link.updated_at" class="ml-2">Updated: {{ formatDate(link.updated_at) }}</span>
</div>
<p v-if="link.reason" class="text-sm">{{ link.reason }}</p>
<div v-if="parseEvidence(link.evidence)" class="text-xs text-muted-foreground space-y-2">
<Separator />
<div>
<div class="font-medium text-foreground">Evidence</div>
<p v-if="parseEvidence(link.evidence)?.explanation" class="mt-1 text-foreground">
{{ parseEvidence(link.evidence).explanation }}
</p>
<div v-if="parseEvidence(link.evidence)?.matchedSignals?.length" class="mt-2">
<div>Matched context:</div>
<ul class="list-disc pl-4">
<li
v-for="(signal, idx) in parseEvidence(link.evidence).matchedSignals"
:key="idx"
>
{{ signal }}
</li>
</ul>
</div>
<div v-if="parseEvidence(link.evidence)?.matchedChunks?.length" class="mt-2">
<div>Matched excerpts:</div>
<ul class="list-disc pl-4">
<li
v-for="(match, idx) in parseEvidence(link.evidence).matchedChunks"
:key="idx"
>
{{ match.sourceKind }}: {{ match.text }}
</li>
</ul>
</div>
</div>
</div>
<div class="flex items-center gap-2">
<Button
size="sm"
variant="outline"
@click="reviewLink(link.id, 'approved')"
:disabled="!canApprove(link.status)"
>
Approve
</Button>
<Button
size="sm"
variant="outline"
@click="reviewLink(link.id, 'rejected')"
:disabled="!canReject(link.status)"
>
Reject
</Button>
<Button
size="sm"
variant="outline"
@click="reviewLink(link.id, 'dismissed')"
:disabled="!canDismiss(link.status)"
>
Dismiss
</Button>
</div>
</div>
</div>
</TabsContent>
</Tabs>
</CardContent>
</Card>
</template>

View File

@@ -5,6 +5,8 @@ import { Button } from '@/components/ui/button'
import { Separator } from '@/components/ui/separator'
import FieldRenderer from '@/components/fields/FieldRenderer.vue'
import RelatedList from '@/components/RelatedList.vue'
import RecordCommentsPanel from '@/components/knowledge/RecordCommentsPanel.vue'
import SemanticLinksPanel from '@/components/knowledge/SemanticLinksPanel.vue'
import { DetailViewConfig, ViewMode, FieldSection, FieldConfig, RelatedListConfig } from '@/types/field-types'
import { Edit, Trash2, ArrowLeft } from 'lucide-vue-next'
import {
@@ -167,6 +169,18 @@ const getFieldsBySection = (section: FieldSection) => {
@create="(objectApiName, parentId) => emit('createRelated', objectApiName, parentId)"
/>
</div>
<!-- Knowledge Panels -->
<div v-if="data?.id && config?.objectApiName" class="space-y-6">
<RecordCommentsPanel
:object-api-name="config.objectApiName"
:record-id="data.id"
/>
<SemanticLinksPanel
:object-api-name="config.objectApiName"
:record-id="data.id"
/>
</div>
</div>
</template>

View File

@@ -7,6 +7,8 @@ import FieldRenderer from '@/components/fields/FieldRenderer.vue'
import PageLayoutRenderer from '@/components/PageLayoutRenderer.vue'
import RelatedList from '@/components/RelatedList.vue'
import RecordSharing from '@/components/RecordSharing.vue'
import RecordCommentsPanel from '@/components/knowledge/RecordCommentsPanel.vue'
import SemanticLinksPanel from '@/components/knowledge/SemanticLinksPanel.vue'
import { DetailViewConfig, ViewMode, FieldSection, FieldConfig, RelatedListConfig } from '@/types/field-types'
import { Edit, Trash2, ArrowLeft } from 'lucide-vue-next'
import {
@@ -170,6 +172,9 @@ const visibleRelatedLists = computed<RelatedListConfig[]>(() => {
<TabsTrigger v-if="showSharing && data.id" value="sharing">
Sharing
</TabsTrigger>
<TabsTrigger v-if="data.id && config.objectApiName" value="knowledge">
Knowledge
</TabsTrigger>
</TabsList>
<!-- Details Tab -->
@@ -277,6 +282,20 @@ const visibleRelatedLists = computed<RelatedListConfig[]>(() => {
</CardContent>
</Card>
</TabsContent>
<!-- Knowledge Tab -->
<TabsContent value="knowledge" class="space-y-6">
<RecordCommentsPanel
v-if="data.id && config.objectApiName"
:object-api-name="config.objectApiName"
:record-id="data.id"
/>
<SemanticLinksPanel
v-if="data.id && config.objectApiName"
:object-api-name="config.objectApiName"
:record-id="data.id"
/>
</TabsContent>
</Tabs>
</div>
</template>