mirror of
https://github.com/eliasstepanik/core.git
synced 2026-01-27 03:38:30 +00:00
Feat: add documents to the kg (#64)
* Feat: add documents to the kg * Feat: add versioning to documents * Fix: invalidation of evolved facts * fix: mcp return * fix: invalidAt is not displayed in graph popover * Fix: use document id for the flow * refactor: consolidate document versioning around sessionId instead of documentId * fix: add docs link in welcome email * fix: give more time for larger graphs to settle on * bump: new version 0.1.20 --------- Co-authored-by: Manoj K <saimanoj58@gmail.com>
This commit is contained in:
parent
1995d4a9c6
commit
4a0a57cb97
@ -1,4 +1,4 @@
|
|||||||
VERSION=0.1.19
|
VERSION=0.1.20
|
||||||
|
|
||||||
# Nest run in docker, change host to database container name
|
# Nest run in docker, change host to database container name
|
||||||
DB_HOST=localhost
|
DB_HOST=localhost
|
||||||
|
|||||||
@ -513,7 +513,7 @@ export const GraphClustering = forwardRef<
|
|||||||
} else if (complexity < 500) {
|
} else if (complexity < 500) {
|
||||||
durationSeconds = 4.0;
|
durationSeconds = 4.0;
|
||||||
} else {
|
} else {
|
||||||
durationSeconds = Math.min(8, 5 + (complexity - 500) * 0.006);
|
durationSeconds = Math.min(20, 5 + (complexity - 500) * 0.006);
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|||||||
@ -82,10 +82,12 @@ export function GraphPopovers({
|
|||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
return Object.entries(entityProperties).map(([key, value]) => ({
|
return Object.entries(entityProperties)
|
||||||
key,
|
.map(([key, value]) => ({
|
||||||
value,
|
key,
|
||||||
}));
|
value,
|
||||||
|
}))
|
||||||
|
.filter(({ value }) => value);
|
||||||
}, [nodePopupContent]);
|
}, [nodePopupContent]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@ -4,6 +4,7 @@ import { AlertCircle, Loader2 } from "lucide-react";
|
|||||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from "../ui/dialog";
|
import { Dialog, DialogContent, DialogHeader, DialogTitle } from "../ui/dialog";
|
||||||
import { Badge } from "../ui/badge";
|
import { Badge } from "../ui/badge";
|
||||||
import { type LogItem } from "~/hooks/use-logs";
|
import { type LogItem } from "~/hooks/use-logs";
|
||||||
|
import Markdown from "react-markdown";
|
||||||
|
|
||||||
interface LogDetailsProps {
|
interface LogDetailsProps {
|
||||||
open: boolean;
|
open: boolean;
|
||||||
@ -79,13 +80,9 @@ export function LogDetails({
|
|||||||
|
|
||||||
<div className="max-h-[70vh] overflow-auto p-4 pt-0">
|
<div className="max-h-[70vh] overflow-auto p-4 pt-0">
|
||||||
{/* Log Content */}
|
{/* Log Content */}
|
||||||
<div className="mb-4">
|
<div className="mb-4 text-sm break-words whitespace-pre-wrap">
|
||||||
<div className="rounded-md">
|
<div className="rounded-md">
|
||||||
<p
|
<Markdown>{text}</Markdown>
|
||||||
className="text-sm break-words whitespace-pre-wrap"
|
|
||||||
style={{ lineHeight: "1.5" }}
|
|
||||||
dangerouslySetInnerHTML={{ __html: text || "No log details." }}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@ -4,7 +4,11 @@ import { useEffect, useRef } from "react";
|
|||||||
|
|
||||||
import { useOptionalUser, useUserChanged } from "./useUser";
|
import { useOptionalUser, useUserChanged } from "./useUser";
|
||||||
|
|
||||||
export const usePostHog = (apiKey?: string, logging = false, debug = false): void => {
|
export const usePostHog = (
|
||||||
|
apiKey?: string,
|
||||||
|
logging = false,
|
||||||
|
debug = false,
|
||||||
|
): void => {
|
||||||
const postHogInitialized = useRef(false);
|
const postHogInitialized = useRef(false);
|
||||||
const location = useLocation();
|
const location = useLocation();
|
||||||
const user = useOptionalUser();
|
const user = useOptionalUser();
|
||||||
@ -15,7 +19,7 @@ export const usePostHog = (apiKey?: string, logging = false, debug = false): voi
|
|||||||
if (postHogInitialized.current === true) return;
|
if (postHogInitialized.current === true) return;
|
||||||
if (logging) console.log("Initializing PostHog");
|
if (logging) console.log("Initializing PostHog");
|
||||||
posthog.init(apiKey, {
|
posthog.init(apiKey, {
|
||||||
api_host: "https://eu.posthog.com",
|
api_host: "https://us.i.posthog.com",
|
||||||
opt_in_site_apps: true,
|
opt_in_site_apps: true,
|
||||||
debug,
|
debug,
|
||||||
loaded: function (posthog) {
|
loaded: function (posthog) {
|
||||||
|
|||||||
@ -1,8 +1,10 @@
|
|||||||
// lib/ingest.queue.ts
|
// lib/ingest.queue.ts
|
||||||
import { IngestionStatus } from "@core/database";
|
import { IngestionStatus } from "@core/database";
|
||||||
|
import { EpisodeType } from "@core/types";
|
||||||
import { type z } from "zod";
|
import { type z } from "zod";
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { type IngestBodyRequest, ingestTask } from "~/trigger/ingest/ingest";
|
import { type IngestBodyRequest, ingestTask } from "~/trigger/ingest/ingest";
|
||||||
|
import { ingestDocumentTask } from "~/trigger/ingest/ingest-document";
|
||||||
|
|
||||||
export const addToQueue = async (
|
export const addToQueue = async (
|
||||||
body: z.infer<typeof IngestBodyRequest>,
|
body: z.infer<typeof IngestBodyRequest>,
|
||||||
@ -35,16 +37,38 @@ export const addToQueue = async (
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const handler = await ingestTask.trigger(
|
let handler;
|
||||||
{ body, userId, workspaceId: user.Workspace.id, queueId: queuePersist.id },
|
if (body.type === EpisodeType.DOCUMENT) {
|
||||||
{
|
handler = await ingestDocumentTask.trigger(
|
||||||
queue: "ingestion-queue",
|
{
|
||||||
concurrencyKey: userId,
|
body,
|
||||||
tags: [user.id, queuePersist.id],
|
userId,
|
||||||
},
|
workspaceId: user.Workspace.id,
|
||||||
);
|
queueId: queuePersist.id,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
queue: "document-ingestion-queue",
|
||||||
|
concurrencyKey: userId,
|
||||||
|
tags: [user.id, queuePersist.id],
|
||||||
|
},
|
||||||
|
);
|
||||||
|
} else if (body.type === EpisodeType.CONVERSATION) {
|
||||||
|
handler = await ingestTask.trigger(
|
||||||
|
{
|
||||||
|
body,
|
||||||
|
userId,
|
||||||
|
workspaceId: user.Workspace.id,
|
||||||
|
queueId: queuePersist.id,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
queue: "ingestion-queue",
|
||||||
|
concurrencyKey: userId,
|
||||||
|
tags: [user.id, queuePersist.id],
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return { id: handler.id, token: handler.publicAccessToken };
|
return { id: handler?.id, token: handler?.publicAccessToken };
|
||||||
};
|
};
|
||||||
|
|
||||||
export { IngestBodyRequest };
|
export { IngestBodyRequest };
|
||||||
|
|||||||
@ -148,6 +148,8 @@ export const getClusteredGraphData = async (userId: string) => {
|
|||||||
s.uuid as statementUuid,
|
s.uuid as statementUuid,
|
||||||
s.spaceIds as spaceIds,
|
s.spaceIds as spaceIds,
|
||||||
s.fact as fact,
|
s.fact as fact,
|
||||||
|
s.invalidAt as invalidAt,
|
||||||
|
s.validAt as validAt,
|
||||||
s.createdAt as createdAt,
|
s.createdAt as createdAt,
|
||||||
rel.isEntityToStatement as isEntityToStatement,
|
rel.isEntityToStatement as isEntityToStatement,
|
||||||
rel.isStatementToEntity as isStatementToEntity`,
|
rel.isStatementToEntity as isStatementToEntity`,
|
||||||
@ -175,6 +177,8 @@ export const getClusteredGraphData = async (userId: string) => {
|
|||||||
const clusterIds = record.get("spaceIds");
|
const clusterIds = record.get("spaceIds");
|
||||||
const clusterId = clusterIds ? clusterIds[0] : undefined;
|
const clusterId = clusterIds ? clusterIds[0] : undefined;
|
||||||
const fact = record.get("fact");
|
const fact = record.get("fact");
|
||||||
|
const invalidAt = record.get("invalidAt");
|
||||||
|
const validAt = record.get("validAt");
|
||||||
const createdAt = record.get("createdAt");
|
const createdAt = record.get("createdAt");
|
||||||
|
|
||||||
// Create unique edge identifier to avoid duplicates
|
// Create unique edge identifier to avoid duplicates
|
||||||
@ -195,6 +199,8 @@ export const getClusteredGraphData = async (userId: string) => {
|
|||||||
clusterId,
|
clusterId,
|
||||||
nodeType: "Statement",
|
nodeType: "Statement",
|
||||||
fact,
|
fact,
|
||||||
|
invalidAt,
|
||||||
|
validAt,
|
||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
...sourceProperties,
|
...sourceProperties,
|
||||||
@ -209,6 +215,8 @@ export const getClusteredGraphData = async (userId: string) => {
|
|||||||
clusterId,
|
clusterId,
|
||||||
nodeType: "Statement",
|
nodeType: "Statement",
|
||||||
fact,
|
fact,
|
||||||
|
invalidAt,
|
||||||
|
validAt,
|
||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
...targetProperties,
|
...targetProperties,
|
||||||
@ -355,6 +363,12 @@ const initializeSchema = async () => {
|
|||||||
await runQuery(
|
await runQuery(
|
||||||
"CREATE INDEX entity_user_uuid IF NOT EXISTS FOR (n:Entity) ON (n.userId, n.uuid)",
|
"CREATE INDEX entity_user_uuid IF NOT EXISTS FOR (n:Entity) ON (n.userId, n.uuid)",
|
||||||
);
|
);
|
||||||
|
await runQuery(
|
||||||
|
"CREATE INDEX episode_user_uuid IF NOT EXISTS FOR (n:Episode) ON (n.userId, n.uuid)",
|
||||||
|
);
|
||||||
|
await runQuery(
|
||||||
|
"CREATE INDEX episode_user_id IF NOT EXISTS FOR (n:Episode) ON (n.userId)",
|
||||||
|
);
|
||||||
|
|
||||||
// Create vector indexes for semantic search (if using Neo4j 5.0+)
|
// Create vector indexes for semantic search (if using Neo4j 5.0+)
|
||||||
await runQuery(`
|
await runQuery(`
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import { addToQueue } from "~/lib/ingest.server";
|
|||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { logger } from "~/services/logger.service";
|
import { logger } from "~/services/logger.service";
|
||||||
import { triggerWebhookDelivery } from "~/trigger/webhooks/webhook-delivery";
|
import { triggerWebhookDelivery } from "~/trigger/webhooks/webhook-delivery";
|
||||||
|
import { EpisodeTypeEnum } from "@core/types";
|
||||||
|
|
||||||
const ActivityCreateSchema = z.object({
|
const ActivityCreateSchema = z.object({
|
||||||
text: z.string().min(1, "Text is required"),
|
text: z.string().min(1, "Text is required"),
|
||||||
@ -56,6 +57,7 @@ const { action, loader } = createActionApiRoute(
|
|||||||
episodeBody: body.text,
|
episodeBody: body.text,
|
||||||
referenceTime: new Date().toISOString(),
|
referenceTime: new Date().toISOString(),
|
||||||
source: body.source,
|
source: body.source,
|
||||||
|
type: EpisodeTypeEnum.CONVERSATION,
|
||||||
};
|
};
|
||||||
|
|
||||||
const queueResponse = await addToQueue(
|
const queueResponse = await addToQueue(
|
||||||
|
|||||||
@ -9,6 +9,7 @@ import { addToQueue } from "~/lib/ingest.server";
|
|||||||
import { SearchService } from "~/services/search.server";
|
import { SearchService } from "~/services/search.server";
|
||||||
import { handleTransport } from "~/utils/mcp";
|
import { handleTransport } from "~/utils/mcp";
|
||||||
import { SpaceService } from "~/services/space.server";
|
import { SpaceService } from "~/services/space.server";
|
||||||
|
import { EpisodeTypeEnum } from "@core/types";
|
||||||
|
|
||||||
// Map to store transports by session ID with cleanup tracking
|
// Map to store transports by session ID with cleanup tracking
|
||||||
const transports: {
|
const transports: {
|
||||||
@ -124,6 +125,7 @@ const handleMCPRequest = async (
|
|||||||
episodeBody: args.message,
|
episodeBody: args.message,
|
||||||
referenceTime: new Date().toISOString(),
|
referenceTime: new Date().toISOString(),
|
||||||
source,
|
source,
|
||||||
|
type: EpisodeTypeEnum.CONVERSATION,
|
||||||
},
|
},
|
||||||
userId,
|
userId,
|
||||||
);
|
);
|
||||||
|
|||||||
@ -11,6 +11,7 @@ import { SpacePattern } from "~/services/spacePattern.server";
|
|||||||
import { addToQueue } from "~/lib/ingest.server";
|
import { addToQueue } from "~/lib/ingest.server";
|
||||||
import { redirect } from "@remix-run/node";
|
import { redirect } from "@remix-run/node";
|
||||||
import { SpaceService } from "~/services/space.server";
|
import { SpaceService } from "~/services/space.server";
|
||||||
|
import { EpisodeTypeEnum } from "@core/types";
|
||||||
|
|
||||||
export async function loader({ request, params }: LoaderFunctionArgs) {
|
export async function loader({ request, params }: LoaderFunctionArgs) {
|
||||||
const workspace = await requireWorkpace(request);
|
const workspace = await requireWorkpace(request);
|
||||||
@ -68,6 +69,7 @@ export async function action({ request, params }: ActionFunctionArgs) {
|
|||||||
},
|
},
|
||||||
source: space.name,
|
source: space.name,
|
||||||
spaceId: space.id,
|
spaceId: space.id,
|
||||||
|
type: EpisodeTypeEnum.CONVERSATION,
|
||||||
},
|
},
|
||||||
userId,
|
userId,
|
||||||
);
|
);
|
||||||
|
|||||||
@ -26,6 +26,7 @@ import { updateUser } from "~/models/user.server";
|
|||||||
import { Copy, Check } from "lucide-react";
|
import { Copy, Check } from "lucide-react";
|
||||||
import { addToQueue } from "~/lib/ingest.server";
|
import { addToQueue } from "~/lib/ingest.server";
|
||||||
import { cn } from "~/lib/utils";
|
import { cn } from "~/lib/utils";
|
||||||
|
import { EpisodeTypeEnum } from "@core/types";
|
||||||
|
|
||||||
const ONBOARDING_STEP_COOKIE = "onboardingStep";
|
const ONBOARDING_STEP_COOKIE = "onboardingStep";
|
||||||
const onboardingStepCookie = createCookie(ONBOARDING_STEP_COOKIE, {
|
const onboardingStepCookie = createCookie(ONBOARDING_STEP_COOKIE, {
|
||||||
@ -75,6 +76,7 @@ export async function action({ request }: ActionFunctionArgs) {
|
|||||||
source: "Core",
|
source: "Core",
|
||||||
episodeBody: aboutUser,
|
episodeBody: aboutUser,
|
||||||
referenceTime: new Date().toISOString(),
|
referenceTime: new Date().toISOString(),
|
||||||
|
type: EpisodeTypeEnum.CONVERSATION,
|
||||||
},
|
},
|
||||||
userId,
|
userId,
|
||||||
);
|
);
|
||||||
|
|||||||
315
apps/webapp/app/services/documentChunker.server.ts
Normal file
315
apps/webapp/app/services/documentChunker.server.ts
Normal file
@ -0,0 +1,315 @@
|
|||||||
|
import { encode } from "gpt-tokenizer";
|
||||||
|
import crypto from "crypto";
|
||||||
|
|
||||||
|
export interface DocumentChunk {
|
||||||
|
content: string;
|
||||||
|
chunkIndex: number;
|
||||||
|
title?: string;
|
||||||
|
context?: string;
|
||||||
|
startPosition: number;
|
||||||
|
endPosition: number;
|
||||||
|
contentHash: string; // Hash for change detection
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChunkedDocument {
|
||||||
|
documentId: string;
|
||||||
|
title: string;
|
||||||
|
originalContent: string;
|
||||||
|
chunks: DocumentChunk[];
|
||||||
|
totalChunks: number;
|
||||||
|
contentHash: string; // Hash of the entire document
|
||||||
|
chunkHashes: string[]; // Array of chunk hashes for change detection
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Document chunking service that splits large documents into semantic chunks
|
||||||
|
* Targets 10-15k tokens per chunk with natural paragraph boundaries
|
||||||
|
*/
|
||||||
|
export class DocumentChunker {
|
||||||
|
private readonly TARGET_CHUNK_SIZE = 12500; // Middle of 10-15k range
|
||||||
|
private readonly MIN_CHUNK_SIZE = 10000;
|
||||||
|
private readonly MAX_CHUNK_SIZE = 15000;
|
||||||
|
private readonly MIN_PARAGRAPH_SIZE = 100; // Minimum tokens for a paragraph to be considered
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Chunk a document into semantic sections with natural boundaries
|
||||||
|
*/
|
||||||
|
async chunkDocument(
|
||||||
|
originalContent: string,
|
||||||
|
title: string,
|
||||||
|
): Promise<ChunkedDocument> {
|
||||||
|
const documentId = crypto.randomUUID();
|
||||||
|
const contentHash = this.generateContentHash(originalContent);
|
||||||
|
|
||||||
|
// First, split by major section headers (markdown style)
|
||||||
|
const majorSections = this.splitByMajorSections(originalContent);
|
||||||
|
|
||||||
|
const chunks: DocumentChunk[] = [];
|
||||||
|
let currentChunk = "";
|
||||||
|
let currentChunkStart = 0;
|
||||||
|
let chunkIndex = 0;
|
||||||
|
|
||||||
|
for (const section of majorSections) {
|
||||||
|
const sectionTokens = encode(section.content).length;
|
||||||
|
const currentChunkTokens = encode(currentChunk).length;
|
||||||
|
|
||||||
|
// If adding this section would exceed max size, finalize current chunk
|
||||||
|
if (currentChunkTokens > 0 && currentChunkTokens + sectionTokens > this.MAX_CHUNK_SIZE) {
|
||||||
|
if (currentChunkTokens >= this.MIN_CHUNK_SIZE) {
|
||||||
|
chunks.push(this.createChunk(
|
||||||
|
currentChunk,
|
||||||
|
chunkIndex,
|
||||||
|
currentChunkStart,
|
||||||
|
currentChunkStart + currentChunk.length,
|
||||||
|
section.title
|
||||||
|
));
|
||||||
|
chunkIndex++;
|
||||||
|
currentChunk = "";
|
||||||
|
currentChunkStart = section.startPosition;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add section to current chunk
|
||||||
|
if (currentChunk) {
|
||||||
|
currentChunk += "\n\n" + section.content;
|
||||||
|
} else {
|
||||||
|
currentChunk = section.content;
|
||||||
|
currentChunkStart = section.startPosition;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If current chunk is large enough and we have a natural break, consider chunking
|
||||||
|
const updatedChunkTokens = encode(currentChunk).length;
|
||||||
|
if (updatedChunkTokens >= this.TARGET_CHUNK_SIZE) {
|
||||||
|
// Try to find a good breaking point within the section
|
||||||
|
const paragraphs = this.splitIntoParagraphs(section.content);
|
||||||
|
if (paragraphs.length > 1) {
|
||||||
|
// Split at paragraph boundary if beneficial
|
||||||
|
const optimalSplit = this.findOptimalParagraphSplit(currentChunk);
|
||||||
|
if (optimalSplit) {
|
||||||
|
chunks.push(this.createChunk(
|
||||||
|
optimalSplit.beforeSplit,
|
||||||
|
chunkIndex,
|
||||||
|
currentChunkStart,
|
||||||
|
currentChunkStart + optimalSplit.beforeSplit.length,
|
||||||
|
section.title
|
||||||
|
));
|
||||||
|
chunkIndex++;
|
||||||
|
currentChunk = optimalSplit.afterSplit;
|
||||||
|
currentChunkStart = currentChunkStart + optimalSplit.beforeSplit.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add remaining content as final chunk
|
||||||
|
if (currentChunk.trim() && encode(currentChunk).length >= this.MIN_PARAGRAPH_SIZE) {
|
||||||
|
chunks.push(this.createChunk(
|
||||||
|
currentChunk,
|
||||||
|
chunkIndex,
|
||||||
|
currentChunkStart,
|
||||||
|
originalContent.length
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate chunk hashes array
|
||||||
|
const chunkHashes = chunks.map(chunk => chunk.contentHash);
|
||||||
|
|
||||||
|
return {
|
||||||
|
documentId,
|
||||||
|
title,
|
||||||
|
originalContent,
|
||||||
|
chunks,
|
||||||
|
totalChunks: chunks.length,
|
||||||
|
contentHash,
|
||||||
|
chunkHashes,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private splitByMajorSections(content: string): Array<{
|
||||||
|
content: string;
|
||||||
|
title?: string;
|
||||||
|
startPosition: number;
|
||||||
|
endPosition: number;
|
||||||
|
}> {
|
||||||
|
const sections: Array<{
|
||||||
|
content: string;
|
||||||
|
title?: string;
|
||||||
|
startPosition: number;
|
||||||
|
endPosition: number;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
// Split by markdown headers (# ## ### etc.) or common document patterns
|
||||||
|
const headerRegex = /^(#{1,6}\s+.*$|={3,}$|-{3,}$)/gm;
|
||||||
|
const matches = Array.from(content.matchAll(headerRegex));
|
||||||
|
|
||||||
|
if (matches.length === 0) {
|
||||||
|
// No headers found, treat as single section
|
||||||
|
sections.push({
|
||||||
|
content: content.trim(),
|
||||||
|
startPosition: 0,
|
||||||
|
endPosition: content.length,
|
||||||
|
});
|
||||||
|
return sections;
|
||||||
|
}
|
||||||
|
|
||||||
|
let lastIndex = 0;
|
||||||
|
|
||||||
|
for (let i = 0; i < matches.length; i++) {
|
||||||
|
const match = matches[i];
|
||||||
|
const nextMatch = matches[i + 1];
|
||||||
|
|
||||||
|
const sectionStart = lastIndex;
|
||||||
|
const sectionEnd = nextMatch ? nextMatch.index! : content.length;
|
||||||
|
|
||||||
|
const sectionContent = content.slice(sectionStart, sectionEnd).trim();
|
||||||
|
|
||||||
|
if (sectionContent) {
|
||||||
|
sections.push({
|
||||||
|
content: sectionContent,
|
||||||
|
title: this.extractSectionTitle(match[0]),
|
||||||
|
startPosition: sectionStart,
|
||||||
|
endPosition: sectionEnd,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
lastIndex = match.index! + match[0].length;
|
||||||
|
}
|
||||||
|
|
||||||
|
return sections;
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractSectionTitle(header: string): string | undefined {
|
||||||
|
// Extract title from markdown header
|
||||||
|
const markdownMatch = header.match(/^#{1,6}\s+(.+)$/);
|
||||||
|
if (markdownMatch) {
|
||||||
|
return markdownMatch[1].trim();
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
private splitIntoParagraphs(content: string): string[] {
|
||||||
|
// Split by double newlines (paragraph breaks) and filter out empty strings
|
||||||
|
return content
|
||||||
|
.split(/\n\s*\n/)
|
||||||
|
.map(p => p.trim())
|
||||||
|
.filter(p => p.length > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
private findOptimalParagraphSplit(content: string): {
|
||||||
|
beforeSplit: string;
|
||||||
|
afterSplit: string;
|
||||||
|
} | null {
|
||||||
|
const paragraphs = this.splitIntoParagraphs(content);
|
||||||
|
if (paragraphs.length < 2) return null;
|
||||||
|
|
||||||
|
let bestSplitIndex = -1;
|
||||||
|
let bestScore = 0;
|
||||||
|
|
||||||
|
// Find the split that gets us closest to target size
|
||||||
|
for (let i = 1; i < paragraphs.length; i++) {
|
||||||
|
const beforeSplit = paragraphs.slice(0, i).join("\n\n");
|
||||||
|
const afterSplit = paragraphs.slice(i).join("\n\n");
|
||||||
|
|
||||||
|
const beforeTokens = encode(beforeSplit).length;
|
||||||
|
const afterTokens = encode(afterSplit).length;
|
||||||
|
|
||||||
|
// Score based on how close we get to target, avoiding too small chunks
|
||||||
|
if (beforeTokens >= this.MIN_CHUNK_SIZE && afterTokens >= this.MIN_PARAGRAPH_SIZE) {
|
||||||
|
const beforeDistance = Math.abs(beforeTokens - this.TARGET_CHUNK_SIZE);
|
||||||
|
const score = 1 / (1 + beforeDistance); // Higher score for closer to target
|
||||||
|
|
||||||
|
if (score > bestScore) {
|
||||||
|
bestScore = score;
|
||||||
|
bestSplitIndex = i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bestSplitIndex > 0) {
|
||||||
|
return {
|
||||||
|
beforeSplit: paragraphs.slice(0, bestSplitIndex).join("\n\n"),
|
||||||
|
afterSplit: paragraphs.slice(bestSplitIndex).join("\n\n"),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private createChunk(
|
||||||
|
content: string,
|
||||||
|
chunkIndex: number,
|
||||||
|
startPosition: number,
|
||||||
|
endPosition: number,
|
||||||
|
title?: string
|
||||||
|
): DocumentChunk {
|
||||||
|
// Generate a concise context/title if not provided
|
||||||
|
const context = title || this.generateChunkContext(content);
|
||||||
|
const contentHash = this.generateContentHash(content.trim());
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: content.trim(),
|
||||||
|
chunkIndex,
|
||||||
|
title: context,
|
||||||
|
context: `Chunk ${chunkIndex + 1}${context ? `: ${context}` : ""}`,
|
||||||
|
startPosition,
|
||||||
|
endPosition,
|
||||||
|
contentHash,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private generateChunkContext(content: string): string {
|
||||||
|
// Extract first meaningful line as context (avoiding markdown syntax)
|
||||||
|
const lines = content.split('\n').map(line => line.trim()).filter(Boolean);
|
||||||
|
|
||||||
|
for (const line of lines.slice(0, 3)) {
|
||||||
|
// Skip markdown headers and find first substantial content
|
||||||
|
if (!line.match(/^#{1,6}\s/) && !line.match(/^[=-]{3,}$/) && line.length > 10) {
|
||||||
|
return line.substring(0, 100) + (line.length > 100 ? "..." : "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "Document content";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate content hash for change detection
|
||||||
|
*/
|
||||||
|
private generateContentHash(content: string): string {
|
||||||
|
return crypto.createHash('sha256').update(content, 'utf8').digest('hex').substring(0, 16);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare chunk hashes to detect changes
|
||||||
|
*/
|
||||||
|
static compareChunkHashes(oldHashes: string[], newHashes: string[]): {
|
||||||
|
changedIndices: number[];
|
||||||
|
changePercentage: number;
|
||||||
|
} {
|
||||||
|
const maxLength = Math.max(oldHashes.length, newHashes.length);
|
||||||
|
const changedIndices: number[] = [];
|
||||||
|
|
||||||
|
for (let i = 0; i < maxLength; i++) {
|
||||||
|
const oldHash = oldHashes[i];
|
||||||
|
const newHash = newHashes[i];
|
||||||
|
|
||||||
|
// Mark as changed if hash is different or chunk added/removed
|
||||||
|
if (oldHash !== newHash) {
|
||||||
|
changedIndices.push(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const changePercentage = maxLength > 0 ? (changedIndices.length / maxLength) * 100 : 0;
|
||||||
|
|
||||||
|
return {
|
||||||
|
changedIndices,
|
||||||
|
changePercentage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate document size in tokens for threshold decisions
|
||||||
|
*/
|
||||||
|
static getDocumentSizeInTokens(content: string): number {
|
||||||
|
return encode(content).length;
|
||||||
|
}
|
||||||
|
}
|
||||||
204
apps/webapp/app/services/documentDiffer.server.ts
Normal file
204
apps/webapp/app/services/documentDiffer.server.ts
Normal file
@ -0,0 +1,204 @@
|
|||||||
|
import { encode } from "gpt-tokenizer";
|
||||||
|
import { DocumentChunker, type ChunkedDocument } from "./documentChunker.server";
|
||||||
|
import type { DocumentNode } from "@core/types";
|
||||||
|
|
||||||
|
export interface DifferentialDecision {
|
||||||
|
shouldUseDifferential: boolean;
|
||||||
|
strategy: "full_reingest" | "chunk_level_diff" | "new_document";
|
||||||
|
reason: string;
|
||||||
|
changedChunkIndices: number[];
|
||||||
|
changePercentage: number;
|
||||||
|
documentSizeTokens: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChunkComparison {
|
||||||
|
chunkIndex: number;
|
||||||
|
hasChanged: boolean;
|
||||||
|
oldHash?: string;
|
||||||
|
newHash: string;
|
||||||
|
semanticSimilarity?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Service for implementing differential document processing logic
|
||||||
|
* Determines when to use differential vs full re-ingestion based on
|
||||||
|
* document size and change percentage thresholds
|
||||||
|
*/
|
||||||
|
export class DocumentDifferentialService {
|
||||||
|
// Threshold constants based on our enhanced approach
|
||||||
|
private readonly SMALL_DOC_THRESHOLD = 5 * 1000; // 5K tokens
|
||||||
|
private readonly MEDIUM_DOC_THRESHOLD = 50 * 1000; // 50K tokens
|
||||||
|
|
||||||
|
// Change percentage thresholds
|
||||||
|
private readonly SMALL_CHANGE_THRESHOLD = 20; // 20%
|
||||||
|
private readonly MEDIUM_CHANGE_THRESHOLD = 30; // 30%
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyze whether to use differential processing for a document update
|
||||||
|
*/
|
||||||
|
async analyzeDifferentialNeed(
|
||||||
|
newContent: string,
|
||||||
|
existingDocument: DocumentNode | null,
|
||||||
|
newChunkedDocument: ChunkedDocument,
|
||||||
|
): Promise<DifferentialDecision> {
|
||||||
|
// If no existing document, it's a new document
|
||||||
|
if (!existingDocument) {
|
||||||
|
return {
|
||||||
|
shouldUseDifferential: false,
|
||||||
|
strategy: "new_document",
|
||||||
|
reason: "No existing document found",
|
||||||
|
changedChunkIndices: [],
|
||||||
|
changePercentage: 100,
|
||||||
|
documentSizeTokens: encode(newContent).length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const documentSizeTokens = encode(newContent).length;
|
||||||
|
|
||||||
|
// Quick content hash comparison
|
||||||
|
if (existingDocument.contentHash === newChunkedDocument.contentHash) {
|
||||||
|
return {
|
||||||
|
shouldUseDifferential: false,
|
||||||
|
strategy: "full_reingest", // No changes detected
|
||||||
|
reason: "Document content unchanged",
|
||||||
|
changedChunkIndices: [],
|
||||||
|
changePercentage: 0,
|
||||||
|
documentSizeTokens,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare chunk hashes to identify changes
|
||||||
|
const chunkComparison = DocumentChunker.compareChunkHashes(
|
||||||
|
existingDocument.chunkHashes || [],
|
||||||
|
newChunkedDocument.chunkHashes,
|
||||||
|
);
|
||||||
|
|
||||||
|
const { changedIndices, changePercentage } = chunkComparison;
|
||||||
|
|
||||||
|
// Apply threshold-based decision matrix
|
||||||
|
const decision = this.applyThresholdDecision(
|
||||||
|
documentSizeTokens,
|
||||||
|
changePercentage,
|
||||||
|
changedIndices,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...decision,
|
||||||
|
changedChunkIndices: changedIndices,
|
||||||
|
changePercentage,
|
||||||
|
documentSizeTokens,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply threshold-based decision matrix
|
||||||
|
*/
|
||||||
|
private applyThresholdDecision(
|
||||||
|
documentSizeTokens: number,
|
||||||
|
changePercentage: number,
|
||||||
|
changedIndices: number[],
|
||||||
|
): Pick<DifferentialDecision, "shouldUseDifferential" | "strategy" | "reason"> {
|
||||||
|
// Small documents: always full re-ingest (cheap)
|
||||||
|
if (documentSizeTokens < this.SMALL_DOC_THRESHOLD) {
|
||||||
|
return {
|
||||||
|
shouldUseDifferential: false,
|
||||||
|
strategy: "full_reingest",
|
||||||
|
reason: `Document too small (${documentSizeTokens} tokens < ${this.SMALL_DOC_THRESHOLD})`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Medium documents (5-50K tokens)
|
||||||
|
if (documentSizeTokens < this.MEDIUM_DOC_THRESHOLD) {
|
||||||
|
if (changePercentage < this.SMALL_CHANGE_THRESHOLD) {
|
||||||
|
return {
|
||||||
|
shouldUseDifferential: true,
|
||||||
|
strategy: "chunk_level_diff",
|
||||||
|
reason: `Medium document with small changes (${changePercentage.toFixed(1)}% < ${this.SMALL_CHANGE_THRESHOLD}%)`,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
shouldUseDifferential: false,
|
||||||
|
strategy: "full_reingest",
|
||||||
|
reason: `Medium document with large changes (${changePercentage.toFixed(1)}% >= ${this.SMALL_CHANGE_THRESHOLD}%)`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Large documents (>50K tokens)
|
||||||
|
if (changePercentage < this.MEDIUM_CHANGE_THRESHOLD) {
|
||||||
|
return {
|
||||||
|
shouldUseDifferential: true,
|
||||||
|
strategy: "chunk_level_diff",
|
||||||
|
reason: `Large document with moderate changes (${changePercentage.toFixed(1)}% < ${this.MEDIUM_CHANGE_THRESHOLD}%)`,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
shouldUseDifferential: false,
|
||||||
|
strategy: "full_reingest",
|
||||||
|
reason: `Large document with extensive changes (${changePercentage.toFixed(1)}% >= ${this.MEDIUM_CHANGE_THRESHOLD}%)`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get detailed chunk comparison for differential processing
|
||||||
|
*/
|
||||||
|
getChunkComparisons(
|
||||||
|
existingDocument: DocumentNode,
|
||||||
|
newChunkedDocument: ChunkedDocument,
|
||||||
|
): ChunkComparison[] {
|
||||||
|
const oldHashes = existingDocument.chunkHashes || [];
|
||||||
|
const newHashes = newChunkedDocument.chunkHashes;
|
||||||
|
const maxLength = Math.max(oldHashes.length, newHashes.length);
|
||||||
|
|
||||||
|
const comparisons: ChunkComparison[] = [];
|
||||||
|
|
||||||
|
for (let i = 0; i < maxLength; i++) {
|
||||||
|
const oldHash = oldHashes[i];
|
||||||
|
const newHash = newHashes[i];
|
||||||
|
|
||||||
|
comparisons.push({
|
||||||
|
chunkIndex: i,
|
||||||
|
hasChanged: oldHash !== newHash,
|
||||||
|
oldHash,
|
||||||
|
newHash: newHash || "", // Handle case where new doc has fewer chunks
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return comparisons;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter chunks that need re-processing
|
||||||
|
*/
|
||||||
|
getChunksNeedingReprocessing(
|
||||||
|
chunkComparisons: ChunkComparison[],
|
||||||
|
): number[] {
|
||||||
|
return chunkComparisons
|
||||||
|
.filter(comparison => comparison.hasChanged)
|
||||||
|
.map(comparison => comparison.chunkIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate processing cost savings estimate
|
||||||
|
*/
|
||||||
|
calculateCostSavings(
|
||||||
|
totalChunks: number,
|
||||||
|
changedChunks: number,
|
||||||
|
): {
|
||||||
|
chunksToProcess: number;
|
||||||
|
chunksSkipped: number;
|
||||||
|
estimatedSavingsPercentage: number;
|
||||||
|
} {
|
||||||
|
const chunksSkipped = totalChunks - changedChunks;
|
||||||
|
const estimatedSavingsPercentage = totalChunks > 0
|
||||||
|
? (chunksSkipped / totalChunks) * 100
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
return {
|
||||||
|
chunksToProcess: changedChunks,
|
||||||
|
chunksSkipped,
|
||||||
|
estimatedSavingsPercentage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
321
apps/webapp/app/services/documentVersioning.server.ts
Normal file
321
apps/webapp/app/services/documentVersioning.server.ts
Normal file
@ -0,0 +1,321 @@
|
|||||||
|
import crypto from "crypto";
|
||||||
|
import type { DocumentNode } from "@core/types";
|
||||||
|
import {
|
||||||
|
findExistingDocument,
|
||||||
|
getDocumentVersions,
|
||||||
|
} from "./graphModels/document";
|
||||||
|
import {
|
||||||
|
DocumentChunker,
|
||||||
|
type ChunkedDocument,
|
||||||
|
} from "./documentChunker.server";
|
||||||
|
import { KnowledgeGraphService } from "./knowledgeGraph.server";
|
||||||
|
|
||||||
|
export interface DocumentVersion {
|
||||||
|
uuid: string;
|
||||||
|
version: number;
|
||||||
|
contentHash: string;
|
||||||
|
chunkHashes: string[];
|
||||||
|
createdAt: Date;
|
||||||
|
validAt: Date;
|
||||||
|
title: string;
|
||||||
|
metadata: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VersionedDocumentInfo {
|
||||||
|
isNewDocument: boolean;
|
||||||
|
existingDocument: DocumentNode | null;
|
||||||
|
newVersion: number;
|
||||||
|
previousVersionUuid: string | null;
|
||||||
|
hasContentChanged: boolean;
|
||||||
|
chunkLevelChanges: {
|
||||||
|
changedChunkIndices: number[];
|
||||||
|
changePercentage: number;
|
||||||
|
totalChunks: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Service for managing document versions and coordinating differential ingestion
|
||||||
|
* Integrates with the knowledge graph for semantic similarity checks
|
||||||
|
*/
|
||||||
|
export class DocumentVersioningService {
|
||||||
|
private knowledgeGraphService: KnowledgeGraphService;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.knowledgeGraphService = new KnowledgeGraphService();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepare a new document version with proper versioning information
|
||||||
|
*/
|
||||||
|
async prepareDocumentVersion(
|
||||||
|
sessionId: string,
|
||||||
|
userId: string,
|
||||||
|
title: string,
|
||||||
|
content: string,
|
||||||
|
source: string,
|
||||||
|
metadata: Record<string, any> = {},
|
||||||
|
): Promise<{
|
||||||
|
documentNode: DocumentNode;
|
||||||
|
versionInfo: VersionedDocumentInfo;
|
||||||
|
chunkedDocument: ChunkedDocument;
|
||||||
|
}> {
|
||||||
|
// Find existing document for version comparison
|
||||||
|
const existingDocument = await findExistingDocument(sessionId, userId);
|
||||||
|
|
||||||
|
// Chunk the new document content
|
||||||
|
const documentChunker = new DocumentChunker();
|
||||||
|
const chunkedDocument = await documentChunker.chunkDocument(content, title);
|
||||||
|
|
||||||
|
// Determine version information
|
||||||
|
const versionInfo = this.analyzeVersionChanges(
|
||||||
|
existingDocument,
|
||||||
|
chunkedDocument,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create new document node
|
||||||
|
const documentNode = this.createVersionedDocumentNode(
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
title,
|
||||||
|
content,
|
||||||
|
source,
|
||||||
|
metadata,
|
||||||
|
versionInfo,
|
||||||
|
chunkedDocument,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
documentNode,
|
||||||
|
versionInfo,
|
||||||
|
chunkedDocument,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyze changes between existing and new document versions
|
||||||
|
*/
|
||||||
|
private analyzeVersionChanges(
|
||||||
|
existingDocument: DocumentNode | null,
|
||||||
|
newChunkedDocument: ChunkedDocument,
|
||||||
|
): VersionedDocumentInfo {
|
||||||
|
if (!existingDocument) {
|
||||||
|
return {
|
||||||
|
isNewDocument: true,
|
||||||
|
existingDocument: null,
|
||||||
|
newVersion: 1,
|
||||||
|
previousVersionUuid: null,
|
||||||
|
hasContentChanged: true,
|
||||||
|
chunkLevelChanges: {
|
||||||
|
changedChunkIndices: [],
|
||||||
|
changePercentage: 100,
|
||||||
|
totalChunks: newChunkedDocument.totalChunks,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if content has actually changed
|
||||||
|
const hasContentChanged =
|
||||||
|
existingDocument.contentHash !== newChunkedDocument.contentHash;
|
||||||
|
|
||||||
|
if (!hasContentChanged) {
|
||||||
|
return {
|
||||||
|
isNewDocument: false,
|
||||||
|
existingDocument,
|
||||||
|
newVersion: existingDocument.version,
|
||||||
|
previousVersionUuid: existingDocument.uuid,
|
||||||
|
hasContentChanged: false,
|
||||||
|
chunkLevelChanges: {
|
||||||
|
changedChunkIndices: [],
|
||||||
|
changePercentage: 0,
|
||||||
|
totalChunks: newChunkedDocument.totalChunks,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Analyze chunk-level changes
|
||||||
|
const chunkComparison = DocumentChunker.compareChunkHashes(
|
||||||
|
existingDocument.chunkHashes || [],
|
||||||
|
newChunkedDocument.chunkHashes,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
isNewDocument: false,
|
||||||
|
existingDocument,
|
||||||
|
newVersion: existingDocument.version + 1,
|
||||||
|
previousVersionUuid: existingDocument.uuid,
|
||||||
|
hasContentChanged: true,
|
||||||
|
chunkLevelChanges: {
|
||||||
|
changedChunkIndices: chunkComparison.changedIndices,
|
||||||
|
changePercentage: chunkComparison.changePercentage,
|
||||||
|
totalChunks: newChunkedDocument.totalChunks,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new versioned document node
|
||||||
|
*/
|
||||||
|
private createVersionedDocumentNode(
|
||||||
|
sessionId: string,
|
||||||
|
userId: string,
|
||||||
|
title: string,
|
||||||
|
content: string,
|
||||||
|
source: string,
|
||||||
|
metadata: Record<string, any>,
|
||||||
|
versionInfo: VersionedDocumentInfo,
|
||||||
|
chunkedDocument: ChunkedDocument,
|
||||||
|
): DocumentNode {
|
||||||
|
return {
|
||||||
|
uuid: crypto.randomUUID(),
|
||||||
|
title,
|
||||||
|
originalContent: content,
|
||||||
|
metadata: {
|
||||||
|
...metadata,
|
||||||
|
chunkingStrategy: "semantic_sections",
|
||||||
|
targetChunkSize: 12500,
|
||||||
|
actualChunks: chunkedDocument.totalChunks,
|
||||||
|
},
|
||||||
|
source,
|
||||||
|
userId,
|
||||||
|
createdAt: new Date(),
|
||||||
|
validAt: new Date(),
|
||||||
|
totalChunks: chunkedDocument.totalChunks,
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
contentHash: chunkedDocument.contentHash,
|
||||||
|
previousVersionUuid: versionInfo.previousVersionUuid || undefined,
|
||||||
|
chunkHashes: chunkedDocument.chunkHashes,
|
||||||
|
sessionId,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get version history for a document
|
||||||
|
*/
|
||||||
|
async getDocumentHistory(
|
||||||
|
documentId: string,
|
||||||
|
userId: string,
|
||||||
|
limit: number = 10,
|
||||||
|
): Promise<DocumentVersion[]> {
|
||||||
|
const versions = await getDocumentVersions(documentId, userId, limit);
|
||||||
|
|
||||||
|
return versions.map((doc) => ({
|
||||||
|
uuid: doc.uuid,
|
||||||
|
version: doc.version,
|
||||||
|
contentHash: doc.contentHash,
|
||||||
|
chunkHashes: doc.chunkHashes || [],
|
||||||
|
createdAt: doc.createdAt,
|
||||||
|
validAt: doc.validAt,
|
||||||
|
title: doc.title,
|
||||||
|
metadata: doc.metadata,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if statements should be invalidated based on semantic similarity
|
||||||
|
* This implements the semantic similarity gate (>0.85 threshold)
|
||||||
|
*/
|
||||||
|
async checkStatementInvalidation(
|
||||||
|
oldChunkContent: string,
|
||||||
|
newChunkContent: string,
|
||||||
|
threshold: number = 0.85,
|
||||||
|
): Promise<{
|
||||||
|
shouldInvalidate: boolean;
|
||||||
|
semanticSimilarity: number;
|
||||||
|
}> {
|
||||||
|
try {
|
||||||
|
// Generate embeddings for both chunks
|
||||||
|
const [oldEmbedding, newEmbedding] = await Promise.all([
|
||||||
|
this.knowledgeGraphService.getEmbedding(oldChunkContent),
|
||||||
|
this.knowledgeGraphService.getEmbedding(newChunkContent),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Calculate cosine similarity
|
||||||
|
const similarity = this.calculateCosineSimilarity(
|
||||||
|
oldEmbedding,
|
||||||
|
newEmbedding,
|
||||||
|
);
|
||||||
|
|
||||||
|
// If similarity is below threshold, invalidate old statements
|
||||||
|
const shouldInvalidate = similarity < threshold;
|
||||||
|
|
||||||
|
return {
|
||||||
|
shouldInvalidate,
|
||||||
|
semanticSimilarity: similarity,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error checking statement invalidation:", error);
|
||||||
|
// On error, be conservative and invalidate
|
||||||
|
return {
|
||||||
|
shouldInvalidate: true,
|
||||||
|
semanticSimilarity: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate cosine similarity between two embedding vectors
|
||||||
|
*/
|
||||||
|
private calculateCosineSimilarity(vecA: number[], vecB: number[]): number {
|
||||||
|
if (vecA.length !== vecB.length) {
|
||||||
|
throw new Error("Vector dimensions must match");
|
||||||
|
}
|
||||||
|
|
||||||
|
let dotProduct = 0;
|
||||||
|
let normA = 0;
|
||||||
|
let normB = 0;
|
||||||
|
|
||||||
|
for (let i = 0; i < vecA.length; i++) {
|
||||||
|
dotProduct += vecA[i] * vecB[i];
|
||||||
|
normA += vecA[i] * vecA[i];
|
||||||
|
normB += vecB[i] * vecB[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
normA = Math.sqrt(normA);
|
||||||
|
normB = Math.sqrt(normB);
|
||||||
|
|
||||||
|
if (normA === 0 || normB === 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return dotProduct / (normA * normB);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a differential processing report
|
||||||
|
*/
|
||||||
|
generateDifferentialReport(
|
||||||
|
versionInfo: VersionedDocumentInfo,
|
||||||
|
processingStats: {
|
||||||
|
chunksProcessed: number;
|
||||||
|
chunksSkipped: number;
|
||||||
|
statementsCreated: number;
|
||||||
|
statementsInvalidated: number;
|
||||||
|
processingTimeMs: number;
|
||||||
|
},
|
||||||
|
): {
|
||||||
|
summary: string;
|
||||||
|
metrics: Record<string, any>;
|
||||||
|
} {
|
||||||
|
const totalChunks = versionInfo.chunkLevelChanges.totalChunks;
|
||||||
|
const changePercentage = versionInfo.chunkLevelChanges.changePercentage;
|
||||||
|
const savingsPercentage =
|
||||||
|
totalChunks > 0 ? (processingStats.chunksSkipped / totalChunks) * 100 : 0;
|
||||||
|
|
||||||
|
return {
|
||||||
|
summary: `Document v${versionInfo.newVersion}: ${changePercentage.toFixed(1)}% changed, ${savingsPercentage.toFixed(1)}% processing saved`,
|
||||||
|
metrics: {
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
isNewDocument: versionInfo.isNewDocument,
|
||||||
|
totalChunks,
|
||||||
|
chunksChanged: processingStats.chunksProcessed,
|
||||||
|
chunksSkipped: processingStats.chunksSkipped,
|
||||||
|
changePercentage: changePercentage,
|
||||||
|
processingTimeMs: processingStats.processingTimeMs,
|
||||||
|
statementsCreated: processingStats.statementsCreated,
|
||||||
|
statementsInvalidated: processingStats.statementsInvalidated,
|
||||||
|
estimatedCostSavings: savingsPercentage,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
250
apps/webapp/app/services/graphModels/document.ts
Normal file
250
apps/webapp/app/services/graphModels/document.ts
Normal file
@ -0,0 +1,250 @@
|
|||||||
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
|
import type { DocumentNode } from "@core/types";
|
||||||
|
import crypto from "crypto";
|
||||||
|
|
||||||
|
export async function saveDocument(document: DocumentNode): Promise<string> {
|
||||||
|
const query = `
|
||||||
|
MERGE (d:Document {uuid: $uuid})
|
||||||
|
ON CREATE SET
|
||||||
|
d.title = $title,
|
||||||
|
d.originalContent = $originalContent,
|
||||||
|
d.metadata = $metadata,
|
||||||
|
d.source = $source,
|
||||||
|
d.userId = $userId,
|
||||||
|
d.createdAt = $createdAt,
|
||||||
|
d.validAt = $validAt,
|
||||||
|
d.totalChunks = $totalChunks,
|
||||||
|
d.sessionId = $sessionId,
|
||||||
|
d.version = $version,
|
||||||
|
d.contentHash = $contentHash,
|
||||||
|
d.previousVersionUuid = $previousVersionUuid,
|
||||||
|
d.chunkHashes = $chunkHashes
|
||||||
|
ON MATCH SET
|
||||||
|
d.title = $title,
|
||||||
|
d.originalContent = $originalContent,
|
||||||
|
d.metadata = $metadata,
|
||||||
|
d.source = $source,
|
||||||
|
d.validAt = $validAt,
|
||||||
|
d.totalChunks = $totalChunks,
|
||||||
|
d.sessionId = $sessionId,
|
||||||
|
d.version = $version,
|
||||||
|
d.contentHash = $contentHash,
|
||||||
|
d.previousVersionUuid = $previousVersionUuid,
|
||||||
|
d.chunkHashes = $chunkHashes
|
||||||
|
RETURN d.uuid as uuid
|
||||||
|
`;
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
uuid: document.uuid,
|
||||||
|
title: document.title,
|
||||||
|
originalContent: document.originalContent,
|
||||||
|
metadata: JSON.stringify(document.metadata || {}),
|
||||||
|
source: document.source,
|
||||||
|
userId: document.userId || null,
|
||||||
|
createdAt: document.createdAt.toISOString(),
|
||||||
|
validAt: document.validAt.toISOString(),
|
||||||
|
totalChunks: document.totalChunks || 0,
|
||||||
|
sessionId: document.sessionId || null,
|
||||||
|
version: document.version || 1,
|
||||||
|
contentHash: document.contentHash,
|
||||||
|
previousVersionUuid: document.previousVersionUuid || null,
|
||||||
|
chunkHashes: document.chunkHashes || [],
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await runQuery(query, params);
|
||||||
|
return result[0].get("uuid");
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function linkEpisodeToDocument(
|
||||||
|
episodeUuid: string,
|
||||||
|
documentUuid: string,
|
||||||
|
chunkIndex: number,
|
||||||
|
): Promise<void> {
|
||||||
|
const query = `
|
||||||
|
MATCH (e:Episode {uuid: $episodeUuid})
|
||||||
|
MATCH (d:Document {uuid: $documentUuid})
|
||||||
|
MERGE (d)-[r:CONTAINS_CHUNK {chunkIndex: $chunkIndex}]->(e)
|
||||||
|
SET e.chunkIndex = $chunkIndex
|
||||||
|
RETURN r
|
||||||
|
`;
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
episodeUuid,
|
||||||
|
documentUuid,
|
||||||
|
chunkIndex,
|
||||||
|
};
|
||||||
|
|
||||||
|
await runQuery(query, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getDocument(
|
||||||
|
documentUuid: string,
|
||||||
|
): Promise<DocumentNode | null> {
|
||||||
|
const query = `
|
||||||
|
MATCH (d:Document {uuid: $uuid})
|
||||||
|
RETURN d
|
||||||
|
`;
|
||||||
|
|
||||||
|
const params = { uuid: documentUuid };
|
||||||
|
const result = await runQuery(query, params);
|
||||||
|
|
||||||
|
if (result.length === 0) return null;
|
||||||
|
|
||||||
|
const record = result[0];
|
||||||
|
const documentNode = record.get("d");
|
||||||
|
|
||||||
|
return {
|
||||||
|
uuid: documentNode.properties.uuid,
|
||||||
|
title: documentNode.properties.title,
|
||||||
|
originalContent: documentNode.properties.originalContent,
|
||||||
|
metadata: JSON.parse(documentNode.properties.metadata || "{}"),
|
||||||
|
source: documentNode.properties.source,
|
||||||
|
userId: documentNode.properties.userId,
|
||||||
|
createdAt: new Date(documentNode.properties.createdAt),
|
||||||
|
validAt: new Date(documentNode.properties.validAt),
|
||||||
|
totalChunks: documentNode.properties.totalChunks,
|
||||||
|
version: documentNode.properties.version || 1,
|
||||||
|
contentHash: documentNode.properties.contentHash || "",
|
||||||
|
previousVersionUuid: documentNode.properties.previousVersionUuid || null,
|
||||||
|
chunkHashes: documentNode.properties.chunkHashes || [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getDocumentEpisodes(documentUuid: string): Promise<
|
||||||
|
Array<{
|
||||||
|
episodeUuid: string;
|
||||||
|
chunkIndex: number;
|
||||||
|
content: string;
|
||||||
|
}>
|
||||||
|
> {
|
||||||
|
const query = `
|
||||||
|
MATCH (d:Document {uuid: $uuid})-[r:CONTAINS_CHUNK]->(e:Episode)
|
||||||
|
RETURN e.uuid as episodeUuid, r.chunkIndex as chunkIndex, e.content as content
|
||||||
|
ORDER BY r.chunkIndex ASC
|
||||||
|
`;
|
||||||
|
|
||||||
|
const params = { uuid: documentUuid };
|
||||||
|
const result = await runQuery(query, params);
|
||||||
|
|
||||||
|
return result.map((record) => ({
|
||||||
|
episodeUuid: record.get("episodeUuid"),
|
||||||
|
chunkIndex: record.get("chunkIndex"),
|
||||||
|
content: record.get("content"),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getUserDocuments(
|
||||||
|
userId: string,
|
||||||
|
limit: number = 50,
|
||||||
|
): Promise<DocumentNode[]> {
|
||||||
|
const query = `
|
||||||
|
MATCH (d:Document {userId: $userId})
|
||||||
|
RETURN d
|
||||||
|
ORDER BY d.createdAt DESC
|
||||||
|
LIMIT $limit
|
||||||
|
`;
|
||||||
|
|
||||||
|
const params = { userId, limit };
|
||||||
|
const result = await runQuery(query, params);
|
||||||
|
|
||||||
|
return result.map((record) => {
|
||||||
|
const documentNode = record.get("d");
|
||||||
|
return {
|
||||||
|
uuid: documentNode.properties.uuid,
|
||||||
|
title: documentNode.properties.title,
|
||||||
|
originalContent: documentNode.properties.originalContent,
|
||||||
|
metadata: JSON.parse(documentNode.properties.metadata || "{}"),
|
||||||
|
source: documentNode.properties.source,
|
||||||
|
userId: documentNode.properties.userId,
|
||||||
|
createdAt: new Date(documentNode.properties.createdAt),
|
||||||
|
validAt: new Date(documentNode.properties.validAt),
|
||||||
|
totalChunks: documentNode.properties.totalChunks,
|
||||||
|
version: documentNode.properties.version || 1,
|
||||||
|
contentHash: documentNode.properties.contentHash || "",
|
||||||
|
previousVersionUuid: documentNode.properties.previousVersionUuid || null,
|
||||||
|
chunkHashes: documentNode.properties.chunkHashes || [],
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate content hash for document versioning
|
||||||
|
*/
|
||||||
|
export function generateContentHash(content: string): string {
|
||||||
|
return crypto.createHash("sha256").update(content, "utf8").digest("hex");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find existing document by documentId and userId for version comparison
|
||||||
|
*/
|
||||||
|
export async function findExistingDocument(
|
||||||
|
sessionId: string,
|
||||||
|
userId: string,
|
||||||
|
): Promise<DocumentNode | null> {
|
||||||
|
const query = `
|
||||||
|
MATCH (d:Document {sessionId: $sessionId, userId: $userId})
|
||||||
|
RETURN d
|
||||||
|
ORDER BY d.version DESC
|
||||||
|
LIMIT 1
|
||||||
|
`;
|
||||||
|
|
||||||
|
const params = { sessionId, userId };
|
||||||
|
const result = await runQuery(query, params);
|
||||||
|
|
||||||
|
if (result.length === 0) return null;
|
||||||
|
|
||||||
|
const documentNode = result[0].get("d");
|
||||||
|
return {
|
||||||
|
uuid: documentNode.properties.uuid,
|
||||||
|
title: documentNode.properties.title,
|
||||||
|
originalContent: documentNode.properties.originalContent,
|
||||||
|
metadata: JSON.parse(documentNode.properties.metadata || "{}"),
|
||||||
|
source: documentNode.properties.source,
|
||||||
|
userId: documentNode.properties.userId,
|
||||||
|
createdAt: new Date(documentNode.properties.createdAt),
|
||||||
|
validAt: new Date(documentNode.properties.validAt),
|
||||||
|
totalChunks: documentNode.properties.totalChunks,
|
||||||
|
version: documentNode.properties.version || 1,
|
||||||
|
contentHash: documentNode.properties.contentHash || "",
|
||||||
|
previousVersionUuid: documentNode.properties.previousVersionUuid || null,
|
||||||
|
chunkHashes: documentNode.properties.chunkHashes || [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get document version history
|
||||||
|
*/
|
||||||
|
export async function getDocumentVersions(
|
||||||
|
sessionId: string,
|
||||||
|
userId: string,
|
||||||
|
limit: number = 10,
|
||||||
|
): Promise<DocumentNode[]> {
|
||||||
|
const query = `
|
||||||
|
MATCH (d:Document {sessionId: $sessionId, userId: $userId})
|
||||||
|
RETURN d
|
||||||
|
ORDER BY d.version DESC
|
||||||
|
LIMIT $limit
|
||||||
|
`;
|
||||||
|
|
||||||
|
const params = { sessionId, userId, limit };
|
||||||
|
const result = await runQuery(query, params);
|
||||||
|
|
||||||
|
return result.map((record) => {
|
||||||
|
const documentNode = record.get("d");
|
||||||
|
return {
|
||||||
|
uuid: documentNode.properties.uuid,
|
||||||
|
title: documentNode.properties.title,
|
||||||
|
originalContent: documentNode.properties.originalContent,
|
||||||
|
metadata: JSON.parse(documentNode.properties.metadata || "{}"),
|
||||||
|
source: documentNode.properties.source,
|
||||||
|
userId: documentNode.properties.userId,
|
||||||
|
createdAt: new Date(documentNode.properties.createdAt),
|
||||||
|
validAt: new Date(documentNode.properties.validAt),
|
||||||
|
totalChunks: documentNode.properties.totalChunks,
|
||||||
|
version: documentNode.properties.version || 1,
|
||||||
|
contentHash: documentNode.properties.contentHash || "",
|
||||||
|
previousVersionUuid: documentNode.properties.previousVersionUuid || null,
|
||||||
|
chunkHashes: documentNode.properties.chunkHashes || [],
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
@ -1,5 +1,5 @@
|
|||||||
import { runQuery } from "~/lib/neo4j.server";
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
import type { EntityNode, EpisodicNode } from "@core/types";
|
import { type EntityNode, EpisodeType, type EpisodicNode } from "@core/types";
|
||||||
|
|
||||||
export async function saveEpisode(episode: EpisodicNode): Promise<string> {
|
export async function saveEpisode(episode: EpisodicNode): Promise<string> {
|
||||||
const query = `
|
const query = `
|
||||||
@ -83,8 +83,7 @@ export async function getRecentEpisodes(params: {
|
|||||||
source?: string;
|
source?: string;
|
||||||
sessionId?: string;
|
sessionId?: string;
|
||||||
}): Promise<EpisodicNode[]> {
|
}): Promise<EpisodicNode[]> {
|
||||||
let filters = `WHERE e.validAt <= $referenceTime
|
let filters = `WHERE e.validAt <= $referenceTime`;
|
||||||
AND e.userId = $userId`;
|
|
||||||
|
|
||||||
if (params.source) {
|
if (params.source) {
|
||||||
filters += `\nAND e.source = $source`;
|
filters += `\nAND e.source = $source`;
|
||||||
@ -95,9 +94,11 @@ export async function getRecentEpisodes(params: {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const query = `
|
const query = `
|
||||||
MATCH (e:Episode)
|
MATCH (e:Episode{userId: $userId})
|
||||||
${filters}
|
${filters}
|
||||||
RETURN e
|
MATCH (e)-[:HAS_PROVENANCE]->(s:Statement)
|
||||||
|
WHERE s.invalidAt IS NULL
|
||||||
|
RETURN DISTINCT e
|
||||||
ORDER BY e.validAt DESC
|
ORDER BY e.validAt DESC
|
||||||
LIMIT ${params.limit}
|
LIMIT ${params.limit}
|
||||||
`;
|
`;
|
||||||
@ -126,6 +127,7 @@ export async function getRecentEpisodes(params: {
|
|||||||
userId: episode.userId,
|
userId: episode.userId,
|
||||||
space: episode.space,
|
space: episode.space,
|
||||||
sessionId: episode.sessionId,
|
sessionId: episode.sessionId,
|
||||||
|
documentId: episode.documentId,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -170,6 +172,7 @@ export async function searchEpisodesByEmbedding(params: {
|
|||||||
? JSON.parse(episode.attributesJson)
|
? JSON.parse(episode.attributesJson)
|
||||||
: {},
|
: {},
|
||||||
userId: episode.userId,
|
userId: episode.userId,
|
||||||
|
documentId: episode.documentId,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -307,6 +310,7 @@ export async function getEpisodeStatements(params: {
|
|||||||
}) {
|
}) {
|
||||||
const query = `
|
const query = `
|
||||||
MATCH (episode:Episode {uuid: $episodeUuid, userId: $userId})-[:HAS_PROVENANCE]->(stmt:Statement)
|
MATCH (episode:Episode {uuid: $episodeUuid, userId: $userId})-[:HAS_PROVENANCE]->(stmt:Statement)
|
||||||
|
WHERE stmt.invalidAt IS NULL
|
||||||
RETURN stmt
|
RETURN stmt
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
|||||||
@ -6,6 +6,8 @@ import {
|
|||||||
type EpisodicNode,
|
type EpisodicNode,
|
||||||
type StatementNode,
|
type StatementNode,
|
||||||
type Triple,
|
type Triple,
|
||||||
|
EpisodeTypeEnum,
|
||||||
|
type EpisodeType,
|
||||||
} from "@core/types";
|
} from "@core/types";
|
||||||
import { logger } from "./logger.service";
|
import { logger } from "./logger.service";
|
||||||
import { ClusteringService } from "./clustering.server";
|
import { ClusteringService } from "./clustering.server";
|
||||||
@ -42,13 +44,14 @@ import {
|
|||||||
searchStatementsByEmbedding,
|
searchStatementsByEmbedding,
|
||||||
} from "./graphModels/statement";
|
} from "./graphModels/statement";
|
||||||
import { getEmbedding, makeModelCall } from "~/lib/model.server";
|
import { getEmbedding, makeModelCall } from "~/lib/model.server";
|
||||||
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
import {
|
import {
|
||||||
Apps,
|
Apps,
|
||||||
getNodeTypes,
|
getNodeTypes,
|
||||||
getNodeTypesString,
|
getNodeTypesString,
|
||||||
isPresetType,
|
isPresetType,
|
||||||
} from "~/utils/presets/nodes";
|
} from "~/utils/presets/nodes";
|
||||||
import { normalizePrompt } from "./prompts";
|
import { normalizePrompt, normalizeDocumentPrompt } from "./prompts";
|
||||||
import { type PrismaClient } from "@prisma/client";
|
import { type PrismaClient } from "@prisma/client";
|
||||||
|
|
||||||
// Default number of previous episodes to retrieve for context
|
// Default number of previous episodes to retrieve for context
|
||||||
@ -65,6 +68,162 @@ export class KnowledgeGraphService {
|
|||||||
return getEmbedding(text);
|
return getEmbedding(text);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invalidate statements from a previous document version that are no longer supported
|
||||||
|
* by the new document content using semantic similarity analysis
|
||||||
|
*/
|
||||||
|
async invalidateStatementsFromPreviousDocumentVersion(params: {
|
||||||
|
previousDocumentUuid: string;
|
||||||
|
newDocumentContent: string;
|
||||||
|
userId: string;
|
||||||
|
invalidatedBy: string;
|
||||||
|
semanticSimilarityThreshold?: number;
|
||||||
|
}): Promise<{
|
||||||
|
invalidatedStatements: string[];
|
||||||
|
preservedStatements: string[];
|
||||||
|
totalStatementsAnalyzed: number;
|
||||||
|
}> {
|
||||||
|
const threshold = params.semanticSimilarityThreshold || 0.75; // Lower threshold for document-level analysis
|
||||||
|
const invalidatedStatements: string[] = [];
|
||||||
|
const preservedStatements: string[] = [];
|
||||||
|
|
||||||
|
// Step 1: Get all statements from the previous document version
|
||||||
|
const previousStatements = await this.getStatementsFromDocument(
|
||||||
|
params.previousDocumentUuid,
|
||||||
|
params.userId,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (previousStatements.length === 0) {
|
||||||
|
return {
|
||||||
|
invalidatedStatements: [],
|
||||||
|
preservedStatements: [],
|
||||||
|
totalStatementsAnalyzed: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
`Analyzing ${previousStatements.length} statements from previous document version`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Step 2: Generate embedding for new document content
|
||||||
|
const newDocumentEmbedding = await this.getEmbedding(
|
||||||
|
params.newDocumentContent,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Step 3: For each statement, check if it's still semantically supported by new content
|
||||||
|
for (const statement of previousStatements) {
|
||||||
|
try {
|
||||||
|
// Generate embedding for the statement fact
|
||||||
|
const statementEmbedding = await this.getEmbedding(statement.fact);
|
||||||
|
|
||||||
|
// Calculate semantic similarity between statement and new document
|
||||||
|
const semanticSimilarity = this.calculateCosineSimilarity(
|
||||||
|
statementEmbedding,
|
||||||
|
newDocumentEmbedding,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (semanticSimilarity < threshold) {
|
||||||
|
invalidatedStatements.push(statement.uuid);
|
||||||
|
logger.log(
|
||||||
|
`Invalidating statement: "${statement.fact}" (similarity: ${semanticSimilarity.toFixed(3)})`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
preservedStatements.push(statement.uuid);
|
||||||
|
logger.log(
|
||||||
|
`Preserving statement: "${statement.fact}" (similarity: ${semanticSimilarity.toFixed(3)})`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Error analyzing statement ${statement.uuid}:`, { error });
|
||||||
|
// On error, be conservative and invalidate
|
||||||
|
invalidatedStatements.push(statement.uuid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4: Bulk invalidate the selected statements
|
||||||
|
if (invalidatedStatements.length > 0) {
|
||||||
|
await invalidateStatements({
|
||||||
|
statementIds: invalidatedStatements,
|
||||||
|
invalidatedBy: params.invalidatedBy,
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.log(`Document-level invalidation completed`, {
|
||||||
|
previousDocumentUuid: params.previousDocumentUuid,
|
||||||
|
totalAnalyzed: previousStatements.length,
|
||||||
|
invalidated: invalidatedStatements.length,
|
||||||
|
preserved: preservedStatements.length,
|
||||||
|
threshold,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
invalidatedStatements,
|
||||||
|
preservedStatements,
|
||||||
|
totalStatementsAnalyzed: previousStatements.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all statements that were created from episodes linked to a specific document
|
||||||
|
*/
|
||||||
|
private async getStatementsFromDocument(
|
||||||
|
documentUuid: string,
|
||||||
|
userId: string,
|
||||||
|
): Promise<StatementNode[]> {
|
||||||
|
const query = `
|
||||||
|
MATCH (doc:Document {uuid: $documentUuid, userId: $userId})-[:CONTAINS_CHUNK]->(episode:Episode)
|
||||||
|
MATCH (episode)-[:HAS_PROVENANCE]->(stmt:Statement)
|
||||||
|
RETURN stmt
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, {
|
||||||
|
documentUuid,
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
|
||||||
|
return result.map((record) => {
|
||||||
|
const stmt = record.get("stmt").properties;
|
||||||
|
return {
|
||||||
|
uuid: stmt.uuid,
|
||||||
|
fact: stmt.fact,
|
||||||
|
factEmbedding: stmt.factEmbedding || [],
|
||||||
|
createdAt: new Date(stmt.createdAt),
|
||||||
|
validAt: new Date(stmt.validAt),
|
||||||
|
invalidAt: stmt.invalidAt ? new Date(stmt.invalidAt) : null,
|
||||||
|
attributes: stmt.attributesJson ? JSON.parse(stmt.attributesJson) : {},
|
||||||
|
userId: stmt.userId,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate cosine similarity between two embedding vectors
|
||||||
|
*/
|
||||||
|
private calculateCosineSimilarity(vecA: number[], vecB: number[]): number {
|
||||||
|
if (vecA.length !== vecB.length) {
|
||||||
|
throw new Error("Vector dimensions must match");
|
||||||
|
}
|
||||||
|
|
||||||
|
let dotProduct = 0;
|
||||||
|
let normA = 0;
|
||||||
|
let normB = 0;
|
||||||
|
|
||||||
|
for (let i = 0; i < vecA.length; i++) {
|
||||||
|
dotProduct += vecA[i] * vecB[i];
|
||||||
|
normA += vecA[i] * vecA[i];
|
||||||
|
normB += vecB[i] * vecB[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
normA = Math.sqrt(normA);
|
||||||
|
normB = Math.sqrt(normB);
|
||||||
|
|
||||||
|
if (normA === 0 || normB === 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return dotProduct / (normA * normB);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Process an episode and update the knowledge graph.
|
* Process an episode and update the knowledge graph.
|
||||||
*
|
*
|
||||||
@ -110,6 +269,7 @@ export class KnowledgeGraphService {
|
|||||||
prisma,
|
prisma,
|
||||||
new Date(params.referenceTime),
|
new Date(params.referenceTime),
|
||||||
sessionContext,
|
sessionContext,
|
||||||
|
params.type,
|
||||||
);
|
);
|
||||||
|
|
||||||
const normalizedTime = Date.now() - startTime;
|
const normalizedTime = Date.now() - startTime;
|
||||||
@ -253,7 +413,7 @@ export class KnowledgeGraphService {
|
|||||||
// Invalidate invalidated statements
|
// Invalidate invalidated statements
|
||||||
await invalidateStatements({
|
await invalidateStatements({
|
||||||
statementIds: invalidatedStatements,
|
statementIds: invalidatedStatements,
|
||||||
invalidatedBy: episode.uuid
|
invalidatedBy: episode.uuid,
|
||||||
});
|
});
|
||||||
|
|
||||||
const endTime = Date.now();
|
const endTime = Date.now();
|
||||||
@ -1146,6 +1306,7 @@ export class KnowledgeGraphService {
|
|||||||
prisma: PrismaClient,
|
prisma: PrismaClient,
|
||||||
episodeTimestamp?: Date,
|
episodeTimestamp?: Date,
|
||||||
sessionContext?: string,
|
sessionContext?: string,
|
||||||
|
contentType?: EpisodeType,
|
||||||
) {
|
) {
|
||||||
let appEnumValues: Apps[] = [];
|
let appEnumValues: Apps[] = [];
|
||||||
if (Apps[source.toUpperCase() as keyof typeof Apps]) {
|
if (Apps[source.toUpperCase() as keyof typeof Apps]) {
|
||||||
@ -1171,7 +1332,12 @@ export class KnowledgeGraphService {
|
|||||||
episodeTimestamp?.toISOString() || new Date().toISOString(),
|
episodeTimestamp?.toISOString() || new Date().toISOString(),
|
||||||
sessionContext,
|
sessionContext,
|
||||||
};
|
};
|
||||||
const messages = normalizePrompt(context);
|
|
||||||
|
// Route to appropriate normalization prompt based on content type
|
||||||
|
const messages =
|
||||||
|
contentType === EpisodeTypeEnum.DOCUMENT
|
||||||
|
? normalizeDocumentPrompt(context)
|
||||||
|
: normalizePrompt(context);
|
||||||
let responseText = "";
|
let responseText = "";
|
||||||
await makeModelCall(false, messages, (text) => {
|
await makeModelCall(false, messages, (text) => {
|
||||||
responseText = text;
|
responseText = text;
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import type { User } from "~/models/user.server";
|
import type { User } from "~/models/user.server";
|
||||||
import { createWorkspace } from "~/models/workspace.server";
|
import { createWorkspace } from "~/models/workspace.server";
|
||||||
import { singleton } from "~/utils/singleton";
|
|
||||||
|
|
||||||
export async function postAuthentication({
|
export async function postAuthentication({
|
||||||
user,
|
user,
|
||||||
|
|||||||
@ -262,3 +262,139 @@ ${context.relatedMemories}
|
|||||||
{ role: "user", content: userPrompt },
|
{ role: "user", content: userPrompt },
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const normalizeDocumentPrompt = (
|
||||||
|
context: Record<string, any>,
|
||||||
|
): CoreMessage[] => {
|
||||||
|
const sysPrompt = `You are C.O.R.E. (Contextual Observation & Recall Engine), a document memory processing system.
|
||||||
|
|
||||||
|
Transform this document content into enriched factual statements for knowledge graph storage.
|
||||||
|
|
||||||
|
<document_processing_approach>
|
||||||
|
Focus on STRUCTURED CONTENT EXTRACTION optimized for documents:
|
||||||
|
|
||||||
|
1. FACTUAL PRESERVATION - Extract concrete facts, data, and information
|
||||||
|
2. STRUCTURAL AWARENESS - Preserve document hierarchy, lists, tables, code blocks
|
||||||
|
3. CROSS-REFERENCE HANDLING - Maintain internal document references and connections
|
||||||
|
4. TECHNICAL CONTENT - Handle specialized terminology, code, formulas, diagrams
|
||||||
|
5. CONTEXTUAL CHUNKING - This content is part of a larger document, maintain coherence
|
||||||
|
|
||||||
|
DOCUMENT-SPECIFIC ENRICHMENT:
|
||||||
|
- Preserve technical accuracy and specialized vocabulary
|
||||||
|
- Extract structured data (lists, tables, procedures, specifications)
|
||||||
|
- Maintain hierarchical relationships (sections, subsections, bullet points)
|
||||||
|
- Handle code blocks, formulas, and technical diagrams
|
||||||
|
- Capture cross-references and internal document links
|
||||||
|
- Preserve authorship, citations, and source attributions
|
||||||
|
</document_processing_approach>
|
||||||
|
|
||||||
|
<document_content_types>
|
||||||
|
Handle various document formats:
|
||||||
|
- Technical documentation and specifications
|
||||||
|
- Research papers and academic content
|
||||||
|
- Code documentation and API references
|
||||||
|
- Business documents and reports
|
||||||
|
- Notes and knowledge base articles
|
||||||
|
- Structured content (wikis, blogs, guides)
|
||||||
|
</document_content_types>
|
||||||
|
|
||||||
|
<temporal_resolution>
|
||||||
|
For document content, convert relative time references using document timestamp:
|
||||||
|
- Publication dates, modification dates, version information
|
||||||
|
- Time-sensitive information within the document content
|
||||||
|
- Historical context and chronological information
|
||||||
|
</temporal_resolution>
|
||||||
|
|
||||||
|
<entity_types>
|
||||||
|
${context.entityTypes}
|
||||||
|
</entity_types>
|
||||||
|
|
||||||
|
<ingestion_rules>
|
||||||
|
${
|
||||||
|
context.ingestionRules
|
||||||
|
? `Apply these rules for content from ${context.source}:
|
||||||
|
${context.ingestionRules}
|
||||||
|
|
||||||
|
CRITICAL: If content does NOT satisfy these rules, respond with "NOTHING_TO_REMEMBER" regardless of other criteria.`
|
||||||
|
: "No specific ingestion rules defined for this source."
|
||||||
|
}
|
||||||
|
</ingestion_rules>
|
||||||
|
|
||||||
|
<document_quality_control>
|
||||||
|
RETURN "NOTHING_TO_REMEMBER" if content consists ONLY of:
|
||||||
|
- Navigation elements or UI text
|
||||||
|
- Copyright notices and boilerplate
|
||||||
|
- Empty sections or placeholder text
|
||||||
|
- Pure formatting markup without content
|
||||||
|
- Table of contents without substance
|
||||||
|
- Repetitive headers without content
|
||||||
|
|
||||||
|
STORE IN MEMORY for document content containing:
|
||||||
|
- Factual information and data
|
||||||
|
- Technical specifications and procedures
|
||||||
|
- Structured knowledge and explanations
|
||||||
|
- Code examples and implementations
|
||||||
|
- Research findings and conclusions
|
||||||
|
- Process descriptions and workflows
|
||||||
|
- Reference information and definitions
|
||||||
|
- Analysis, insights, and documented decisions
|
||||||
|
</document_quality_control>
|
||||||
|
|
||||||
|
<document_enrichment_examples>
|
||||||
|
TECHNICAL CONTENT:
|
||||||
|
- Original: "The API returns a 200 status code on success"
|
||||||
|
- Enriched: "On June 15, 2024, the REST API documentation specifies that successful requests return HTTP status code 200."
|
||||||
|
|
||||||
|
STRUCTURED CONTENT:
|
||||||
|
- Original: "Step 1: Initialize the database\nStep 2: Run migrations"
|
||||||
|
- Enriched: "On June 15, 2024, the deployment guide outlines a two-step process: first initialize the database, then run migrations."
|
||||||
|
|
||||||
|
CROSS-REFERENCE:
|
||||||
|
- Original: "As mentioned in Section 3, the algorithm complexity is O(n)"
|
||||||
|
- Enriched: "On June 15, 2024, the algorithm analysis document confirms O(n) time complexity, referencing the detailed explanation in Section 3."
|
||||||
|
</document_enrichment_examples>
|
||||||
|
|
||||||
|
CRITICAL OUTPUT FORMAT REQUIREMENT:
|
||||||
|
You MUST wrap your response in <output> tags. This is MANDATORY - no exceptions.
|
||||||
|
|
||||||
|
If the document content should be stored in memory:
|
||||||
|
<output>
|
||||||
|
{{your_enriched_statement_here}}
|
||||||
|
</output>
|
||||||
|
|
||||||
|
If there is nothing worth remembering:
|
||||||
|
<output>
|
||||||
|
NOTHING_TO_REMEMBER
|
||||||
|
</output>
|
||||||
|
|
||||||
|
ALWAYS include opening <output> and closing </output> tags around your entire response.
|
||||||
|
`;
|
||||||
|
|
||||||
|
const userPrompt = `
|
||||||
|
<DOCUMENT_CONTENT>
|
||||||
|
${context.episodeContent}
|
||||||
|
</DOCUMENT_CONTENT>
|
||||||
|
|
||||||
|
<SOURCE>
|
||||||
|
${context.source}
|
||||||
|
</SOURCE>
|
||||||
|
|
||||||
|
<DOCUMENT_TIMESTAMP>
|
||||||
|
${context.episodeTimestamp || "Not provided"}
|
||||||
|
</DOCUMENT_TIMESTAMP>
|
||||||
|
|
||||||
|
<DOCUMENT_SESSION_CONTEXT>
|
||||||
|
${context.sessionContext || "No previous chunks in this document session"}
|
||||||
|
</DOCUMENT_SESSION_CONTEXT>
|
||||||
|
|
||||||
|
<RELATED_MEMORIES>
|
||||||
|
${context.relatedMemories}
|
||||||
|
</RELATED_MEMORIES>
|
||||||
|
|
||||||
|
`;
|
||||||
|
|
||||||
|
return [
|
||||||
|
{ role: "system", content: sysPrompt },
|
||||||
|
{ role: "user", content: userPrompt },
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|||||||
@ -214,8 +214,9 @@ export const resolveStatementPrompt = (
|
|||||||
content: `You are a knowledge graph expert that analyzes statements to detect duplications and TRUE contradictions.
|
content: `You are a knowledge graph expert that analyzes statements to detect duplications and TRUE contradictions.
|
||||||
You analyze multiple new statements against existing statements to determine whether the new statement duplicates any existing statement or ACTUALLY contradicts any existing statement.
|
You analyze multiple new statements against existing statements to determine whether the new statement duplicates any existing statement or ACTUALLY contradicts any existing statement.
|
||||||
|
|
||||||
CRITICAL: Distinguish between CONTRADICTIONS vs PROGRESSIONS:
|
CRITICAL: Distinguish between CONTRADICTIONS, SUPERSEDING EVOLUTION, and PROGRESSIONS:
|
||||||
- CONTRADICTIONS: Statements that CANNOT both be true (mutually exclusive facts)
|
- CONTRADICTIONS: Statements that CANNOT both be true (mutually exclusive facts)
|
||||||
|
- SUPERSEDING EVOLUTION: Sequential changes where the new state invalidates the previous state (e.g., technology migrations, job changes, relationship status changes)
|
||||||
- PROGRESSIONS: Sequential states or developments that CAN both be true (e.g., planning → execution, researching → deciding)
|
- PROGRESSIONS: Sequential states or developments that CAN both be true (e.g., planning → execution, researching → deciding)
|
||||||
|
|
||||||
|
|
||||||
@ -247,12 +248,22 @@ TRUE CONTRADICTIONS (mark as contradictions):
|
|||||||
- "Project completed" vs "Project cancelled" (mutually exclusive outcomes)
|
- "Project completed" vs "Project cancelled" (mutually exclusive outcomes)
|
||||||
- "Caroline is single" vs "Caroline is married" (same time period, opposite states)
|
- "Caroline is single" vs "Caroline is married" (same time period, opposite states)
|
||||||
|
|
||||||
|
SUPERSEDING EVOLUTION (mark as contradictions - old statement becomes invalid):
|
||||||
|
- "Application built with NextJS" vs "Application migrated to Remix" (technology stack change)
|
||||||
|
- "John works at CompanyA" vs "John joined CompanyB" (job change invalidates previous employment)
|
||||||
|
- "Database uses MySQL" vs "Database migrated to PostgreSQL" (infrastructure change)
|
||||||
|
- "System deployed on AWS" vs "System moved to Google Cloud" (platform migration)
|
||||||
|
- "Caroline living in Boston" vs "Caroline moved to Seattle" (location change)
|
||||||
|
- "Project using Python" vs "Project rewritten in TypeScript" (language migration)
|
||||||
|
|
||||||
NOT CONTRADICTIONS (do NOT mark as contradictions):
|
NOT CONTRADICTIONS (do NOT mark as contradictions):
|
||||||
- "Caroline researching adoption agencies" vs "Caroline finalized adoption agency" (research → decision progression)
|
- "Caroline researching adoption agencies" vs "Caroline finalized adoption agency" (research → decision progression)
|
||||||
- "Caroline planning camping next week" vs "Caroline went camping" (planning → execution progression)
|
- "Caroline planning camping next week" vs "Caroline went camping" (planning → execution progression)
|
||||||
- "User studying Python" vs "User completed Python course" (learning progression)
|
- "User studying Python" vs "User completed Python course" (learning progression)
|
||||||
- "Meeting scheduled for 3pm" vs "Meeting was held at 3pm" (planning → execution)
|
- "Meeting scheduled for 3pm" vs "Meeting was held at 3pm" (planning → execution)
|
||||||
- "Considering job offers" vs "Accepted job offer" (consideration → decision)
|
- "Considering job offers" vs "Accepted job offer" (consideration → decision)
|
||||||
|
- "Project in development" vs "Project launched" (development → deployment progression)
|
||||||
|
- "Learning React" vs "Built app with React" (skill → application progression)
|
||||||
|
|
||||||
5. MANDATORY OUTPUT FORMAT:
|
5. MANDATORY OUTPUT FORMAT:
|
||||||
|
|
||||||
@ -278,10 +289,11 @@ CRITICAL FORMATTING RULES:
|
|||||||
- Include NO text before <output> or after </output>
|
- Include NO text before <output> or after </output>
|
||||||
- Return valid JSON array with all statement IDs from NEW_STATEMENTS
|
- Return valid JSON array with all statement IDs from NEW_STATEMENTS
|
||||||
- If the new statement is a duplicate, include the UUID of the duplicate statement
|
- If the new statement is a duplicate, include the UUID of the duplicate statement
|
||||||
- For TRUE contradictions only, list statement UUIDs that the new statement contradicts
|
- For TRUE contradictions AND superseding evolution, list statement UUIDs that the new statement contradicts
|
||||||
- If a statement is both a contradiction AND a duplicate (rare case), mark it as a duplicate
|
- If a statement is both a contradiction AND a duplicate (rare case), mark it as a duplicate
|
||||||
- DO NOT mark progressions, temporal sequences, or state developments as contradictions
|
- DO NOT mark progressions, temporal sequences, or cumulative developments as contradictions
|
||||||
- ONLY mark genuine mutually exclusive facts as contradictions
|
- MARK superseding evolution (technology/job/location changes) as contradictions to invalidate old state
|
||||||
|
- ONLY mark genuine mutually exclusive facts and superseding evolution as contradictions
|
||||||
`,
|
`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -747,7 +747,7 @@ export function createHybridActionApiRoute<
|
|||||||
|
|
||||||
async function loader({ request, params }: LoaderFunctionArgs) {
|
async function loader({ request, params }: LoaderFunctionArgs) {
|
||||||
if (corsStrategy !== "none" && request.method.toUpperCase() === "OPTIONS") {
|
if (corsStrategy !== "none" && request.method.toUpperCase() === "OPTIONS") {
|
||||||
return apiCors(request, json({}));
|
return apiCors(request, json({ origin: "*" }));
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Response(null, { status: 405 });
|
return new Response(null, { status: 405 });
|
||||||
|
|||||||
276
apps/webapp/app/trigger/ingest/ingest-document.ts
Normal file
276
apps/webapp/app/trigger/ingest/ingest-document.ts
Normal file
@ -0,0 +1,276 @@
|
|||||||
|
import { queue, task } from "@trigger.dev/sdk";
|
||||||
|
import { type z } from "zod";
|
||||||
|
import crypto from "crypto";
|
||||||
|
|
||||||
|
import { IngestionStatus } from "@core/database";
|
||||||
|
import { EpisodeTypeEnum } from "@core/types";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { saveDocument } from "~/services/graphModels/document";
|
||||||
|
import { type IngestBodyRequest } from "~/lib/ingest.server";
|
||||||
|
import { DocumentVersioningService } from "~/services/documentVersioning.server";
|
||||||
|
import { DocumentDifferentialService } from "~/services/documentDiffer.server";
|
||||||
|
import { KnowledgeGraphService } from "~/services/knowledgeGraph.server";
|
||||||
|
import { prisma } from "../utils/prisma";
|
||||||
|
import { ingestTask } from "./ingest";
|
||||||
|
|
||||||
|
const documentIngestionQueue = queue({
|
||||||
|
name: "document-ingestion-queue",
|
||||||
|
concurrencyLimit: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Register the Document Ingestion Trigger.dev task
|
||||||
|
export const ingestDocumentTask = task({
|
||||||
|
id: "ingest-document",
|
||||||
|
queue: documentIngestionQueue,
|
||||||
|
machine: "medium-2x",
|
||||||
|
run: async (payload: {
|
||||||
|
body: z.infer<typeof IngestBodyRequest>;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
try {
|
||||||
|
logger.log(`Processing document for user ${payload.userId}`, {
|
||||||
|
contentLength: payload.body.episodeBody.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
status: IngestionStatus.PROCESSING,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const documentBody = payload.body;
|
||||||
|
|
||||||
|
// Step 1: Initialize services and prepare document version
|
||||||
|
const versioningService = new DocumentVersioningService();
|
||||||
|
const differentialService = new DocumentDifferentialService();
|
||||||
|
const knowledgeGraphService = new KnowledgeGraphService();
|
||||||
|
|
||||||
|
const {
|
||||||
|
documentNode: document,
|
||||||
|
versionInfo,
|
||||||
|
chunkedDocument,
|
||||||
|
} = await versioningService.prepareDocumentVersion(
|
||||||
|
documentBody.sessionId!,
|
||||||
|
payload.userId,
|
||||||
|
documentBody.metadata?.documentTitle?.toString() || "Untitled Document",
|
||||||
|
documentBody.episodeBody,
|
||||||
|
documentBody.source,
|
||||||
|
documentBody.metadata || {},
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(`Document version analysis:`, {
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
isNewDocument: versionInfo.isNewDocument,
|
||||||
|
hasContentChanged: versionInfo.hasContentChanged,
|
||||||
|
changePercentage: versionInfo.chunkLevelChanges.changePercentage,
|
||||||
|
changedChunks: versionInfo.chunkLevelChanges.changedChunkIndices.length,
|
||||||
|
totalChunks: versionInfo.chunkLevelChanges.totalChunks,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Step 2: Determine processing strategy
|
||||||
|
const differentialDecision =
|
||||||
|
await differentialService.analyzeDifferentialNeed(
|
||||||
|
documentBody.episodeBody,
|
||||||
|
versionInfo.existingDocument,
|
||||||
|
chunkedDocument,
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(`Differential analysis:`, {
|
||||||
|
shouldUseDifferential: differentialDecision.shouldUseDifferential,
|
||||||
|
strategy: differentialDecision.strategy,
|
||||||
|
reason: differentialDecision.reason,
|
||||||
|
documentSizeTokens: differentialDecision.documentSizeTokens,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Step 3: Save the new document version
|
||||||
|
await saveDocument(document);
|
||||||
|
|
||||||
|
// Step 3.1: Invalidate statements from previous document version if it exists
|
||||||
|
let invalidationResults = null;
|
||||||
|
if (versionInfo.existingDocument && versionInfo.hasContentChanged) {
|
||||||
|
logger.log(
|
||||||
|
`Invalidating statements from previous document version: ${versionInfo.existingDocument.uuid}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
invalidationResults =
|
||||||
|
await knowledgeGraphService.invalidateStatementsFromPreviousDocumentVersion(
|
||||||
|
{
|
||||||
|
previousDocumentUuid: versionInfo.existingDocument.uuid,
|
||||||
|
newDocumentContent: documentBody.episodeBody,
|
||||||
|
userId: payload.userId,
|
||||||
|
invalidatedBy: document.uuid,
|
||||||
|
semanticSimilarityThreshold: 0.75, // Configurable threshold
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(`Statement invalidation completed:`, {
|
||||||
|
totalAnalyzed: invalidationResults.totalStatementsAnalyzed,
|
||||||
|
invalidated: invalidationResults.invalidatedStatements.length,
|
||||||
|
preserved: invalidationResults.preservedStatements.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
`Document chunked into ${chunkedDocument.chunks.length} chunks`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Step 4: Process chunks based on differential strategy
|
||||||
|
let chunksToProcess = chunkedDocument.chunks;
|
||||||
|
let processingMode = "full";
|
||||||
|
|
||||||
|
if (
|
||||||
|
differentialDecision.shouldUseDifferential &&
|
||||||
|
differentialDecision.strategy === "chunk_level_diff"
|
||||||
|
) {
|
||||||
|
// Only process changed chunks
|
||||||
|
const chunkComparisons = differentialService.getChunkComparisons(
|
||||||
|
versionInfo.existingDocument!,
|
||||||
|
chunkedDocument,
|
||||||
|
);
|
||||||
|
|
||||||
|
const changedIndices =
|
||||||
|
differentialService.getChunksNeedingReprocessing(chunkComparisons);
|
||||||
|
chunksToProcess = chunkedDocument.chunks.filter((chunk) =>
|
||||||
|
changedIndices.includes(chunk.chunkIndex),
|
||||||
|
);
|
||||||
|
processingMode = "differential";
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
`Differential processing: ${chunksToProcess.length}/${chunkedDocument.chunks.length} chunks need reprocessing`,
|
||||||
|
);
|
||||||
|
} else if (differentialDecision.strategy === "full_reingest") {
|
||||||
|
// Process all chunks
|
||||||
|
processingMode = "full";
|
||||||
|
logger.log(
|
||||||
|
`Full reingestion: processing all ${chunkedDocument.chunks.length} chunks`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 5: Queue chunks for processing
|
||||||
|
const episodeHandlers = [];
|
||||||
|
for (const chunk of chunksToProcess) {
|
||||||
|
const chunkEpisodeData = {
|
||||||
|
episodeBody: chunk.content,
|
||||||
|
referenceTime: documentBody.referenceTime,
|
||||||
|
metadata: {
|
||||||
|
...documentBody.metadata,
|
||||||
|
processingMode,
|
||||||
|
differentialStrategy: differentialDecision.strategy,
|
||||||
|
chunkHash: chunk.contentHash,
|
||||||
|
documentTitle:
|
||||||
|
documentBody.metadata?.documentTitle?.toString() ||
|
||||||
|
"Untitled Document",
|
||||||
|
chunkIndex: chunk.chunkIndex,
|
||||||
|
documentUuid: document.uuid,
|
||||||
|
},
|
||||||
|
source: documentBody.source,
|
||||||
|
spaceId: documentBody.spaceId,
|
||||||
|
sessionId: documentBody.sessionId,
|
||||||
|
type: EpisodeTypeEnum.DOCUMENT,
|
||||||
|
};
|
||||||
|
|
||||||
|
const episodeHandler = await ingestTask.trigger(
|
||||||
|
{
|
||||||
|
body: chunkEpisodeData,
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
queueId: payload.queueId,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
queue: "ingestion-queue",
|
||||||
|
concurrencyKey: payload.userId,
|
||||||
|
tags: [payload.userId, payload.queueId, processingMode],
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
if (episodeHandler.id) {
|
||||||
|
episodeHandlers.push(episodeHandler.id);
|
||||||
|
logger.log(
|
||||||
|
`Queued chunk ${chunk.chunkIndex + 1} for ${processingMode} processing`,
|
||||||
|
{
|
||||||
|
handlerId: episodeHandler.id,
|
||||||
|
chunkSize: chunk.content.length,
|
||||||
|
chunkHash: chunk.contentHash,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate cost savings
|
||||||
|
const costSavings = differentialService.calculateCostSavings(
|
||||||
|
chunkedDocument.chunks.length,
|
||||||
|
chunksToProcess.length,
|
||||||
|
);
|
||||||
|
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
output: {
|
||||||
|
documentUuid: document.uuid,
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
totalChunks: chunkedDocument.chunks.length,
|
||||||
|
chunksProcessed: chunksToProcess.length,
|
||||||
|
chunksSkipped: costSavings.chunksSkipped,
|
||||||
|
processingMode,
|
||||||
|
differentialStrategy: differentialDecision.strategy,
|
||||||
|
estimatedSavings: `${costSavings.estimatedSavingsPercentage.toFixed(1)}%`,
|
||||||
|
statementInvalidation: invalidationResults
|
||||||
|
? {
|
||||||
|
totalAnalyzed: invalidationResults.totalStatementsAnalyzed,
|
||||||
|
invalidated: invalidationResults.invalidatedStatements.length,
|
||||||
|
preserved: invalidationResults.preservedStatements.length,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
episodes: [],
|
||||||
|
episodeHandlers,
|
||||||
|
},
|
||||||
|
status: IngestionStatus.PROCESSING,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const processingTimeMs = Date.now() - startTime;
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
`Document differential processing completed in ${processingTimeMs}ms`,
|
||||||
|
{
|
||||||
|
documentUuid: document.uuid,
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
processingMode,
|
||||||
|
totalChunks: chunkedDocument.chunks.length,
|
||||||
|
chunksProcessed: chunksToProcess.length,
|
||||||
|
chunksSkipped: costSavings.chunksSkipped,
|
||||||
|
estimatedSavings: `${costSavings.estimatedSavingsPercentage.toFixed(1)}%`,
|
||||||
|
changePercentage: `${differentialDecision.changePercentage.toFixed(1)}%`,
|
||||||
|
statementInvalidation: invalidationResults
|
||||||
|
? {
|
||||||
|
totalAnalyzed: invalidationResults.totalStatementsAnalyzed,
|
||||||
|
invalidated: invalidationResults.invalidatedStatements.length,
|
||||||
|
preserved: invalidationResults.preservedStatements.length,
|
||||||
|
}
|
||||||
|
: "No previous version",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
return { success: true };
|
||||||
|
} catch (err: any) {
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
error: err.message,
|
||||||
|
status: IngestionStatus.FAILED,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.error(
|
||||||
|
`Error processing document for user ${payload.userId}:`,
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
return { success: false, error: err.message };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
@ -1,11 +1,13 @@
|
|||||||
import { queue, task } from "@trigger.dev/sdk";
|
import { queue, task } from "@trigger.dev/sdk";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { KnowledgeGraphService } from "~/services/knowledgeGraph.server";
|
import { KnowledgeGraphService } from "~/services/knowledgeGraph.server";
|
||||||
|
import { linkEpisodeToDocument } from "~/services/graphModels/document";
|
||||||
|
|
||||||
import { IngestionStatus } from "@core/database";
|
import { IngestionStatus } from "@core/database";
|
||||||
import { logger } from "~/services/logger.service";
|
import { logger } from "~/services/logger.service";
|
||||||
import { triggerSpaceAssignment } from "../spaces/space-assignment";
|
import { triggerSpaceAssignment } from "../spaces/space-assignment";
|
||||||
import { prisma } from "../utils/prisma";
|
import { prisma } from "../utils/prisma";
|
||||||
|
import { EpisodeType } from "@core/types";
|
||||||
|
|
||||||
export const IngestBodyRequest = z.object({
|
export const IngestBodyRequest = z.object({
|
||||||
episodeBody: z.string(),
|
episodeBody: z.string(),
|
||||||
@ -14,6 +16,9 @@ export const IngestBodyRequest = z.object({
|
|||||||
source: z.string(),
|
source: z.string(),
|
||||||
spaceId: z.string().optional(),
|
spaceId: z.string().optional(),
|
||||||
sessionId: z.string().optional(),
|
sessionId: z.string().optional(),
|
||||||
|
type: z
|
||||||
|
.enum([EpisodeType.CONVERSATION, EpisodeType.DOCUMENT])
|
||||||
|
.default(EpisodeType.CONVERSATION),
|
||||||
});
|
});
|
||||||
|
|
||||||
const ingestionQueue = queue({
|
const ingestionQueue = queue({
|
||||||
@ -35,7 +40,7 @@ export const ingestTask = task({
|
|||||||
try {
|
try {
|
||||||
logger.log(`Processing job for user ${payload.userId}`);
|
logger.log(`Processing job for user ${payload.userId}`);
|
||||||
|
|
||||||
await prisma.ingestionQueue.update({
|
const ingestionQueue = await prisma.ingestionQueue.update({
|
||||||
where: { id: payload.queueId },
|
where: { id: payload.queueId },
|
||||||
data: {
|
data: {
|
||||||
status: IngestionStatus.PROCESSING,
|
status: IngestionStatus.PROCESSING,
|
||||||
@ -54,11 +59,56 @@ export const ingestTask = task({
|
|||||||
prisma,
|
prisma,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Link episode to document if it's a document chunk
|
||||||
|
if (
|
||||||
|
episodeBody.type === EpisodeType.DOCUMENT &&
|
||||||
|
episodeBody.metadata.documentUuid &&
|
||||||
|
episodeDetails.episodeUuid
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
await linkEpisodeToDocument(
|
||||||
|
episodeDetails.episodeUuid,
|
||||||
|
episodeBody.metadata.documentUuid,
|
||||||
|
episodeBody.metadata.chunkIndex || 0,
|
||||||
|
);
|
||||||
|
logger.log(
|
||||||
|
`Linked episode ${episodeDetails.episodeUuid} to document ${episodeBody.metadata.documentUuid} at chunk ${episodeBody.metadata.chunkIndex || 0}`,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to link episode to document:`, {
|
||||||
|
error,
|
||||||
|
episodeUuid: episodeDetails.episodeUuid,
|
||||||
|
documentUuid: episodeBody.metadata.documentUuid,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let finalOutput = episodeDetails;
|
||||||
|
let episodeUuids: string[] = episodeDetails.episodeUuid
|
||||||
|
? [episodeDetails.episodeUuid]
|
||||||
|
: [];
|
||||||
|
let currentStatus: IngestionStatus = IngestionStatus.COMPLETED;
|
||||||
|
if (episodeBody.type === EpisodeType.DOCUMENT) {
|
||||||
|
const currentOutput = ingestionQueue.output as any;
|
||||||
|
currentOutput.episodes.push(episodeDetails);
|
||||||
|
episodeUuids = currentOutput.episodes.map(
|
||||||
|
(episode: any) => episode.episodeUuid,
|
||||||
|
);
|
||||||
|
|
||||||
|
finalOutput = {
|
||||||
|
...currentOutput,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (currentOutput.episodes.length !== currentOutput.totalChunks) {
|
||||||
|
currentStatus = IngestionStatus.PROCESSING;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
await prisma.ingestionQueue.update({
|
await prisma.ingestionQueue.update({
|
||||||
where: { id: payload.queueId },
|
where: { id: payload.queueId },
|
||||||
data: {
|
data: {
|
||||||
output: episodeDetails,
|
output: finalOutput,
|
||||||
status: IngestionStatus.COMPLETED,
|
status: currentStatus,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -69,12 +119,15 @@ export const ingestTask = task({
|
|||||||
workspaceId: payload.workspaceId,
|
workspaceId: payload.workspaceId,
|
||||||
episodeId: episodeDetails?.episodeUuid,
|
episodeId: episodeDetails?.episodeUuid,
|
||||||
});
|
});
|
||||||
if (episodeDetails.episodeUuid) {
|
if (
|
||||||
|
episodeDetails.episodeUuid &&
|
||||||
|
currentStatus === IngestionStatus.COMPLETED
|
||||||
|
) {
|
||||||
await triggerSpaceAssignment({
|
await triggerSpaceAssignment({
|
||||||
userId: payload.userId,
|
userId: payload.userId,
|
||||||
workspaceId: payload.workspaceId,
|
workspaceId: payload.workspaceId,
|
||||||
mode: "episode",
|
mode: "episode",
|
||||||
episodeId: episodeDetails.episodeUuid,
|
episodeIds: episodeUuids,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (assignmentError) {
|
} catch (assignmentError) {
|
||||||
|
|||||||
@ -25,7 +25,7 @@ interface SpaceAssignmentPayload {
|
|||||||
workspaceId: string;
|
workspaceId: string;
|
||||||
mode: "new_space" | "episode";
|
mode: "new_space" | "episode";
|
||||||
newSpaceId?: string; // For new_space mode
|
newSpaceId?: string; // For new_space mode
|
||||||
episodeId?: string; // For daily_batch mode (default: 1)
|
episodeIds?: string[]; // For daily_batch mode (default: 1)
|
||||||
batchSize?: number; // Processing batch size
|
batchSize?: number; // Processing batch size
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -181,7 +181,7 @@ export const spaceAssignmentTask = task({
|
|||||||
workspaceId,
|
workspaceId,
|
||||||
mode,
|
mode,
|
||||||
newSpaceId,
|
newSpaceId,
|
||||||
episodeId,
|
episodeIds,
|
||||||
batchSize = mode === "new_space"
|
batchSize = mode === "new_space"
|
||||||
? CONFIG.newSpaceMode.batchSize
|
? CONFIG.newSpaceMode.batchSize
|
||||||
: CONFIG.episodeMode.batchSize,
|
: CONFIG.episodeMode.batchSize,
|
||||||
@ -191,7 +191,7 @@ export const spaceAssignmentTask = task({
|
|||||||
userId,
|
userId,
|
||||||
mode,
|
mode,
|
||||||
newSpaceId,
|
newSpaceId,
|
||||||
episodeId,
|
episodeIds,
|
||||||
batchSize,
|
batchSize,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -213,7 +213,7 @@ export const spaceAssignmentTask = task({
|
|||||||
// 2. Get statements to analyze based on mode
|
// 2. Get statements to analyze based on mode
|
||||||
const statements = await getStatementsToAnalyze(userId, mode, {
|
const statements = await getStatementsToAnalyze(userId, mode, {
|
||||||
newSpaceId,
|
newSpaceId,
|
||||||
episodeId,
|
episodeIds,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (statements.length === 0) {
|
if (statements.length === 0) {
|
||||||
@ -454,7 +454,7 @@ export const spaceAssignmentTask = task({
|
|||||||
async function getStatementsToAnalyze(
|
async function getStatementsToAnalyze(
|
||||||
userId: string,
|
userId: string,
|
||||||
mode: "new_space" | "episode",
|
mode: "new_space" | "episode",
|
||||||
options: { newSpaceId?: string; episodeId?: string },
|
options: { newSpaceId?: string; episodeIds?: string[] },
|
||||||
): Promise<StatementData[]> {
|
): Promise<StatementData[]> {
|
||||||
let query: string;
|
let query: string;
|
||||||
let params: any = { userId };
|
let params: any = { userId };
|
||||||
@ -471,16 +471,19 @@ async function getStatementsToAnalyze(
|
|||||||
ORDER BY s.createdAt DESC
|
ORDER BY s.createdAt DESC
|
||||||
`;
|
`;
|
||||||
} else {
|
} else {
|
||||||
|
// Optimized query: Use UNWIND for better performance with IN clause
|
||||||
|
// and combine entity lookups in single pattern
|
||||||
query = `
|
query = `
|
||||||
MATCH (e:Episode {uuid: $episodeId, userId: $userId})-[:HAS_PROVENANCE]->(s:Statement)
|
UNWIND $episodeIds AS episodeId
|
||||||
|
MATCH (e:Episode {uuid: episodeId, userId: $userId})-[:HAS_PROVENANCE]->(s:Statement)
|
||||||
WHERE s.invalidAt IS NULL
|
WHERE s.invalidAt IS NULL
|
||||||
MATCH (s)-[:HAS_SUBJECT]->(subj:Entity)
|
MATCH (s)-[:HAS_SUBJECT]->(subj:Entity),
|
||||||
MATCH (s)-[:HAS_PREDICATE]->(pred:Entity)
|
(s)-[:HAS_PREDICATE]->(pred:Entity),
|
||||||
MATCH (s)-[:HAS_OBJECT]->(obj:Entity)
|
(s)-[:HAS_OBJECT]->(obj:Entity)
|
||||||
RETURN s, subj.name as subject, pred.name as predicate, obj.name as object
|
RETURN s, subj.name as subject, pred.name as predicate, obj.name as object
|
||||||
ORDER BY s.createdAt DESC
|
ORDER BY s.createdAt DESC
|
||||||
`;
|
`;
|
||||||
params.episodeId = options.episodeId;
|
params.episodeIds = options.episodeIds;
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await runQuery(query, params);
|
const result = await runQuery(query, params);
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { type Message } from "@core/types";
|
import { EpisodeTypeEnum, type Message } from "@core/types";
|
||||||
import { addToQueue } from "./queue";
|
import { addToQueue } from "./queue";
|
||||||
import { triggerWebhookDelivery } from "../webhooks/webhook-delivery";
|
import { triggerWebhookDelivery } from "../webhooks/webhook-delivery";
|
||||||
import { logger } from "@trigger.dev/sdk";
|
import { logger } from "@trigger.dev/sdk";
|
||||||
@ -149,6 +149,7 @@ export const createActivities = async ({
|
|||||||
episodeBody: message.data.text,
|
episodeBody: message.data.text,
|
||||||
referenceTime: new Date().toISOString(),
|
referenceTime: new Date().toISOString(),
|
||||||
source: integrationAccount?.integrationDefinition.slug,
|
source: integrationAccount?.integrationDefinition.slug,
|
||||||
|
type: EpisodeTypeEnum.CONVERSATION,
|
||||||
};
|
};
|
||||||
|
|
||||||
const queueResponse = await addToQueue(
|
const queueResponse = await addToQueue(
|
||||||
|
|||||||
@ -20,7 +20,7 @@ export async function apiCors(
|
|||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
return cors(request, response, options);
|
return cors(request, response, { ...options });
|
||||||
}
|
}
|
||||||
|
|
||||||
export function makeApiCors(
|
export function makeApiCors(
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
import { EpisodeTypeEnum } from "@core/types";
|
||||||
import { addToQueue } from "~/lib/ingest.server";
|
import { addToQueue } from "~/lib/ingest.server";
|
||||||
import { logger } from "~/services/logger.service";
|
import { logger } from "~/services/logger.service";
|
||||||
import { SearchService } from "~/services/search.server";
|
import { SearchService } from "~/services/search.server";
|
||||||
@ -110,11 +111,12 @@ export async function callMemoryTool(
|
|||||||
// Handler for memory_ingest
|
// Handler for memory_ingest
|
||||||
async function handleMemoryIngest(args: any) {
|
async function handleMemoryIngest(args: any) {
|
||||||
try {
|
try {
|
||||||
const response = addToQueue(
|
const response = await addToQueue(
|
||||||
{
|
{
|
||||||
episodeBody: args.message,
|
episodeBody: args.message,
|
||||||
referenceTime: new Date().toISOString(),
|
referenceTime: new Date().toISOString(),
|
||||||
source: args.source,
|
source: args.source,
|
||||||
|
type: EpisodeTypeEnum.CONVERSATION,
|
||||||
},
|
},
|
||||||
args.userId,
|
args.userId,
|
||||||
);
|
);
|
||||||
@ -122,7 +124,10 @@ async function handleMemoryIngest(args: any) {
|
|||||||
content: [
|
content: [
|
||||||
{
|
{
|
||||||
type: "text",
|
type: "text",
|
||||||
text: JSON.stringify(response),
|
text: JSON.stringify({
|
||||||
|
success: true,
|
||||||
|
id: response.id,
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|||||||
@ -97,6 +97,7 @@
|
|||||||
"execa": "^9.6.0",
|
"execa": "^9.6.0",
|
||||||
"express": "^4.18.1",
|
"express": "^4.18.1",
|
||||||
"fast-sort": "^3.4.0",
|
"fast-sort": "^3.4.0",
|
||||||
|
"gpt-tokenizer": "^3.0.1",
|
||||||
"graphology": "^0.26.0",
|
"graphology": "^0.26.0",
|
||||||
"graphology-layout-force": "^0.2.4",
|
"graphology-layout-force": "^0.2.4",
|
||||||
"graphology-layout-forceatlas2": "^0.10.1",
|
"graphology-layout-forceatlas2": "^0.10.1",
|
||||||
@ -125,6 +126,7 @@
|
|||||||
"remix-themes": "^2.0.4",
|
"remix-themes": "^2.0.4",
|
||||||
"remix-typedjson": "0.3.1",
|
"remix-typedjson": "0.3.1",
|
||||||
"remix-utils": "^7.7.0",
|
"remix-utils": "^7.7.0",
|
||||||
|
"react-markdown": "10.1.0",
|
||||||
"sdk": "link:@modelcontextprotocol/sdk",
|
"sdk": "link:@modelcontextprotocol/sdk",
|
||||||
"sigma": "^3.0.2",
|
"sigma": "^3.0.2",
|
||||||
"simple-oauth2": "^5.1.0",
|
"simple-oauth2": "^5.1.0",
|
||||||
@ -174,10 +176,10 @@
|
|||||||
"prettier-plugin-tailwindcss": "^0.6.11",
|
"prettier-plugin-tailwindcss": "^0.6.11",
|
||||||
"tailwind-scrollbar": "^4.0.2",
|
"tailwind-scrollbar": "^4.0.2",
|
||||||
"tailwindcss": "4.1.7",
|
"tailwindcss": "4.1.7",
|
||||||
|
"tsx": "4.20.4",
|
||||||
"typescript": "5.8.3",
|
"typescript": "5.8.3",
|
||||||
"vite": "^6.0.0",
|
"vite": "^6.0.0",
|
||||||
"vite-tsconfig-paths": "^4.2.1",
|
"vite-tsconfig-paths": "^4.2.1"
|
||||||
"tsx": "4.20.4"
|
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=20.0.0"
|
"node": ">=20.0.0"
|
||||||
|
|||||||
@ -16,7 +16,9 @@ async function init() {
|
|||||||
const build = viteDevServer
|
const build = viteDevServer
|
||||||
? () => viteDevServer.ssrLoadModule("virtual:remix/server-build")
|
? () => viteDevServer.ssrLoadModule("virtual:remix/server-build")
|
||||||
: await import("./build/server/index.js");
|
: await import("./build/server/index.js");
|
||||||
const module = build.entry?.module;
|
const module = viteDevServer
|
||||||
|
? (await build()).entry.module
|
||||||
|
: build.entry?.module;
|
||||||
remixHandler = createRequestHandler({ build });
|
remixHandler = createRequestHandler({ build });
|
||||||
const app = express();
|
const app = express();
|
||||||
app.use(compression());
|
app.use(compression());
|
||||||
|
|||||||
@ -34,6 +34,8 @@ export default defineConfig({
|
|||||||
"tailwindcss",
|
"tailwindcss",
|
||||||
"@tiptap/react",
|
"@tiptap/react",
|
||||||
"react-tweet",
|
"react-tweet",
|
||||||
|
"posthog-js",
|
||||||
|
"posthog-js/react",
|
||||||
],
|
],
|
||||||
external: ["@prisma/client"],
|
external: ["@prisma/client"],
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
VERSION=0.1.19
|
VERSION=0.1.20
|
||||||
|
|
||||||
# Nest run in docker, change host to database container name
|
# Nest run in docker, change host to database container name
|
||||||
DB_HOST=postgres
|
DB_HOST=postgres
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "core",
|
"name": "core",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.1.19",
|
"version": "0.1.20",
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
"apps/*",
|
"apps/*",
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|||||||
@ -33,15 +33,30 @@ export default function WelcomeEmail() {
|
|||||||
brainstorming sessions from claude desktop via mcp. solve context loss problems across ai
|
brainstorming sessions from claude desktop via mcp. solve context loss problems across ai
|
||||||
tools with persistent, cross-session memory. add this url and get started
|
tools with persistent, cross-session memory. add this url and get started
|
||||||
</Text>
|
</Text>
|
||||||
<Link
|
<Text style={{ ...paragraphLight, display: "flex", alignItems: "center", marginTop: 0 }}>
|
||||||
style={{
|
<Link
|
||||||
...anchor,
|
style={{
|
||||||
marginTop: "10px",
|
...anchor,
|
||||||
marginBottom: "10px",
|
|
||||||
}}
|
display: "inline-flex",
|
||||||
>
|
}}
|
||||||
https://core.heysol.ai/api/v1/mcp?source='Your Coding Agent'
|
>
|
||||||
</Link>
|
https://core.heysol.ai/api/v1/mcp?source='Your Coding Agent'
|
||||||
|
</Link>
|
||||||
|
. Check how to connect{" "}
|
||||||
|
<Link
|
||||||
|
style={{
|
||||||
|
...anchor,
|
||||||
|
marginLeft: "4px",
|
||||||
|
display: "inline-flex",
|
||||||
|
}}
|
||||||
|
href="https://docs.heysol.ai/providers/claude"
|
||||||
|
>
|
||||||
|
claude
|
||||||
|
</Link>
|
||||||
|
.
|
||||||
|
</Text>
|
||||||
|
|
||||||
<Img
|
<Img
|
||||||
alt="Claude"
|
alt="Claude"
|
||||||
style={{
|
style={{
|
||||||
@ -57,7 +72,18 @@ export default function WelcomeEmail() {
|
|||||||
<Text style={paragraphLight}>
|
<Text style={paragraphLight}>
|
||||||
recall relevant context from core memory in chatgpt, grok, and gemini. save conversations
|
recall relevant context from core memory in chatgpt, grok, and gemini. save conversations
|
||||||
and content from chatgpt, grok, gemini, twitter, youtube, blog posts, and any webpage
|
and content from chatgpt, grok, gemini, twitter, youtube, blog posts, and any webpage
|
||||||
directly into your Core memory with simple text selection.
|
directly into your Core memory with simple text selection. Check steps to connect
|
||||||
|
<Link
|
||||||
|
style={{
|
||||||
|
...anchor,
|
||||||
|
marginLeft: "4px",
|
||||||
|
display: "inline-flex",
|
||||||
|
}}
|
||||||
|
href="https://docs.heysol.ai/providers/browser-extension"
|
||||||
|
>
|
||||||
|
here
|
||||||
|
</Link>
|
||||||
|
.
|
||||||
</Text>
|
</Text>
|
||||||
<Img
|
<Img
|
||||||
alt="Claude"
|
alt="Claude"
|
||||||
@ -73,11 +99,14 @@ export default function WelcomeEmail() {
|
|||||||
|
|
||||||
<Text style={heading}>need real-time, human help to get started? </Text>
|
<Text style={heading}>need real-time, human help to get started? </Text>
|
||||||
<Text style={paragraphLight}>
|
<Text style={paragraphLight}>
|
||||||
- join our discord community & get direct help from our team + over 100+ enthusiasts using
|
- join our{" "}
|
||||||
Core memory
|
<Link style={anchor} href="https://discord.gg/YGUZcvDjUa">
|
||||||
|
discord community
|
||||||
|
</Link>{" "}
|
||||||
|
& get direct help from our team + over 100+ enthusiasts using Core memory
|
||||||
</Text>
|
</Text>
|
||||||
<Text style={paragraphLight}>
|
<Text style={paragraphLight}>
|
||||||
- We are open-source us on our repo -{" "}
|
- We are open-source us ⭐ on our repo -{" "}
|
||||||
<Link style={anchor} href="https://github.com/RedPlanetHQ/core">
|
<Link style={anchor} href="https://github.com/RedPlanetHQ/core">
|
||||||
https://github.com/RedPlanetHQ/core
|
https://github.com/RedPlanetHQ/core
|
||||||
</Link>
|
</Link>
|
||||||
|
|||||||
@ -15,7 +15,7 @@
|
|||||||
"nodemailer": "^6.9.16",
|
"nodemailer": "^6.9.16",
|
||||||
"react": "^18.2.0",
|
"react": "^18.2.0",
|
||||||
"react-email": "^2.1.1",
|
"react-email": "^2.1.1",
|
||||||
"resend": "^3.2.0",
|
"resend": "^6.0.2",
|
||||||
"tiny-invariant": "^1.2.0",
|
"tiny-invariant": "^1.2.0",
|
||||||
"zod": "3.23.8"
|
"zod": "3.23.8"
|
||||||
},
|
},
|
||||||
|
|||||||
@ -27,7 +27,7 @@ export class ResendMailTransport implements MailTransport {
|
|||||||
if (result.error) {
|
if (result.error) {
|
||||||
console.log(result);
|
console.log(result);
|
||||||
console.error(
|
console.error(
|
||||||
`Failed to send email to ${to}, ${subject}. Error ${result.error.name}: ${result.error.message}`
|
`Failed to send email to ${to}, ${subject}. Error ${result.error.name}: ${result.error.message}`
|
||||||
);
|
);
|
||||||
throw new EmailError(result.error);
|
throw new EmailError(result.error);
|
||||||
}
|
}
|
||||||
@ -44,7 +44,7 @@ export class ResendMailTransport implements MailTransport {
|
|||||||
|
|
||||||
if (result.error) {
|
if (result.error) {
|
||||||
console.error(
|
console.error(
|
||||||
`Failed to send email to ${to}, ${subject}. Error ${result.error.name}: ${result.error.message}`
|
`Failed to send email plain to ${to}, ${subject}. Error ${result.error.name}: ${result.error.message}`
|
||||||
);
|
);
|
||||||
throw new EmailError(result.error);
|
throw new EmailError(result.error);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,23 @@
|
|||||||
export enum EpisodeType {
|
/**
|
||||||
Conversation = "CONVERSATION",
|
* Interface for document node in the reified knowledge graph
|
||||||
Text = "TEXT",
|
* Documents are parent containers for episodic chunks
|
||||||
|
*/
|
||||||
|
export interface DocumentNode {
|
||||||
|
uuid: string;
|
||||||
|
title: string;
|
||||||
|
originalContent: string;
|
||||||
|
metadata: Record<string, any>;
|
||||||
|
source: string;
|
||||||
|
userId: string;
|
||||||
|
createdAt: Date;
|
||||||
|
validAt: Date;
|
||||||
|
totalChunks: number;
|
||||||
|
sessionId?: string;
|
||||||
|
// Version tracking for differential ingestion
|
||||||
|
version: number;
|
||||||
|
contentHash: string;
|
||||||
|
previousVersionUuid?: string;
|
||||||
|
chunkHashes?: string[]; // Hash of each chunk for change detection
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -21,6 +38,7 @@ export interface EpisodicNode {
|
|||||||
space?: string;
|
space?: string;
|
||||||
sessionId?: string;
|
sessionId?: string;
|
||||||
recallCount?: number;
|
recallCount?: number;
|
||||||
|
chunkIndex?: number; // Index of this chunk within the document
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -72,14 +90,27 @@ export interface Triple {
|
|||||||
provenance: EpisodicNode;
|
provenance: EpisodicNode;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum EpisodeTypeEnum {
|
||||||
|
CONVERSATION = "CONVERSATION",
|
||||||
|
DOCUMENT = "DOCUMENT",
|
||||||
|
}
|
||||||
|
|
||||||
|
export const EpisodeType = {
|
||||||
|
CONVERSATION: "CONVERSATION",
|
||||||
|
DOCUMENT: "DOCUMENT",
|
||||||
|
};
|
||||||
|
|
||||||
|
export type EpisodeType = (typeof EpisodeType)[keyof typeof EpisodeType];
|
||||||
|
|
||||||
export type AddEpisodeParams = {
|
export type AddEpisodeParams = {
|
||||||
episodeBody: string;
|
episodeBody: string;
|
||||||
referenceTime: Date;
|
referenceTime: Date;
|
||||||
metadata: Record<string, any>;
|
metadata?: Record<string, any>;
|
||||||
source: string;
|
source: string;
|
||||||
userId: string;
|
userId: string;
|
||||||
spaceId?: string;
|
spaceId?: string;
|
||||||
sessionId?: string;
|
sessionId?: string;
|
||||||
|
type?: EpisodeType;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type AddEpisodeResult = {
|
export type AddEpisodeResult = {
|
||||||
|
|||||||
184
pnpm-lock.yaml
generated
184
pnpm-lock.yaml
generated
@ -529,6 +529,9 @@ importers:
|
|||||||
fast-sort:
|
fast-sort:
|
||||||
specifier: ^3.4.0
|
specifier: ^3.4.0
|
||||||
version: 3.4.1
|
version: 3.4.1
|
||||||
|
gpt-tokenizer:
|
||||||
|
specifier: ^3.0.1
|
||||||
|
version: 3.0.1
|
||||||
graphology:
|
graphology:
|
||||||
specifier: ^0.26.0
|
specifier: ^0.26.0
|
||||||
version: 0.26.0(graphology-types@0.24.8)
|
version: 0.26.0(graphology-types@0.24.8)
|
||||||
@ -592,6 +595,9 @@ importers:
|
|||||||
react-dom:
|
react-dom:
|
||||||
specifier: ^18.2.0
|
specifier: ^18.2.0
|
||||||
version: 18.3.1(react@18.3.1)
|
version: 18.3.1(react@18.3.1)
|
||||||
|
react-markdown:
|
||||||
|
specifier: 10.1.0
|
||||||
|
version: 10.1.0(@types/react@18.2.69)(react@18.3.1)
|
||||||
react-resizable-panels:
|
react-resizable-panels:
|
||||||
specifier: ^1.0.9
|
specifier: ^1.0.9
|
||||||
version: 1.0.10(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
version: 1.0.10(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||||
@ -649,7 +655,7 @@ importers:
|
|||||||
devDependencies:
|
devDependencies:
|
||||||
'@remix-run/dev':
|
'@remix-run/dev':
|
||||||
specifier: 2.16.7
|
specifier: 2.16.7
|
||||||
version: 2.16.7(@remix-run/react@2.16.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.8.3))(@remix-run/serve@2.16.7(typescript@5.8.3))(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(typescript@5.8.3)(vite@6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0))(yaml@2.8.0)
|
version: 2.16.7(@remix-run/react@2.16.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.8.3))(@remix-run/serve@2.16.7(typescript@5.8.3))(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(typescript@5.8.3)(vite@6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0))(yaml@2.8.0)
|
||||||
'@remix-run/eslint-config':
|
'@remix-run/eslint-config':
|
||||||
specifier: 2.16.7
|
specifier: 2.16.7
|
||||||
version: 2.16.7(eslint@8.57.1)(react@18.3.1)(typescript@5.8.3)
|
version: 2.16.7(eslint@8.57.1)(react@18.3.1)(typescript@5.8.3)
|
||||||
@ -664,7 +670,7 @@ importers:
|
|||||||
version: 0.5.16(tailwindcss@4.1.7)
|
version: 0.5.16(tailwindcss@4.1.7)
|
||||||
'@tailwindcss/vite':
|
'@tailwindcss/vite':
|
||||||
specifier: ^4.1.7
|
specifier: ^4.1.7
|
||||||
version: 4.1.9(vite@6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0))
|
version: 4.1.9(vite@6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0))
|
||||||
'@trigger.dev/build':
|
'@trigger.dev/build':
|
||||||
specifier: 4.0.0-v4-beta.22
|
specifier: 4.0.0-v4-beta.22
|
||||||
version: 4.0.0-v4-beta.22(typescript@5.8.3)
|
version: 4.0.0-v4-beta.22(typescript@5.8.3)
|
||||||
@ -763,10 +769,10 @@ importers:
|
|||||||
version: 5.8.3
|
version: 5.8.3
|
||||||
vite:
|
vite:
|
||||||
specifier: ^6.0.0
|
specifier: ^6.0.0
|
||||||
version: 6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
version: 6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
||||||
vite-tsconfig-paths:
|
vite-tsconfig-paths:
|
||||||
specifier: ^4.2.1
|
specifier: ^4.2.1
|
||||||
version: 4.3.2(typescript@5.8.3)(vite@6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0))
|
version: 4.3.2(typescript@5.8.3)(vite@6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0))
|
||||||
|
|
||||||
packages/database:
|
packages/database:
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -805,8 +811,8 @@ importers:
|
|||||||
specifier: ^2.1.1
|
specifier: ^2.1.1
|
||||||
version: 2.1.6(@opentelemetry/api@1.9.0)(@swc/helpers@0.5.17)(eslint@8.57.1)(sass@1.89.2)
|
version: 2.1.6(@opentelemetry/api@1.9.0)(@swc/helpers@0.5.17)(eslint@8.57.1)(sass@1.89.2)
|
||||||
resend:
|
resend:
|
||||||
specifier: ^3.2.0
|
specifier: ^6.0.2
|
||||||
version: 3.5.0(react-dom@18.2.0(react@18.2.0))(react@18.3.1)
|
version: 6.0.2(@react-email/render@0.0.12)
|
||||||
tiny-invariant:
|
tiny-invariant:
|
||||||
specifier: ^1.2.0
|
specifier: ^1.2.0
|
||||||
version: 1.3.3
|
version: 1.3.3
|
||||||
@ -835,7 +841,7 @@ importers:
|
|||||||
version: 20.19.7
|
version: 20.19.7
|
||||||
tsup:
|
tsup:
|
||||||
specifier: ^8.0.1
|
specifier: ^8.0.1
|
||||||
version: 8.5.0(@swc/core@1.3.101)(jiti@2.4.2)(postcss@8.5.5)(tsx@4.20.4)(typescript@5.8.3)(yaml@2.8.0)
|
version: 8.5.0(@swc/core@1.3.101(@swc/helpers@0.5.17))(jiti@2.4.2)(postcss@8.5.5)(tsx@4.20.4)(typescript@5.8.3)(yaml@2.8.0)
|
||||||
typescript:
|
typescript:
|
||||||
specifier: ^5.0.0
|
specifier: ^5.0.0
|
||||||
version: 5.8.3
|
version: 5.8.3
|
||||||
@ -872,7 +878,7 @@ importers:
|
|||||||
version: 6.0.1
|
version: 6.0.1
|
||||||
tsup:
|
tsup:
|
||||||
specifier: ^8.0.1
|
specifier: ^8.0.1
|
||||||
version: 8.5.0(@swc/core@1.3.101)(jiti@2.4.2)(postcss@8.5.5)(tsx@4.20.4)(typescript@5.8.3)(yaml@2.8.0)
|
version: 8.5.0(@swc/core@1.3.101(@swc/helpers@0.5.17))(jiti@2.4.2)(postcss@8.5.5)(tsx@4.20.4)(typescript@5.8.3)(yaml@2.8.0)
|
||||||
typescript:
|
typescript:
|
||||||
specifier: ^5.3.0
|
specifier: ^5.3.0
|
||||||
version: 5.8.3
|
version: 5.8.3
|
||||||
@ -3963,13 +3969,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-S8WRv/PqECEi6x0QJBj0asnAb5GFtJaHlnByxLETLkgJjc76cxMYDH4r9wdbuJ4sjkcbpwP3LPnVzwS+aIjT7g==}
|
resolution: {integrity: sha512-S8WRv/PqECEi6x0QJBj0asnAb5GFtJaHlnByxLETLkgJjc76cxMYDH4r9wdbuJ4sjkcbpwP3LPnVzwS+aIjT7g==}
|
||||||
engines: {node: '>=18.0.0'}
|
engines: {node: '>=18.0.0'}
|
||||||
|
|
||||||
'@react-email/render@0.0.16':
|
|
||||||
resolution: {integrity: sha512-wDaMy27xAq1cJHtSFptp0DTKPuV2GYhloqia95ub/DH9Dea1aWYsbdM918MOc/b/HvVS3w1z8DWzfAk13bGStQ==}
|
|
||||||
engines: {node: '>=18.0.0'}
|
|
||||||
peerDependencies:
|
|
||||||
react: ^18.2.0
|
|
||||||
react-dom: ^18.2.0
|
|
||||||
|
|
||||||
'@react-email/row@0.0.7':
|
'@react-email/row@0.0.7':
|
||||||
resolution: {integrity: sha512-h7pwrLVGk5CIx7Ai/oPxBgCCAGY7BEpCUQ7FCzi4+eThcs5IdjSwDPefLEkwaFS8KZc56UNwTAH92kNq5B7blg==}
|
resolution: {integrity: sha512-h7pwrLVGk5CIx7Ai/oPxBgCCAGY7BEpCUQ7FCzi4+eThcs5IdjSwDPefLEkwaFS8KZc56UNwTAH92kNq5B7blg==}
|
||||||
engines: {node: '>=18.0.0'}
|
engines: {node: '>=18.0.0'}
|
||||||
@ -7317,9 +7316,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==}
|
resolution: {integrity: sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==}
|
||||||
engines: {node: '>=4'}
|
engines: {node: '>=4'}
|
||||||
|
|
||||||
fast-deep-equal@2.0.1:
|
|
||||||
resolution: {integrity: sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w==}
|
|
||||||
|
|
||||||
fast-deep-equal@3.1.3:
|
fast-deep-equal@3.1.3:
|
||||||
resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
|
resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
|
||||||
|
|
||||||
@ -7634,6 +7630,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==}
|
resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==}
|
||||||
engines: {node: '>= 0.4'}
|
engines: {node: '>= 0.4'}
|
||||||
|
|
||||||
|
gpt-tokenizer@3.0.1:
|
||||||
|
resolution: {integrity: sha512-5jdaspBq/w4sWw322SvQj1Fku+CN4OAfYZeeEg8U7CWtxBz+zkxZ3h0YOHD43ee+nZYZ5Ud70HRN0ANcdIj4qg==}
|
||||||
|
|
||||||
graceful-fs@4.2.11:
|
graceful-fs@4.2.11:
|
||||||
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
|
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
|
||||||
|
|
||||||
@ -10077,6 +10076,12 @@ packages:
|
|||||||
react-lifecycles-compat@3.0.4:
|
react-lifecycles-compat@3.0.4:
|
||||||
resolution: {integrity: sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==}
|
resolution: {integrity: sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==}
|
||||||
|
|
||||||
|
react-markdown@10.1.0:
|
||||||
|
resolution: {integrity: sha512-qKxVopLT/TyA6BX3Ue5NwabOsAzm0Q7kAPwq6L+wWDwisYs7R8vZ0nRXqq6rkueboxpkjvLGU9fWifiX/ZZFxQ==}
|
||||||
|
peerDependencies:
|
||||||
|
'@types/react': '>=18'
|
||||||
|
react: '>=18'
|
||||||
|
|
||||||
react-markdown@9.1.0:
|
react-markdown@9.1.0:
|
||||||
resolution: {integrity: sha512-xaijuJB0kzGiUdG7nc2MOMDUDBWPyGAjZtUrow9XxUeua8IqeP+VlIfAZ3bphpcLTnSZXz6z9jcVC/TCwbfgdw==}
|
resolution: {integrity: sha512-xaijuJB0kzGiUdG7nc2MOMDUDBWPyGAjZtUrow9XxUeua8IqeP+VlIfAZ3bphpcLTnSZXz6z9jcVC/TCwbfgdw==}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
@ -10086,9 +10091,6 @@ packages:
|
|||||||
react-moveable@0.56.0:
|
react-moveable@0.56.0:
|
||||||
resolution: {integrity: sha512-FmJNmIOsOA36mdxbrc/huiE4wuXSRlmon/o+/OrfNhSiYYYL0AV5oObtPluEhb2Yr/7EfYWBHTxF5aWAvjg1SA==}
|
resolution: {integrity: sha512-FmJNmIOsOA36mdxbrc/huiE4wuXSRlmon/o+/OrfNhSiYYYL0AV5oObtPluEhb2Yr/7EfYWBHTxF5aWAvjg1SA==}
|
||||||
|
|
||||||
react-promise-suspense@0.3.4:
|
|
||||||
resolution: {integrity: sha512-I42jl7L3Ze6kZaq+7zXWSunBa3b1on5yfvUW6Eo/3fFOj6dZ5Bqmcd264nJbTK/gn1HjjILAjSwnZbV4RpSaNQ==}
|
|
||||||
|
|
||||||
react-refresh@0.14.2:
|
react-refresh@0.14.2:
|
||||||
resolution: {integrity: sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==}
|
resolution: {integrity: sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
@ -10353,9 +10355,14 @@ packages:
|
|||||||
resolution: {integrity: sha512-L9jEkOi3ASd9PYit2cwRfyppc9NoABujTP8/5gFcbERmo5jUoAKovIC3fsF17pkTnGsrByysqX+Kxd2OTNI1ww==}
|
resolution: {integrity: sha512-L9jEkOi3ASd9PYit2cwRfyppc9NoABujTP8/5gFcbERmo5jUoAKovIC3fsF17pkTnGsrByysqX+Kxd2OTNI1ww==}
|
||||||
engines: {node: '>=0.10.5'}
|
engines: {node: '>=0.10.5'}
|
||||||
|
|
||||||
resend@3.5.0:
|
resend@6.0.2:
|
||||||
resolution: {integrity: sha512-bKu4LhXSecP6krvhfDzyDESApYdNfjirD5kykkT1xO0Cj9TKSiGh5Void4pGTs3Am+inSnp4dg0B5XzdwHBJOQ==}
|
resolution: {integrity: sha512-um08qWpSVvEVqAePEy/bsa7pqtnJK+qTCZ0Et7YE7xuqM46J0C9gnSbIJKR3LIcRVMgO9jUeot8rH0UI84eqMQ==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
|
peerDependencies:
|
||||||
|
'@react-email/render': ^1.1.0
|
||||||
|
peerDependenciesMeta:
|
||||||
|
'@react-email/render':
|
||||||
|
optional: true
|
||||||
|
|
||||||
resolve-from@4.0.0:
|
resolve-from@4.0.0:
|
||||||
resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==}
|
resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==}
|
||||||
@ -13595,7 +13602,7 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@floating-ui/dom': 1.7.1
|
'@floating-ui/dom': 1.7.1
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
|
|
||||||
'@floating-ui/react-dom@2.1.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
'@floating-ui/react-dom@2.1.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -14348,7 +14355,7 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -14402,7 +14409,7 @@ snapshots:
|
|||||||
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
'@radix-ui/react-use-layout-effect': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-layout-effect': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -14430,7 +14437,7 @@ snapshots:
|
|||||||
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||||
'@radix-ui/react-slot': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-slot': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -14563,7 +14570,7 @@ snapshots:
|
|||||||
'@radix-ui/react-use-callback-ref': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-callback-ref': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
'@radix-ui/react-use-escape-keydown': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-escape-keydown': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -14628,7 +14635,7 @@ snapshots:
|
|||||||
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||||
'@radix-ui/react-use-callback-ref': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-callback-ref': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -14720,7 +14727,7 @@ snapshots:
|
|||||||
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
aria-hidden: 1.2.6
|
aria-hidden: 1.2.6
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
react-remove-scroll: 2.5.7(@types/react@18.2.47)(react@18.2.0)
|
react-remove-scroll: 2.5.7(@types/react@18.2.47)(react@18.2.0)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
@ -14762,7 +14769,7 @@ snapshots:
|
|||||||
'@radix-ui/react-use-size': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-size': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
'@radix-ui/rect': 1.1.0
|
'@radix-ui/rect': 1.1.0
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -14797,7 +14804,7 @@ snapshots:
|
|||||||
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||||
'@radix-ui/react-use-layout-effect': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-layout-effect': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -14825,7 +14832,7 @@ snapshots:
|
|||||||
'@radix-ui/react-compose-refs': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-compose-refs': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
'@radix-ui/react-use-layout-effect': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-layout-effect': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -14851,7 +14858,7 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@radix-ui/react-slot': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-slot': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -14877,7 +14884,7 @@ snapshots:
|
|||||||
'@radix-ui/react-use-callback-ref': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-callback-ref': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -15062,7 +15069,7 @@ snapshots:
|
|||||||
'@radix-ui/react-toggle': 1.1.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
'@radix-ui/react-toggle': 1.1.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||||
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -15073,7 +15080,7 @@ snapshots:
|
|||||||
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||||
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -15093,7 +15100,7 @@ snapshots:
|
|||||||
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
'@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.2.47)(react@18.2.0)
|
||||||
'@radix-ui/react-visually-hidden': 1.1.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
'@radix-ui/react-visually-hidden': 1.1.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -15245,7 +15252,7 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
'@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.18)(@types/react@18.2.47)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/react': 18.2.47
|
'@types/react': 18.2.47
|
||||||
'@types/react-dom': 18.2.18
|
'@types/react-dom': 18.2.18
|
||||||
@ -15361,14 +15368,6 @@ snapshots:
|
|||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.2.0)
|
||||||
|
|
||||||
'@react-email/render@0.0.16(react-dom@18.2.0(react@18.2.0))(react@18.3.1)':
|
|
||||||
dependencies:
|
|
||||||
html-to-text: 9.0.5
|
|
||||||
js-beautify: 1.15.4
|
|
||||||
react: 18.3.1
|
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
|
||||||
react-promise-suspense: 0.3.4
|
|
||||||
|
|
||||||
'@react-email/row@0.0.7(react@18.3.1)':
|
'@react-email/row@0.0.7(react@18.3.1)':
|
||||||
dependencies:
|
dependencies:
|
||||||
react: 18.3.1
|
react: 18.3.1
|
||||||
@ -15400,7 +15399,7 @@ snapshots:
|
|||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- encoding
|
- encoding
|
||||||
|
|
||||||
'@remix-run/dev@2.16.7(@remix-run/react@2.16.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.8.3))(@remix-run/serve@2.16.7(typescript@5.8.3))(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(typescript@5.8.3)(vite@6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0))(yaml@2.8.0)':
|
'@remix-run/dev@2.16.7(@remix-run/react@2.16.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.8.3))(@remix-run/serve@2.16.7(typescript@5.8.3))(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(typescript@5.8.3)(vite@6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0))(yaml@2.8.0)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@babel/core': 7.27.4
|
'@babel/core': 7.27.4
|
||||||
'@babel/generator': 7.27.5
|
'@babel/generator': 7.27.5
|
||||||
@ -15417,7 +15416,7 @@ snapshots:
|
|||||||
'@remix-run/router': 1.23.0
|
'@remix-run/router': 1.23.0
|
||||||
'@remix-run/server-runtime': 2.16.7(typescript@5.8.3)
|
'@remix-run/server-runtime': 2.16.7(typescript@5.8.3)
|
||||||
'@types/mdx': 2.0.13
|
'@types/mdx': 2.0.13
|
||||||
'@vanilla-extract/integration': 6.5.0(@types/node@22.16.0)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)
|
'@vanilla-extract/integration': 6.5.0(@types/node@20.19.7)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)
|
||||||
arg: 5.0.2
|
arg: 5.0.2
|
||||||
cacache: 17.1.4
|
cacache: 17.1.4
|
||||||
chalk: 4.1.2
|
chalk: 4.1.2
|
||||||
@ -15457,12 +15456,12 @@ snapshots:
|
|||||||
tar-fs: 2.1.3
|
tar-fs: 2.1.3
|
||||||
tsconfig-paths: 4.2.0
|
tsconfig-paths: 4.2.0
|
||||||
valibot: 0.41.0(typescript@5.8.3)
|
valibot: 0.41.0(typescript@5.8.3)
|
||||||
vite-node: 3.2.3(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
vite-node: 3.2.3(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
||||||
ws: 7.5.10
|
ws: 7.5.10
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@remix-run/serve': 2.16.7(typescript@5.8.3)
|
'@remix-run/serve': 2.16.7(typescript@5.8.3)
|
||||||
typescript: 5.8.3
|
typescript: 5.8.3
|
||||||
vite: 6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
vite: 6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- '@types/node'
|
- '@types/node'
|
||||||
- babel-plugin-macros
|
- babel-plugin-macros
|
||||||
@ -16425,12 +16424,12 @@ snapshots:
|
|||||||
postcss-selector-parser: 6.0.10
|
postcss-selector-parser: 6.0.10
|
||||||
tailwindcss: 4.1.7
|
tailwindcss: 4.1.7
|
||||||
|
|
||||||
'@tailwindcss/vite@4.1.9(vite@6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0))':
|
'@tailwindcss/vite@4.1.9(vite@6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0))':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@tailwindcss/node': 4.1.9
|
'@tailwindcss/node': 4.1.9
|
||||||
'@tailwindcss/oxide': 4.1.9
|
'@tailwindcss/oxide': 4.1.9
|
||||||
tailwindcss: 4.1.9
|
tailwindcss: 4.1.9
|
||||||
vite: 6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
vite: 6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
||||||
|
|
||||||
'@tanstack/react-table@8.21.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
'@tanstack/react-table@8.21.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -17037,7 +17036,7 @@ snapshots:
|
|||||||
|
|
||||||
'@types/mdast@4.0.4':
|
'@types/mdast@4.0.4':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/unist': 2.0.11
|
'@types/unist': 3.0.3
|
||||||
|
|
||||||
'@types/mdurl@2.0.0': {}
|
'@types/mdurl@2.0.0': {}
|
||||||
|
|
||||||
@ -17439,7 +17438,7 @@ snapshots:
|
|||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- babel-plugin-macros
|
- babel-plugin-macros
|
||||||
|
|
||||||
'@vanilla-extract/integration@6.5.0(@types/node@22.16.0)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)':
|
'@vanilla-extract/integration@6.5.0(@types/node@20.19.7)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@babel/core': 7.27.4
|
'@babel/core': 7.27.4
|
||||||
'@babel/plugin-syntax-typescript': 7.27.1(@babel/core@7.27.4)
|
'@babel/plugin-syntax-typescript': 7.27.1(@babel/core@7.27.4)
|
||||||
@ -17452,8 +17451,8 @@ snapshots:
|
|||||||
lodash: 4.17.21
|
lodash: 4.17.21
|
||||||
mlly: 1.7.4
|
mlly: 1.7.4
|
||||||
outdent: 0.8.0
|
outdent: 0.8.0
|
||||||
vite: 5.4.19(@types/node@22.16.0)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)
|
vite: 5.4.19(@types/node@20.19.7)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)
|
||||||
vite-node: 1.6.1(@types/node@22.16.0)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)
|
vite-node: 1.6.1(@types/node@20.19.7)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- '@types/node'
|
- '@types/node'
|
||||||
- babel-plugin-macros
|
- babel-plugin-macros
|
||||||
@ -19695,8 +19694,6 @@ snapshots:
|
|||||||
iconv-lite: 0.4.24
|
iconv-lite: 0.4.24
|
||||||
tmp: 0.0.33
|
tmp: 0.0.33
|
||||||
|
|
||||||
fast-deep-equal@2.0.1: {}
|
|
||||||
|
|
||||||
fast-deep-equal@3.1.3: {}
|
fast-deep-equal@3.1.3: {}
|
||||||
|
|
||||||
fast-glob@3.3.3:
|
fast-glob@3.3.3:
|
||||||
@ -19844,7 +19841,7 @@ snapshots:
|
|||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@emotion/is-prop-valid': 0.8.8
|
'@emotion/is-prop-valid': 0.8.8
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
|
|
||||||
framework-utils@1.1.0: {}
|
framework-utils@1.1.0: {}
|
||||||
|
|
||||||
@ -20045,6 +20042,8 @@ snapshots:
|
|||||||
|
|
||||||
gopd@1.2.0: {}
|
gopd@1.2.0: {}
|
||||||
|
|
||||||
|
gpt-tokenizer@3.0.1: {}
|
||||||
|
|
||||||
graceful-fs@4.2.11: {}
|
graceful-fs@4.2.11: {}
|
||||||
|
|
||||||
gradient-string@2.0.2:
|
gradient-string@2.0.2:
|
||||||
@ -21698,7 +21697,7 @@ snapshots:
|
|||||||
graceful-fs: 4.2.11
|
graceful-fs: 4.2.11
|
||||||
postcss: 8.4.31
|
postcss: 8.4.31
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
styled-jsx: 5.1.1(@babel/core@7.24.5)(react@18.2.0)
|
styled-jsx: 5.1.1(@babel/core@7.24.5)(react@18.2.0)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@next/swc-darwin-arm64': 14.1.4
|
'@next/swc-darwin-arm64': 14.1.4
|
||||||
@ -22754,6 +22753,12 @@ snapshots:
|
|||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
scheduler: 0.23.2
|
scheduler: 0.23.2
|
||||||
|
|
||||||
|
react-dom@18.2.0(react@18.3.1):
|
||||||
|
dependencies:
|
||||||
|
loose-envify: 1.4.0
|
||||||
|
react: 18.3.1
|
||||||
|
scheduler: 0.23.2
|
||||||
|
|
||||||
react-dom@18.3.1(react@18.3.1):
|
react-dom@18.3.1(react@18.3.1):
|
||||||
dependencies:
|
dependencies:
|
||||||
loose-envify: 1.4.0
|
loose-envify: 1.4.0
|
||||||
@ -22793,7 +22798,7 @@ snapshots:
|
|||||||
postcss: 8.4.38
|
postcss: 8.4.38
|
||||||
prism-react-renderer: 2.1.0(react@18.2.0)
|
prism-react-renderer: 2.1.0(react@18.2.0)
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
socket.io: 4.7.3
|
socket.io: 4.7.3
|
||||||
socket.io-client: 4.7.3
|
socket.io-client: 4.7.3
|
||||||
sonner: 1.3.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
sonner: 1.3.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||||
@ -22821,6 +22826,24 @@ snapshots:
|
|||||||
|
|
||||||
react-lifecycles-compat@3.0.4: {}
|
react-lifecycles-compat@3.0.4: {}
|
||||||
|
|
||||||
|
react-markdown@10.1.0(@types/react@18.2.69)(react@18.3.1):
|
||||||
|
dependencies:
|
||||||
|
'@types/hast': 3.0.4
|
||||||
|
'@types/mdast': 4.0.4
|
||||||
|
'@types/react': 18.2.69
|
||||||
|
devlop: 1.1.0
|
||||||
|
hast-util-to-jsx-runtime: 2.3.6
|
||||||
|
html-url-attributes: 3.0.1
|
||||||
|
mdast-util-to-hast: 13.2.0
|
||||||
|
react: 18.3.1
|
||||||
|
remark-parse: 11.0.0
|
||||||
|
remark-rehype: 11.1.2
|
||||||
|
unified: 11.0.5
|
||||||
|
unist-util-visit: 5.0.0
|
||||||
|
vfile: 6.0.3
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- supports-color
|
||||||
|
|
||||||
react-markdown@9.1.0(@types/react@18.2.69)(react@18.3.1):
|
react-markdown@9.1.0(@types/react@18.2.69)(react@18.3.1):
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/hast': 3.0.4
|
'@types/hast': 3.0.4
|
||||||
@ -22855,10 +22878,6 @@ snapshots:
|
|||||||
react-css-styled: 1.1.9
|
react-css-styled: 1.1.9
|
||||||
react-selecto: 1.26.3
|
react-selecto: 1.26.3
|
||||||
|
|
||||||
react-promise-suspense@0.3.4:
|
|
||||||
dependencies:
|
|
||||||
fast-deep-equal: 2.0.1
|
|
||||||
|
|
||||||
react-refresh@0.14.2: {}
|
react-refresh@0.14.2: {}
|
||||||
|
|
||||||
react-remove-scroll-bar@2.3.8(@types/react@18.2.47)(react@18.2.0):
|
react-remove-scroll-bar@2.3.8(@types/react@18.2.47)(react@18.2.0):
|
||||||
@ -23177,12 +23196,9 @@ snapshots:
|
|||||||
|
|
||||||
requireindex@1.2.0: {}
|
requireindex@1.2.0: {}
|
||||||
|
|
||||||
resend@3.5.0(react-dom@18.2.0(react@18.2.0))(react@18.3.1):
|
resend@6.0.2(@react-email/render@0.0.12):
|
||||||
dependencies:
|
optionalDependencies:
|
||||||
'@react-email/render': 0.0.16(react-dom@18.2.0(react@18.2.0))(react@18.3.1)
|
'@react-email/render': 0.0.12
|
||||||
transitivePeerDependencies:
|
|
||||||
- react
|
|
||||||
- react-dom
|
|
||||||
|
|
||||||
resolve-from@4.0.0: {}
|
resolve-from@4.0.0: {}
|
||||||
|
|
||||||
@ -23604,7 +23620,7 @@ snapshots:
|
|||||||
sonner@1.3.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
|
sonner@1.3.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
|
||||||
dependencies:
|
dependencies:
|
||||||
react: 18.2.0
|
react: 18.2.0
|
||||||
react-dom: 18.2.0(react@18.2.0)
|
react-dom: 18.2.0(react@18.3.1)
|
||||||
|
|
||||||
source-map-js@1.0.2: {}
|
source-map-js@1.0.2: {}
|
||||||
|
|
||||||
@ -24133,7 +24149,7 @@ snapshots:
|
|||||||
|
|
||||||
tslib@2.8.1: {}
|
tslib@2.8.1: {}
|
||||||
|
|
||||||
tsup@8.5.0(@swc/core@1.3.101)(jiti@2.4.2)(postcss@8.5.5)(tsx@4.20.4)(typescript@5.8.3)(yaml@2.8.0):
|
tsup@8.5.0(@swc/core@1.3.101(@swc/helpers@0.5.17))(jiti@2.4.2)(postcss@8.5.5)(tsx@4.20.4)(typescript@5.8.3)(yaml@2.8.0):
|
||||||
dependencies:
|
dependencies:
|
||||||
bundle-require: 5.1.0(esbuild@0.25.5)
|
bundle-require: 5.1.0(esbuild@0.25.5)
|
||||||
cac: 6.7.14
|
cac: 6.7.14
|
||||||
@ -24545,13 +24561,13 @@ snapshots:
|
|||||||
'@types/unist': 3.0.3
|
'@types/unist': 3.0.3
|
||||||
vfile-message: 4.0.2
|
vfile-message: 4.0.2
|
||||||
|
|
||||||
vite-node@1.6.1(@types/node@22.16.0)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0):
|
vite-node@1.6.1(@types/node@20.19.7)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0):
|
||||||
dependencies:
|
dependencies:
|
||||||
cac: 6.7.14
|
cac: 6.7.14
|
||||||
debug: 4.4.1(supports-color@10.0.0)
|
debug: 4.4.1(supports-color@10.0.0)
|
||||||
pathe: 1.1.2
|
pathe: 1.1.2
|
||||||
picocolors: 1.1.1
|
picocolors: 1.1.1
|
||||||
vite: 5.4.19(@types/node@22.16.0)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)
|
vite: 5.4.19(@types/node@20.19.7)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- '@types/node'
|
- '@types/node'
|
||||||
- less
|
- less
|
||||||
@ -24563,13 +24579,13 @@ snapshots:
|
|||||||
- supports-color
|
- supports-color
|
||||||
- terser
|
- terser
|
||||||
|
|
||||||
vite-node@3.2.3(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0):
|
vite-node@3.2.3(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0):
|
||||||
dependencies:
|
dependencies:
|
||||||
cac: 6.7.14
|
cac: 6.7.14
|
||||||
debug: 4.4.1(supports-color@10.0.0)
|
debug: 4.4.1(supports-color@10.0.0)
|
||||||
es-module-lexer: 1.7.0
|
es-module-lexer: 1.7.0
|
||||||
pathe: 2.0.3
|
pathe: 2.0.3
|
||||||
vite: 6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
vite: 6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- '@types/node'
|
- '@types/node'
|
||||||
- jiti
|
- jiti
|
||||||
@ -24584,31 +24600,31 @@ snapshots:
|
|||||||
- tsx
|
- tsx
|
||||||
- yaml
|
- yaml
|
||||||
|
|
||||||
vite-tsconfig-paths@4.3.2(typescript@5.8.3)(vite@6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)):
|
vite-tsconfig-paths@4.3.2(typescript@5.8.3)(vite@6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)):
|
||||||
dependencies:
|
dependencies:
|
||||||
debug: 4.4.1(supports-color@10.0.0)
|
debug: 4.4.1(supports-color@10.0.0)
|
||||||
globrex: 0.1.2
|
globrex: 0.1.2
|
||||||
tsconfck: 3.1.6(typescript@5.8.3)
|
tsconfck: 3.1.6(typescript@5.8.3)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
vite: 6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
vite: 6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
- typescript
|
- typescript
|
||||||
|
|
||||||
vite@5.4.19(@types/node@22.16.0)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0):
|
vite@5.4.19(@types/node@20.19.7)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0):
|
||||||
dependencies:
|
dependencies:
|
||||||
esbuild: 0.21.5
|
esbuild: 0.21.5
|
||||||
postcss: 8.5.5
|
postcss: 8.5.5
|
||||||
rollup: 4.43.0
|
rollup: 4.43.0
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/node': 22.16.0
|
'@types/node': 20.19.7
|
||||||
fsevents: 2.3.3
|
fsevents: 2.3.3
|
||||||
less: 4.4.0
|
less: 4.4.0
|
||||||
lightningcss: 1.30.1
|
lightningcss: 1.30.1
|
||||||
sass: 1.89.2
|
sass: 1.89.2
|
||||||
terser: 5.42.0
|
terser: 5.42.0
|
||||||
|
|
||||||
vite@6.3.5(@types/node@22.16.0)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0):
|
vite@6.3.5(@types/node@20.19.7)(jiti@2.4.2)(less@4.4.0)(lightningcss@1.30.1)(sass@1.89.2)(terser@5.42.0)(tsx@4.20.4)(yaml@2.8.0):
|
||||||
dependencies:
|
dependencies:
|
||||||
esbuild: 0.25.5
|
esbuild: 0.25.5
|
||||||
fdir: 6.4.6(picomatch@4.0.2)
|
fdir: 6.4.6(picomatch@4.0.2)
|
||||||
@ -24617,7 +24633,7 @@ snapshots:
|
|||||||
rollup: 4.43.0
|
rollup: 4.43.0
|
||||||
tinyglobby: 0.2.14
|
tinyglobby: 0.2.14
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@types/node': 22.16.0
|
'@types/node': 20.19.7
|
||||||
fsevents: 2.3.3
|
fsevents: 2.3.3
|
||||||
jiti: 2.4.2
|
jiti: 2.4.2
|
||||||
less: 4.4.0
|
less: 4.4.0
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user