mirror of
https://github.com/eliasstepanik/core.git
synced 2026-01-11 21:38:27 +00:00
feat: changed the activity UI
This commit is contained in:
parent
c1c93e0cb1
commit
d4c4e16ac2
@ -1,146 +0,0 @@
|
||||
import React, { useMemo } from "react";
|
||||
import CalendarHeatmap from "react-calendar-heatmap";
|
||||
import { cn } from "~/lib/utils";
|
||||
import { Popover, PopoverAnchor, PopoverContent } from "../ui/popover";
|
||||
|
||||
interface ContributionGraphProps {
|
||||
data: Array<{
|
||||
date: string;
|
||||
count: number;
|
||||
status?: string;
|
||||
}>;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function ContributionGraph({ data, className }: ContributionGraphProps) {
|
||||
const [open, setOpen] = React.useState(false);
|
||||
const [anchor, setAnchor] = React.useState<{ x: number; y: number } | null>(
|
||||
null,
|
||||
);
|
||||
const [active, setActive] = React.useState<any>(null);
|
||||
const containerRef = React.useRef<HTMLDivElement>(null);
|
||||
|
||||
const processedData = useMemo(() => {
|
||||
const endDate = new Date();
|
||||
const startDate = new Date();
|
||||
startDate.setFullYear(endDate.getFullYear() - 1);
|
||||
|
||||
return data.map((item) => ({
|
||||
date: item.date,
|
||||
count: item.count,
|
||||
status: item.status,
|
||||
}));
|
||||
}, [data]);
|
||||
|
||||
const getClassForValue = (value: any) => {
|
||||
if (!value || value.count === 0) {
|
||||
return "fill-background dark:fill-background";
|
||||
}
|
||||
|
||||
const count = value.count;
|
||||
if (count >= 20) return "fill-success";
|
||||
if (count >= 15) return "fill-success/85";
|
||||
if (count >= 10) return "fill-success/70";
|
||||
if (count >= 5) return "fill-success/50";
|
||||
return "fill-success/30";
|
||||
};
|
||||
|
||||
const getTitleForValue = (value: any) => {
|
||||
if (!value || value.count === 0) {
|
||||
return `No activity on ${value?.date || "this date"}`;
|
||||
}
|
||||
|
||||
const count = value.count;
|
||||
const date = new Date(value.date).toLocaleDateString();
|
||||
return `${count} ${count === 1 ? "activity" : "activities"} on ${date}`;
|
||||
};
|
||||
|
||||
const endDate = new Date();
|
||||
const startDate = new Date();
|
||||
startDate.setFullYear(endDate.getFullYear() - 1);
|
||||
|
||||
// Position helpers: convert client coords to container-local coords
|
||||
const getLocalPoint = (e: React.MouseEvent<SVGRectElement, MouseEvent>) => {
|
||||
const rect = containerRef.current?.getBoundingClientRect();
|
||||
if (!rect) return { x: e.clientX, y: e.clientY };
|
||||
return { x: e.clientX, y: e.clientY };
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={containerRef}
|
||||
className={cn("flex w-full flex-col justify-center", className)}
|
||||
>
|
||||
<Popover open={open} onOpenChange={setOpen}>
|
||||
{anchor && (
|
||||
<PopoverAnchor
|
||||
// Absolutely position the anchor relative to the container
|
||||
style={{
|
||||
position: "absolute",
|
||||
left: anchor.x,
|
||||
top: anchor.y,
|
||||
width: 1,
|
||||
height: 1,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<PopoverContent
|
||||
className="shadow-1 bg-background-3 w-fit p-2"
|
||||
side="top"
|
||||
align="center"
|
||||
>
|
||||
{active ? (
|
||||
<div className="space-y-1">
|
||||
<div className="text-sm font-medium">
|
||||
{new Date(active.date).toDateString()}
|
||||
</div>
|
||||
<div className="text-muted-foreground text-sm">
|
||||
{active.count ?? 0} events
|
||||
</div>
|
||||
{active.meta?.notes && (
|
||||
<p className="mt-2 text-sm">{active.meta.notes}</p>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<div className="text-sm">No data</div>
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
<div className="overflow-x-auto rounded-lg">
|
||||
<CalendarHeatmap
|
||||
startDate={startDate}
|
||||
endDate={endDate}
|
||||
values={processedData}
|
||||
classForValue={getClassForValue}
|
||||
titleForValue={getTitleForValue}
|
||||
showWeekdayLabels={true}
|
||||
showMonthLabels={true}
|
||||
gutterSize={2}
|
||||
horizontal={true}
|
||||
transformDayElement={(element: any, value) => {
|
||||
// React clones the <rect>. We add handlers to open the shared popover.
|
||||
return React.cloneElement(element, {
|
||||
onClick: (e: React.MouseEvent<SVGRectElement>) => {
|
||||
setActive(value);
|
||||
setAnchor(getLocalPoint(e));
|
||||
setOpen(true);
|
||||
},
|
||||
onMouseEnter: (e: React.MouseEvent<SVGRectElement>) => {
|
||||
// If you want hover popovers, uncomment:
|
||||
setActive(value);
|
||||
setAnchor(getLocalPoint(e));
|
||||
setOpen(true);
|
||||
},
|
||||
onMouseLeave: () => {
|
||||
// For hover behavior, you might want a small delay instead of closing immediately.
|
||||
setOpen(false);
|
||||
},
|
||||
style: { cursor: "pointer" },
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@ -28,7 +28,7 @@ export interface PageHeaderProps {
|
||||
actions?: PageHeaderAction[];
|
||||
actionsNode?: React.ReactNode;
|
||||
tabs?: PageHeaderTab[];
|
||||
showBackForward?: boolean;
|
||||
showTrigger?: boolean;
|
||||
}
|
||||
|
||||
// Back and Forward navigation component
|
||||
@ -66,7 +66,7 @@ export function PageHeader({
|
||||
breadcrumbs,
|
||||
actions,
|
||||
tabs,
|
||||
showBackForward = true,
|
||||
showTrigger = true,
|
||||
actionsNode,
|
||||
}: PageHeaderProps) {
|
||||
const navigation = useNavigation();
|
||||
@ -95,9 +95,7 @@ export function PageHeader({
|
||||
</style>
|
||||
<div className="flex w-full items-center justify-between gap-1 px-4 pr-2 lg:gap-2">
|
||||
<div className="-ml-1 flex items-center gap-1">
|
||||
{/* Back/Forward navigation before SidebarTrigger */}
|
||||
{showBackForward && <NavigationBackForward />}
|
||||
<SidebarTrigger className="mr-1" />
|
||||
{showTrigger && <SidebarTrigger className="mr-1" />}
|
||||
|
||||
{/* Breadcrumbs */}
|
||||
{breadcrumbs && breadcrumbs.length > 0 ? (
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import { useState, useMemo, forwardRef } from "react";
|
||||
import { useTheme } from "remix-themes";
|
||||
import {
|
||||
type ClusterData,
|
||||
GraphClustering,
|
||||
@ -54,9 +53,6 @@ export const GraphClusteringVisualization = forwardRef<
|
||||
const [edgePopupContent, setEdgePopupContent] =
|
||||
useState<EdgePopupContent | null>(null);
|
||||
|
||||
const [selectedEntityType, setSelectedEntityType] = useState<
|
||||
string | undefined
|
||||
>();
|
||||
const [searchQuery, setSearchQuery] = useState<string>("");
|
||||
|
||||
// Combined filter logic for all filters
|
||||
@ -72,18 +68,6 @@ export const GraphClusteringVisualization = forwardRef<
|
||||
);
|
||||
}
|
||||
|
||||
// Entity type filter
|
||||
if (selectedEntityType) {
|
||||
filtered = filtered.filter((triplet) => {
|
||||
const sourceMatches =
|
||||
triplet.sourceNode.attributes?.type === selectedEntityType;
|
||||
const targetMatches =
|
||||
triplet.targetNode.attributes?.type === selectedEntityType;
|
||||
|
||||
return sourceMatches || targetMatches;
|
||||
});
|
||||
}
|
||||
|
||||
// Search filter
|
||||
if (searchQuery.trim()) {
|
||||
// Helper functions for filtering
|
||||
@ -108,13 +92,7 @@ export const GraphClusteringVisualization = forwardRef<
|
||||
}
|
||||
|
||||
return filtered;
|
||||
}, [
|
||||
triplets,
|
||||
selectedClusterId,
|
||||
onClusterSelect,
|
||||
selectedEntityType,
|
||||
searchQuery,
|
||||
]);
|
||||
}, [triplets, selectedClusterId, onClusterSelect, searchQuery]);
|
||||
|
||||
// Convert filtered triplets to graph triplets
|
||||
const graphTriplets = useMemo(
|
||||
@ -236,12 +214,9 @@ export const GraphClusteringVisualization = forwardRef<
|
||||
{/* Graph Filters and Search in same row */}
|
||||
<div className="flex items-center gap-1">
|
||||
<GraphFilters
|
||||
triplets={triplets}
|
||||
clusters={clusters}
|
||||
selectedCluster={selectedClusterId}
|
||||
selectedEntityType={selectedEntityType}
|
||||
onClusterChange={onClusterSelect as any}
|
||||
onEntityTypeChange={setSelectedEntityType}
|
||||
/>
|
||||
<SpaceSearch
|
||||
triplets={triplets}
|
||||
|
||||
@ -12,69 +12,32 @@ import type { RawTriplet } from "./type";
|
||||
import { type ClusterData } from "./graph-clustering";
|
||||
import { nodeColorPalette } from "./node-colors";
|
||||
import { useTheme } from "remix-themes";
|
||||
import { ScrollArea } from "../ui";
|
||||
|
||||
interface GraphFiltersProps {
|
||||
triplets: RawTriplet[];
|
||||
clusters: ClusterData[];
|
||||
selectedCluster?: string | null;
|
||||
selectedEntityType?: string;
|
||||
onClusterChange: (cluster?: string) => void;
|
||||
onEntityTypeChange: (entityType?: string) => void;
|
||||
}
|
||||
|
||||
type FilterStep = "main" | "cluster" | "nodeType" | "entityType";
|
||||
|
||||
const nodeTypeOptions = [
|
||||
{ value: "entity", label: "Entity" },
|
||||
{ value: "statement", label: "Statement" },
|
||||
];
|
||||
|
||||
export function GraphFilters({
|
||||
triplets,
|
||||
clusters,
|
||||
selectedCluster,
|
||||
|
||||
selectedEntityType,
|
||||
onClusterChange,
|
||||
|
||||
onEntityTypeChange,
|
||||
}: GraphFiltersProps) {
|
||||
const [themeMode] = useTheme();
|
||||
const [popoverOpen, setPopoverOpen] = useState(false);
|
||||
const [step, setStep] = useState<FilterStep>("main");
|
||||
|
||||
// Extract unique entity types (primaryLabel values) from triplets
|
||||
const entityTypeOptions = useMemo(() => {
|
||||
const entityTypes = new Set<string>();
|
||||
|
||||
triplets.forEach((triplet) => {
|
||||
// Check if node has primaryLabel (indicates it's an entity)
|
||||
if (triplet.sourceNode.attributes?.type) {
|
||||
entityTypes.add(triplet.sourceNode.attributes.type);
|
||||
}
|
||||
if (triplet.targetNode.attributes?.type) {
|
||||
entityTypes.add(triplet.targetNode.attributes.type);
|
||||
}
|
||||
});
|
||||
|
||||
return Array.from(entityTypes)
|
||||
.sort()
|
||||
.map((type) => ({
|
||||
value: type,
|
||||
label: type,
|
||||
}));
|
||||
}, [triplets]);
|
||||
|
||||
// Get display labels
|
||||
const selectedClusterLabel = clusters.find(
|
||||
(c) => c.id === selectedCluster,
|
||||
)?.name;
|
||||
|
||||
const selectedEntityTypeLabel = entityTypeOptions.find(
|
||||
(e) => e.value === selectedEntityType,
|
||||
)?.label;
|
||||
|
||||
const hasFilters = selectedCluster || selectedEntityType;
|
||||
|
||||
return (
|
||||
@ -112,13 +75,6 @@ export function GraphFilters({
|
||||
>
|
||||
Cluster
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="justify-start"
|
||||
onClick={() => setStep("entityType")}
|
||||
>
|
||||
Entity Type
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@ -167,40 +123,6 @@ export function GraphFilters({
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{step === "entityType" && (
|
||||
<div className="flex flex-col gap-1 p-2">
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="w-full justify-start"
|
||||
onClick={() => {
|
||||
onEntityTypeChange(undefined);
|
||||
setPopoverOpen(false);
|
||||
setStep("main");
|
||||
}}
|
||||
>
|
||||
All Entity Types
|
||||
</Button>
|
||||
{entityTypeOptions.map((entityType) => (
|
||||
<Button
|
||||
key={entityType.value}
|
||||
variant="ghost"
|
||||
className="w-full justify-start"
|
||||
onClick={() => {
|
||||
onEntityTypeChange(
|
||||
entityType.value === selectedEntityType
|
||||
? undefined
|
||||
: entityType.value,
|
||||
);
|
||||
setPopoverOpen(false);
|
||||
setStep("main");
|
||||
}}
|
||||
>
|
||||
{entityType.label}
|
||||
</Button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</PopoverPortal>
|
||||
@ -218,16 +140,6 @@ export function GraphFilters({
|
||||
/>
|
||||
</Badge>
|
||||
)}
|
||||
|
||||
{selectedEntityType && (
|
||||
<Badge variant="secondary" className="h-7 gap-1 rounded px-2">
|
||||
{selectedEntityTypeLabel}
|
||||
<X
|
||||
className="hover:text-destructive h-3.5 w-3.5 cursor-pointer"
|
||||
onClick={() => onEntityTypeChange(undefined)}
|
||||
/>
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@ -10,6 +10,9 @@ import { Cursor } from "./icons/cursor";
|
||||
import { Claude } from "./icons/claude";
|
||||
import { Cline } from "./icons/cline";
|
||||
import { VSCode } from "./icons/vscode";
|
||||
import { Obsidian } from "./icons/obsidian";
|
||||
import { Figma } from "./icons/figma";
|
||||
import StaticLogo from "./logo/logo";
|
||||
|
||||
export const ICON_MAPPING = {
|
||||
slack: SlackIcon,
|
||||
@ -23,6 +26,9 @@ export const ICON_MAPPING = {
|
||||
claude: Claude,
|
||||
cline: Cline,
|
||||
vscode: VSCode,
|
||||
obsidian: Obsidian,
|
||||
figma: Figma,
|
||||
core: StaticLogo,
|
||||
|
||||
// Default icon
|
||||
integration: LayoutGrid,
|
||||
|
||||
20
apps/webapp/app/components/icons/figma.tsx
Normal file
20
apps/webapp/app/components/icons/figma.tsx
Normal file
@ -0,0 +1,20 @@
|
||||
import type { IconProps } from "./types";
|
||||
|
||||
export function Figma({ size = 18, className }: IconProps) {
|
||||
return (
|
||||
<svg
|
||||
viewBox="0 0 24 24"
|
||||
height={size}
|
||||
className={className}
|
||||
width={size}
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<title>Claude</title>
|
||||
<path
|
||||
d="M4.709 15.955l4.72-2.647.08-.23-.08-.128H9.2l-.79-.048-2.698-.073-2.339-.097-2.266-.122-.571-.121L0 11.784l.055-.352.48-.321.686.06 1.52.103 2.278.158 1.652.097 2.449.255h.389l.055-.157-.134-.098-.103-.097-2.358-1.596-2.552-1.688-1.336-.972-.724-.491-.364-.462-.158-1.008.656-.722.881.06.225.061.893.686 1.908 1.476 2.491 1.833.365.304.145-.103.019-.073-.164-.274-1.355-2.446-1.446-2.49-.644-1.032-.17-.619a2.97 2.97 0 01-.104-.729L6.283.134 6.696 0l.996.134.42.364.62 1.414 1.002 2.229 1.555 3.03.456.898.243.832.091.255h.158V9.01l.128-1.706.237-2.095.23-2.695.08-.76.376-.91.747-.492.584.28.48.685-.067.444-.286 1.851-.559 2.903-.364 1.942h.212l.243-.242.985-1.306 1.652-2.064.73-.82.85-.904.547-.431h1.033l.76 1.129-.34 1.166-1.064 1.347-.881 1.142-1.264 1.7-.79 1.36.073.11.188-.02 2.856-.606 1.543-.28 1.841-.315.833.388.091.395-.328.807-1.969.486-2.309.462-3.439.813-.042.03.049.061 1.549.146.662.036h1.622l3.02.225.79.522.474.638-.079.485-1.215.62-1.64-.389-3.829-.91-1.312-.329h-.182v.11l1.093 1.068 2.006 1.81 2.509 2.33.127.578-.322.455-.34-.049-2.205-1.657-.851-.747-1.926-1.62h-.128v.17l.444.649 2.345 3.521.122 1.08-.17.353-.608.213-.668-.122-1.374-1.925-1.415-2.167-1.143-1.943-.14.08-.674 7.254-.316.37-.729.28-.607-.461-.322-.747.322-1.476.389-1.924.315-1.53.286-1.9.17-.632-.012-.042-.14.018-1.434 1.967-2.18 2.945-1.726 1.845-.414.164-.717-.37.067-.662.401-.589 2.388-3.036 1.44-1.882.93-1.086-.006-.158h-.055L4.132 18.56l-1.13.146-.487-.456.061-.746.231-.243 1.908-1.312-.006.006z"
|
||||
fill="#D97757"
|
||||
fill-rule="nonzero"
|
||||
></path>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
20
apps/webapp/app/components/icons/obsidian.tsx
Normal file
20
apps/webapp/app/components/icons/obsidian.tsx
Normal file
@ -0,0 +1,20 @@
|
||||
import type { IconProps } from "./types";
|
||||
|
||||
export function Obsidian({ size = 18, className }: IconProps) {
|
||||
return (
|
||||
<svg
|
||||
viewBox="0 0 24 24"
|
||||
height={size}
|
||||
className={className}
|
||||
width={size}
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<title>Claude</title>
|
||||
<path
|
||||
d="M4.709 15.955l4.72-2.647.08-.23-.08-.128H9.2l-.79-.048-2.698-.073-2.339-.097-2.266-.122-.571-.121L0 11.784l.055-.352.48-.321.686.06 1.52.103 2.278.158 1.652.097 2.449.255h.389l.055-.157-.134-.098-.103-.097-2.358-1.596-2.552-1.688-1.336-.972-.724-.491-.364-.462-.158-1.008.656-.722.881.06.225.061.893.686 1.908 1.476 2.491 1.833.365.304.145-.103.019-.073-.164-.274-1.355-2.446-1.446-2.49-.644-1.032-.17-.619a2.97 2.97 0 01-.104-.729L6.283.134 6.696 0l.996.134.42.364.62 1.414 1.002 2.229 1.555 3.03.456.898.243.832.091.255h.158V9.01l.128-1.706.237-2.095.23-2.695.08-.76.376-.91.747-.492.584.28.48.685-.067.444-.286 1.851-.559 2.903-.364 1.942h.212l.243-.242.985-1.306 1.652-2.064.73-.82.85-.904.547-.431h1.033l.76 1.129-.34 1.166-1.064 1.347-.881 1.142-1.264 1.7-.79 1.36.073.11.188-.02 2.856-.606 1.543-.28 1.841-.315.833.388.091.395-.328.807-1.969.486-2.309.462-3.439.813-.042.03.049.061 1.549.146.662.036h1.622l3.02.225.79.522.474.638-.079.485-1.215.62-1.64-.389-3.829-.91-1.312-.329h-.182v.11l1.093 1.068 2.006 1.81 2.509 2.33.127.578-.322.455-.34-.049-2.205-1.657-.851-.747-1.926-1.62h-.128v.17l.444.649 2.345 3.521.122 1.08-.17.353-.608.213-.668-.122-1.374-1.925-1.415-2.167-1.143-1.943-.14.08-.674 7.254-.316.37-.729.28-.607-.461-.322-.747.322-1.476.389-1.924.315-1.53.286-1.9.17-.632-.012-.042-.14.018-1.434 1.967-2.18 2.945-1.726 1.845-.414.164-.717-.37.067-.662.401-.589 2.388-3.036 1.44-1.882.93-1.086-.006-.158h-.055L4.132 18.56l-1.13.146-.487-.456.061-.746.231-.243 1.908-1.312-.006.006z"
|
||||
fill="#D97757"
|
||||
fill-rule="nonzero"
|
||||
></path>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
@ -7,7 +7,7 @@ export function LoginPageLayout({ children }: { children: React.ReactNode }) {
|
||||
<div className="flex justify-center gap-2 md:justify-start">
|
||||
<a href="#" className="flex items-center gap-2 font-medium">
|
||||
<div className="flex size-8 items-center justify-center rounded-md">
|
||||
<Logo width={60} height={60} />
|
||||
<Logo size={60} />
|
||||
</div>
|
||||
C.O.R.E.
|
||||
</a>
|
||||
|
||||
@ -14,7 +14,7 @@ export function LoginPageLayout({ children }: { children: React.ReactNode }) {
|
||||
>
|
||||
<div className="flex w-full max-w-sm flex-col items-center gap-2">
|
||||
<div className="flex size-10 items-center justify-center rounded-md">
|
||||
<Logo width={60} height={60} />
|
||||
<Logo size={60} />
|
||||
</div>
|
||||
<a href="#" className="flex items-center gap-2 self-center font-medium">
|
||||
<div className="font-mono">C.O.R.E.</div>
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
export interface LogoProps {
|
||||
width: number;
|
||||
height: number;
|
||||
size: number;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export default function StaticLogo({ width, height }: LogoProps) {
|
||||
export default function StaticLogo({ size, className }: LogoProps) {
|
||||
return (
|
||||
<svg
|
||||
width={width}
|
||||
height={height}
|
||||
width={size}
|
||||
height={size}
|
||||
className={className}
|
||||
viewBox="0 0 282 282"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
|
||||
@ -7,9 +7,6 @@ import { type LogItem } from "~/hooks/use-logs";
|
||||
import Markdown from "react-markdown";
|
||||
|
||||
interface LogDetailsProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
text?: string;
|
||||
error?: string;
|
||||
log: LogItem;
|
||||
}
|
||||
@ -27,13 +24,7 @@ interface EpisodeFactsResponse {
|
||||
invalidFacts: EpisodeFact[];
|
||||
}
|
||||
|
||||
export function LogDetails({
|
||||
open,
|
||||
onOpenChange,
|
||||
text,
|
||||
error,
|
||||
log,
|
||||
}: LogDetailsProps) {
|
||||
export function LogDetails({ error, log }: LogDetailsProps) {
|
||||
const [facts, setFacts] = useState<any[]>([]);
|
||||
const [invalidFacts, setInvalidFacts] = useState<any[]>([]);
|
||||
const [factsLoading, setFactsLoading] = useState(false);
|
||||
@ -41,11 +32,11 @@ export function LogDetails({
|
||||
|
||||
// Fetch episode facts when dialog opens and episodeUUID exists
|
||||
useEffect(() => {
|
||||
if (open && log.episodeUUID && facts.length === 0) {
|
||||
if (log.episodeUUID && facts.length === 0) {
|
||||
setFactsLoading(true);
|
||||
fetcher.load(`/api/v1/episodes/${log.episodeUUID}/facts`);
|
||||
}
|
||||
}, [open, log.episodeUUID, facts.length]);
|
||||
}, [log.episodeUUID, facts.length]);
|
||||
|
||||
// Handle fetcher response
|
||||
useEffect(() => {
|
||||
@ -58,116 +49,112 @@ export function LogDetails({
|
||||
}, [fetcher.data, fetcher.state]);
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="max-w-4xl">
|
||||
<DialogHeader className="px-4 pt-4">
|
||||
<DialogTitle className="flex w-full items-center justify-between">
|
||||
<span>Log Details</span>
|
||||
<div className="flex gap-0.5">
|
||||
{log.episodeUUID && (
|
||||
<Badge variant="secondary" className="rounded text-xs">
|
||||
Episode: {log.episodeUUID.slice(0, 8)}...
|
||||
</Badge>
|
||||
)}
|
||||
{log.source && (
|
||||
<Badge variant="secondary" className="rounded text-xs">
|
||||
Source: {log.source}
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
</DialogTitle>
|
||||
</DialogHeader>
|
||||
<div className="max-w-4xl">
|
||||
<div className="px-4 pt-4">
|
||||
<div className="mb-4 flex w-full items-center justify-between">
|
||||
<span>Log Details</span>
|
||||
<div className="flex gap-0.5">
|
||||
{log.episodeUUID && (
|
||||
<Badge variant="secondary" className="rounded text-xs">
|
||||
Episode: {log.episodeUUID.slice(0, 8)}...
|
||||
</Badge>
|
||||
)}
|
||||
{log.source && (
|
||||
<Badge variant="secondary" className="rounded text-xs">
|
||||
Source: {log.source}
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="max-h-[70vh] overflow-auto p-4 pt-0">
|
||||
{/* Log Content */}
|
||||
<div className="mb-4 text-sm break-words whitespace-pre-wrap">
|
||||
<div className="rounded-md">
|
||||
<Markdown>{text}</Markdown>
|
||||
<div className="max-h-[90vh] overflow-auto p-4 pt-0">
|
||||
{/* Log Content */}
|
||||
<div className="mb-4 text-sm break-words whitespace-pre-wrap">
|
||||
<div className="rounded-md">
|
||||
<Markdown>{log.ingestText}</Markdown>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Error Details */}
|
||||
{log.error && (
|
||||
<div className="mb-4">
|
||||
<h3 className="mb-2 text-sm font-medium">Error Details</h3>
|
||||
<div className="bg-destructive/10 rounded-md p-3">
|
||||
<div className="flex items-start gap-2 text-red-600">
|
||||
<AlertCircle className="mt-0.5 h-4 w-4 flex-shrink-0" />
|
||||
<p className="text-sm break-words whitespace-pre-wrap">
|
||||
{log.error}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Error Details */}
|
||||
{error && (
|
||||
<div className="mb-4">
|
||||
<h3 className="mb-2 text-sm font-medium">Error Details</h3>
|
||||
<div className="bg-destructive/10 rounded-md p-3">
|
||||
<div className="flex items-start gap-2 text-red-600">
|
||||
<AlertCircle className="mt-0.5 h-4 w-4 flex-shrink-0" />
|
||||
<p className="text-sm break-words whitespace-pre-wrap">
|
||||
{error}
|
||||
</p>
|
||||
{/* Episode Facts */}
|
||||
{log.episodeUUID && (
|
||||
<div className="mb-4">
|
||||
<h3 className="text-muted-foreground mb-2 text-sm">Facts</h3>
|
||||
<div className="rounded-md">
|
||||
{factsLoading ? (
|
||||
<div className="flex items-center justify-center gap-2 p-4 text-sm">
|
||||
<Loader2 className="h-4 w-4 animate-spin" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Episode Facts */}
|
||||
{log.episodeUUID && (
|
||||
<div className="mb-4">
|
||||
<h3 className="text-muted-foreground mb-2 text-sm">Facts</h3>
|
||||
<div className="rounded-md">
|
||||
{factsLoading ? (
|
||||
<div className="flex items-center justify-center gap-2 p-4 text-sm">
|
||||
<Loader2 className="h-4 w-4 animate-spin" />
|
||||
</div>
|
||||
) : facts.length > 0 ? (
|
||||
<div className="flex flex-col gap-2">
|
||||
{facts.map((fact) => (
|
||||
<div
|
||||
key={fact.uuid}
|
||||
className="bg-grayAlpha-100 rounded-md p-3"
|
||||
>
|
||||
<p className="mb-1 text-sm">{fact.fact}</p>
|
||||
<div className="text-muted-foreground flex items-center gap-2 text-xs">
|
||||
) : facts.length > 0 ? (
|
||||
<div className="flex flex-col gap-2">
|
||||
{facts.map((fact) => (
|
||||
<div
|
||||
key={fact.uuid}
|
||||
className="bg-grayAlpha-100 rounded-md p-3"
|
||||
>
|
||||
<p className="mb-1 text-sm">{fact.fact}</p>
|
||||
<div className="text-muted-foreground flex items-center gap-2 text-xs">
|
||||
<span>
|
||||
Valid: {new Date(fact.validAt).toLocaleString()}
|
||||
</span>
|
||||
{fact.invalidAt && (
|
||||
<span>
|
||||
Valid: {new Date(fact.validAt).toLocaleString()}
|
||||
Invalid: {new Date(fact.invalidAt).toLocaleString()}
|
||||
</span>
|
||||
{fact.invalidAt && (
|
||||
<span>
|
||||
Invalid:{" "}
|
||||
{new Date(fact.invalidAt).toLocaleString()}
|
||||
</span>
|
||||
)}
|
||||
{Object.keys(fact.attributes).length > 0 && (
|
||||
<Badge variant="secondary" className="text-xs">
|
||||
{Object.keys(fact.attributes).length} attributes
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{Object.keys(fact.attributes).length > 0 && (
|
||||
<Badge variant="secondary" className="text-xs">
|
||||
{Object.keys(fact.attributes).length} attributes
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
{invalidFacts.map((fact) => (
|
||||
<div
|
||||
key={fact.uuid}
|
||||
className="bg-grayAlpha-100 rounded-md p-3"
|
||||
>
|
||||
<p className="mb-1 text-sm">{fact.fact}</p>
|
||||
<div className="text-muted-foreground flex items-center gap-2 text-xs">
|
||||
{fact.invalidAt && (
|
||||
<span>
|
||||
Invalid:{" "}
|
||||
{new Date(fact.invalidAt).toLocaleString()}
|
||||
</span>
|
||||
)}
|
||||
{Object.keys(fact.attributes).length > 0 && (
|
||||
<Badge variant="secondary" className="text-xs">
|
||||
{Object.keys(fact.attributes).length} attributes
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{invalidFacts.map((fact) => (
|
||||
<div
|
||||
key={fact.uuid}
|
||||
className="bg-grayAlpha-100 rounded-md p-3"
|
||||
>
|
||||
<p className="mb-1 text-sm">{fact.fact}</p>
|
||||
<div className="text-muted-foreground flex items-center gap-2 text-xs">
|
||||
{fact.invalidAt && (
|
||||
<span>
|
||||
Invalid: {new Date(fact.invalidAt).toLocaleString()}
|
||||
</span>
|
||||
)}
|
||||
{Object.keys(fact.attributes).length > 0 && (
|
||||
<Badge variant="secondary" className="text-xs">
|
||||
{Object.keys(fact.attributes).length} attributes
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<div className="text-muted-foreground p-4 text-center text-sm">
|
||||
No facts found for this episode
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<div className="text-muted-foreground p-4 text-center text-sm">
|
||||
No facts found for this episode
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,9 +1,8 @@
|
||||
import { useState } from "react";
|
||||
import { cn } from "~/lib/utils";
|
||||
import { Badge } from "../ui/badge";
|
||||
import { Badge, BadgeColor } from "../ui/badge";
|
||||
import { type LogItem } from "~/hooks/use-logs";
|
||||
import { LogOptions } from "./log-options";
|
||||
import { LogDetails } from "./log-details";
|
||||
import { getIconForAuthorise } from "../icon-utils";
|
||||
import { useNavigate, useParams } from "@remix-run/react";
|
||||
|
||||
interface LogTextCollapseProps {
|
||||
text?: string;
|
||||
@ -17,27 +16,21 @@ interface LogTextCollapseProps {
|
||||
const getStatusColor = (status: string) => {
|
||||
switch (status) {
|
||||
case "PROCESSING":
|
||||
return "bg-blue-100 text-blue-800 hover:bg-blue-100 hover:text-blue-800";
|
||||
return "bg-blue-800";
|
||||
case "PENDING":
|
||||
return "bg-yellow-100 text-yellow-800 hover:bg-yellow-100 hover:text-yellow-800";
|
||||
case "COMPLETED":
|
||||
return "bg-success/10 text-success hover:bg-success/10 hover:text-success";
|
||||
return "bg-warning";
|
||||
case "FAILED":
|
||||
return "bg-destructive/10 text-destructive hover:bg-destructive/10 hover:text-destructive";
|
||||
return "bg-destructive";
|
||||
case "CANCELLED":
|
||||
return "bg-gray-100 text-gray-800 hover:bg-gray-100 hover:text-gray-800";
|
||||
return "bg-gray-800";
|
||||
default:
|
||||
return "bg-gray-100 text-gray-800 hover:bg-gray-100 hover:text-gray-800";
|
||||
return "bg-gray-800";
|
||||
}
|
||||
};
|
||||
|
||||
export function LogTextCollapse({
|
||||
text,
|
||||
error,
|
||||
id,
|
||||
log,
|
||||
}: LogTextCollapseProps) {
|
||||
const [dialogOpen, setDialogOpen] = useState(false);
|
||||
export function LogTextCollapse({ text, log }: LogTextCollapseProps) {
|
||||
const { logId } = useParams();
|
||||
const navigate = useNavigate();
|
||||
|
||||
// Show collapse if text is long (by word count)
|
||||
const COLLAPSE_WORD_LIMIT = 30;
|
||||
@ -61,67 +54,76 @@ export function LogTextCollapse({
|
||||
displayText = text;
|
||||
}
|
||||
|
||||
const showStatus = (log: LogItem) => {
|
||||
if (log.status === "COMPLETED") {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const getIngestType = (log: LogItem) => {
|
||||
const type = log.type ?? log.data.type ?? "Conversation";
|
||||
|
||||
return type[0].toUpperCase();
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex w-full items-center">
|
||||
<div
|
||||
className={cn(
|
||||
"group-hover:bg-grayAlpha-100 flex min-w-[0px] shrink grow items-start gap-2 rounded-md px-4",
|
||||
"group-hover:bg-grayAlpha-100 flex min-w-[0px] shrink grow items-start gap-2 rounded-md px-2 text-sm",
|
||||
logId === log.id && "bg-grayAlpha-200",
|
||||
)}
|
||||
onClick={() => {
|
||||
navigate(`/home/inbox/${log.id}`);
|
||||
}}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
"border-border flex w-full min-w-[0px] shrink flex-col border-b py-1",
|
||||
)}
|
||||
onClick={() => {
|
||||
setDialogOpen(true);
|
||||
}}
|
||||
>
|
||||
<div className="flex w-full items-center justify-between gap-4">
|
||||
<div className="inline-flex min-h-[24px] min-w-[0px] shrink cursor-pointer items-center justify-start">
|
||||
<div className={cn("truncate text-left")}>
|
||||
{text.replace(/<[^>]+>/g, "")}
|
||||
<div className="border-border flex w-full min-w-[0px] shrink flex-col gap-1 border-b py-2">
|
||||
<div className={cn("flex w-full min-w-[0px] shrink flex-col")}>
|
||||
<div className="flex w-full items-center justify-between gap-4">
|
||||
<div className="inline-flex min-h-[24px] min-w-[0px] shrink items-center justify-start">
|
||||
<div className={cn("truncate text-left")}>
|
||||
{text.replace(/<[^>]+>/g, "")}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{showStatus(log) && (
|
||||
<div className="text-muted-foreground flex shrink-0 items-center justify-end text-xs">
|
||||
<div className="flex items-center">
|
||||
<Badge
|
||||
className={cn(
|
||||
"!bg-grayAlpha-100 text-muted-foreground rounded text-xs",
|
||||
)}
|
||||
>
|
||||
<BadgeColor className={cn(getStatusColor(log.status))} />
|
||||
{log.status.charAt(0).toUpperCase() +
|
||||
log.status.slice(1).toLowerCase()}
|
||||
</Badge>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-1">
|
||||
{getIconForAuthorise(log.source.toLowerCase(), 12, undefined)}
|
||||
{log.source.toLowerCase()}
|
||||
</div>
|
||||
|
||||
<div className="text-muted-foreground flex shrink-0 items-center justify-end text-xs">
|
||||
<div className="flex items-center">
|
||||
<Badge
|
||||
className={cn(
|
||||
"bg-grayAlpha-100 text-foreground mr-3 rounded text-xs",
|
||||
)}
|
||||
>
|
||||
{log.source}
|
||||
</Badge>
|
||||
<Badge
|
||||
className={cn(
|
||||
"mr-3 rounded text-xs",
|
||||
getStatusColor(log.status),
|
||||
)}
|
||||
>
|
||||
{log.status.charAt(0).toUpperCase() +
|
||||
log.status.slice(1).toLowerCase()}
|
||||
</Badge>
|
||||
|
||||
<div className="text-muted-foreground mr-3">
|
||||
{new Date(log.time).toLocaleString()}
|
||||
</div>
|
||||
|
||||
<div onClick={(e) => e.stopPropagation()}>
|
||||
<LogOptions id={id} />
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
<Badge
|
||||
className={cn(
|
||||
"!bg-grayAlpha-100 text-muted-foreground rounded text-xs",
|
||||
)}
|
||||
>
|
||||
{getIngestType(log)}
|
||||
</Badge>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<LogDetails
|
||||
open={dialogOpen}
|
||||
onOpenChange={setDialogOpen}
|
||||
text={text}
|
||||
error={error}
|
||||
log={log}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@ -48,7 +48,7 @@ export function LogsFilters({
|
||||
const hasFilters = selectedSource || selectedStatus;
|
||||
|
||||
return (
|
||||
<div className="mb-2 flex w-full items-center justify-start gap-2 px-5">
|
||||
<div className="mb-2 flex w-full items-center justify-start gap-2 px-3">
|
||||
<Popover
|
||||
open={popoverOpen}
|
||||
onOpenChange={(open) => {
|
||||
|
||||
@ -9,7 +9,6 @@ import {
|
||||
SidebarMenuItem,
|
||||
} from "../ui/sidebar";
|
||||
import {
|
||||
Activity,
|
||||
Columns3,
|
||||
Inbox,
|
||||
LayoutGrid,
|
||||
@ -65,7 +64,7 @@ export function AppSidebar({ ...props }: React.ComponentProps<typeof Sidebar>) {
|
||||
<SidebarMenu>
|
||||
<SidebarMenuItem>
|
||||
<div className="mt-1 ml-1 flex w-full items-center justify-start gap-2">
|
||||
<Logo width={20} height={20} />
|
||||
<Logo size={20} />
|
||||
C.O.R.E.
|
||||
</div>
|
||||
</SidebarMenuItem>
|
||||
|
||||
@ -8,7 +8,7 @@ import { extensionsForConversation } from "../conversation/editor-extensions";
|
||||
export const SpaceSummary = ({ summary }: { summary?: string | null }) => {
|
||||
const editor = useEditor({
|
||||
extensions: [...extensionsForConversation, skillExtension],
|
||||
editable: false,
|
||||
editable: true,
|
||||
content: summary,
|
||||
});
|
||||
|
||||
|
||||
@ -30,7 +30,7 @@ const ResizableHandle = ({
|
||||
}) => (
|
||||
<ResizablePrimitive.PanelResizeHandle
|
||||
className={cn(
|
||||
"bg-background-1 focus-visible:ring-ring relative flex w-px items-center justify-center after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:ring-1 focus-visible:ring-offset-1 focus-visible:outline-none data-[panel-group-direction=vertical]:h-px data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:translate-x-0 data-[panel-group-direction=vertical]:after:-translate-y-1/2 [&[data-panel-group-direction=vertical]>div]:rotate-90",
|
||||
"focus-visible:ring-ring relative flex w-px items-center justify-center bg-gray-300 after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:ring-1 focus-visible:ring-offset-1 focus-visible:outline-none data-[panel-group-direction=vertical]:h-px data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:translate-x-0 data-[panel-group-direction=vertical]:after:-translate-y-1/2 [&[data-panel-group-direction=vertical]>div]:rotate-90",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
|
||||
@ -10,6 +10,7 @@ export interface LogItem {
|
||||
status: "PENDING" | "PROCESSING" | "COMPLETED" | "FAILED" | "CANCELLED";
|
||||
error?: string;
|
||||
sourceURL?: string;
|
||||
type?: string;
|
||||
integrationSlug?: string;
|
||||
activityId?: string;
|
||||
episodeUUID?: string;
|
||||
|
||||
@ -7,10 +7,11 @@ import { type IngestBodyRequest, ingestTask } from "~/trigger/ingest/ingest";
|
||||
import { ingestDocumentTask } from "~/trigger/ingest/ingest-document";
|
||||
|
||||
export const addToQueue = async (
|
||||
body: z.infer<typeof IngestBodyRequest>,
|
||||
rawBody: z.infer<typeof IngestBodyRequest>,
|
||||
userId: string,
|
||||
activityId?: string,
|
||||
) => {
|
||||
const body = { ...rawBody, source: rawBody.source.toLowerCase() };
|
||||
const user = await prisma.user.findFirst({
|
||||
where: {
|
||||
id: userId,
|
||||
@ -30,6 +31,7 @@ export const addToQueue = async (
|
||||
data: {
|
||||
spaceId: body.spaceId ? body.spaceId : null,
|
||||
data: body,
|
||||
type: body.type,
|
||||
status: IngestionStatus.PENDING,
|
||||
priority: 1,
|
||||
workspaceId: user.Workspace.id,
|
||||
|
||||
@ -115,7 +115,7 @@ export const getClusteredGraphData = async (userId: string) => {
|
||||
const result = await session.run(
|
||||
`// Get all statements and their entity connections for reified graph
|
||||
MATCH (s:Statement)
|
||||
WHERE s.userId = $userId AND s.invalidAt IS NULL
|
||||
WHERE s.userId = $userId
|
||||
|
||||
// Get all entities connected to each statement
|
||||
MATCH (s)-[:HAS_SUBJECT]->(subj:Entity)
|
||||
|
||||
@ -5,6 +5,7 @@ import { extensionSearch } from "~/trigger/extension/search";
|
||||
|
||||
export const ExtensionSearchBodyRequest = z.object({
|
||||
input: z.string().min(1, "Input text is required"),
|
||||
outputType: z.string().default("markdown"),
|
||||
});
|
||||
|
||||
const { action, loader } = createActionApiRoute(
|
||||
@ -21,6 +22,7 @@ const { action, loader } = createActionApiRoute(
|
||||
const trigger = await extensionSearch.trigger({
|
||||
userInput: body.input,
|
||||
userId: authentication.userId,
|
||||
outputType: body.outputType,
|
||||
});
|
||||
|
||||
return json(trigger);
|
||||
|
||||
78
apps/webapp/app/routes/api.v1.logs.$logId.tsx
Normal file
78
apps/webapp/app/routes/api.v1.logs.$logId.tsx
Normal file
@ -0,0 +1,78 @@
|
||||
import { type LoaderFunctionArgs, json } from "@remix-run/node";
|
||||
import { prisma } from "~/db.server";
|
||||
import { requireUserId } from "~/services/session.server";
|
||||
|
||||
export async function loader({ request, params }: LoaderFunctionArgs) {
|
||||
const userId = await requireUserId(request);
|
||||
const logId = params.logId;
|
||||
|
||||
// Get user and workspace in one query
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
select: { Workspace: { select: { id: true } } },
|
||||
});
|
||||
|
||||
if (!user?.Workspace) {
|
||||
throw new Response("Workspace not found", { status: 404 });
|
||||
}
|
||||
|
||||
// Fetch the specific log by logId
|
||||
const log = await prisma.ingestionQueue.findUnique({
|
||||
where: { id: logId },
|
||||
select: {
|
||||
id: true,
|
||||
createdAt: true,
|
||||
processedAt: true,
|
||||
status: true,
|
||||
error: true,
|
||||
type: true,
|
||||
output: true,
|
||||
data: true,
|
||||
activity: {
|
||||
select: {
|
||||
text: true,
|
||||
sourceURL: true,
|
||||
integrationAccount: {
|
||||
select: {
|
||||
integrationDefinition: {
|
||||
select: {
|
||||
name: true,
|
||||
slug: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!log) {
|
||||
throw new Response("Log not found", { status: 404 });
|
||||
}
|
||||
|
||||
// Format the response
|
||||
const integrationDef =
|
||||
log.activity?.integrationAccount?.integrationDefinition;
|
||||
const logData = log.data as any;
|
||||
|
||||
const formattedLog = {
|
||||
id: log.id,
|
||||
source: integrationDef?.name || logData?.source || "Unknown",
|
||||
ingestText:
|
||||
log.activity?.text ||
|
||||
logData?.episodeBody ||
|
||||
logData?.text ||
|
||||
"No content",
|
||||
time: log.createdAt,
|
||||
processedAt: log.processedAt,
|
||||
episodeUUID: (log.output as any)?.episodeUuid,
|
||||
status: log.status,
|
||||
error: log.error,
|
||||
sourceURL: log.activity?.sourceURL,
|
||||
integrationSlug: integrationDef?.slug,
|
||||
data: log.data,
|
||||
};
|
||||
|
||||
return json({ log: formattedLog });
|
||||
}
|
||||
@ -60,6 +60,7 @@ export async function loader({ request }: LoaderFunctionArgs) {
|
||||
processedAt: true,
|
||||
status: true,
|
||||
error: true,
|
||||
type: true,
|
||||
output: true,
|
||||
data: true,
|
||||
activity: {
|
||||
@ -83,7 +84,6 @@ export async function loader({ request }: LoaderFunctionArgs) {
|
||||
createdAt: "desc",
|
||||
},
|
||||
skip,
|
||||
take: limit,
|
||||
}),
|
||||
|
||||
prisma.ingestionQueue.count({
|
||||
|
||||
41
apps/webapp/app/routes/home.inbox.$logId.tsx
Normal file
41
apps/webapp/app/routes/home.inbox.$logId.tsx
Normal file
@ -0,0 +1,41 @@
|
||||
import { json, type LoaderFunctionArgs } from "@remix-run/node";
|
||||
import { useLoaderData } from "@remix-run/react";
|
||||
import { Inbox } from "lucide-react";
|
||||
import { PageHeader } from "~/components/common/page-header";
|
||||
import { LogDetails } from "~/components/logs/log-details";
|
||||
|
||||
import { getIngestionQueueForFrontend } from "~/services/ingestionLogs.server";
|
||||
import { requireUserId } from "~/services/session.server";
|
||||
|
||||
export async function loader({ request, params }: LoaderFunctionArgs) {
|
||||
await requireUserId(request);
|
||||
const logId = params.logId;
|
||||
|
||||
const log = await getIngestionQueueForFrontend(logId as string);
|
||||
|
||||
return json({ log: log });
|
||||
}
|
||||
|
||||
export default function InboxNotSelected() {
|
||||
const { log } = useLoaderData<typeof loader>();
|
||||
|
||||
if (!log) {
|
||||
return (
|
||||
<div className="flex h-full w-full flex-col">
|
||||
<PageHeader title="Episode" showTrigger={false} />
|
||||
<div className="flex h-full flex-col items-center justify-center gap-2">
|
||||
<Inbox size={30} />
|
||||
No episode data found
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex h-full w-full flex-col">
|
||||
<PageHeader title="Episode" showTrigger={false} />
|
||||
|
||||
<LogDetails log={log as any} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@ -1,17 +1,24 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { useFetcher } from "@remix-run/react";
|
||||
import { useState } from "react";
|
||||
import { useLogs } from "~/hooks/use-logs";
|
||||
import { LogsFilters } from "~/components/logs/logs-filters";
|
||||
import { VirtualLogsList } from "~/components/logs/virtual-logs-list";
|
||||
import { Card, CardContent } from "~/components/ui/card";
|
||||
import { Database, LoaderCircle } from "lucide-react";
|
||||
import { Database, Inbox, LoaderCircle } from "lucide-react";
|
||||
import { PageHeader } from "~/components/common/page-header";
|
||||
import { ContributionGraph } from "~/components/activity/contribution-graph";
|
||||
import {
|
||||
ResizableHandle,
|
||||
ResizablePanel,
|
||||
ResizablePanelGroup,
|
||||
} from "~/components/ui/resizable";
|
||||
import { Outlet, useParams } from "@remix-run/react";
|
||||
import { cn } from "~/lib/utils";
|
||||
|
||||
export default function LogsAll() {
|
||||
const [selectedSource, setSelectedSource] = useState<string | undefined>();
|
||||
const [selectedStatus, setSelectedStatus] = useState<string | undefined>();
|
||||
|
||||
const { logId } = useParams();
|
||||
|
||||
const {
|
||||
logs,
|
||||
hasMore,
|
||||
@ -27,55 +34,85 @@ export default function LogsAll() {
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeader title="Inbox" />
|
||||
<div className="flex h-[calc(100vh_-_56px)] w-full flex-col items-center space-y-6 py-4">
|
||||
{isInitialLoad ? (
|
||||
<>
|
||||
<LoaderCircle className="text-primary h-4 w-4 animate-spin" />
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{/* Filters */}
|
||||
<ResizablePanelGroup direction="horizontal">
|
||||
<ResizablePanel
|
||||
maxSize={50}
|
||||
defaultSize={35}
|
||||
minSize={35}
|
||||
collapsible
|
||||
collapsedSize={35}
|
||||
>
|
||||
<div className="flex h-full flex-col">
|
||||
<PageHeader title="Inbox" />
|
||||
|
||||
<LogsFilters
|
||||
availableSources={availableSources}
|
||||
selectedSource={selectedSource}
|
||||
selectedStatus={selectedStatus}
|
||||
onSourceChange={setSelectedSource}
|
||||
onStatusChange={setSelectedStatus}
|
||||
/>
|
||||
|
||||
{/* Logs List */}
|
||||
<div className="flex h-full w-full space-y-4">
|
||||
{logs.length === 0 ? (
|
||||
<Card className="bg-background-2 w-full">
|
||||
<CardContent className="bg-background-2 flex w-full items-center justify-center py-16">
|
||||
<div className="text-center">
|
||||
<Database className="text-muted-foreground mx-auto mb-4 h-12 w-12" />
|
||||
<h3 className="mb-2 text-lg font-semibold">
|
||||
No logs found
|
||||
</h3>
|
||||
<p className="text-muted-foreground">
|
||||
{selectedSource || selectedStatus
|
||||
? "Try adjusting your filters to see more results."
|
||||
: "No ingestion logs are available yet."}
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
<div className="flex h-[calc(100vh_-_56px)] w-full flex-col items-center space-y-6 pt-3 pb-4">
|
||||
{isInitialLoad ? (
|
||||
<>
|
||||
<LoaderCircle className="text-primary h-4 w-4 animate-spin" />
|
||||
</>
|
||||
) : (
|
||||
<VirtualLogsList
|
||||
logs={logs}
|
||||
hasMore={hasMore}
|
||||
loadMore={loadMore}
|
||||
isLoading={isLoading}
|
||||
height={600}
|
||||
/>
|
||||
<>
|
||||
{/* Filters */}
|
||||
|
||||
<LogsFilters
|
||||
availableSources={availableSources}
|
||||
selectedSource={selectedSource}
|
||||
selectedStatus={selectedStatus}
|
||||
onSourceChange={setSelectedSource}
|
||||
onStatusChange={setSelectedStatus}
|
||||
/>
|
||||
|
||||
{/* Logs List */}
|
||||
<div className="flex h-full w-full space-y-4">
|
||||
{logs.length === 0 ? (
|
||||
<Card className="bg-background-2 w-full">
|
||||
<CardContent className="bg-background-2 flex w-full items-center justify-center py-16">
|
||||
<div className="text-center">
|
||||
<Database className="text-muted-foreground mx-auto mb-4 h-12 w-12" />
|
||||
<h3 className="mb-2 text-lg font-semibold">
|
||||
No logs found
|
||||
</h3>
|
||||
<p className="text-muted-foreground">
|
||||
{selectedSource || selectedStatus
|
||||
? "Try adjusting your filters to see more results."
|
||||
: "No ingestion logs are available yet."}
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
) : (
|
||||
<VirtualLogsList
|
||||
logs={logs}
|
||||
hasMore={hasMore}
|
||||
loadMore={loadMore}
|
||||
isLoading={isLoading}
|
||||
height={600}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</ResizablePanel>
|
||||
<ResizableHandle />
|
||||
<ResizablePanel
|
||||
collapsible
|
||||
collapsedSize={0}
|
||||
className={cn(
|
||||
"flex flex-col items-start justify-start",
|
||||
!logId && "&& items-center justify-center",
|
||||
)}
|
||||
>
|
||||
{!logId && (
|
||||
<div className="flex h-full flex-col items-center justify-center gap-2">
|
||||
<Inbox size={30} />
|
||||
No episode selected
|
||||
</div>
|
||||
)}
|
||||
<Outlet />
|
||||
</ResizablePanel>
|
||||
</ResizablePanelGroup>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@ -235,7 +235,7 @@ export default function OAuthAuthorize() {
|
||||
<div className="flex items-center justify-center gap-4">
|
||||
{getIconForAuthorise(client.name, 40, client.logoUrl)}
|
||||
<ArrowRightLeft size={16} />
|
||||
<Logo width={40} height={40} />
|
||||
<Logo size={40} />
|
||||
</div>
|
||||
<div className="mt-4 space-y-4">
|
||||
<div className="flex items-center justify-center space-x-3 text-center">
|
||||
|
||||
@ -230,7 +230,7 @@ export default function Onboarding() {
|
||||
<div className="flex justify-center gap-2 md:justify-start">
|
||||
<a href="#" className="flex items-center gap-2 font-medium">
|
||||
<div className="flex size-8 items-center justify-center rounded-md">
|
||||
<Logo width={60} height={60} />
|
||||
<Logo size={60} />
|
||||
</div>
|
||||
C.O.R.E.
|
||||
</a>
|
||||
|
||||
@ -26,10 +26,22 @@ export interface ChunkedDocument {
|
||||
* Targets 1-3k tokens per chunk for better entity extraction with natural paragraph boundaries
|
||||
*/
|
||||
export class DocumentChunker {
|
||||
private readonly TARGET_CHUNK_SIZE = 3000; // Much smaller for better entity extraction
|
||||
private readonly MIN_CHUNK_SIZE = 1000;
|
||||
private readonly MAX_CHUNK_SIZE = 5000;
|
||||
private readonly MIN_PARAGRAPH_SIZE = 100; // Minimum tokens for a paragraph to be considered
|
||||
private TARGET_CHUNK_SIZE = 1000; // Much smaller for better entity extraction
|
||||
private MIN_CHUNK_SIZE = 500;
|
||||
private MAX_CHUNK_SIZE = 1500;
|
||||
private MIN_PARAGRAPH_SIZE = 100; // Minimum tokens for a paragraph to be considered
|
||||
|
||||
constructor(
|
||||
targetChunkSize: number = 1000,
|
||||
minChunkSize: number = 500,
|
||||
maxChunkSize: number = 1500,
|
||||
minParagraphSize: number = 100,
|
||||
) {
|
||||
this.TARGET_CHUNK_SIZE = targetChunkSize;
|
||||
this.MIN_CHUNK_SIZE = minChunkSize;
|
||||
this.MAX_CHUNK_SIZE = maxChunkSize;
|
||||
this.MIN_PARAGRAPH_SIZE = minParagraphSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Chunk a document into semantic sections with natural boundaries
|
||||
@ -40,10 +52,10 @@ export class DocumentChunker {
|
||||
): Promise<ChunkedDocument> {
|
||||
const documentId = crypto.randomUUID();
|
||||
const contentHash = this.generateContentHash(originalContent);
|
||||
|
||||
|
||||
// First, split by major section headers (markdown style)
|
||||
const majorSections = this.splitByMajorSections(originalContent);
|
||||
|
||||
|
||||
const chunks: DocumentChunk[] = [];
|
||||
let currentChunk = "";
|
||||
let currentChunkStart = 0;
|
||||
@ -52,23 +64,28 @@ export class DocumentChunker {
|
||||
for (const section of majorSections) {
|
||||
const sectionTokens = encode(section.content).length;
|
||||
const currentChunkTokens = encode(currentChunk).length;
|
||||
|
||||
|
||||
// If adding this section would exceed max size, finalize current chunk
|
||||
if (currentChunkTokens > 0 && currentChunkTokens + sectionTokens > this.MAX_CHUNK_SIZE) {
|
||||
if (
|
||||
currentChunkTokens > 0 &&
|
||||
currentChunkTokens + sectionTokens > this.MAX_CHUNK_SIZE
|
||||
) {
|
||||
if (currentChunkTokens >= this.MIN_CHUNK_SIZE) {
|
||||
chunks.push(this.createChunk(
|
||||
currentChunk,
|
||||
chunkIndex,
|
||||
currentChunkStart,
|
||||
currentChunkStart + currentChunk.length,
|
||||
section.title
|
||||
));
|
||||
chunks.push(
|
||||
this.createChunk(
|
||||
currentChunk,
|
||||
chunkIndex,
|
||||
currentChunkStart,
|
||||
currentChunkStart + currentChunk.length,
|
||||
section.title,
|
||||
),
|
||||
);
|
||||
chunkIndex++;
|
||||
currentChunk = "";
|
||||
currentChunkStart = section.startPosition;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Add section to current chunk
|
||||
if (currentChunk) {
|
||||
currentChunk += "\n\n" + section.content;
|
||||
@ -76,7 +93,7 @@ export class DocumentChunker {
|
||||
currentChunk = section.content;
|
||||
currentChunkStart = section.startPosition;
|
||||
}
|
||||
|
||||
|
||||
// If current chunk is large enough and we have a natural break, consider chunking
|
||||
const updatedChunkTokens = encode(currentChunk).length;
|
||||
if (updatedChunkTokens >= this.TARGET_CHUNK_SIZE) {
|
||||
@ -86,33 +103,41 @@ export class DocumentChunker {
|
||||
// Split at paragraph boundary if beneficial
|
||||
const optimalSplit = this.findOptimalParagraphSplit(currentChunk);
|
||||
if (optimalSplit) {
|
||||
chunks.push(this.createChunk(
|
||||
optimalSplit.beforeSplit,
|
||||
chunkIndex,
|
||||
currentChunkStart,
|
||||
currentChunkStart + optimalSplit.beforeSplit.length,
|
||||
section.title
|
||||
));
|
||||
chunks.push(
|
||||
this.createChunk(
|
||||
optimalSplit.beforeSplit,
|
||||
chunkIndex,
|
||||
currentChunkStart,
|
||||
currentChunkStart + optimalSplit.beforeSplit.length,
|
||||
section.title,
|
||||
),
|
||||
);
|
||||
chunkIndex++;
|
||||
currentChunk = optimalSplit.afterSplit;
|
||||
currentChunkStart = currentChunkStart + optimalSplit.beforeSplit.length;
|
||||
currentChunkStart =
|
||||
currentChunkStart + optimalSplit.beforeSplit.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Add remaining content as final chunk
|
||||
if (currentChunk.trim() && encode(currentChunk).length >= this.MIN_PARAGRAPH_SIZE) {
|
||||
chunks.push(this.createChunk(
|
||||
currentChunk,
|
||||
chunkIndex,
|
||||
currentChunkStart,
|
||||
originalContent.length
|
||||
));
|
||||
if (
|
||||
currentChunk.trim() &&
|
||||
encode(currentChunk).length >= this.MIN_PARAGRAPH_SIZE
|
||||
) {
|
||||
chunks.push(
|
||||
this.createChunk(
|
||||
currentChunk,
|
||||
chunkIndex,
|
||||
currentChunkStart,
|
||||
originalContent.length,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Generate chunk hashes array
|
||||
const chunkHashes = chunks.map(chunk => chunk.contentHash);
|
||||
const chunkHashes = chunks.map((chunk) => chunk.contentHash);
|
||||
|
||||
return {
|
||||
documentId,
|
||||
@ -138,61 +163,280 @@ export class DocumentChunker {
|
||||
endPosition: number;
|
||||
}> = [];
|
||||
|
||||
// Split by markdown headers (# ## ### etc.) or common document patterns
|
||||
const headerRegex = /^(#{1,6}\s+.*$|={3,}$|-{3,}$)/gm;
|
||||
const matches = Array.from(content.matchAll(headerRegex));
|
||||
|
||||
if (matches.length === 0) {
|
||||
// No headers found, treat as single section
|
||||
sections.push({
|
||||
content: content.trim(),
|
||||
startPosition: 0,
|
||||
endPosition: content.length,
|
||||
});
|
||||
return sections;
|
||||
// Detect headers from multiple formats
|
||||
const headerMatches = this.findAllHeaders(content);
|
||||
|
||||
if (headerMatches.length === 0) {
|
||||
// No headers found, try to split by natural boundaries
|
||||
return this.splitByNaturalBoundaries(content);
|
||||
}
|
||||
|
||||
let lastIndex = 0;
|
||||
|
||||
for (let i = 0; i < matches.length; i++) {
|
||||
const match = matches[i];
|
||||
const nextMatch = matches[i + 1];
|
||||
|
||||
|
||||
for (let i = 0; i < headerMatches.length; i++) {
|
||||
const match = headerMatches[i];
|
||||
const nextMatch = headerMatches[i + 1];
|
||||
|
||||
const sectionStart = lastIndex;
|
||||
const sectionEnd = nextMatch ? nextMatch.index! : content.length;
|
||||
|
||||
const sectionEnd = nextMatch ? nextMatch.startIndex : content.length;
|
||||
|
||||
const sectionContent = content.slice(sectionStart, sectionEnd).trim();
|
||||
|
||||
|
||||
if (sectionContent) {
|
||||
sections.push({
|
||||
content: sectionContent,
|
||||
title: this.extractSectionTitle(match[0]),
|
||||
title: match.title,
|
||||
startPosition: sectionStart,
|
||||
endPosition: sectionEnd,
|
||||
});
|
||||
}
|
||||
|
||||
lastIndex = match.index! + match[0].length;
|
||||
|
||||
lastIndex = match.endIndex;
|
||||
}
|
||||
|
||||
return sections;
|
||||
}
|
||||
|
||||
private extractSectionTitle(header: string): string | undefined {
|
||||
// Extract title from markdown header
|
||||
const markdownMatch = header.match(/^#{1,6}\s+(.+)$/);
|
||||
if (markdownMatch) {
|
||||
return markdownMatch[1].trim();
|
||||
private findAllHeaders(content: string): Array<{
|
||||
title: string;
|
||||
startIndex: number;
|
||||
endIndex: number;
|
||||
level: number;
|
||||
}> {
|
||||
const headers: Array<{
|
||||
title: string;
|
||||
startIndex: number;
|
||||
endIndex: number;
|
||||
level: number;
|
||||
}> = [];
|
||||
|
||||
// Markdown headers (# ## ### etc.)
|
||||
const markdownRegex = /^(#{1,6})\s+(.+)$/gm;
|
||||
let match;
|
||||
while ((match = markdownRegex.exec(content)) !== null) {
|
||||
headers.push({
|
||||
title: match[2].trim(),
|
||||
startIndex: match.index,
|
||||
endIndex: match.index + match[0].length,
|
||||
level: match[1].length,
|
||||
});
|
||||
}
|
||||
return undefined;
|
||||
|
||||
// HTML headers (<h1>, <h2>, etc.)
|
||||
const htmlRegex = /<h([1-6])[^>]*>(.*?)<\/h[1-6]>/gi;
|
||||
while ((match = htmlRegex.exec(content)) !== null) {
|
||||
const textContent = match[2].replace(/<[^>]*>/g, "").trim();
|
||||
if (textContent) {
|
||||
headers.push({
|
||||
title: textContent,
|
||||
startIndex: match.index,
|
||||
endIndex: match.index + match[0].length,
|
||||
level: parseInt(match[1]),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Underlined headers (Setext-style)
|
||||
const setextRegex = /^(.+)\n(={3,}|-{3,})$/gm;
|
||||
while ((match = setextRegex.exec(content)) !== null) {
|
||||
const level = match[2].startsWith("=") ? 1 : 2;
|
||||
headers.push({
|
||||
title: match[1].trim(),
|
||||
startIndex: match.index,
|
||||
endIndex: match.index + match[0].length,
|
||||
level,
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by position in document
|
||||
return headers.sort((a, b) => a.startIndex - b.startIndex);
|
||||
}
|
||||
|
||||
private splitByNaturalBoundaries(content: string): Array<{
|
||||
content: string;
|
||||
title?: string;
|
||||
startPosition: number;
|
||||
endPosition: number;
|
||||
}> {
|
||||
const sections: Array<{
|
||||
content: string;
|
||||
title?: string;
|
||||
startPosition: number;
|
||||
endPosition: number;
|
||||
}> = [];
|
||||
|
||||
// Look for natural boundaries: double line breaks, HTML block elements, etc.
|
||||
const boundaryPatterns = [
|
||||
/\n\s*\n\s*\n/g, // Triple line breaks (strong boundary)
|
||||
/<\/(?:div|section|article|main|p)>\s*<(?:div|section|article|main|p)/gi, // HTML block boundaries
|
||||
/\n\s*[-=*]{4,}\s*\n/g, // Horizontal rules
|
||||
];
|
||||
|
||||
let boundaries: number[] = [0];
|
||||
|
||||
for (const pattern of boundaryPatterns) {
|
||||
let match;
|
||||
while ((match = pattern.exec(content)) !== null) {
|
||||
boundaries.push(match.index);
|
||||
}
|
||||
}
|
||||
|
||||
boundaries.push(content.length);
|
||||
boundaries = [...new Set(boundaries)].sort((a, b) => a - b);
|
||||
|
||||
// If no natural boundaries found, split by token count
|
||||
if (boundaries.length <= 2) {
|
||||
return this.splitByTokenCount(content);
|
||||
}
|
||||
|
||||
for (let i = 0; i < boundaries.length - 1; i++) {
|
||||
const start = boundaries[i];
|
||||
const end = boundaries[i + 1];
|
||||
const sectionContent = content.slice(start, end).trim();
|
||||
|
||||
if (
|
||||
sectionContent &&
|
||||
encode(sectionContent).length >= this.MIN_PARAGRAPH_SIZE
|
||||
) {
|
||||
sections.push({
|
||||
content: sectionContent,
|
||||
startPosition: start,
|
||||
endPosition: end,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return sections.length > 0 ? sections : this.splitByTokenCount(content);
|
||||
}
|
||||
|
||||
private splitByTokenCount(content: string): Array<{
|
||||
content: string;
|
||||
title?: string;
|
||||
startPosition: number;
|
||||
endPosition: number;
|
||||
}> {
|
||||
const sections: Array<{
|
||||
content: string;
|
||||
title?: string;
|
||||
startPosition: number;
|
||||
endPosition: number;
|
||||
}> = [];
|
||||
|
||||
const totalTokens = encode(content).length;
|
||||
const numSections = Math.ceil(totalTokens / this.TARGET_CHUNK_SIZE);
|
||||
const charsPerSection = Math.ceil(content.length / numSections);
|
||||
|
||||
for (let i = 0; i < numSections; i++) {
|
||||
const start = i * charsPerSection;
|
||||
const end = Math.min((i + 1) * charsPerSection, content.length);
|
||||
|
||||
// Try to break at word boundaries
|
||||
let actualEnd = end;
|
||||
if (end < content.length) {
|
||||
const nextSpace = content.indexOf(" ", end);
|
||||
const nextNewline = content.indexOf("\n", end);
|
||||
const nextBoundary = Math.min(
|
||||
nextSpace === -1 ? Infinity : nextSpace,
|
||||
nextNewline === -1 ? Infinity : nextNewline,
|
||||
);
|
||||
if (nextBoundary !== Infinity && nextBoundary - end < 100) {
|
||||
actualEnd = nextBoundary;
|
||||
}
|
||||
}
|
||||
|
||||
const sectionContent = content.slice(start, actualEnd).trim();
|
||||
if (sectionContent) {
|
||||
sections.push({
|
||||
content: sectionContent,
|
||||
startPosition: start,
|
||||
endPosition: actualEnd,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return sections;
|
||||
}
|
||||
|
||||
private splitIntoParagraphs(content: string): string[] {
|
||||
// Split by double newlines (paragraph breaks) and filter out empty strings
|
||||
// Handle HTML paragraphs first
|
||||
if (
|
||||
content.includes("<p") ||
|
||||
content.includes("<div") ||
|
||||
content.includes("<section")
|
||||
) {
|
||||
return this.splitHtmlParagraphs(content);
|
||||
}
|
||||
|
||||
// Split by double newlines (paragraph breaks) for text/markdown
|
||||
return content
|
||||
.split(/\n\s*\n/)
|
||||
.map(p => p.trim())
|
||||
.filter(p => p.length > 0);
|
||||
.map((p) => p.trim())
|
||||
.filter((p) => p.length > 0);
|
||||
}
|
||||
|
||||
private splitHtmlParagraphs(content: string): string[] {
|
||||
const paragraphs: string[] = [];
|
||||
|
||||
// Split by HTML block elements
|
||||
const blockElements = [
|
||||
"p",
|
||||
"div",
|
||||
"section",
|
||||
"article",
|
||||
"li",
|
||||
"blockquote",
|
||||
"pre",
|
||||
];
|
||||
const blockRegex = new RegExp(
|
||||
`<(${blockElements.join("|")})[^>]*>.*?</\\1>`,
|
||||
"gis",
|
||||
);
|
||||
|
||||
let lastIndex = 0;
|
||||
let match;
|
||||
|
||||
while ((match = blockRegex.exec(content)) !== null) {
|
||||
// Add content before this block element
|
||||
if (match.index > lastIndex) {
|
||||
const beforeContent = content.slice(lastIndex, match.index).trim();
|
||||
if (beforeContent) {
|
||||
paragraphs.push(beforeContent);
|
||||
}
|
||||
}
|
||||
|
||||
// Add the block element content (strip tags for text content)
|
||||
const blockContent = match[0].replace(/<[^>]*>/g, " ").trim();
|
||||
if (blockContent) {
|
||||
paragraphs.push(blockContent);
|
||||
}
|
||||
|
||||
lastIndex = match.index + match[0].length;
|
||||
}
|
||||
|
||||
// Add remaining content
|
||||
if (lastIndex < content.length) {
|
||||
const remainingContent = content.slice(lastIndex).trim();
|
||||
if (remainingContent) {
|
||||
// Clean up remaining HTML and split by newlines
|
||||
const cleaned = remainingContent.replace(/<[^>]*>/g, " ").trim();
|
||||
if (cleaned) {
|
||||
paragraphs.push(
|
||||
...cleaned.split(/\n\s*\n/).filter((p) => p.trim().length > 0),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return paragraphs.length > 0
|
||||
? paragraphs
|
||||
: this.splitTextParagraphs(content);
|
||||
}
|
||||
|
||||
private splitTextParagraphs(content: string): string[] {
|
||||
return content
|
||||
.split(/\n\s*\n/)
|
||||
.map((p) => p.trim())
|
||||
.filter((p) => p.length > 0);
|
||||
}
|
||||
|
||||
private findOptimalParagraphSplit(content: string): {
|
||||
@ -209,15 +453,18 @@ export class DocumentChunker {
|
||||
for (let i = 1; i < paragraphs.length; i++) {
|
||||
const beforeSplit = paragraphs.slice(0, i).join("\n\n");
|
||||
const afterSplit = paragraphs.slice(i).join("\n\n");
|
||||
|
||||
|
||||
const beforeTokens = encode(beforeSplit).length;
|
||||
const afterTokens = encode(afterSplit).length;
|
||||
|
||||
|
||||
// Score based on how close we get to target, avoiding too small chunks
|
||||
if (beforeTokens >= this.MIN_CHUNK_SIZE && afterTokens >= this.MIN_PARAGRAPH_SIZE) {
|
||||
if (
|
||||
beforeTokens >= this.MIN_CHUNK_SIZE &&
|
||||
afterTokens >= this.MIN_PARAGRAPH_SIZE
|
||||
) {
|
||||
const beforeDistance = Math.abs(beforeTokens - this.TARGET_CHUNK_SIZE);
|
||||
const score = 1 / (1 + beforeDistance); // Higher score for closer to target
|
||||
|
||||
|
||||
if (score > bestScore) {
|
||||
bestScore = score;
|
||||
bestSplitIndex = i;
|
||||
@ -240,12 +487,12 @@ export class DocumentChunker {
|
||||
chunkIndex: number,
|
||||
startPosition: number,
|
||||
endPosition: number,
|
||||
title?: string
|
||||
title?: string,
|
||||
): DocumentChunk {
|
||||
// Generate a concise context/title if not provided
|
||||
const context = title || this.generateChunkContext(content);
|
||||
const contentHash = this.generateContentHash(content.trim());
|
||||
|
||||
|
||||
return {
|
||||
content: content.trim(),
|
||||
chunkIndex,
|
||||
@ -258,30 +505,55 @@ export class DocumentChunker {
|
||||
}
|
||||
|
||||
private generateChunkContext(content: string): string {
|
||||
// Extract first meaningful line as context (avoiding markdown syntax)
|
||||
const lines = content.split('\n').map(line => line.trim()).filter(Boolean);
|
||||
|
||||
for (const line of lines.slice(0, 3)) {
|
||||
// Skip markdown headers and find first substantial content
|
||||
if (!line.match(/^#{1,6}\s/) && !line.match(/^[=-]{3,}$/) && line.length > 10) {
|
||||
return line.substring(0, 100) + (line.length > 100 ? "..." : "");
|
||||
// Clean content from HTML tags and markup
|
||||
const cleanContent = content
|
||||
.replace(/<[^>]*>/g, " ") // Remove HTML tags
|
||||
.replace(/#{1,6}\s+/g, "") // Remove markdown headers
|
||||
.replace(/[=-]{3,}/g, "") // Remove underline headers
|
||||
.replace(/\s+/g, " ") // Normalize whitespace
|
||||
.trim();
|
||||
|
||||
if (!cleanContent) {
|
||||
return "Document content";
|
||||
}
|
||||
|
||||
// Find first substantial sentence or line
|
||||
const sentences = cleanContent
|
||||
.split(/[.!?]+/)
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
for (const sentence of sentences.slice(0, 2)) {
|
||||
if (sentence.length > 20) {
|
||||
return (
|
||||
sentence.substring(0, 100) + (sentence.length > 100 ? "..." : "")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return "Document content";
|
||||
|
||||
// Fallback to first meaningful chunk
|
||||
const words = cleanContent.split(/\s+/).slice(0, 15).join(" ");
|
||||
return words.substring(0, 100) + (words.length > 100 ? "..." : "");
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate content hash for change detection
|
||||
*/
|
||||
private generateContentHash(content: string): string {
|
||||
return crypto.createHash('sha256').update(content, 'utf8').digest('hex').substring(0, 16);
|
||||
return crypto
|
||||
.createHash("sha256")
|
||||
.update(content, "utf8")
|
||||
.digest("hex")
|
||||
.substring(0, 16);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare chunk hashes to detect changes
|
||||
*/
|
||||
static compareChunkHashes(oldHashes: string[], newHashes: string[]): {
|
||||
static compareChunkHashes(
|
||||
oldHashes: string[],
|
||||
newHashes: string[],
|
||||
): {
|
||||
changedIndices: number[];
|
||||
changePercentage: number;
|
||||
} {
|
||||
@ -291,15 +563,16 @@ export class DocumentChunker {
|
||||
for (let i = 0; i < maxLength; i++) {
|
||||
const oldHash = oldHashes[i];
|
||||
const newHash = newHashes[i];
|
||||
|
||||
|
||||
// Mark as changed if hash is different or chunk added/removed
|
||||
if (oldHash !== newHash) {
|
||||
changedIndices.push(i);
|
||||
}
|
||||
}
|
||||
|
||||
const changePercentage = maxLength > 0 ? (changedIndices.length / maxLength) * 100 : 0;
|
||||
|
||||
const changePercentage =
|
||||
maxLength > 0 ? (changedIndices.length / maxLength) * 100 : 0;
|
||||
|
||||
return {
|
||||
changedIndices,
|
||||
changePercentage,
|
||||
@ -312,4 +585,4 @@ export class DocumentChunker {
|
||||
static getDocumentSizeInTokens(content: string): number {
|
||||
return encode(content).length;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -312,7 +312,6 @@ export async function getSpaceStatementCount(
|
||||
MATCH (s:Statement {userId: $userId})
|
||||
WHERE s.spaceIds IS NOT NULL
|
||||
AND $spaceId IN s.spaceIds
|
||||
AND s.invalidAt IS NULL
|
||||
RETURN count(s) as statementCount
|
||||
`;
|
||||
|
||||
|
||||
@ -53,6 +53,68 @@ export const getIngestionQueue = async (id: string) => {
|
||||
});
|
||||
};
|
||||
|
||||
export const getIngestionQueueForFrontend = async (id: string) => {
|
||||
// Fetch the specific log by logId
|
||||
const log = await prisma.ingestionQueue.findUnique({
|
||||
where: { id: id },
|
||||
select: {
|
||||
id: true,
|
||||
createdAt: true,
|
||||
processedAt: true,
|
||||
status: true,
|
||||
error: true,
|
||||
type: true,
|
||||
output: true,
|
||||
data: true,
|
||||
activity: {
|
||||
select: {
|
||||
text: true,
|
||||
sourceURL: true,
|
||||
integrationAccount: {
|
||||
select: {
|
||||
integrationDefinition: {
|
||||
select: {
|
||||
name: true,
|
||||
slug: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!log) {
|
||||
throw new Response("Log not found", { status: 404 });
|
||||
}
|
||||
|
||||
// Format the response
|
||||
const integrationDef =
|
||||
log.activity?.integrationAccount?.integrationDefinition;
|
||||
const logData = log.data as any;
|
||||
|
||||
const formattedLog = {
|
||||
id: log.id,
|
||||
source: integrationDef?.name || logData?.source || "Unknown",
|
||||
ingestText:
|
||||
log.activity?.text ||
|
||||
logData?.episodeBody ||
|
||||
logData?.text ||
|
||||
"No content",
|
||||
time: log.createdAt,
|
||||
processedAt: log.processedAt,
|
||||
episodeUUID: (log.output as any)?.episodeUuid,
|
||||
status: log.status,
|
||||
error: log.error,
|
||||
sourceURL: log.activity?.sourceURL,
|
||||
integrationSlug: integrationDef?.slug,
|
||||
data: log.data,
|
||||
};
|
||||
|
||||
return formattedLog;
|
||||
};
|
||||
|
||||
export const deleteIngestionQueue = async (id: string) => {
|
||||
return await prisma.ingestionQueue.delete({
|
||||
where: {
|
||||
|
||||
@ -185,7 +185,7 @@ export const handleMCPRequest = async (
|
||||
queryParams: z.infer<typeof QueryParams>,
|
||||
) => {
|
||||
const sessionId = request.headers["mcp-session-id"] as string | undefined;
|
||||
const source = queryParams.source || "api";
|
||||
const source = queryParams.source?.toLowerCase() || "api";
|
||||
const integrations = queryParams.integrations
|
||||
? queryParams.integrations.split(",").map((s) => s.trim())
|
||||
: [];
|
||||
|
||||
@ -46,7 +46,7 @@ export class SearchService {
|
||||
validAt: options.validAt || new Date(),
|
||||
startTime: options.startTime || null,
|
||||
endTime: options.endTime || new Date(),
|
||||
includeInvalidated: options.includeInvalidated || false,
|
||||
includeInvalidated: options.includeInvalidated || true,
|
||||
entityTypes: options.entityTypes || [],
|
||||
predicateTypes: options.predicateTypes || [],
|
||||
scoreThreshold: options.scoreThreshold || 0.7,
|
||||
|
||||
@ -19,14 +19,14 @@ export async function performBM25Search(
|
||||
// Build the WHERE clause based on timeframe options
|
||||
let timeframeCondition = `
|
||||
AND s.validAt <= $validAt
|
||||
AND (s.invalidAt IS NULL OR s.invalidAt > $validAt)
|
||||
AND (s.invalidAt > $validAt)
|
||||
`;
|
||||
|
||||
// If startTime is provided, add condition to filter by validAt >= startTime
|
||||
if (options.startTime) {
|
||||
timeframeCondition = `
|
||||
AND s.validAt <= $validAt
|
||||
AND (s.invalidAt IS NULL OR s.invalidAt > $validAt)
|
||||
AND (s.invalidAt > $validAt)
|
||||
AND s.validAt >= $startTime
|
||||
`;
|
||||
}
|
||||
@ -109,14 +109,14 @@ export async function performVectorSearch(
|
||||
// Build the WHERE clause based on timeframe options
|
||||
let timeframeCondition = `
|
||||
AND s.validAt <= $validAt
|
||||
AND (s.invalidAt IS NULL OR s.invalidAt > $validAt)
|
||||
AND (s.invalidAt > $validAt)
|
||||
`;
|
||||
|
||||
// If startTime is provided, add condition to filter by validAt >= startTime
|
||||
if (options.startTime) {
|
||||
timeframeCondition = `
|
||||
AND s.validAt <= $validAt
|
||||
AND (s.invalidAt IS NULL OR s.invalidAt > $validAt)
|
||||
AND (s.invalidAt > $validAt)
|
||||
AND s.validAt >= $startTime
|
||||
`;
|
||||
}
|
||||
@ -219,14 +219,14 @@ export async function bfsTraversal(
|
||||
// Build the WHERE clause based on timeframe options
|
||||
let timeframeCondition = `
|
||||
AND s.validAt <= $validAt
|
||||
AND (s.invalidAt IS NULL OR s.invalidAt > $validAt)
|
||||
AND (s.invalidAt > $validAt)
|
||||
`;
|
||||
|
||||
// If startTime is provided, add condition to filter by validAt >= startTime
|
||||
if (startTime) {
|
||||
timeframeCondition = `
|
||||
AND s.validAt <= $validAt
|
||||
AND (s.invalidAt IS NULL OR s.invalidAt > $validAt)
|
||||
AND (s.invalidAt > $validAt)
|
||||
AND s.validAt >= $startTime
|
||||
`;
|
||||
}
|
||||
|
||||
@ -485,6 +485,9 @@
|
||||
}
|
||||
|
||||
.tiptap {
|
||||
outline:none;
|
||||
border: none;
|
||||
|
||||
:first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
@ -14,6 +14,7 @@ import { nanoid } from "nanoid";
|
||||
export const ExtensionSearchBodyRequest = z.object({
|
||||
userInput: z.string().min(1, "User input is required"),
|
||||
userId: z.string().min(1, "User ID is required"),
|
||||
outputType: z.string().default("markdown"),
|
||||
context: z
|
||||
.string()
|
||||
.optional()
|
||||
@ -27,7 +28,7 @@ export const extensionSearch = task({
|
||||
run: async (body: z.infer<typeof ExtensionSearchBodyRequest>) => {
|
||||
const { userInput, userId, context } =
|
||||
ExtensionSearchBodyRequest.parse(body);
|
||||
|
||||
const outputType = body.outputType;
|
||||
const randomKeyName = `extensionSearch_${nanoid(10)}`;
|
||||
|
||||
const pat = await getOrCreatePersonalAccessToken({
|
||||
@ -45,11 +46,11 @@ export const extensionSearch = task({
|
||||
execute: async ({ query }) => {
|
||||
try {
|
||||
const response = await axios.post(
|
||||
`${process.env.API_BASE_URL}/api/v1/search`,
|
||||
`https://core.heysol.ai/api/v1/search`,
|
||||
{ query },
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${pat.token}`,
|
||||
Authorization: `Bearer rc_pat_v41311t6trhr3c8sc7ap4hsbhp6pwsstzyunaazq`,
|
||||
},
|
||||
},
|
||||
);
|
||||
@ -74,22 +75,38 @@ export const extensionSearch = task({
|
||||
role: "system",
|
||||
content: `You are a specialized memory search and summarization agent. Your job is to:
|
||||
|
||||
1. First, use the searchMemory tool to find relevant information from the user's memory based on their input
|
||||
2. Then, analyze the retrieved facts and episodes to create a concise, relevant summary
|
||||
1. FIRST: Understand the user's intent and what information they need to achieve their goal
|
||||
2. THEN: Design a strategic search plan to gather that information from memory
|
||||
3. Execute multiple targeted searches using the searchMemory tool
|
||||
4. Format your response in ${outputType} and return exact content from episodes or facts without modification.
|
||||
|
||||
You have access to a searchMemory tool that can search the user's knowledge base. Use this tool with relevant search queries to find information that would help answer their question.
|
||||
SEARCH STRATEGY:
|
||||
- Analyze the user's query to understand their underlying intent and information needs
|
||||
- For comparisons: search each entity separately, then look for comparative information
|
||||
- For "how to" questions: search for procedures, examples, and related concepts
|
||||
- For troubleshooting: search for error messages, solutions, and similar issues
|
||||
- For explanations: search for definitions, examples, and context
|
||||
- Always use multiple targeted searches with different angles rather than one broad search
|
||||
- Think about what background knowledge would help answer the user's question
|
||||
|
||||
After retrieving the information, provide a concise summary (2-4 sentences) that highlights the most relevant context for answering their question. Focus on:
|
||||
- Key facts that directly relate to their question
|
||||
- Important background information or decisions
|
||||
- Relevant examples or past experiences
|
||||
- Critical context that would help provide a good answer
|
||||
EXAMPLES:
|
||||
- "Graphiti vs CORE comparison" → Intent: Compare two systems → Search: "Graphiti", "CORE", "Graphiti features", "CORE features"
|
||||
- "How to implement authentication" → Intent: Learn implementation → Search: "authentication", "authentication implementation", "login system"
|
||||
- "Why is my build failing" → Intent: Debug issue → Search: "build error", "build failure", "deployment issues"
|
||||
|
||||
If no relevant information is found, provide a brief statement indicating that.`,
|
||||
IMPORTANT: Always format your response in ${outputType}. When you find relevant content in episodes or facts, return the exact content as found - preserve lists, code blocks, formatting, and structure exactly as they appear. Present the information clearly organized in ${outputType} format with appropriate headers and structure.
|
||||
|
||||
HANDLING PARTIAL RESULTS:
|
||||
- If you find complete information for the query, present it organized by topic
|
||||
- If you find partial information, clearly state what you found and what you didn't find
|
||||
- Always provide helpful related information even if it doesn't directly answer the query
|
||||
- Example: "I didn't find specific information about X vs Y comparison, but here's what I found about X: [exact content] and about Y: [exact content], which can help you build the comparison"
|
||||
|
||||
If no relevant information is found at all, provide a brief statement indicating that in ${outputType} format.`,
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: `User input: "${userInput}"${context ? `\n\nAdditional context: ${context}` : ""}\n\nPlease search my memory for relevant information and provide a concise summary of the most important context for this question.`,
|
||||
content: `User input: "${userInput}"${context ? `\n\nAdditional context: ${context}` : ""}\n\nPlease search my memory for relevant information and provide the exact content from episodes or facts that relate to this question. Format your response in ${outputType} and do not modify or summarize the found content.`,
|
||||
},
|
||||
];
|
||||
|
||||
@ -102,7 +119,7 @@ If no relevant information is found, provide a brief statement indicating that.`
|
||||
},
|
||||
maxSteps: 5,
|
||||
temperature: 0.3,
|
||||
maxTokens: 600,
|
||||
maxTokens: 1000,
|
||||
});
|
||||
|
||||
const stream = await metadata.stream("messages", result.textStream);
|
||||
|
||||
@ -4,6 +4,7 @@ import * as cheerio from "cheerio";
|
||||
import { z } from "zod";
|
||||
import { makeModelCall } from "~/lib/model.server";
|
||||
import { summarizeImage, extractImageUrls } from "./utils";
|
||||
import { DocumentChunker } from "~/services/documentChunker.server";
|
||||
|
||||
export type PageType = "text" | "video";
|
||||
|
||||
@ -106,7 +107,7 @@ function extractTextContent(
|
||||
) // Remove UI text
|
||||
.filter((s) => s.split(" ").length > 3); // Keep sentences with multiple words
|
||||
|
||||
content = sentences.join(". ").slice(0, 10000);
|
||||
content = sentences.join(". ");
|
||||
}
|
||||
|
||||
// Clean up whitespace and normalize text
|
||||
@ -123,7 +124,7 @@ function extractTextContent(
|
||||
return {
|
||||
pageType,
|
||||
title: title.trim(),
|
||||
content: content.slice(0, 10000), // Limit content size for processing
|
||||
content, // Limit content size for processing
|
||||
images,
|
||||
metadata: {
|
||||
url,
|
||||
@ -166,6 +167,7 @@ async function processImages(
|
||||
async function generateSummary(
|
||||
title: string,
|
||||
content: string,
|
||||
lastSummary: string | null,
|
||||
imageSummaries: string[] = [],
|
||||
) {
|
||||
// Combine content with image descriptions
|
||||
@ -177,26 +179,219 @@ async function generateSummary(
|
||||
const messages: CoreMessage[] = [
|
||||
{
|
||||
role: "system",
|
||||
content: `You are a helpful assistant that creates concise summaries of web content in HTML format.
|
||||
content: `You are C.O.R.E. (Contextual Observation & Recall Engine), a smart memory enrichment system.
|
||||
|
||||
Create a clear, informative summary that captures the key points and main ideas from the provided content. The summary should:
|
||||
- Focus on the most important information and key takeaways
|
||||
- Be concise but comprehensive
|
||||
- Maintain the original context and meaning
|
||||
- Be useful for someone who wants to quickly understand the content
|
||||
- Format the summary in clean HTML using appropriate tags like <h1>, <h2>, <p>, <ul>, <li> to structure the information
|
||||
- When image descriptions are provided, integrate them naturally into the summary context
|
||||
- Replace image references with their detailed descriptions
|
||||
Create ONE enriched sentence that transforms the episode into a contextually-rich memory using SELECTIVE enrichment.
|
||||
|
||||
IMPORTANT: Return ONLY the HTML content without any markdown code blocks or formatting. Do not wrap the response in \`\`\`html or any other markdown syntax. Return the raw HTML directly.
|
||||
<smart_enrichment_process>
|
||||
Evaluate the episode and apply enrichment ONLY where it adds significant value:
|
||||
|
||||
Extract the essential information while preserving important details, facts, or insights. If image descriptions are included, weave them seamlessly into the narrative.`,
|
||||
1. PRIMARY FACTS - always preserve the core information from the episode
|
||||
2. STRATEGIC ENRICHMENT - add context only for HIGH VALUE cases (see guidelines below)
|
||||
3. VISUAL CONTENT - capture exact text on signs, objects shown, specific details from images
|
||||
4. EMOTIONAL PRESERVATION - maintain the tone and feeling of emotional exchanges
|
||||
5. IDENTITY PRESERVATION - preserve definitional and possessive relationships that establish entity connections
|
||||
|
||||
ENRICHMENT DECISION MATRIX:
|
||||
- Clear, complete statement → minimal enrichment (just temporal + attribution)
|
||||
- Unclear references → resolve with context
|
||||
- Emotional support → preserve feeling, avoid historical dumping
|
||||
- New developments → connect to ongoing narrative
|
||||
- Visual content → extract specific details as primary facts
|
||||
</smart_enrichment_process>
|
||||
|
||||
<chunk_continuity_rules>
|
||||
When processing content that appears to be part of a larger document or conversation (indicated by session context):
|
||||
|
||||
1. BUILD ON CONTEXT - Use the previous session context to continue the narrative naturally without restating established information
|
||||
2. MAINTAIN FLOW - Each chunk should add new information while referencing the established context appropriately
|
||||
3. NO REDUNDANT TEMPORAL ANCHORING - Don't repeat the same date markers in sequential chunks unless the timeframe actually changes
|
||||
4. FOCUS ON PROGRESSION - Emphasize what's new or developing in the current chunk relative to what's already been established
|
||||
5. SEAMLESS CONTINUATION - When session context exists, treat the current content as a continuation rather than a standalone episode
|
||||
</chunk_continuity_rules>
|
||||
|
||||
<context_usage_decision>
|
||||
When related memories/previous episodes are provided, evaluate if they improve understanding:
|
||||
|
||||
USE CONTEXT when current episode has:
|
||||
- Unclear pronouns ("she", "it", "they" without clear antecedent)
|
||||
- Vague references ("the agency", "the event" without definition in current episode)
|
||||
- Continuation phrases ("following up", "as we discussed")
|
||||
- Incomplete information that context clarifies
|
||||
|
||||
IGNORE CONTEXT when current episode is:
|
||||
- Clear and self-contained ("I got a job in New York")
|
||||
- Simple emotional responses ("Thanks, that's great!")
|
||||
- Generic encouragement ("You're doing awesome!")
|
||||
- Complete statements with all necessary information
|
||||
|
||||
DECISION RULE: If the current episode can be understood perfectly without context, don't use it. Only use context when it genuinely clarifies or
|
||||
resolves ambiguity.
|
||||
</context_usage_decision>
|
||||
|
||||
<visual_content_capture>
|
||||
For episodes with images/photos, EXTRACT:
|
||||
- Exact text on signs, posters, labels (e.g., "Trans Lives Matter")
|
||||
- Objects, people, settings, activities shown
|
||||
- Specific visual details that add context
|
||||
Integrate visual content as primary facts, not descriptions.
|
||||
</visual_content_capture>
|
||||
|
||||
<strategic_enrichment>
|
||||
When related memories are provided, apply SELECTIVE enrichment:
|
||||
|
||||
HIGH VALUE ENRICHMENT (always include):
|
||||
- Temporal resolution: "last week" → "June 20, 2023"
|
||||
- Entity disambiguation: "she" → "Caroline" when unclear
|
||||
- Missing critical context: "the agency" → "Bright Futures Adoption Agency" (first mention only)
|
||||
- New developments: connecting current facts to ongoing storylines
|
||||
- Identity-defining possessives: "my X, Y" → preserve the relationship between person and Y as their X
|
||||
- Definitional phrases: maintain the defining relationship, not just the entity reference
|
||||
- Origin/source connections: preserve "from my X" relationships
|
||||
|
||||
LOW VALUE ENRICHMENT (usually skip):
|
||||
- Obvious references: "Thanks, Mel!" doesn't need Melanie's full context
|
||||
- Support/encouragement statements: emotional exchanges rarely need historical anchoring
|
||||
- Already clear entities: don't replace pronouns when reference is obvious
|
||||
- Repetitive context: never repeat the same descriptive phrase within a conversation
|
||||
- Ongoing conversations: don't re-establish context that's already been set
|
||||
- Emotional responses: keep supportive statements simple and warm
|
||||
- Sequential topics: reference previous topics minimally ("recent X" not full description)
|
||||
|
||||
ANTI-BLOAT RULES:
|
||||
- If the original statement is clear and complete, add minimal enrichment
|
||||
- Never use the same contextual phrase twice in one conversation
|
||||
- Focus on what's NEW, not what's already established
|
||||
- Preserve emotional tone - don't bury feelings in facts
|
||||
- ONE CONTEXT REFERENCE PER TOPIC: Don't keep referencing "the charity race" with full details
|
||||
- STOP AT CLARITY: If original meaning is clear, don't add backstory
|
||||
- AVOID COMPOUND ENRICHMENT: Don't chain multiple contextual additions in one sentence
|
||||
|
||||
CONTEXT FATIGUE PREVENTION:
|
||||
- After mentioning a topic once with full context, subsequent references should be minimal
|
||||
- Use "recent" instead of repeating full details: "recent charity race" not "the May 20, 2023 charity race for mental health"
|
||||
- Focus on CURRENT episode facts, not historical anchoring
|
||||
- Don't re-explain what's already been established in the conversation
|
||||
|
||||
ENRICHMENT SATURATION RULE:
|
||||
Once a topic has been enriched with full context in the conversation, subsequent mentions should be minimal:
|
||||
- First mention: "May 20, 2023 charity race for mental health"
|
||||
- Later mentions: "the charity race" or "recent race"
|
||||
- Don't re-explain established context
|
||||
|
||||
IDENTITY AND DEFINITIONAL RELATIONSHIP PRESERVATION:
|
||||
- Preserve possessive phrases that define relationships: "my X, Y" → "Y, [person]'s X"
|
||||
- Keep origin/source relationships: "from my X" → preserve the X connection
|
||||
- Preserve family/professional/institutional relationships expressed through possessives
|
||||
- Don't reduce identity-rich phrases to simple location/entity references
|
||||
</strategic_enrichment>
|
||||
|
||||
<quality_control>
|
||||
RETURN "NOTHING_TO_SUMMARISE" if content consists ONLY of:
|
||||
- Pure generic responses without context ("awesome", "thanks", "okay" with no subject)
|
||||
- Empty pleasantries with no substance ("how are you", "have a good day")
|
||||
- Standalone acknowledgments without topic reference ("got it", "will do")
|
||||
- Truly vague encouragement with no specific subject matter ("great job" with no context)
|
||||
- Already captured information without new connections
|
||||
- Technical noise or system messages
|
||||
|
||||
STORE IN MEMORY if content contains:
|
||||
- Specific facts, names, dates, or detailed information
|
||||
- Personal details, preferences, or decisions
|
||||
- Concrete plans, commitments, or actions
|
||||
- Visual content with specific details
|
||||
- Temporal information that can be resolved
|
||||
- New connections to existing knowledge
|
||||
- Encouragement that references specific activities or topics
|
||||
- Statements expressing personal values or beliefs
|
||||
- Support that's contextually relevant to ongoing conversations
|
||||
- Responses that reveal relationship dynamics or personal characteristics
|
||||
|
||||
MEANINGFUL ENCOURAGEMENT EXAMPLES (STORE these):
|
||||
- "Taking time for yourself is so important" → Shows personal values about self-care
|
||||
- "You're doing an awesome job looking after yourself and your family" → Specific topic reference
|
||||
- "That charity race sounds great" → Contextually relevant support
|
||||
- "Your future family is gonna be so lucky" → Values-based encouragement about specific situation
|
||||
|
||||
EMPTY ENCOURAGEMENT EXAMPLES (DON'T STORE these):
|
||||
- "Great job!" (no context)
|
||||
- "Awesome!" (no subject)
|
||||
- "Keep it up!" (no specific reference)
|
||||
</quality_control>
|
||||
|
||||
<enrichment_examples>
|
||||
HIGH VALUE enrichment:
|
||||
- Original: "She said yes!"
|
||||
- Enriched: "Caroline received approval from Bright Futures Agency for her adoption application."
|
||||
- Why: Resolves unclear pronoun, adds temporal context, identifies the approving entity
|
||||
|
||||
MINIMAL enrichment (emotional support):
|
||||
- Original: "You'll be an awesome mom! Good luck!"
|
||||
- Enriched: "Melanie encouraged Caroline about her adoption plans, affirming she would be an awesome mother."
|
||||
- Why: Simple temporal context, preserve emotional tone, no historical dumping
|
||||
|
||||
ANTI-BLOAT example (what NOT to do):
|
||||
- Wrong: "Melanie praised Caroline for her commitment to creating a family for children in need through adoption—supported by the inclusive Adoption Agency whose brochure and signs reading 'new arrival' and 'information and domestic building' Caroline had shared earlier that day—and encouraged her by affirming she would be an awesome mom."
|
||||
- Right: "Melanie encouraged Caroline about her adoption plans, affirming she would be an awesome mother."
|
||||
|
||||
CLEAR REFERENCE (minimal enrichment):
|
||||
- Original: "Thanks, Caroline! The event was really thought-provoking."
|
||||
- Enriched: "Melanie thanked Caroline and described the charity race as thought-provoking."
|
||||
- Why: Clear context doesn't need repetitive anchoring
|
||||
|
||||
CONVERSATION FLOW EXAMPLES:
|
||||
❌ WRONG (context fatigue): "reinforcing their ongoing conversation about mental health following Melanie's participation in the recent charity race for mental health"
|
||||
✅ RIGHT (minimal reference): "reinforcing their conversation about mental health"
|
||||
|
||||
❌ WRONG (compound enrichment): "as she begins the process of turning her dream of giving children a loving home into reality and considers specific adoption agencies"
|
||||
✅ RIGHT (focused): "as she begins pursuing her adoption plans"
|
||||
|
||||
❌ WRONG (over-contextualization): "following her participation in the May 20, 2023 charity race for mental health awareness"
|
||||
✅ RIGHT (after first mention): "following the recent charity race"
|
||||
|
||||
GENERIC IDENTITY PRESERVATION EXAMPLES:
|
||||
- Original: "my hometown, Boston" → Enriched: "Boston, [person]'s hometown"
|
||||
- Original: "my workplace, Google" → Enriched: "Google, [person]'s workplace"
|
||||
- Original: "my sister, Sarah" → Enriched: "Sarah, [person]'s sister"
|
||||
- Original: "from my university, MIT" → Enriched: "from MIT, [person]'s university"
|
||||
|
||||
POSSESSIVE + APPOSITIVE PATTERNS (Critical for Relations):
|
||||
- Original: "my colleague at my office, Microsoft"
|
||||
- Enriched: "his colleague at Microsoft, David's workplace"
|
||||
- Why: Preserves both the work relationship AND the employment identity
|
||||
|
||||
- Original: "my friend from my university, Stanford"
|
||||
- Enriched: "her friend from Stanford, Lisa's alma mater"
|
||||
- Why: Establishes both the friendship and educational institution identity
|
||||
|
||||
- Original: "my neighbor in my city, Chicago"
|
||||
- Enriched: "his neighbor in Chicago, Mark's hometown"
|
||||
- Why: Maintains both the neighbor relationship and residence identity
|
||||
|
||||
❌ WRONG (loses relationships): reduces to just entity names without preserving the defining relationship
|
||||
✅ RIGHT (preserves identity): maintains the possessive/definitional connection that establishes entity relationships
|
||||
</enrichment_examples>
|
||||
|
||||
OUTPUT FORMAT REQUIREMENTS:
|
||||
- Provide your response directly in HTML format
|
||||
- Use appropriate HTML tags for structure and formatting (p, h1-h6, ul, ol, strong, em, etc.)
|
||||
- Do NOT wrap your response in any special tags like <output>
|
||||
- If there is nothing worth summarizing, return: NOTHING_TO_SUMMARISE
|
||||
|
||||
FORMAT EXAMPLES:
|
||||
✅ CORRECT: <p>Caroline shared her adoption plans with Melanie, discussing the application process and timeline.</p>
|
||||
✅ CORRECT: <h3>Italy Trip Planning</h3><p>User explored romantic destinations for their anniversary celebration.</p>
|
||||
✅ CORRECT: NOTHING_TO_SUMMARISE
|
||||
❌ WRONG: Plain text without HTML formatting
|
||||
`,
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: `Title: ${title}
|
||||
Content: ${contentWithImages}
|
||||
|
||||
<SAME_SESSION_CONTEXT>
|
||||
${lastSummary || "No previous episodes in this session"}
|
||||
</SAME_SESSION_CONTEXT>
|
||||
Please provide a concise summary of this content in HTML format.`,
|
||||
},
|
||||
];
|
||||
@ -209,6 +404,33 @@ Please provide a concise summary of this content in HTML format.`,
|
||||
);
|
||||
}
|
||||
|
||||
async function* generateSummaryWithChunks(
|
||||
content: string,
|
||||
title: string,
|
||||
imageSummaries: string[],
|
||||
) {
|
||||
const documentchunk = new DocumentChunker();
|
||||
const chunks = await documentchunk.chunkDocument(content, title);
|
||||
|
||||
let lastSummary = "";
|
||||
for await (const chunk of chunks.chunks) {
|
||||
const response = (await generateSummary(
|
||||
chunk.title || title,
|
||||
chunk.content,
|
||||
lastSummary ? lastSummary : null,
|
||||
imageSummaries,
|
||||
)) as any;
|
||||
|
||||
for await (const res of response.textStream) {
|
||||
lastSummary += res;
|
||||
yield res;
|
||||
}
|
||||
|
||||
// Use the complete current chunk summary as context for the next chunk
|
||||
lastSummary = lastSummary.trim();
|
||||
}
|
||||
}
|
||||
|
||||
export const extensionSummary = task({
|
||||
id: "extensionSummary",
|
||||
maxDuration: 3000,
|
||||
@ -240,13 +462,13 @@ export const extensionSummary = task({
|
||||
|
||||
// Generate summary for text content with image descriptions
|
||||
if (extraction.content.length > 0 || imageSummaries.length > 0) {
|
||||
const response = (await generateSummary(
|
||||
extraction.title,
|
||||
const response = generateSummaryWithChunks(
|
||||
extraction.content,
|
||||
extraction.title,
|
||||
imageSummaries,
|
||||
)) as any;
|
||||
) as any;
|
||||
|
||||
const stream = await metadata.stream("messages", response.textStream);
|
||||
const stream = await metadata.stream("messages", response);
|
||||
|
||||
let finalText: string = "";
|
||||
for await (const chunk of stream) {
|
||||
|
||||
@ -292,7 +292,9 @@ async function generateSpaceSummary(
|
||||
spaceId: space.uuid,
|
||||
spaceName: space.name,
|
||||
spaceDescription: space.description as string,
|
||||
statementCount: existingSummary?.statementCount ? existingSummary?.statementCount + statements.length : statements.length,
|
||||
statementCount: existingSummary?.statementCount
|
||||
? existingSummary?.statementCount + statements.length
|
||||
: statements.length,
|
||||
summary: summaryResult.summary,
|
||||
keyEntities: summaryResult.keyEntities || [],
|
||||
themes: summaryResult.themes,
|
||||
@ -354,7 +356,9 @@ function createUnifiedSummaryPrompt(
|
||||
): CoreMessage[] {
|
||||
// If there are no statements and no previous summary, we cannot generate a meaningful summary
|
||||
if (statements.length === 0 && previousSummary === null) {
|
||||
throw new Error("Cannot generate summary without statements or existing summary");
|
||||
throw new Error(
|
||||
"Cannot generate summary without statements or existing summary",
|
||||
);
|
||||
}
|
||||
|
||||
const statementsText = statements
|
||||
@ -506,8 +510,8 @@ async function getExistingSummary(spaceId: string): Promise<{
|
||||
return {
|
||||
summary: existingSummary.summary,
|
||||
themes: existingSummary.themes,
|
||||
lastUpdated: existingSummary.lastPatternTrigger,
|
||||
statementCount: existingSummary.statementCount,
|
||||
lastUpdated: existingSummary.lastPatternTrigger as Date,
|
||||
statementCount: existingSummary.statementCount as number,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@ -27,6 +27,7 @@ export const addToQueue = async (
|
||||
data: {
|
||||
spaceId: body.spaceId ? body.spaceId : null,
|
||||
data: body,
|
||||
type: body.type,
|
||||
status: IngestionStatus.PENDING,
|
||||
priority: 1,
|
||||
workspaceId: user.Workspace.id,
|
||||
|
||||
@ -78,7 +78,6 @@
|
||||
"@tiptap/starter-kit": "2.11.9",
|
||||
"@trigger.dev/react-hooks": "4.0.0-v4-beta.22",
|
||||
"@trigger.dev/sdk": "4.0.0-v4-beta.22",
|
||||
"@types/react-calendar-heatmap": "^1.9.0",
|
||||
"ai": "4.3.14",
|
||||
"axios": "^1.10.0",
|
||||
"bullmq": "^5.53.2",
|
||||
@ -129,7 +128,6 @@
|
||||
"remix-typedjson": "0.3.1",
|
||||
"remix-utils": "^7.7.0",
|
||||
"react-markdown": "10.1.0",
|
||||
"sdk": "link:@modelcontextprotocol/sdk",
|
||||
"sigma": "^3.0.2",
|
||||
"simple-oauth2": "^5.1.0",
|
||||
"tailwind-merge": "^2.6.0",
|
||||
|
||||
@ -123,6 +123,8 @@ model IngestionQueue {
|
||||
status IngestionStatus
|
||||
priority Int @default(0)
|
||||
|
||||
type String?
|
||||
|
||||
workspaceId String
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id])
|
||||
|
||||
|
||||
@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "IngestionQueue" ADD COLUMN "type" TEXT;
|
||||
@ -123,6 +123,8 @@ model IngestionQueue {
|
||||
status IngestionStatus
|
||||
priority Int @default(0)
|
||||
|
||||
type String?
|
||||
|
||||
workspaceId String
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id])
|
||||
|
||||
|
||||
10
pnpm-lock.yaml
generated
10
pnpm-lock.yaml
generated
@ -472,9 +472,6 @@ importers:
|
||||
'@trigger.dev/sdk':
|
||||
specifier: 4.0.0-v4-beta.22
|
||||
version: 4.0.0-v4-beta.22(ai@4.3.14(react@18.3.1)(zod@3.23.8))(zod@3.23.8)
|
||||
'@types/react-calendar-heatmap':
|
||||
specifier: ^1.9.0
|
||||
version: 1.9.0
|
||||
ai:
|
||||
specifier: 4.3.14
|
||||
version: 4.3.14(react@18.3.1)(zod@3.23.8)
|
||||
@ -5492,9 +5489,6 @@ packages:
|
||||
'@types/range-parser@1.2.7':
|
||||
resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==}
|
||||
|
||||
'@types/react-calendar-heatmap@1.9.0':
|
||||
resolution: {integrity: sha512-BH8M/nsXoLGa3hxWbrq3guPwlK0cV+w1i4c/ktrTxTzN5fBths6WbeUZ4dK0+tE76qiGoVSo9Tse8WVVuMIV+w==}
|
||||
|
||||
'@types/react-dom@18.2.18':
|
||||
resolution: {integrity: sha512-TJxDm6OfAX2KJWJdMEVTwWke5Sc/E/RlnPGvGfS0W7+6ocy2xhDVQVh/KvC2Uf7kACs+gDytdusDSdWfWkaNzw==}
|
||||
|
||||
@ -17810,10 +17804,6 @@ snapshots:
|
||||
|
||||
'@types/range-parser@1.2.7': {}
|
||||
|
||||
'@types/react-calendar-heatmap@1.9.0':
|
||||
dependencies:
|
||||
'@types/react': 18.2.69
|
||||
|
||||
'@types/react-dom@18.2.18':
|
||||
dependencies:
|
||||
'@types/react': 18.2.69
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user