mirror of
https://github.com/eliasstepanik/core.git
synced 2026-01-25 03:38:27 +00:00
fix: telemetry and trigger deploymen
This commit is contained in:
parent
2030cebdc0
commit
17b8f9520b
@ -1,10 +1,4 @@
|
|||||||
import { EllipsisVertical, Trash, Copy } from "lucide-react";
|
import { Trash, Copy, RotateCw } from "lucide-react";
|
||||||
import {
|
|
||||||
DropdownMenu,
|
|
||||||
DropdownMenuContent,
|
|
||||||
DropdownMenuItem,
|
|
||||||
DropdownMenuTrigger,
|
|
||||||
} from "../ui/dropdown-menu";
|
|
||||||
import { Button } from "../ui/button";
|
import { Button } from "../ui/button";
|
||||||
import {
|
import {
|
||||||
AlertDialog,
|
AlertDialog,
|
||||||
@ -22,11 +16,13 @@ import { toast } from "~/hooks/use-toast";
|
|||||||
|
|
||||||
interface LogOptionsProps {
|
interface LogOptionsProps {
|
||||||
id: string;
|
id: string;
|
||||||
|
status?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const LogOptions = ({ id }: LogOptionsProps) => {
|
export const LogOptions = ({ id, status }: LogOptionsProps) => {
|
||||||
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
|
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
|
||||||
const deleteFetcher = useFetcher<{ success: boolean }>();
|
const deleteFetcher = useFetcher<{ success: boolean }>();
|
||||||
|
const retryFetcher = useFetcher<{ success: boolean }>();
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
|
|
||||||
const handleDelete = () => {
|
const handleDelete = () => {
|
||||||
@ -58,22 +54,54 @@ export const LogOptions = ({ id }: LogOptionsProps) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleRetry = () => {
|
||||||
|
retryFetcher.submit(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
action: `/api/v1/logs/${id}/retry`,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (deleteFetcher.state === "idle" && deleteFetcher.data?.success) {
|
if (deleteFetcher.state === "idle" && deleteFetcher.data?.success) {
|
||||||
navigate(`/home/inbox`);
|
navigate(`/home/inbox`);
|
||||||
}
|
}
|
||||||
}, [deleteFetcher.state, deleteFetcher.data]);
|
}, [deleteFetcher.state, deleteFetcher.data]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (retryFetcher.state === "idle" && retryFetcher.data?.success) {
|
||||||
|
toast({
|
||||||
|
title: "Success",
|
||||||
|
description: "Episode retry initiated",
|
||||||
|
});
|
||||||
|
// Reload the page to reflect the new status
|
||||||
|
window.location.reload();
|
||||||
|
}
|
||||||
|
}, [retryFetcher.state, retryFetcher.data]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
|
{status === "FAILED" && (
|
||||||
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
size="sm"
|
||||||
|
className="gap-2 rounded"
|
||||||
|
onClick={handleRetry}
|
||||||
|
disabled={retryFetcher.state !== "idle"}
|
||||||
|
>
|
||||||
|
<RotateCw size={15} /> Retry
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
<Button
|
<Button
|
||||||
variant="secondary"
|
variant="secondary"
|
||||||
size="sm"
|
size="sm"
|
||||||
className="gap-2 rounded"
|
className="gap-2 rounded"
|
||||||
onClick={handleCopy}
|
onClick={handleCopy}
|
||||||
>
|
>
|
||||||
<Copy size={15} /> Copy ID
|
<Copy size={15} /> Copy Id
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
<Button
|
||||||
variant="secondary"
|
variant="secondary"
|
||||||
|
|||||||
@ -74,7 +74,7 @@ export function LogTextCollapse({ text, log }: LogTextCollapseProps) {
|
|||||||
<div className={cn("flex w-full min-w-[0px] shrink flex-col")}>
|
<div className={cn("flex w-full min-w-[0px] shrink flex-col")}>
|
||||||
<div className="flex w-full items-center justify-between gap-4">
|
<div className="flex w-full items-center justify-between gap-4">
|
||||||
<div className="inline-flex min-h-[24px] min-w-[0px] shrink items-center justify-start">
|
<div className="inline-flex min-h-[24px] min-w-[0px] shrink items-center justify-start">
|
||||||
<div className={cn("truncate text-left")}>
|
<div className={cn("truncate text-left text-base")}>
|
||||||
{text.replace(/<[^>]+>/g, "")}
|
{text.replace(/<[^>]+>/g, "")}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -97,7 +97,7 @@ export function LogTextCollapse({ text, log }: LogTextCollapseProps) {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<div className="flex items-center gap-1">
|
<div className="flex items-center gap-1 font-light">
|
||||||
{getIconForAuthorise(log.source.toLowerCase(), 12, undefined)}
|
{getIconForAuthorise(log.source.toLowerCase(), 12, undefined)}
|
||||||
{log.source.toLowerCase()}
|
{log.source.toLowerCase()}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -99,7 +99,7 @@ export const SpaceOptions = ({ id, name, description }: SpaceOptionsProps) => {
|
|||||||
<DropdownMenuContent align="end">
|
<DropdownMenuContent align="end">
|
||||||
<DropdownMenuItem onClick={handleCopy}>
|
<DropdownMenuItem onClick={handleCopy}>
|
||||||
<Button variant="link" size="sm" className="gap-2 rounded">
|
<Button variant="link" size="sm" className="gap-2 rounded">
|
||||||
<Copy size={15} /> Copy ID
|
<Copy size={15} /> Copy Id
|
||||||
</Button>
|
</Button>
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
<DropdownMenuItem onClick={() => setEditDialogOpen(true)}>
|
<DropdownMenuItem onClick={() => setEditDialogOpen(true)}>
|
||||||
|
|||||||
@ -17,6 +17,7 @@ import { renderToPipeableStream } from "react-dom/server";
|
|||||||
import { initializeStartupServices } from "./utils/startup";
|
import { initializeStartupServices } from "./utils/startup";
|
||||||
import { handleMCPRequest, handleSessionRequest } from "~/services/mcp.server";
|
import { handleMCPRequest, handleSessionRequest } from "~/services/mcp.server";
|
||||||
import { authenticateHybridRequest } from "~/services/routeBuilders/apiBuilder.server";
|
import { authenticateHybridRequest } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
import { trackError } from "~/services/telemetry.server";
|
||||||
|
|
||||||
const ABORT_DELAY = 5_000;
|
const ABORT_DELAY = 5_000;
|
||||||
|
|
||||||
@ -27,6 +28,42 @@ async function init() {
|
|||||||
|
|
||||||
init();
|
init();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Global error handler for all server-side errors
|
||||||
|
* This catches errors from loaders, actions, and rendering
|
||||||
|
* Automatically tracks all errors to telemetry
|
||||||
|
*/
|
||||||
|
export function handleError(
|
||||||
|
error: unknown,
|
||||||
|
{ request }: { request: Request },
|
||||||
|
): void {
|
||||||
|
// Don't track 404s or aborted requests as errors
|
||||||
|
if (
|
||||||
|
error instanceof Response &&
|
||||||
|
(error.status === 404 || error.status === 304)
|
||||||
|
) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track error to telemetry
|
||||||
|
if (error instanceof Error) {
|
||||||
|
const url = new URL(request.url);
|
||||||
|
trackError(error, {
|
||||||
|
url: request.url,
|
||||||
|
path: url.pathname,
|
||||||
|
method: request.method,
|
||||||
|
userAgent: request.headers.get("user-agent") || "unknown",
|
||||||
|
referer: request.headers.get("referer") || undefined,
|
||||||
|
}).catch((trackingError) => {
|
||||||
|
// If telemetry tracking fails, just log it - don't break the app
|
||||||
|
console.error("Failed to track error:", trackingError);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always log to console for development/debugging
|
||||||
|
console.error(error);
|
||||||
|
}
|
||||||
|
|
||||||
export default function handleRequest(
|
export default function handleRequest(
|
||||||
request: Request,
|
request: Request,
|
||||||
responseStatusCode: number,
|
responseStatusCode: number,
|
||||||
|
|||||||
@ -3,102 +3,126 @@ import { isValidDatabaseUrl } from "./utils/db";
|
|||||||
import { isValidRegex } from "./utils/regex";
|
import { isValidRegex } from "./utils/regex";
|
||||||
import { LLMModelEnum } from "@core/types";
|
import { LLMModelEnum } from "@core/types";
|
||||||
|
|
||||||
const EnvironmentSchema = z.object({
|
const EnvironmentSchema = z
|
||||||
NODE_ENV: z.union([
|
.object({
|
||||||
z.literal("development"),
|
NODE_ENV: z.union([
|
||||||
z.literal("production"),
|
z.literal("development"),
|
||||||
z.literal("test"),
|
z.literal("production"),
|
||||||
]),
|
z.literal("test"),
|
||||||
POSTGRES_DB: z.string(),
|
]),
|
||||||
DATABASE_URL: z
|
POSTGRES_DB: z.string(),
|
||||||
.string()
|
DATABASE_URL: z
|
||||||
.refine(
|
.string()
|
||||||
isValidDatabaseUrl,
|
.refine(
|
||||||
"DATABASE_URL is invalid, for details please check the additional output above this message.",
|
isValidDatabaseUrl,
|
||||||
),
|
"DATABASE_URL is invalid, for details please check the additional output above this message.",
|
||||||
DATABASE_CONNECTION_LIMIT: z.coerce.number().int().default(10),
|
),
|
||||||
DATABASE_POOL_TIMEOUT: z.coerce.number().int().default(60),
|
DATABASE_CONNECTION_LIMIT: z.coerce.number().int().default(10),
|
||||||
DATABASE_CONNECTION_TIMEOUT: z.coerce.number().int().default(20),
|
DATABASE_POOL_TIMEOUT: z.coerce.number().int().default(60),
|
||||||
DIRECT_URL: z
|
DATABASE_CONNECTION_TIMEOUT: z.coerce.number().int().default(20),
|
||||||
.string()
|
DIRECT_URL: z
|
||||||
.refine(
|
.string()
|
||||||
isValidDatabaseUrl,
|
.refine(
|
||||||
"DIRECT_URL is invalid, for details please check the additional output above this message.",
|
isValidDatabaseUrl,
|
||||||
),
|
"DIRECT_URL is invalid, for details please check the additional output above this message.",
|
||||||
DATABASE_READ_REPLICA_URL: z.string().optional(),
|
),
|
||||||
SESSION_SECRET: z.string(),
|
DATABASE_READ_REPLICA_URL: z.string().optional(),
|
||||||
ENCRYPTION_KEY: z.string(),
|
SESSION_SECRET: z.string(),
|
||||||
MAGIC_LINK_SECRET: z.string(),
|
ENCRYPTION_KEY: z.string(),
|
||||||
WHITELISTED_EMAILS: z
|
MAGIC_LINK_SECRET: z.string(),
|
||||||
.string()
|
WHITELISTED_EMAILS: z
|
||||||
.refine(isValidRegex, "WHITELISTED_EMAILS must be a valid regex.")
|
.string()
|
||||||
.optional(),
|
.refine(isValidRegex, "WHITELISTED_EMAILS must be a valid regex.")
|
||||||
ADMIN_EMAILS: z
|
.optional(),
|
||||||
.string()
|
ADMIN_EMAILS: z
|
||||||
.refine(isValidRegex, "ADMIN_EMAILS must be a valid regex.")
|
.string()
|
||||||
.optional(),
|
.refine(isValidRegex, "ADMIN_EMAILS must be a valid regex.")
|
||||||
|
.optional(),
|
||||||
|
|
||||||
APP_ENV: z.string().default(process.env.NODE_ENV),
|
APP_ENV: z.string().default(process.env.NODE_ENV),
|
||||||
LOGIN_ORIGIN: z.string().default("http://localhost:5173"),
|
LOGIN_ORIGIN: z.string().default("http://localhost:5173"),
|
||||||
APP_ORIGIN: z.string().default("http://localhost:5173"),
|
APP_ORIGIN: z.string().default("http://localhost:5173"),
|
||||||
POSTHOG_PROJECT_KEY: z.string().default(""),
|
|
||||||
|
|
||||||
//storage
|
// Telemetry
|
||||||
ACCESS_KEY_ID: z.string().optional(),
|
POSTHOG_PROJECT_KEY: z
|
||||||
SECRET_ACCESS_KEY: z.string().optional(),
|
.string()
|
||||||
BUCKET: z.string().optional(),
|
.default("phc_SwfGIzzX5gh5bazVWoRxZTBhkr7FwvzArS0NRyGXm1a"),
|
||||||
|
TELEMETRY_ENABLED: z.coerce.boolean().default(true),
|
||||||
|
TELEMETRY_ANONYMOUS: z.coerce.boolean().default(false),
|
||||||
|
|
||||||
// google auth
|
//storage
|
||||||
AUTH_GOOGLE_CLIENT_ID: z.string().optional(),
|
ACCESS_KEY_ID: z.string().optional(),
|
||||||
AUTH_GOOGLE_CLIENT_SECRET: z.string().optional(),
|
SECRET_ACCESS_KEY: z.string().optional(),
|
||||||
|
BUCKET: z.string().optional(),
|
||||||
|
|
||||||
ENABLE_EMAIL_LOGIN: z.coerce.boolean().default(true),
|
// google auth
|
||||||
|
AUTH_GOOGLE_CLIENT_ID: z.string().optional(),
|
||||||
|
AUTH_GOOGLE_CLIENT_SECRET: z.string().optional(),
|
||||||
|
|
||||||
//Redis
|
ENABLE_EMAIL_LOGIN: z.coerce.boolean().default(true),
|
||||||
REDIS_HOST: z.string().default("localhost"),
|
|
||||||
REDIS_PORT: z.coerce.number().default(6379),
|
|
||||||
REDIS_TLS_DISABLED: z.coerce.boolean().default(true),
|
|
||||||
|
|
||||||
//Neo4j
|
//Redis
|
||||||
NEO4J_URI: z.string(),
|
REDIS_HOST: z.string().default("localhost"),
|
||||||
NEO4J_USERNAME: z.string(),
|
REDIS_PORT: z.coerce.number().default(6379),
|
||||||
NEO4J_PASSWORD: z.string(),
|
REDIS_TLS_DISABLED: z.coerce.boolean().default(true),
|
||||||
|
|
||||||
//OpenAI
|
//Neo4j
|
||||||
OPENAI_API_KEY: z.string(),
|
NEO4J_URI: z.string(),
|
||||||
ANTHROPIC_API_KEY: z.string().optional(),
|
NEO4J_USERNAME: z.string(),
|
||||||
|
NEO4J_PASSWORD: z.string(),
|
||||||
|
|
||||||
EMAIL_TRANSPORT: z.string().optional(),
|
//OpenAI
|
||||||
FROM_EMAIL: z.string().optional(),
|
OPENAI_API_KEY: z.string(),
|
||||||
REPLY_TO_EMAIL: z.string().optional(),
|
ANTHROPIC_API_KEY: z.string().optional(),
|
||||||
RESEND_API_KEY: z.string().optional(),
|
|
||||||
SMTP_HOST: z.string().optional(),
|
|
||||||
SMTP_PORT: z.coerce.number().optional(),
|
|
||||||
SMTP_SECURE: z.coerce.boolean().optional(),
|
|
||||||
SMTP_USER: z.string().optional(),
|
|
||||||
SMTP_PASSWORD: z.string().optional(),
|
|
||||||
|
|
||||||
//Trigger
|
EMAIL_TRANSPORT: z.string().optional(),
|
||||||
TRIGGER_PROJECT_ID: z.string(),
|
FROM_EMAIL: z.string().optional(),
|
||||||
TRIGGER_SECRET_KEY: z.string(),
|
REPLY_TO_EMAIL: z.string().optional(),
|
||||||
TRIGGER_API_URL: z.string(),
|
RESEND_API_KEY: z.string().optional(),
|
||||||
TRIGGER_DB: z.string().default("trigger"),
|
SMTP_HOST: z.string().optional(),
|
||||||
|
SMTP_PORT: z.coerce.number().optional(),
|
||||||
|
SMTP_SECURE: z.coerce.boolean().optional(),
|
||||||
|
SMTP_USER: z.string().optional(),
|
||||||
|
SMTP_PASSWORD: z.string().optional(),
|
||||||
|
|
||||||
// Model envs
|
//Trigger
|
||||||
MODEL: z.string().default(LLMModelEnum.GPT41),
|
TRIGGER_PROJECT_ID: z.string().optional(),
|
||||||
EMBEDDING_MODEL: z.string().default("mxbai-embed-large"),
|
TRIGGER_SECRET_KEY: z.string().optional(),
|
||||||
EMBEDDING_MODEL_SIZE: z.string().default("1024"),
|
TRIGGER_API_URL: z.string().optional(),
|
||||||
OLLAMA_URL: z.string().optional(),
|
TRIGGER_DB: z.string().default("trigger"),
|
||||||
COHERE_API_KEY: z.string().optional(),
|
|
||||||
COHERE_SCORE_THRESHOLD: z.string().default("0.3"),
|
|
||||||
|
|
||||||
AWS_ACCESS_KEY_ID: z.string().optional(),
|
// Model envs
|
||||||
AWS_SECRET_ACCESS_KEY: z.string().optional(),
|
MODEL: z.string().default(LLMModelEnum.GPT41),
|
||||||
AWS_REGION: z.string().optional(),
|
EMBEDDING_MODEL: z.string().default("mxbai-embed-large"),
|
||||||
|
EMBEDDING_MODEL_SIZE: z.string().default("1024"),
|
||||||
|
OLLAMA_URL: z.string().optional(),
|
||||||
|
COHERE_API_KEY: z.string().optional(),
|
||||||
|
COHERE_SCORE_THRESHOLD: z.string().default("0.3"),
|
||||||
|
|
||||||
// Queue provider
|
AWS_ACCESS_KEY_ID: z.string().optional(),
|
||||||
QUEUE_PROVIDER: z.enum(["trigger", "bullmq"]).default("trigger"),
|
AWS_SECRET_ACCESS_KEY: z.string().optional(),
|
||||||
});
|
AWS_REGION: z.string().optional(),
|
||||||
|
|
||||||
|
// Queue provider
|
||||||
|
QUEUE_PROVIDER: z.enum(["trigger", "bullmq"]).default("trigger"),
|
||||||
|
})
|
||||||
|
.refine(
|
||||||
|
(data) => {
|
||||||
|
// If QUEUE_PROVIDER is "trigger", then Trigger.dev variables must be present
|
||||||
|
if (data.QUEUE_PROVIDER === "trigger") {
|
||||||
|
return !!(
|
||||||
|
data.TRIGGER_PROJECT_ID &&
|
||||||
|
data.TRIGGER_SECRET_KEY &&
|
||||||
|
data.TRIGGER_API_URL
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
message:
|
||||||
|
"TRIGGER_PROJECT_ID, TRIGGER_SECRET_KEY, and TRIGGER_API_URL are required when QUEUE_PROVIDER=trigger",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
export type Environment = z.infer<typeof EnvironmentSchema>;
|
export type Environment = z.infer<typeof EnvironmentSchema>;
|
||||||
export const env = EnvironmentSchema.parse(process.env);
|
export const env = EnvironmentSchema.parse(process.env);
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import { useOptionalUser, useUserChanged } from "./useUser";
|
|||||||
|
|
||||||
export const usePostHog = (
|
export const usePostHog = (
|
||||||
apiKey?: string,
|
apiKey?: string,
|
||||||
|
telemetryEnabled = true,
|
||||||
logging = false,
|
logging = false,
|
||||||
debug = false,
|
debug = false,
|
||||||
): void => {
|
): void => {
|
||||||
@ -15,6 +16,8 @@ export const usePostHog = (
|
|||||||
|
|
||||||
//start PostHog once
|
//start PostHog once
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
// Respect telemetry settings
|
||||||
|
if (!telemetryEnabled) return;
|
||||||
if (apiKey === undefined || apiKey === "") return;
|
if (apiKey === undefined || apiKey === "") return;
|
||||||
if (postHogInitialized.current === true) return;
|
if (postHogInitialized.current === true) return;
|
||||||
if (logging) console.log("Initializing PostHog");
|
if (logging) console.log("Initializing PostHog");
|
||||||
@ -27,19 +30,26 @@ export const usePostHog = (
|
|||||||
if (logging) console.log("PostHog loaded");
|
if (logging) console.log("PostHog loaded");
|
||||||
if (user !== undefined) {
|
if (user !== undefined) {
|
||||||
if (logging) console.log("Loaded: Identifying user", user);
|
if (logging) console.log("Loaded: Identifying user", user);
|
||||||
posthog.identify(user.id, { email: user.email });
|
posthog.identify(user.id, {
|
||||||
|
email: user.email,
|
||||||
|
name: user.name,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
postHogInitialized.current = true;
|
postHogInitialized.current = true;
|
||||||
}, [apiKey, logging, user]);
|
}, [apiKey, telemetryEnabled, logging, user]);
|
||||||
|
|
||||||
useUserChanged((user) => {
|
useUserChanged((user) => {
|
||||||
if (postHogInitialized.current === false) return;
|
if (postHogInitialized.current === false) return;
|
||||||
|
if (!telemetryEnabled) return;
|
||||||
if (logging) console.log("User changed");
|
if (logging) console.log("User changed");
|
||||||
if (user) {
|
if (user) {
|
||||||
if (logging) console.log("Identifying user", user);
|
if (logging) console.log("Identifying user", user);
|
||||||
posthog.identify(user.id, { email: user.email });
|
posthog.identify(user.id, {
|
||||||
|
email: user.email,
|
||||||
|
name: user.name,
|
||||||
|
});
|
||||||
} else {
|
} else {
|
||||||
if (logging) console.log("Resetting user");
|
if (logging) console.log("Resetting user");
|
||||||
posthog.reset();
|
posthog.reset();
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import { prisma } from "~/trigger/utils/prisma";
|
|||||||
import { EpisodeType } from "@core/types";
|
import { EpisodeType } from "@core/types";
|
||||||
import { deductCredits, hasCredits } from "~/trigger/utils/utils";
|
import { deductCredits, hasCredits } from "~/trigger/utils/utils";
|
||||||
import { assignEpisodesToSpace } from "~/services/graphModels/space";
|
import { assignEpisodesToSpace } from "~/services/graphModels/space";
|
||||||
|
import { trackEvent, trackError } from "~/services/telemetry.server";
|
||||||
|
|
||||||
export const IngestBodyRequest = z.object({
|
export const IngestBodyRequest = z.object({
|
||||||
episodeBody: z.string(),
|
episodeBody: z.string(),
|
||||||
|
|||||||
@ -9,11 +9,13 @@ import {
|
|||||||
enqueueIngestDocument,
|
enqueueIngestDocument,
|
||||||
enqueueIngestEpisode,
|
enqueueIngestEpisode,
|
||||||
} from "~/lib/queue-adapter.server";
|
} from "~/lib/queue-adapter.server";
|
||||||
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
|
|
||||||
export const addToQueue = async (
|
export const addToQueue = async (
|
||||||
rawBody: z.infer<typeof IngestBodyRequest>,
|
rawBody: z.infer<typeof IngestBodyRequest>,
|
||||||
userId: string,
|
userId: string,
|
||||||
activityId?: string,
|
activityId?: string,
|
||||||
|
ingestionQueueId?: string,
|
||||||
) => {
|
) => {
|
||||||
const body = { ...rawBody, source: rawBody.source.toLowerCase() };
|
const body = { ...rawBody, source: rawBody.source.toLowerCase() };
|
||||||
const user = await prisma.user.findFirst({
|
const user = await prisma.user.findFirst({
|
||||||
@ -41,8 +43,18 @@ export const addToQueue = async (
|
|||||||
throw new Error("no credits");
|
throw new Error("no credits");
|
||||||
}
|
}
|
||||||
|
|
||||||
const queuePersist = await prisma.ingestionQueue.create({
|
// Upsert: update existing or create new ingestion queue entry
|
||||||
data: {
|
const queuePersist = await prisma.ingestionQueue.upsert({
|
||||||
|
where: {
|
||||||
|
id: ingestionQueueId || "non-existent-id", // Use provided ID or dummy ID to force create
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
data: body,
|
||||||
|
type: body.type,
|
||||||
|
status: IngestionStatus.PENDING,
|
||||||
|
error: null,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
data: body,
|
data: body,
|
||||||
type: body.type,
|
type: body.type,
|
||||||
status: IngestionStatus.PENDING,
|
status: IngestionStatus.PENDING,
|
||||||
@ -60,6 +72,9 @@ export const addToQueue = async (
|
|||||||
workspaceId: user.Workspace.id,
|
workspaceId: user.Workspace.id,
|
||||||
queueId: queuePersist.id,
|
queueId: queuePersist.id,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Track document ingestion
|
||||||
|
trackFeatureUsage("document_ingested", userId).catch(console.error);
|
||||||
} else if (body.type === EpisodeType.CONVERSATION) {
|
} else if (body.type === EpisodeType.CONVERSATION) {
|
||||||
handler = await enqueueIngestEpisode({
|
handler = await enqueueIngestEpisode({
|
||||||
body,
|
body,
|
||||||
@ -67,6 +82,9 @@ export const addToQueue = async (
|
|||||||
workspaceId: user.Workspace.id,
|
workspaceId: user.Workspace.id,
|
||||||
queueId: queuePersist.id,
|
queueId: queuePersist.id,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Track episode ingestion
|
||||||
|
trackFeatureUsage("episode_ingested", userId).catch(console.error);
|
||||||
}
|
}
|
||||||
|
|
||||||
return { id: handler?.id, publicAccessToken: handler?.token };
|
return { id: handler?.id, publicAccessToken: handler?.token };
|
||||||
|
|||||||
@ -194,3 +194,7 @@ export async function enqueueSpaceAssignment(payload: {
|
|||||||
console.warn("Space assignment not implemented for BullMQ yet");
|
console.warn("Space assignment not implemented for BullMQ yet");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const isTriggerDeployment = () => {
|
||||||
|
return env.QUEUE_PROVIDER === "trigger";
|
||||||
|
};
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import type { GoogleProfile } from "@coji/remix-auth-google";
|
|||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { env } from "~/env.server";
|
import { env } from "~/env.server";
|
||||||
import { runQuery } from "~/lib/neo4j.server";
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
export type { User } from "@core/database";
|
export type { User } from "@core/database";
|
||||||
|
|
||||||
type FindOrCreateMagicLink = {
|
type FindOrCreateMagicLink = {
|
||||||
@ -72,9 +73,16 @@ export async function findOrCreateMagicLinkUser(
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const isNewUser = !existingUser;
|
||||||
|
|
||||||
|
// Track new user registration
|
||||||
|
if (isNewUser) {
|
||||||
|
trackFeatureUsage("user_registered", user.id).catch(console.error);
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
user,
|
user,
|
||||||
isNewUser: !existingUser,
|
isNewUser,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -160,9 +168,16 @@ export async function findOrCreateGoogleUser({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const isNewUser = !existingUser;
|
||||||
|
|
||||||
|
// Track new user registration
|
||||||
|
if (isNewUser) {
|
||||||
|
trackFeatureUsage("user_registered", user.id).catch(console.error);
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
user,
|
user,
|
||||||
isNewUser: !existingUser,
|
isNewUser,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -51,6 +51,7 @@ export const loader = async ({ request }: LoaderFunctionArgs) => {
|
|||||||
const { getTheme } = await themeSessionResolver(request);
|
const { getTheme } = await themeSessionResolver(request);
|
||||||
|
|
||||||
const posthogProjectKey = env.POSTHOG_PROJECT_KEY;
|
const posthogProjectKey = env.POSTHOG_PROJECT_KEY;
|
||||||
|
const telemetryEnabled = env.TELEMETRY_ENABLED;
|
||||||
const user = await getUser(request);
|
const user = await getUser(request);
|
||||||
const usageSummary = await getUsageSummary(user?.Workspace?.id as string);
|
const usageSummary = await getUsageSummary(user?.Workspace?.id as string);
|
||||||
|
|
||||||
@ -62,6 +63,7 @@ export const loader = async ({ request }: LoaderFunctionArgs) => {
|
|||||||
toastMessage,
|
toastMessage,
|
||||||
theme: getTheme(),
|
theme: getTheme(),
|
||||||
posthogProjectKey,
|
posthogProjectKey,
|
||||||
|
telemetryEnabled,
|
||||||
appEnv: env.APP_ENV,
|
appEnv: env.APP_ENV,
|
||||||
appOrigin: env.APP_ORIGIN,
|
appOrigin: env.APP_ORIGIN,
|
||||||
},
|
},
|
||||||
@ -113,8 +115,9 @@ export function ErrorBoundary() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function App() {
|
function App() {
|
||||||
const { posthogProjectKey } = useTypedLoaderData<typeof loader>();
|
const { posthogProjectKey, telemetryEnabled } =
|
||||||
usePostHog(posthogProjectKey);
|
useTypedLoaderData<typeof loader>();
|
||||||
|
usePostHog(posthogProjectKey, telemetryEnabled);
|
||||||
const [theme] = useTheme();
|
const [theme] = useTheme();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import { json } from "@remix-run/node";
|
|||||||
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
import { enqueueDeepSearch } from "~/lib/queue-adapter.server";
|
import { enqueueDeepSearch } from "~/lib/queue-adapter.server";
|
||||||
import { runs } from "@trigger.dev/sdk";
|
import { runs } from "@trigger.dev/sdk";
|
||||||
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
|
|
||||||
const DeepSearchBodySchema = z.object({
|
const DeepSearchBodySchema = z.object({
|
||||||
content: z.string().min(1, "Content is required"),
|
content: z.string().min(1, "Content is required"),
|
||||||
@ -28,6 +29,9 @@ const { action, loader } = createActionApiRoute(
|
|||||||
corsStrategy: "all",
|
corsStrategy: "all",
|
||||||
},
|
},
|
||||||
async ({ body, authentication }) => {
|
async ({ body, authentication }) => {
|
||||||
|
// Track deep search
|
||||||
|
trackFeatureUsage("deep_search_performed", authentication.userId).catch(console.error);
|
||||||
|
|
||||||
let trigger;
|
let trigger;
|
||||||
if (!body.stream) {
|
if (!body.stream) {
|
||||||
trigger = await enqueueDeepSearch({
|
trigger = await enqueueDeepSearch({
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import { logger } from "~/services/logger.service";
|
|||||||
import { getWorkspaceByUser } from "~/models/workspace.server";
|
import { getWorkspaceByUser } from "~/models/workspace.server";
|
||||||
import { tasks } from "@trigger.dev/sdk";
|
import { tasks } from "@trigger.dev/sdk";
|
||||||
import { type scheduler } from "~/trigger/integrations/scheduler";
|
import { type scheduler } from "~/trigger/integrations/scheduler";
|
||||||
|
import { isTriggerDeployment } from "~/lib/queue-adapter.server";
|
||||||
|
|
||||||
// Schema for creating an integration account with API key
|
// Schema for creating an integration account with API key
|
||||||
const IntegrationAccountBodySchema = z.object({
|
const IntegrationAccountBodySchema = z.object({
|
||||||
@ -63,6 +64,13 @@ const { action, loader } = createHybridActionApiRoute(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!isTriggerDeployment()) {
|
||||||
|
return json(
|
||||||
|
{ error: "Integrations don't work in non trigger deployment" },
|
||||||
|
{ status: 400 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
await tasks.trigger<typeof scheduler>("scheduler", {
|
await tasks.trigger<typeof scheduler>("scheduler", {
|
||||||
integrationAccountId: setupResult?.account?.id,
|
integrationAccountId: setupResult?.account?.id,
|
||||||
});
|
});
|
||||||
|
|||||||
88
apps/webapp/app/routes/api.v1.logs.$logId.retry.tsx
Normal file
88
apps/webapp/app/routes/api.v1.logs.$logId.retry.tsx
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
import { json } from "@remix-run/node";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { IngestionStatus } from "@core/database";
|
||||||
|
import { getIngestionQueue } from "~/services/ingestionLogs.server";
|
||||||
|
import { createHybridActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
import { addToQueue } from "~/lib/ingest.server";
|
||||||
|
|
||||||
|
// Schema for log ID parameter
|
||||||
|
const LogParamsSchema = z.object({
|
||||||
|
logId: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const { action } = createHybridActionApiRoute(
|
||||||
|
{
|
||||||
|
params: LogParamsSchema,
|
||||||
|
allowJWT: true,
|
||||||
|
method: "POST",
|
||||||
|
authorization: {
|
||||||
|
action: "update",
|
||||||
|
},
|
||||||
|
corsStrategy: "all",
|
||||||
|
},
|
||||||
|
async ({ params, authentication }) => {
|
||||||
|
try {
|
||||||
|
const ingestionQueue = await getIngestionQueue(params.logId);
|
||||||
|
|
||||||
|
if (!ingestionQueue) {
|
||||||
|
return json(
|
||||||
|
{
|
||||||
|
error: "Ingestion log not found",
|
||||||
|
code: "not_found",
|
||||||
|
},
|
||||||
|
{ status: 404 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only allow retry for FAILED status
|
||||||
|
if (ingestionQueue.status !== IngestionStatus.FAILED) {
|
||||||
|
return json(
|
||||||
|
{
|
||||||
|
error: "Only failed ingestion logs can be retried",
|
||||||
|
code: "invalid_status",
|
||||||
|
},
|
||||||
|
{ status: 400 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the original ingestion data
|
||||||
|
const originalData = ingestionQueue.data as any;
|
||||||
|
|
||||||
|
// Re-enqueue the job with the existing queue ID (will upsert)
|
||||||
|
await addToQueue(
|
||||||
|
originalData,
|
||||||
|
authentication.userId,
|
||||||
|
ingestionQueue.activityId || undefined,
|
||||||
|
ingestionQueue.id, // Pass the existing queue ID for upsert
|
||||||
|
);
|
||||||
|
|
||||||
|
return json({
|
||||||
|
success: true,
|
||||||
|
message: "Ingestion retry initiated successfully",
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error retrying ingestion:", error);
|
||||||
|
|
||||||
|
// Handle specific error cases
|
||||||
|
if (error instanceof Error && error.message === "no credits") {
|
||||||
|
return json(
|
||||||
|
{
|
||||||
|
error: "Insufficient credits to retry ingestion",
|
||||||
|
code: "no_credits",
|
||||||
|
},
|
||||||
|
{ status: 402 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return json(
|
||||||
|
{
|
||||||
|
error: "Failed to retry ingestion",
|
||||||
|
code: "internal_error",
|
||||||
|
},
|
||||||
|
{ status: 500 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export { action };
|
||||||
@ -5,6 +5,7 @@ import {
|
|||||||
} from "~/services/routeBuilders/apiBuilder.server";
|
} from "~/services/routeBuilders/apiBuilder.server";
|
||||||
import { SearchService } from "~/services/search.server";
|
import { SearchService } from "~/services/search.server";
|
||||||
import { json } from "@remix-run/node";
|
import { json } from "@remix-run/node";
|
||||||
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
|
|
||||||
export const SearchBodyRequest = z.object({
|
export const SearchBodyRequest = z.object({
|
||||||
query: z.string(),
|
query: z.string(),
|
||||||
@ -51,6 +52,10 @@ const { action, loader } = createHybridActionApiRoute(
|
|||||||
structured: body.structured,
|
structured: body.structured,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Track search
|
||||||
|
trackFeatureUsage("search_performed", authentication.userId).catch(console.error);
|
||||||
|
|
||||||
return json(results);
|
return json(results);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import { SpaceService } from "~/services/space.server";
|
|||||||
import { json } from "@remix-run/node";
|
import { json } from "@remix-run/node";
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { apiCors } from "~/utils/apiCors";
|
import { apiCors } from "~/utils/apiCors";
|
||||||
|
import { isTriggerDeployment } from "~/lib/queue-adapter.server";
|
||||||
|
|
||||||
const spaceService = new SpaceService();
|
const spaceService = new SpaceService();
|
||||||
|
|
||||||
@ -40,6 +41,13 @@ const { action } = createHybridActionApiRoute(
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (!isTriggerDeployment()) {
|
||||||
|
return json(
|
||||||
|
{ error: "Spaces don't work in non trigger deployment" },
|
||||||
|
{ status: 400 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (!user?.Workspace?.id) {
|
if (!user?.Workspace?.id) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
"Workspace ID is required to create an ingestion queue entry.",
|
"Workspace ID is required to create an ingestion queue entry.",
|
||||||
|
|||||||
@ -40,7 +40,7 @@ export default function InboxNotSelected() {
|
|||||||
<PageHeader
|
<PageHeader
|
||||||
title="Episode"
|
title="Episode"
|
||||||
showTrigger={false}
|
showTrigger={false}
|
||||||
actionsNode={<LogOptions id={log.id} />}
|
actionsNode={<LogOptions id={log.id} status={log.status} />}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<LogDetails log={log as any} />
|
<LogDetails log={log as any} />
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import { enqueueCreateConversationTitle } from "~/lib/queue-adapter.server";
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { type ConversationHistory } from "@prisma/client";
|
import { type ConversationHistory } from "@prisma/client";
|
||||||
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
|
|
||||||
export const CreateConversationSchema = z.object({
|
export const CreateConversationSchema = z.object({
|
||||||
message: z.string(),
|
message: z.string(),
|
||||||
@ -55,6 +56,9 @@ export async function createConversation(
|
|||||||
{ tags: [conversationHistory.id, workspaceId, conversationId] },
|
{ tags: [conversationHistory.id, workspaceId, conversationId] },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Track conversation message
|
||||||
|
trackFeatureUsage("conversation_message_sent", userId).catch(console.error);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: handler.id,
|
id: handler.id,
|
||||||
token: handler.publicAccessToken,
|
token: handler.publicAccessToken,
|
||||||
@ -102,6 +106,9 @@ export async function createConversation(
|
|||||||
{ tags: [conversationHistory.id, workspaceId, conversation.id] },
|
{ tags: [conversationHistory.id, workspaceId, conversation.id] },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Track new conversation creation
|
||||||
|
trackFeatureUsage("conversation_created", userId).catch(console.error);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: handler.id,
|
id: handler.id,
|
||||||
token: handler.publicAccessToken,
|
token: handler.publicAccessToken,
|
||||||
|
|||||||
@ -17,6 +17,7 @@ import {
|
|||||||
updateSpace,
|
updateSpace,
|
||||||
} from "./graphModels/space";
|
} from "./graphModels/space";
|
||||||
import { prisma } from "~/trigger/utils/prisma";
|
import { prisma } from "~/trigger/utils/prisma";
|
||||||
|
import { trackFeatureUsage } from "./telemetry.server";
|
||||||
|
|
||||||
export class SpaceService {
|
export class SpaceService {
|
||||||
/**
|
/**
|
||||||
@ -63,6 +64,9 @@ export class SpaceService {
|
|||||||
|
|
||||||
logger.info(`Created space ${space.id} successfully`);
|
logger.info(`Created space ${space.id} successfully`);
|
||||||
|
|
||||||
|
// Track space creation
|
||||||
|
trackFeatureUsage("space_created", params.userId).catch(console.error);
|
||||||
|
|
||||||
// Trigger automatic LLM assignment for the new space
|
// Trigger automatic LLM assignment for the new space
|
||||||
try {
|
try {
|
||||||
await triggerSpaceAssignment({
|
await triggerSpaceAssignment({
|
||||||
@ -192,6 +196,10 @@ export class SpaceService {
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.info(`Nothing to update to graph`);
|
logger.info(`Nothing to update to graph`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Track space update
|
||||||
|
trackFeatureUsage("space_updated", userId).catch(console.error);
|
||||||
|
|
||||||
logger.info(`Updated space ${spaceId} successfully`);
|
logger.info(`Updated space ${spaceId} successfully`);
|
||||||
return space;
|
return space;
|
||||||
}
|
}
|
||||||
|
|||||||
274
apps/webapp/app/services/telemetry.server.ts
Normal file
274
apps/webapp/app/services/telemetry.server.ts
Normal file
@ -0,0 +1,274 @@
|
|||||||
|
import { PostHog } from "posthog-node";
|
||||||
|
import { env } from "~/env.server";
|
||||||
|
import { prisma } from "~/db.server";
|
||||||
|
|
||||||
|
// Server-side PostHog client for backend tracking
|
||||||
|
let posthogClient: PostHog | null = null;
|
||||||
|
|
||||||
|
function getPostHogClient(): PostHog | null {
|
||||||
|
if (!env.TELEMETRY_ENABLED || !env.POSTHOG_PROJECT_KEY) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!posthogClient) {
|
||||||
|
posthogClient = new PostHog(env.POSTHOG_PROJECT_KEY, {
|
||||||
|
host: "https://us.posthog.com",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return posthogClient;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get user email from userId, or return "anonymous" if TELEMETRY_ANONYMOUS is enabled
|
||||||
|
*/
|
||||||
|
async function getUserIdentifier(userId?: string): Promise<string> {
|
||||||
|
if (env.TELEMETRY_ANONYMOUS || !userId) {
|
||||||
|
return "anonymous";
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { id: userId },
|
||||||
|
select: { email: true },
|
||||||
|
});
|
||||||
|
return user?.email || "anonymous";
|
||||||
|
} catch (error) {
|
||||||
|
return "anonymous";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Telemetry event types
|
||||||
|
export type TelemetryEvent =
|
||||||
|
| "episode_ingested"
|
||||||
|
| "document_ingested"
|
||||||
|
| "search_performed"
|
||||||
|
| "deep_search_performed"
|
||||||
|
| "conversation_created"
|
||||||
|
| "conversation_message_sent"
|
||||||
|
| "space_created"
|
||||||
|
| "space_updated"
|
||||||
|
| "user_registered"
|
||||||
|
| "error_occurred"
|
||||||
|
| "queue_job_started"
|
||||||
|
| "queue_job_completed"
|
||||||
|
| "queue_job_failed";
|
||||||
|
|
||||||
|
// Common properties for all events
|
||||||
|
interface BaseEventProperties {
|
||||||
|
userId?: string;
|
||||||
|
workspaceId?: string;
|
||||||
|
email?: string;
|
||||||
|
name?: string;
|
||||||
|
queueProvider?: "trigger" | "bullmq";
|
||||||
|
modelProvider?: string;
|
||||||
|
embeddingModel?: string;
|
||||||
|
appEnv?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Event-specific properties
|
||||||
|
interface EpisodeIngestedProperties extends BaseEventProperties {
|
||||||
|
spaceId?: string;
|
||||||
|
documentCount?: number;
|
||||||
|
processingTimeMs?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SearchPerformedProperties extends BaseEventProperties {
|
||||||
|
query: string;
|
||||||
|
resultsCount: number;
|
||||||
|
searchType: "basic" | "deep";
|
||||||
|
spaceIds?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ConversationProperties extends BaseEventProperties {
|
||||||
|
conversationId: string;
|
||||||
|
messageLength?: number;
|
||||||
|
model?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ErrorProperties extends BaseEventProperties {
|
||||||
|
errorType: string;
|
||||||
|
errorMessage: string;
|
||||||
|
stackTrace?: string;
|
||||||
|
context?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface QueueJobProperties extends BaseEventProperties {
|
||||||
|
jobId: string;
|
||||||
|
jobType: string;
|
||||||
|
queueName: string;
|
||||||
|
durationMs?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
type EventProperties =
|
||||||
|
| EpisodeIngestedProperties
|
||||||
|
| SearchPerformedProperties
|
||||||
|
| ConversationProperties
|
||||||
|
| ErrorProperties
|
||||||
|
| QueueJobProperties
|
||||||
|
| BaseEventProperties;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Track telemetry events to PostHog
|
||||||
|
*/
|
||||||
|
export async function trackEvent(
|
||||||
|
event: TelemetryEvent,
|
||||||
|
properties: EventProperties,
|
||||||
|
): Promise<void> {
|
||||||
|
const client = getPostHogClient();
|
||||||
|
if (!client) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const userId = properties.userId || "anonymous";
|
||||||
|
|
||||||
|
// Add common properties to all events
|
||||||
|
const enrichedProperties = {
|
||||||
|
...properties,
|
||||||
|
queueProvider: env.QUEUE_PROVIDER,
|
||||||
|
modelProvider: getModelProvider(),
|
||||||
|
embeddingModel: env.EMBEDDING_MODEL,
|
||||||
|
appEnv: env.APP_ENV,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
client.capture({
|
||||||
|
distinctId: userId,
|
||||||
|
event,
|
||||||
|
properties: enrichedProperties,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Identify user if we have their info
|
||||||
|
if (properties.email || properties.name) {
|
||||||
|
client.identify({
|
||||||
|
distinctId: userId,
|
||||||
|
properties: {
|
||||||
|
email: properties.email,
|
||||||
|
name: properties.name,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Silently fail - don't break the app if telemetry fails
|
||||||
|
console.error("Telemetry error:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Track feature usage - simplified API
|
||||||
|
* @param feature - Feature name (e.g., "episode_ingested", "search_performed")
|
||||||
|
* @param userId - User ID (will be converted to email internally)
|
||||||
|
* @param properties - Additional properties (optional)
|
||||||
|
*/
|
||||||
|
export async function trackFeatureUsage(
|
||||||
|
feature: string,
|
||||||
|
userId?: string,
|
||||||
|
properties?: Record<string, any>,
|
||||||
|
): Promise<void> {
|
||||||
|
const client = getPostHogClient();
|
||||||
|
if (!client) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const email = await getUserIdentifier(userId);
|
||||||
|
|
||||||
|
client.capture({
|
||||||
|
distinctId: email,
|
||||||
|
event: feature,
|
||||||
|
properties: {
|
||||||
|
...properties,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
// Silently fail - don't break the app if telemetry fails
|
||||||
|
console.error("Telemetry error:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Track system configuration once at startup
|
||||||
|
* Tracks queue provider, model provider, embedding model, etc.
|
||||||
|
*/
|
||||||
|
export async function trackConfig(): Promise<void> {
|
||||||
|
const client = getPostHogClient();
|
||||||
|
if (!client) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
client.capture({
|
||||||
|
distinctId: "system",
|
||||||
|
event: "system_config",
|
||||||
|
properties: {
|
||||||
|
queueProvider: env.QUEUE_PROVIDER,
|
||||||
|
modelProvider: getModelProvider(),
|
||||||
|
model: env.MODEL,
|
||||||
|
embeddingModel: env.EMBEDDING_MODEL,
|
||||||
|
appEnv: env.APP_ENV,
|
||||||
|
nodeEnv: env.NODE_ENV,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to track config:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Track errors
|
||||||
|
*/
|
||||||
|
export async function trackError(
|
||||||
|
error: Error,
|
||||||
|
context?: Record<string, any>,
|
||||||
|
userId?: string,
|
||||||
|
): Promise<void> {
|
||||||
|
const client = getPostHogClient();
|
||||||
|
if (!client) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const email = await getUserIdentifier(userId);
|
||||||
|
|
||||||
|
client.capture({
|
||||||
|
distinctId: email,
|
||||||
|
event: "error_occurred",
|
||||||
|
properties: {
|
||||||
|
errorType: error.name,
|
||||||
|
errorMessage: error.message,
|
||||||
|
stackTrace: error.stack,
|
||||||
|
...context,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (trackingError) {
|
||||||
|
console.error("Failed to track error:", trackingError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flush pending events (call on shutdown)
|
||||||
|
*/
|
||||||
|
export async function flushTelemetry(): Promise<void> {
|
||||||
|
const client = getPostHogClient();
|
||||||
|
if (client) {
|
||||||
|
await client.shutdown();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to determine model provider from MODEL env variable
|
||||||
|
*/
|
||||||
|
function getModelProvider(): string {
|
||||||
|
const model = env.MODEL.toLowerCase();
|
||||||
|
if (model.includes("gpt") || model.includes("openai")) return "openai";
|
||||||
|
if (model.includes("claude") || model.includes("anthropic"))
|
||||||
|
return "anthropic";
|
||||||
|
if (env.OLLAMA_URL) return "ollama";
|
||||||
|
return "unknown";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export types for use in other files
|
||||||
|
export type {
|
||||||
|
BaseEventProperties,
|
||||||
|
EpisodeIngestedProperties,
|
||||||
|
SearchPerformedProperties,
|
||||||
|
ConversationProperties,
|
||||||
|
ErrorProperties,
|
||||||
|
QueueJobProperties,
|
||||||
|
};
|
||||||
@ -3,6 +3,7 @@ import { fetchAndSaveStdioIntegrations } from "~/trigger/utils/mcp";
|
|||||||
import { initNeo4jSchemaOnce } from "~/lib/neo4j.server";
|
import { initNeo4jSchemaOnce } from "~/lib/neo4j.server";
|
||||||
import { env } from "~/env.server";
|
import { env } from "~/env.server";
|
||||||
import { startWorkers } from "~/bullmq/start-workers";
|
import { startWorkers } from "~/bullmq/start-workers";
|
||||||
|
import { trackConfig } from "~/services/telemetry.server";
|
||||||
|
|
||||||
// Global flag to ensure startup only runs once per server process
|
// Global flag to ensure startup only runs once per server process
|
||||||
let startupInitialized = false;
|
let startupInitialized = false;
|
||||||
@ -44,13 +45,16 @@ export async function initializeStartupServices() {
|
|||||||
if (env.QUEUE_PROVIDER === "trigger") {
|
if (env.QUEUE_PROVIDER === "trigger") {
|
||||||
try {
|
try {
|
||||||
const triggerApiUrl = env.TRIGGER_API_URL;
|
const triggerApiUrl = env.TRIGGER_API_URL;
|
||||||
if (triggerApiUrl) {
|
// At this point, env validation should have already ensured these are present
|
||||||
await waitForTriggerLogin(triggerApiUrl);
|
// But we add a runtime check for safety
|
||||||
await addEnvVariablesInTrigger();
|
if (!triggerApiUrl || !env.TRIGGER_PROJECT_ID || !env.TRIGGER_SECRET_KEY) {
|
||||||
} else {
|
console.error(
|
||||||
console.error("TRIGGER_API_URL is not set in environment variables.");
|
"TRIGGER_API_URL, TRIGGER_PROJECT_ID, and TRIGGER_SECRET_KEY must be set when QUEUE_PROVIDER=trigger",
|
||||||
|
);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
await waitForTriggerLogin(triggerApiUrl);
|
||||||
|
await addEnvVariablesInTrigger();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
console.error(e);
|
||||||
console.error("Trigger is not configured");
|
console.error("Trigger is not configured");
|
||||||
@ -70,6 +74,10 @@ export async function initializeStartupServices() {
|
|||||||
await fetchAndSaveStdioIntegrations();
|
await fetchAndSaveStdioIntegrations();
|
||||||
logger.info("Stdio integrations initialization completed");
|
logger.info("Stdio integrations initialization completed");
|
||||||
|
|
||||||
|
// Track system configuration once at startup
|
||||||
|
await trackConfig();
|
||||||
|
logger.info("System configuration tracked");
|
||||||
|
|
||||||
startupInitialized = true;
|
startupInitialized = true;
|
||||||
logger.info("Application initialization completed successfully");
|
logger.info("Application initialization completed successfully");
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -126,6 +134,14 @@ export async function addEnvVariablesInTrigger() {
|
|||||||
TRIGGER_SECRET_KEY,
|
TRIGGER_SECRET_KEY,
|
||||||
} = env;
|
} = env;
|
||||||
|
|
||||||
|
// These should always be present when this function is called
|
||||||
|
// but we add a runtime check for type safety
|
||||||
|
if (!TRIGGER_PROJECT_ID || !TRIGGER_API_URL || !TRIGGER_SECRET_KEY) {
|
||||||
|
throw new Error(
|
||||||
|
"TRIGGER_PROJECT_ID, TRIGGER_API_URL, and TRIGGER_SECRET_KEY are required",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const DATABASE_URL = getDatabaseUrl(POSTGRES_DB);
|
const DATABASE_URL = getDatabaseUrl(POSTGRES_DB);
|
||||||
|
|
||||||
// Map of key to value from env, replacing 'localhost' as needed
|
// Map of key to value from env, replacing 'localhost' as needed
|
||||||
|
|||||||
@ -120,6 +120,7 @@
|
|||||||
"ollama-ai-provider": "1.2.0",
|
"ollama-ai-provider": "1.2.0",
|
||||||
"openai": "^5.12.2",
|
"openai": "^5.12.2",
|
||||||
"posthog-js": "^1.116.6",
|
"posthog-js": "^1.116.6",
|
||||||
|
"posthog-node": "^5.10.3",
|
||||||
"react": "^18.2.0",
|
"react": "^18.2.0",
|
||||||
"react-calendar-heatmap": "^1.10.0",
|
"react-calendar-heatmap": "^1.10.0",
|
||||||
"react-dom": "^18.2.0",
|
"react-dom": "^18.2.0",
|
||||||
|
|||||||
28
docker/Dockerfile.neo4j
Normal file
28
docker/Dockerfile.neo4j
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
FROM neo4j:5
|
||||||
|
|
||||||
|
# Set environment variables for plugin versions
|
||||||
|
# GDS 2.13 is compatible with Neo4j 5.26
|
||||||
|
# APOC 5.26.14 is the latest for Neo4j 5.x
|
||||||
|
ENV GDS_VERSION=2.13.0
|
||||||
|
ENV APOC_VERSION=5.26.0
|
||||||
|
|
||||||
|
# Install GDS and APOC plugins
|
||||||
|
RUN apt-get update && apt-get install -y curl && \
|
||||||
|
curl -L https://github.com/neo4j/graph-data-science/releases/download/${GDS_VERSION}/neo4j-graph-data-science-${GDS_VERSION}.jar \
|
||||||
|
-o /var/lib/neo4j/plugins/neo4j-graph-data-science-${GDS_VERSION}.jar && \
|
||||||
|
curl -L https://github.com/neo4j/apoc/releases/download/${APOC_VERSION}/apoc-${APOC_VERSION}-core.jar \
|
||||||
|
-o /var/lib/neo4j/plugins/apoc-${APOC_VERSION}-core.jar && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Set proper permissions
|
||||||
|
RUN chown -R neo4j:neo4j /var/lib/neo4j/plugins
|
||||||
|
|
||||||
|
# Default configuration for GDS and APOC
|
||||||
|
ENV NEO4J_dbms_security_procedures_unrestricted=gds.*,apoc.*
|
||||||
|
ENV NEO4J_dbms_security_procedures_allowlist=gds.*,apoc.*
|
||||||
|
ENV NEO4J_apoc_export_file_enabled=true
|
||||||
|
ENV NEO4J_apoc_import_file_enabled=true
|
||||||
|
ENV NEO4J_apoc_import_file_use_neo4j_config=true
|
||||||
|
|
||||||
|
EXPOSE 7474 7687
|
||||||
243
docs/TELEMETRY.md
Normal file
243
docs/TELEMETRY.md
Normal file
@ -0,0 +1,243 @@
|
|||||||
|
# Telemetry in Core
|
||||||
|
|
||||||
|
Core collects anonymous usage data to help us understand how the product is being used and to make data-driven improvements. This document explains what we collect, why we collect it, and how to opt-out.
|
||||||
|
|
||||||
|
## Our Commitment to Privacy
|
||||||
|
|
||||||
|
We take your privacy seriously. Telemetry is designed to be:
|
||||||
|
|
||||||
|
- **Transparent**: You can see exactly what we collect (listed below)
|
||||||
|
- **Respectful**: Easy to disable at any time
|
||||||
|
- **Minimal**: We only collect what helps improve the product
|
||||||
|
- **Secure**: Data is transmitted securely to PostHog
|
||||||
|
|
||||||
|
## What We Collect
|
||||||
|
|
||||||
|
### User Information
|
||||||
|
|
||||||
|
- **Email address only**: Used to identify unique users (can be anonymized - see below)
|
||||||
|
- No other personal information is collected
|
||||||
|
|
||||||
|
### Feature Usage Events
|
||||||
|
|
||||||
|
We track when these features are used (event name only, no additional data):
|
||||||
|
|
||||||
|
- **episode_ingested**: When you add a conversation episode
|
||||||
|
- **document_ingested**: When you add a document
|
||||||
|
- **search_performed**: When you perform a search
|
||||||
|
- **deep_search_performed**: When you use deep search
|
||||||
|
- **conversation_created**: When you start a new AI conversation
|
||||||
|
- **conversation_message_sent**: When you send a message in a conversation
|
||||||
|
- **space_created**: When you create a new space
|
||||||
|
- **space_updated**: When you update a space
|
||||||
|
- **user_registered**: When a new user signs up
|
||||||
|
|
||||||
|
### System Configuration (Tracked Once at Startup)
|
||||||
|
|
||||||
|
- **Queue provider**: Whether you're using Trigger.dev or BullMQ
|
||||||
|
- **Model provider**: Which LLM you're using (OpenAI, Anthropic, Ollama, etc.)
|
||||||
|
- **Model name**: The specific model configured
|
||||||
|
- **Embedding model**: Which embedding model is configured
|
||||||
|
- **App environment**: Development, production, or test
|
||||||
|
- **Node environment**: Runtime environment
|
||||||
|
|
||||||
|
### Errors (Automatic)
|
||||||
|
|
||||||
|
- **Error type**: The type of error that occurred
|
||||||
|
- **Error message**: Brief description of the error
|
||||||
|
- **Error stack trace**: Technical details for debugging
|
||||||
|
- **Request context**: URL, method, user agent (for server errors)
|
||||||
|
|
||||||
|
### Page Views (Client-Side)
|
||||||
|
|
||||||
|
- **Page navigation**: Which pages are visited
|
||||||
|
- **Session information**: Basic session tracking
|
||||||
|
|
||||||
|
## What We DON'T Collect
|
||||||
|
|
||||||
|
We explicitly **do not** collect:
|
||||||
|
|
||||||
|
- ❌ **Your document content**: None of your ingested documents or notes
|
||||||
|
- ❌ **Space content**: Your space data remains private
|
||||||
|
- ❌ **Search queries**: We track that searches happen, not what you searched for
|
||||||
|
- ❌ **Conversation content**: We never collect the actual messages or responses
|
||||||
|
- ❌ **User names**: Only email addresses are collected (can be anonymized)
|
||||||
|
- ❌ **Workspace IDs**: Not tracked
|
||||||
|
- ❌ **Space IDs**: Not tracked
|
||||||
|
- ❌ **Conversation IDs**: Not tracked
|
||||||
|
- ❌ **API keys or secrets**: No sensitive credentials
|
||||||
|
- ❌ **IP addresses**: Not tracked
|
||||||
|
- ❌ **File paths or system details**: No filesystem information
|
||||||
|
- ❌ **Environment variables**: Configuration remains private
|
||||||
|
|
||||||
|
**Privacy-First Approach**: We only track the event name and user email. No metadata, no additional properties, no detailed analytics.
|
||||||
|
|
||||||
|
## Why We Collect This Data
|
||||||
|
|
||||||
|
### Product Improvement
|
||||||
|
|
||||||
|
- Understand which features are most valuable
|
||||||
|
- Identify features that need improvement
|
||||||
|
- Prioritize development based on actual usage
|
||||||
|
|
||||||
|
### Reliability & Performance
|
||||||
|
|
||||||
|
- Detect and fix errors before they affect many users
|
||||||
|
- Identify performance bottlenecks
|
||||||
|
- Monitor system health across different configurations
|
||||||
|
|
||||||
|
### Usage Patterns
|
||||||
|
|
||||||
|
- Understand how different deployment types (Docker, manual, cloud) are used
|
||||||
|
- See which queue providers and models are popular
|
||||||
|
- Make informed decisions about which integrations to prioritize
|
||||||
|
|
||||||
|
## How to Opt-Out
|
||||||
|
|
||||||
|
We respect your choice to disable telemetry. Here are several ways to control telemetry:
|
||||||
|
|
||||||
|
### Option 1: Disable Telemetry Completely
|
||||||
|
|
||||||
|
Add to your `.env` file:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
TELEMETRY_ENABLED=false
|
||||||
|
```
|
||||||
|
|
||||||
|
### Option 2: Anonymous Mode
|
||||||
|
|
||||||
|
Keep telemetry enabled but send "anonymous" instead of your email:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
TELEMETRY_ANONYMOUS=true
|
||||||
|
```
|
||||||
|
|
||||||
|
### Option 3: Remove PostHog Key
|
||||||
|
|
||||||
|
Set the PostHog key to empty:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
POSTHOG_PROJECT_KEY=
|
||||||
|
```
|
||||||
|
|
||||||
|
After making any of these changes, restart your Core instance.
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# PostHog project key
|
||||||
|
POSTHOG_PROJECT_KEY=phc_your_key_here
|
||||||
|
|
||||||
|
# Enable/disable telemetry (default: true)
|
||||||
|
TELEMETRY_ENABLED=true
|
||||||
|
|
||||||
|
# Send "anonymous" instead of email (default: false)
|
||||||
|
TELEMETRY_ANONYMOUS=false
|
||||||
|
|
||||||
|
# Industry standard opt-out
|
||||||
|
DO_NOT_TRACK=1
|
||||||
|
```
|
||||||
|
|
||||||
|
## For Self-Hosted Deployments
|
||||||
|
|
||||||
|
### Default Behavior
|
||||||
|
|
||||||
|
- Telemetry is **enabled by default** with opt-out
|
||||||
|
- Sends data to our PostHog instance
|
||||||
|
- Easy to disable (see options above)
|
||||||
|
|
||||||
|
### Using Your Own PostHog Instance
|
||||||
|
|
||||||
|
If you prefer to keep all data in-house, you can:
|
||||||
|
|
||||||
|
1. Deploy your own PostHog instance (https://posthog.com/docs/self-host)
|
||||||
|
2. Set `POSTHOG_PROJECT_KEY` to your self-hosted instance's key
|
||||||
|
3. All telemetry data stays on your infrastructure
|
||||||
|
|
||||||
|
### Completely Disable Telemetry
|
||||||
|
|
||||||
|
For maximum privacy in self-hosted deployments:
|
||||||
|
|
||||||
|
1. Set `TELEMETRY_ENABLED=false` in your `.env`
|
||||||
|
2. Or set `DO_NOT_TRACK=1`
|
||||||
|
3. No telemetry data will be sent
|
||||||
|
|
||||||
|
### Anonymous Mode
|
||||||
|
|
||||||
|
If you want to contribute usage data without identifying yourself:
|
||||||
|
|
||||||
|
1. Set `TELEMETRY_ANONYMOUS=true` in your `.env`
|
||||||
|
2. All events will be tracked as "anonymous" instead of your email
|
||||||
|
3. Helps us improve the product while maintaining your privacy
|
||||||
|
|
||||||
|
## Transparency
|
||||||
|
|
||||||
|
### Open Source
|
||||||
|
|
||||||
|
Core's telemetry code is completely open source. You can inspect exactly what is being tracked:
|
||||||
|
|
||||||
|
**Server-Side Tracking:**
|
||||||
|
|
||||||
|
- `apps/webapp/app/services/telemetry.server.ts` - Core telemetry service
|
||||||
|
- `apps/webapp/app/entry.server.tsx` - Global error tracking
|
||||||
|
- `apps/webapp/app/lib/ingest.server.ts:66,76` - Episode/document ingestion
|
||||||
|
- `apps/webapp/app/routes/api.v1.search.tsx:57` - Search tracking
|
||||||
|
- `apps/webapp/app/routes/api.v1.deep-search.tsx:33` - Deep search tracking
|
||||||
|
- `apps/webapp/app/services/conversation.server.ts:60,110` - Conversation tracking
|
||||||
|
- `apps/webapp/app/services/space.server.ts:68,201` - Space tracking
|
||||||
|
- `apps/webapp/app/models/user.server.ts:80,175` - User registration tracking
|
||||||
|
- `apps/webapp/app/utils/startup.ts:78` - System config tracking (once at startup)
|
||||||
|
|
||||||
|
**Client-Side Tracking:**
|
||||||
|
|
||||||
|
- `apps/webapp/app/hooks/usePostHog.ts` - Page views and user identification
|
||||||
|
- `apps/webapp/app/root.tsx:118-119` - PostHog initialization
|
||||||
|
|
||||||
|
### PostHog Key Security
|
||||||
|
|
||||||
|
- The PostHog project key (`phc_*`) is safe to expose publicly
|
||||||
|
- It can only **send** events, not read existing data
|
||||||
|
- This is standard practice for client-side analytics
|
||||||
|
|
||||||
|
### Data Minimization
|
||||||
|
|
||||||
|
Our approach prioritizes minimal data collection:
|
||||||
|
|
||||||
|
- **Event name only**: Just the feature name (e.g., "search_performed")
|
||||||
|
- **Email only**: Single identifier (can be anonymized)
|
||||||
|
- **No metadata**: No counts, times, IDs, or other properties
|
||||||
|
- **Config once**: System configuration tracked only at startup, not per-event
|
||||||
|
|
||||||
|
## Questions?
|
||||||
|
|
||||||
|
If you have questions about telemetry:
|
||||||
|
|
||||||
|
- Open an issue on GitHub: https://github.com/redplanethq/core/issues
|
||||||
|
- Review the source code to see exactly what's tracked
|
||||||
|
- Check PostHog's privacy policy: https://posthog.com/privacy
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
**What we track**: Event names + email (e.g., "search_performed" by "user@example.com")
|
||||||
|
**What we don't track**: Content, queries, messages, IDs, counts, times, or any metadata
|
||||||
|
**How to opt-out**: `TELEMETRY_ENABLED=false` or `DO_NOT_TRACK=1`
|
||||||
|
**Anonymous mode**: `TELEMETRY_ANONYMOUS=true` (sends "anonymous" instead of email)
|
||||||
|
**Default**: Enabled with easy opt-out
|
||||||
|
|
||||||
|
### Events Tracked
|
||||||
|
|
||||||
|
| Event | Location | When It Fires |
|
||||||
|
| --------------------------- | ----------------------------------- | -------------------------------- |
|
||||||
|
| `episode_ingested` | lib/ingest.server.ts:76 | Conversation episode added |
|
||||||
|
| `document_ingested` | lib/ingest.server.ts:66 | Document added |
|
||||||
|
| `search_performed` | routes/api.v1.search.tsx:57 | Basic search executed |
|
||||||
|
| `deep_search_performed` | routes/api.v1.deep-search.tsx:33 | Deep search executed |
|
||||||
|
| `conversation_created` | services/conversation.server.ts:110 | New conversation started |
|
||||||
|
| `conversation_message_sent` | services/conversation.server.ts:60 | Message sent in conversation |
|
||||||
|
| `space_created` | services/space.server.ts:68 | New space created |
|
||||||
|
| `space_updated` | services/space.server.ts:201 | Space updated |
|
||||||
|
| `user_registered` | models/user.server.ts:80,175 | New user signs up |
|
||||||
|
| `error_occurred` | entry.server.tsx:36 | Server error (auto-tracked) |
|
||||||
|
| `system_config` | utils/startup.ts:78 | App starts (config tracked once) |
|
||||||
|
|
||||||
|
We believe in building in public and being transparent about data collection. Thank you for helping make Core better!
|
||||||
@ -84,7 +84,7 @@ services:
|
|||||||
|
|
||||||
neo4j:
|
neo4j:
|
||||||
container_name: core-neo4j
|
container_name: core-neo4j
|
||||||
image: neo4j:5
|
image: core-neo4j:0.1.0
|
||||||
environment:
|
environment:
|
||||||
- NEO4J_AUTH=${NEO4J_AUTH}
|
- NEO4J_AUTH=${NEO4J_AUTH}
|
||||||
- NEO4J_dbms_security_procedures_unrestricted=gds.*,apoc.*
|
- NEO4J_dbms_security_procedures_unrestricted=gds.*,apoc.*
|
||||||
|
|||||||
16
pnpm-lock.yaml
generated
16
pnpm-lock.yaml
generated
@ -598,6 +598,9 @@ importers:
|
|||||||
posthog-js:
|
posthog-js:
|
||||||
specifier: ^1.116.6
|
specifier: ^1.116.6
|
||||||
version: 1.250.2
|
version: 1.250.2
|
||||||
|
posthog-node:
|
||||||
|
specifier: ^5.10.3
|
||||||
|
version: 5.10.3
|
||||||
react:
|
react:
|
||||||
specifier: ^18.2.0
|
specifier: ^18.2.0
|
||||||
version: 18.3.1
|
version: 18.3.1
|
||||||
@ -3161,6 +3164,9 @@ packages:
|
|||||||
'@popperjs/core@2.11.8':
|
'@popperjs/core@2.11.8':
|
||||||
resolution: {integrity: sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==}
|
resolution: {integrity: sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==}
|
||||||
|
|
||||||
|
'@posthog/core@1.3.1':
|
||||||
|
resolution: {integrity: sha512-sGKVHituJ8L/bJxVV4KamMFp+IBWAZyCiYunFawJZ4cc59PCtLnKFIMEV6kn7A4eZQcQ6EKV5Via4sF3Z7qMLQ==}
|
||||||
|
|
||||||
'@prisma/client@5.4.1':
|
'@prisma/client@5.4.1':
|
||||||
resolution: {integrity: sha512-xyD0DJ3gRNfLbPsC+YfMBBuLJtZKQfy1OD2qU/PZg+HKrr7SO+09174LMeTlWP0YF2wca9LxtVd4HnAiB5ketQ==}
|
resolution: {integrity: sha512-xyD0DJ3gRNfLbPsC+YfMBBuLJtZKQfy1OD2qU/PZg+HKrr7SO+09174LMeTlWP0YF2wca9LxtVd4HnAiB5ketQ==}
|
||||||
engines: {node: '>=16.13'}
|
engines: {node: '>=16.13'}
|
||||||
@ -10273,6 +10279,10 @@ packages:
|
|||||||
rrweb-snapshot:
|
rrweb-snapshot:
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
posthog-node@5.10.3:
|
||||||
|
resolution: {integrity: sha512-pe0P/4MfTSBgM4PWRTeg2iKDSSX6nxnlxAyW+v2+acpCSU50KM2YE5UFJ1Vkq/PtwcJgrt2Ydj66IzuRn2uwFQ==}
|
||||||
|
engines: {node: '>=20'}
|
||||||
|
|
||||||
preact@10.26.9:
|
preact@10.26.9:
|
||||||
resolution: {integrity: sha512-SSjF9vcnF27mJK1XyFMNJzFd5u3pQiATFqoaDy03XuN00u4ziveVVEGt5RKJrDR8MHE/wJo9Nnad56RLzS2RMA==}
|
resolution: {integrity: sha512-SSjF9vcnF27mJK1XyFMNJzFd5u3pQiATFqoaDy03XuN00u4ziveVVEGt5RKJrDR8MHE/wJo9Nnad56RLzS2RMA==}
|
||||||
|
|
||||||
@ -15584,6 +15594,8 @@ snapshots:
|
|||||||
|
|
||||||
'@popperjs/core@2.11.8': {}
|
'@popperjs/core@2.11.8': {}
|
||||||
|
|
||||||
|
'@posthog/core@1.3.1': {}
|
||||||
|
|
||||||
'@prisma/client@5.4.1(prisma@5.4.1)':
|
'@prisma/client@5.4.1(prisma@5.4.1)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@prisma/engines-version': 5.4.1-1.2f302df92bd8945e20ad4595a73def5b96afa54f
|
'@prisma/engines-version': 5.4.1-1.2f302df92bd8945e20ad4595a73def5b96afa54f
|
||||||
@ -24116,6 +24128,10 @@ snapshots:
|
|||||||
preact: 10.26.9
|
preact: 10.26.9
|
||||||
web-vitals: 4.2.4
|
web-vitals: 4.2.4
|
||||||
|
|
||||||
|
posthog-node@5.10.3:
|
||||||
|
dependencies:
|
||||||
|
'@posthog/core': 1.3.1
|
||||||
|
|
||||||
preact@10.26.9: {}
|
preact@10.26.9: {}
|
||||||
|
|
||||||
preferred-pm@3.1.4:
|
preferred-pm@3.1.4:
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user