Feat: webhooks for oauth apps (#32)

* Fix: integration account webhooks

* Fix: activity webhook

* Feat: add integration credentials API

* Fix: user rules for integrations

* Feat: make self hosting simple

* Fix: add init container functionality

---------

Co-authored-by: Manoj K <saimanoj58@gmail.com>
This commit is contained in:
Harshith Mullapudi 2025-08-01 10:34:16 +05:30 committed by GitHub
parent 0dad877166
commit 2a6acaf899
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
82 changed files with 2050 additions and 2457 deletions

View File

@ -1,6 +1,4 @@
VERSION=0.1.12
VERSION=0.1.13
# Nest run in docker, change host to database container name
DB_HOST=localhost

View File

@ -7,6 +7,32 @@ on:
workflow_dispatch:
jobs:
build-init:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
ref: main
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Registry
run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin
- name: Build and Push Frontend Docker Image
uses: docker/build-push-action@v2
with:
context: .
file: ./apps/init/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: redplanethq/init:${{ github.ref_name }}
build-webapp:
runs-on: ubuntu-latest

View File

@ -1,4 +1,3 @@
<div align="right">
<details>
<summary >🌐 Language</summary>
@ -63,7 +62,7 @@ C.O.R.E is a portable memory graph built from your llm interactions and personal
<img width="7480" height="4672" alt="core-memory-graph" src="https://github.com/user-attachments/assets/2b159e87-38ce-4cc6-ac16-047f645c3a4b" />
## 🧩 Key Features
## 🧩 Key Features
- **Memory Graph**: Visualise how your facts and preferences link together
- **Chat with Memory**: Ask questions about memory for instant insights and understanding
@ -85,14 +84,19 @@ Check out our [docs](https://docs.heysol.ai/self-hosting/docker) for modular dep
## Documentation
Explore our documentation to get the most out of CORE
- [Basic Concepts](https://docs.heysol.ai/concepts/memory_graph)
- [Self Hosting](https://docs.heysol.ai/self-hosting/overview)
- [Connect Core MCP with Claude](https://docs.heysol.ai/providers/claude)
- [Connect Core MCP with Cursor](https://docs.heysol.ai/providers/cursor)
- [Basic Concepts](https://docs.heysol.ai/overview)
- [API Reference](https://docs.heysol.ai/local-setup)
## 🧑‍💻 Support
Have questions or feedback? We're here to help:
- Discord: [Join core-support channel](https://discord.gg/YGUZcvDjUa)
- Documentation: [docs.heysol.ai](https://docs.heysol.ai)
- Email: manik@poozle.dev

51
apps/init/.gitignore vendored Normal file
View File

@ -0,0 +1,51 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# Dependencies
node_modules
.pnp
.pnp.js
# Local env files
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
# Testing
coverage
# Turbo
.turbo
# Vercel
.vercel
# Build Outputs
.next/
out/
build
dist
.tshy/
.tshy-build/
# Debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Misc
.DS_Store
*.pem
docker-compose.dev.yaml
clickhouse/
.vscode/
registry/
.cursor
CLAUDE.md
.claude

70
apps/init/Dockerfile Normal file
View File

@ -0,0 +1,70 @@
ARG NODE_IMAGE=node:20.11.1-bullseye-slim@sha256:5a5a92b3a8d392691c983719dbdc65d9f30085d6dcd65376e7a32e6fe9bf4cbe
FROM ${NODE_IMAGE} AS pruner
WORKDIR /core
COPY --chown=node:node . .
RUN npx -q turbo@2.5.3 prune --scope=@redplanethq/init --docker
RUN find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
# Base strategy to have layer caching
FROM ${NODE_IMAGE} AS base
RUN apt-get update && apt-get install -y openssl dumb-init postgresql-client
WORKDIR /core
COPY --chown=node:node .gitignore .gitignore
COPY --from=pruner --chown=node:node /core/out/json/ .
COPY --from=pruner --chown=node:node /core/out/pnpm-lock.yaml ./pnpm-lock.yaml
COPY --from=pruner --chown=node:node /core/out/pnpm-workspace.yaml ./pnpm-workspace.yaml
## Dev deps
FROM base AS dev-deps
WORKDIR /core
# Corepack is used to install pnpm
RUN corepack enable
ENV NODE_ENV development
RUN pnpm install --ignore-scripts --no-frozen-lockfile
## Production deps
FROM base AS production-deps
WORKDIR /core
# Corepack is used to install pnpm
RUN corepack enable
ENV NODE_ENV production
RUN pnpm install --prod --no-frozen-lockfile
## Builder (builds the init CLI)
FROM base AS builder
WORKDIR /core
# Corepack is used to install pnpm
RUN corepack enable
COPY --from=pruner --chown=node:node /core/out/full/ .
COPY --from=dev-deps --chown=node:node /core/ .
COPY --chown=node:node turbo.json turbo.json
COPY --chown=node:node .configs/tsconfig.base.json .configs/tsconfig.base.json
RUN pnpm run build --filter=@redplanethq/init...
# Runner
FROM ${NODE_IMAGE} AS runner
RUN apt-get update && apt-get install -y openssl postgresql-client ca-certificates
WORKDIR /core
RUN corepack enable
ENV NODE_ENV production
COPY --from=base /usr/bin/dumb-init /usr/bin/dumb-init
COPY --from=pruner --chown=node:node /core/out/full/ .
COPY --from=production-deps --chown=node:node /core .
COPY --from=builder --chown=node:node /core/apps/init/dist ./apps/init/dist
# Copy the trigger dump file
COPY --chown=node:node apps/init/trigger.dump ./apps/init/trigger.dump
# Copy and set up entrypoint script
COPY --chown=node:node apps/init/entrypoint.sh ./apps/init/entrypoint.sh
RUN chmod +x ./apps/init/entrypoint.sh
USER node
WORKDIR /core/apps/init
ENTRYPOINT ["dumb-init", "--"]
CMD ["./entrypoint.sh"]

22
apps/init/entrypoint.sh Normal file
View File

@ -0,0 +1,22 @@
#!/bin/sh
# Exit on any error
set -e
echo "Starting init CLI..."
# Wait for database to be ready
echo "Waiting for database connection..."
until pg_isready -h "${DB_HOST:-localhost}" -p "${DB_PORT:-5432}" -U "${POSTGRES_USER:-docker}"; do
echo "Database is unavailable - sleeping"
sleep 2
done
echo "Database is ready!"
# Run the init command
echo "Running init command..."
node ./dist/esm/index.js init
echo "Init completed successfully!"
exit 0

View File

@ -1,13 +1,13 @@
{
"name": "@redplanethq/core",
"version": "0.1.8",
"description": "A Command-Line Interface for Core",
"name": "@redplanethq/init",
"version": "0.1.0",
"description": "A init service to create trigger instance",
"type": "module",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/redplanethq/core",
"directory": "packages/core-cli"
"directory": "apps/init"
},
"publishConfig": {
"access": "public"
@ -16,7 +16,8 @@
"typescript"
],
"files": [
"dist"
"dist",
"trigger.dump"
],
"bin": {
"core": "./dist/esm/index.js"

View File

@ -1,7 +1,5 @@
import { Command } from "commander";
import { initCommand } from "../commands/init.js";
import { startCommand } from "../commands/start.js";
import { stopCommand } from "../commands/stop.js";
import { VERSION } from "./version.js";
const program = new Command();
@ -13,8 +11,4 @@ program
.description("Initialize Core development environment (run once)")
.action(initCommand);
program.command("start").description("Start Core development environment").action(startCommand);
program.command("stop").description("Stop Core development environment").action(stopCommand);
program.parse(process.argv);

View File

@ -0,0 +1,3 @@
import { env } from "../utils/env.js";
export const VERSION = env.VERSION;

View File

@ -0,0 +1,36 @@
import { intro, outro, note } from "@clack/prompts";
import { printCoreBrainLogo } from "../utils/ascii.js";
import { initTriggerDatabase, updateWorkerImage } from "../utils/trigger.js";
export async function initCommand() {
// Display the CORE brain logo
printCoreBrainLogo();
intro("🚀 Core Development Environment Setup");
try {
await initTriggerDatabase();
await updateWorkerImage();
note(
[
"Your services will start running:",
"",
"• Core Application: http://localhost:3033",
"• Trigger.dev: http://localhost:8030",
"• PostgreSQL: localhost:5432",
"",
"You can now start developing with Core!",
"",
" When logging in to the Core Application, you can find the login URL in the Docker container logs:",
" docker logs core-app --tail 50",
].join("\n"),
"🚀 Services Running"
);
outro("🎉 Setup Complete!");
process.exit(0);
} catch (error: any) {
outro(`❌ Setup failed: ${error.message}`);
process.exit(1);
}
}

3
apps/init/src/index.ts Normal file
View File

@ -0,0 +1,3 @@
#!/usr/bin/env node
import "./cli/index.js";

View File

@ -0,0 +1,24 @@
import { z } from "zod";
const EnvironmentSchema = z.object({
// Version
VERSION: z.string().default("0.1.13"),
// Database
DB_HOST: z.string().default("localhost"),
DB_PORT: z.string().default("5432"),
TRIGGER_DB: z.string().default("trigger"),
POSTGRES_USER: z.string().default("docker"),
POSTGRES_PASSWORD: z.string().default("docker"),
// Trigger database
TRIGGER_TASKS_IMAGE: z.string().default("redplanethq/proj_core:latest"),
// Node environment
NODE_ENV: z
.union([z.literal("development"), z.literal("production"), z.literal("test")])
.default("development"),
});
export type Environment = z.infer<typeof EnvironmentSchema>;
export const env = EnvironmentSchema.parse(process.env);

View File

@ -0,0 +1,182 @@
import Knex from "knex";
import path from "path";
import { fileURLToPath } from "url";
import { env } from "./env.js";
import { spinner, note, log } from "@clack/prompts";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
/**
* Returns a PostgreSQL database URL for the given database name.
* Throws if required environment variables are missing.
*/
export function getDatabaseUrl(dbName: string): string {
const { POSTGRES_USER, POSTGRES_PASSWORD, DB_HOST, DB_PORT } = env;
if (!POSTGRES_USER || !POSTGRES_PASSWORD || !DB_HOST || !DB_PORT || !dbName) {
throw new Error(
"One or more required environment variables are missing: POSTGRES_USER, POSTGRES_PASSWORD, DB_HOST, DB_PORT, dbName"
);
}
return `postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${DB_HOST}:${DB_PORT}/${dbName}`;
}
/**
* Checks if the database specified by TRIGGER_DB exists, and creates it if it does not.
* Returns { exists: boolean, created: boolean } - exists indicates success, created indicates if database was newly created.
*/
export async function ensureDatabaseExists(): Promise<{ exists: boolean; created: boolean }> {
const { TRIGGER_DB } = env;
if (!TRIGGER_DB) {
throw new Error("TRIGGER_DB environment variable is missing");
}
// Build a connection string to the default 'postgres' database
const adminDbUrl = getDatabaseUrl("postgres");
// Create a Knex instance for the admin connection
const adminKnex = Knex({
client: "pg",
connection: adminDbUrl,
});
const s = spinner();
s.start("Checking for Trigger.dev database...");
try {
// Check if the database exists
const result = await adminKnex.select(1).from("pg_database").where("datname", TRIGGER_DB);
if (result.length === 0) {
s.message("Database not found. Creating...");
// Database does not exist, create it
await adminKnex.raw(`CREATE DATABASE "${TRIGGER_DB}"`);
s.stop("Database created.");
return { exists: true, created: true };
} else {
s.stop("Database exists.");
return { exists: true, created: false };
}
} catch (err) {
s.stop("Failed to ensure database exists.");
log.warning("Failed to ensure database exists: " + (err as Error).message);
return { exists: false, created: false };
} finally {
await adminKnex.destroy();
}
}
// Main initialization function
export async function initTriggerDatabase() {
const { TRIGGER_DB } = env;
if (!TRIGGER_DB) {
throw new Error("TRIGGER_DB environment variable is missing");
}
// Ensure the database exists
const { exists, created } = await ensureDatabaseExists();
if (!exists) {
throw new Error("Failed to create or verify database exists");
}
// Only run pg_restore if the database was newly created
if (!created) {
note("Database already exists, skipping restore from trigger.dump");
return;
}
// Run pg_restore with the trigger.dump file
const dumpFilePath = path.join(__dirname, "../../../trigger.dump");
const connectionString = getDatabaseUrl(TRIGGER_DB);
const s = spinner();
s.start("Restoring database from trigger.dump...");
try {
// Use execSync and capture stdout/stderr, send to spinner.log
const { spawn } = await import("child_process");
await new Promise<void>((resolve, reject) => {
const child = spawn(
"pg_restore",
["--verbose", "--no-acl", "--no-owner", "-d", connectionString, dumpFilePath],
{ stdio: ["ignore", "pipe", "pipe"] }
);
child.stdout.on("data", (data) => {
s.message(data.toString());
});
child.stderr.on("data", (data) => {
s.message(data.toString());
});
child.on("close", (code) => {
if (code === 0) {
s.stop("Database restored successfully from trigger.dump");
resolve();
} else {
s.stop("Failed to restore database.");
log.warning(`Failed to restore database: pg_restore exited with code ${code}`);
reject(new Error(`Database restore failed: pg_restore exited with code ${code}`));
}
});
child.on("error", (err) => {
s.stop("Failed to restore database.");
log.warning("Failed to restore database: " + err.message);
reject(new Error(`Database restore failed: ${err.message}`));
});
});
} catch (error: any) {
s.stop("Failed to restore database.");
log.warning("Failed to restore database: " + error.message);
throw new Error(`Database restore failed: ${error.message}`);
}
}
export async function updateWorkerImage() {
const { TRIGGER_DB, TRIGGER_TASKS_IMAGE } = env;
if (!TRIGGER_DB) {
throw new Error("TRIGGER_DB environment variable is missing");
}
const connectionString = getDatabaseUrl(TRIGGER_DB);
const knex = Knex({
client: "pg",
connection: connectionString,
});
const s = spinner();
s.start("Updating worker image reference...");
try {
// Get the first record from WorkerDeployment table
const firstWorkerDeployment = await knex("WorkerDeployment").select("id").first();
if (!firstWorkerDeployment) {
s.stop("No WorkerDeployment records found, skipping image update");
note("No WorkerDeployment records found, skipping image update");
return;
}
// Update the imageReference column with the TRIGGER_TASKS_IMAGE value
await knex("WorkerDeployment").where("id", firstWorkerDeployment.id).update({
imageReference: TRIGGER_TASKS_IMAGE,
updatedAt: new Date(),
});
s.stop(`Successfully updated worker image reference to: ${TRIGGER_TASKS_IMAGE}`);
} catch (error: any) {
s.stop("Failed to update worker image.");
log.warning("Failed to update worker image: " + error.message);
throw new Error(`Worker image update failed: ${error.message}`);
} finally {
await knex.destroy();
}
}

BIN
apps/init/trigger.dump Normal file

Binary file not shown.

View File

@ -0,0 +1,183 @@
import * as React from "react"
import {
BookOpen,
Bot,
Command,
Frame,
LifeBuoy,
Map,
PieChart,
Send,
Settings2,
SquareTerminal,
} from "lucide-react"
import { NavMain } from "~/components/nav-main"
import { NavProjects } from "~/components/nav-projects"
import { NavSecondary } from "~/components/nav-secondary"
import { NavUser } from "~/components/nav-user"
import {
Sidebar,
SidebarContent,
SidebarFooter,
SidebarHeader,
SidebarMenu,
SidebarMenuButton,
SidebarMenuItem,
} from "~/components/ui/sidebar"
const data = {
user: {
name: "shadcn",
email: "m@example.com",
avatar: "/avatars/shadcn.jpg",
},
navMain: [
{
title: "Playground",
url: "#",
icon: SquareTerminal,
isActive: true,
items: [
{
title: "History",
url: "#",
},
{
title: "Starred",
url: "#",
},
{
title: "Settings",
url: "#",
},
],
},
{
title: "Models",
url: "#",
icon: Bot,
items: [
{
title: "Genesis",
url: "#",
},
{
title: "Explorer",
url: "#",
},
{
title: "Quantum",
url: "#",
},
],
},
{
title: "Documentation",
url: "#",
icon: BookOpen,
items: [
{
title: "Introduction",
url: "#",
},
{
title: "Get Started",
url: "#",
},
{
title: "Tutorials",
url: "#",
},
{
title: "Changelog",
url: "#",
},
],
},
{
title: "Settings",
url: "#",
icon: Settings2,
items: [
{
title: "General",
url: "#",
},
{
title: "Team",
url: "#",
},
{
title: "Billing",
url: "#",
},
{
title: "Limits",
url: "#",
},
],
},
],
navSecondary: [
{
title: "Support",
url: "#",
icon: LifeBuoy,
},
{
title: "Feedback",
url: "#",
icon: Send,
},
],
projects: [
{
name: "Design Engineering",
url: "#",
icon: Frame,
},
{
name: "Sales & Marketing",
url: "#",
icon: PieChart,
},
{
name: "Travel",
url: "#",
icon: Map,
},
],
}
export function AppSidebar({ ...props }: React.ComponentProps<typeof Sidebar>) {
return (
<Sidebar variant="inset" {...props}>
<SidebarHeader>
<SidebarMenu>
<SidebarMenuItem>
<SidebarMenuButton size="lg" asChild>
<a href="#">
<div className="bg-sidebar-primary text-sidebar-primary-foreground flex aspect-square size-8 items-center justify-center rounded-lg">
<Command className="size-4" />
</div>
<div className="grid flex-1 text-left text-sm leading-tight">
<span className="truncate font-medium">Acme Inc</span>
<span className="truncate text-xs">Enterprise</span>
</div>
</a>
</SidebarMenuButton>
</SidebarMenuItem>
</SidebarMenu>
</SidebarHeader>
<SidebarContent>
<NavMain items={data.navMain} />
<NavProjects projects={data.projects} />
<NavSecondary items={data.navSecondary} className="mt-auto" />
</SidebarContent>
<SidebarFooter>
<NavUser user={data.user} />
</SidebarFooter>
</Sidebar>
)
}

View File

@ -5,12 +5,12 @@ import {
useCallback,
useImperativeHandle,
forwardRef,
useState,
} from "react";
import Sigma from "sigma";
import GraphologyGraph from "graphology";
import forceAtlas2 from "graphology-layout-forceatlas2";
import noverlap from "graphology-layout-noverlap";
import FA2Layout from "graphology-layout-forceatlas2/worker";
import { EdgeLineProgram } from "sigma/rendering";
import colors from "tailwindcss/colors";
import type { GraphTriplet, IdValue, GraphNode } from "./type";
import {
@ -369,6 +369,10 @@ export const Graph = forwardRef<GraphRef, GraphProps>(
const optimalParams = calculateOptimalParameters(graph);
const settings = forceAtlas2.inferSettings(graph);
const fa2Layout = new FA2Layout(graph, {
settings: settings,
});
forceAtlas2.assign(graph, {
iterations: optimalParams.iterations,
settings: {
@ -381,21 +385,25 @@ export const Graph = forwardRef<GraphRef, GraphProps>(
},
});
noverlap.assign(graph, {
maxIterations: 200,
settings: {
margin: 10,
expansion: 1.5,
gridSize: 30,
},
});
// noverlap.assign(graph, {
// maxIterations: 200,
// settings: {
// margin: 10,
// expansion: 1.5,
// gridSize: 30,
// },
// });
}
// Create Sigma instance
const sigma = new Sigma(graph, containerRef.current, {
renderEdgeLabels: true,
defaultEdgeColor: theme.link.stroke,
defaultEdgeColor: "#0000001A",
defaultNodeColor: theme.node.fill,
defaultEdgeType: "edges-fast",
edgeProgramClasses: {
"edges-fast": EdgeLineProgram,
},
enableEdgeEvents: true,
minCameraRatio: 0.1,
maxCameraRatio: 2,
@ -526,28 +534,6 @@ export const Graph = forwardRef<GraphRef, GraphProps>(
};
}, [nodes, edges]);
// // Theme update effect
// useEffect(() => {
// if (!sigmaRef.current || !graphRef.current || !isInitializedRef.current)
// return;
// const graph = graphRef.current;
// graph.forEachNode((node) => {
// const nodeData = graph.getNodeAttribute(node, "nodeData");
// const isHighlighted = graph.getNodeAttribute(node, "highlighted");
// if (!isHighlighted) {
// graph.setNodeAttribute(node, "color", getNodeColor(nodeData));
// }
// });
// graph.forEachEdge((edge) => {
// const isHighlighted = graph.getEdgeAttribute(edge, "highlighted");
// if (!isHighlighted) {
// graph.setEdgeAttribute(edge, "color", theme.link.stroke);
// }
// });
// sigmaRef.current.setSetting("defaultEdgeColor", theme.link.stroke);
// sigmaRef.current.setSetting("defaultNodeColor", "red");
// }, [theme, getNodeColor]);
return (
<div
ref={containerRef}

View File

@ -5,7 +5,7 @@ import { GalleryVerticalEnd } from "lucide-react";
export function LoginPageLayout({ children }: { children: React.ReactNode }) {
return (
<div className="grid min-h-svh lg:grid-cols-2">
<div className="grid h-[100vh] w-[100vw] grid-cols-1 overflow-hidden xl:grid-cols-2">
<div className="flex flex-col gap-4 p-6 md:p-10">
<div className="flex justify-center gap-2 md:justify-start">
<a href="#" className="flex items-center gap-2 font-medium">
@ -19,7 +19,7 @@ export function LoginPageLayout({ children }: { children: React.ReactNode }) {
<div className="w-full max-w-sm">{children}</div>
</div>
</div>
<div className="relative hidden lg:block">
<div className="relative hidden xl:block">
<img
src="/login.png"
alt="Image"

View File

@ -0,0 +1,78 @@
"use client"
import { ChevronRight, type LucideIcon } from "lucide-react"
import {
Collapsible,
CollapsibleContent,
CollapsibleTrigger,
} from "~/components/ui/collapsible"
import {
SidebarGroup,
SidebarGroupLabel,
SidebarMenu,
SidebarMenuAction,
SidebarMenuButton,
SidebarMenuItem,
SidebarMenuSub,
SidebarMenuSubButton,
SidebarMenuSubItem,
} from "~/components/ui/sidebar"
export function NavMain({
items,
}: {
items: {
title: string
url: string
icon: LucideIcon
isActive?: boolean
items?: {
title: string
url: string
}[]
}[]
}) {
return (
<SidebarGroup>
<SidebarGroupLabel>Platform</SidebarGroupLabel>
<SidebarMenu>
{items.map((item) => (
<Collapsible key={item.title} asChild defaultOpen={item.isActive}>
<SidebarMenuItem>
<SidebarMenuButton asChild tooltip={item.title}>
<a href={item.url}>
<item.icon />
<span>{item.title}</span>
</a>
</SidebarMenuButton>
{item.items?.length ? (
<>
<CollapsibleTrigger asChild>
<SidebarMenuAction className="data-[state=open]:rotate-90">
<ChevronRight />
<span className="sr-only">Toggle</span>
</SidebarMenuAction>
</CollapsibleTrigger>
<CollapsibleContent>
<SidebarMenuSub>
{item.items?.map((subItem) => (
<SidebarMenuSubItem key={subItem.title}>
<SidebarMenuSubButton asChild>
<a href={subItem.url}>
<span>{subItem.title}</span>
</a>
</SidebarMenuSubButton>
</SidebarMenuSubItem>
))}
</SidebarMenuSub>
</CollapsibleContent>
</>
) : null}
</SidebarMenuItem>
</Collapsible>
))}
</SidebarMenu>
</SidebarGroup>
)
}

View File

@ -0,0 +1,87 @@
import {
Folder,
MoreHorizontal,
Share,
Trash2,
type LucideIcon,
} from "lucide-react"
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from "~/components/ui/dropdown-menu"
import {
SidebarGroup,
SidebarGroupLabel,
SidebarMenu,
SidebarMenuAction,
SidebarMenuButton,
SidebarMenuItem,
useSidebar,
} from "~/components/ui/sidebar"
export function NavProjects({
projects,
}: {
projects: {
name: string
url: string
icon: LucideIcon
}[]
}) {
const { isMobile } = useSidebar()
return (
<SidebarGroup className="group-data-[collapsible=icon]:hidden">
<SidebarGroupLabel>Projects</SidebarGroupLabel>
<SidebarMenu>
{projects.map((item) => (
<SidebarMenuItem key={item.name}>
<SidebarMenuButton asChild>
<a href={item.url}>
<item.icon />
<span>{item.name}</span>
</a>
</SidebarMenuButton>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<SidebarMenuAction showOnHover>
<MoreHorizontal />
<span className="sr-only">More</span>
</SidebarMenuAction>
</DropdownMenuTrigger>
<DropdownMenuContent
className="w-48"
side={isMobile ? "bottom" : "right"}
align={isMobile ? "end" : "start"}
>
<DropdownMenuItem>
<Folder className="text-muted-foreground" />
<span>View Project</span>
</DropdownMenuItem>
<DropdownMenuItem>
<Share className="text-muted-foreground" />
<span>Share Project</span>
</DropdownMenuItem>
<DropdownMenuSeparator />
<DropdownMenuItem>
<Trash2 className="text-muted-foreground" />
<span>Delete Project</span>
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</SidebarMenuItem>
))}
<SidebarMenuItem>
<SidebarMenuButton>
<MoreHorizontal />
<span>More</span>
</SidebarMenuButton>
</SidebarMenuItem>
</SidebarMenu>
</SidebarGroup>
)
}

View File

@ -0,0 +1,40 @@
import * as React from "react"
import { type LucideIcon } from "lucide-react"
import {
SidebarGroup,
SidebarGroupContent,
SidebarMenu,
SidebarMenuButton,
SidebarMenuItem,
} from "~/components/ui/sidebar"
export function NavSecondary({
items,
...props
}: {
items: {
title: string
url: string
icon: LucideIcon
}[]
} & React.ComponentPropsWithoutRef<typeof SidebarGroup>) {
return (
<SidebarGroup {...props}>
<SidebarGroupContent>
<SidebarMenu>
{items.map((item) => (
<SidebarMenuItem key={item.title}>
<SidebarMenuButton asChild size="sm">
<a href={item.url}>
<item.icon />
<span>{item.title}</span>
</a>
</SidebarMenuButton>
</SidebarMenuItem>
))}
</SidebarMenu>
</SidebarGroupContent>
</SidebarGroup>
)
}

View File

@ -0,0 +1,114 @@
"use client"
import {
BadgeCheck,
Bell,
ChevronsUpDown,
CreditCard,
LogOut,
Sparkles,
} from "lucide-react"
import {
Avatar,
AvatarFallback,
AvatarImage,
} from "~/components/ui/avatar"
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuGroup,
DropdownMenuItem,
DropdownMenuLabel,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from "~/components/ui/dropdown-menu"
import {
SidebarMenu,
SidebarMenuButton,
SidebarMenuItem,
useSidebar,
} from "~/components/ui/sidebar"
export function NavUser({
user,
}: {
user: {
name: string
email: string
avatar: string
}
}) {
const { isMobile } = useSidebar()
return (
<SidebarMenu>
<SidebarMenuItem>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<SidebarMenuButton
size="lg"
className="data-[state=open]:bg-sidebar-accent data-[state=open]:text-sidebar-accent-foreground"
>
<Avatar className="h-8 w-8 rounded-lg">
<AvatarImage src={user.avatar} alt={user.name} />
<AvatarFallback className="rounded-lg">CN</AvatarFallback>
</Avatar>
<div className="grid flex-1 text-left text-sm leading-tight">
<span className="truncate font-medium">{user.name}</span>
<span className="truncate text-xs">{user.email}</span>
</div>
<ChevronsUpDown className="ml-auto size-4" />
</SidebarMenuButton>
</DropdownMenuTrigger>
<DropdownMenuContent
className="w-(--radix-dropdown-menu-trigger-width) min-w-56 rounded-lg"
side={isMobile ? "bottom" : "right"}
align="end"
sideOffset={4}
>
<DropdownMenuLabel className="p-0 font-normal">
<div className="flex items-center gap-2 px-1 py-1.5 text-left text-sm">
<Avatar className="h-8 w-8 rounded-lg">
<AvatarImage src={user.avatar} alt={user.name} />
<AvatarFallback className="rounded-lg">CN</AvatarFallback>
</Avatar>
<div className="grid flex-1 text-left text-sm leading-tight">
<span className="truncate font-medium">{user.name}</span>
<span className="truncate text-xs">{user.email}</span>
</div>
</div>
</DropdownMenuLabel>
<DropdownMenuSeparator />
<DropdownMenuGroup>
<DropdownMenuItem>
<Sparkles />
Upgrade to Pro
</DropdownMenuItem>
</DropdownMenuGroup>
<DropdownMenuSeparator />
<DropdownMenuGroup>
<DropdownMenuItem>
<BadgeCheck />
Account
</DropdownMenuItem>
<DropdownMenuItem>
<CreditCard />
Billing
</DropdownMenuItem>
<DropdownMenuItem>
<Bell />
Notifications
</DropdownMenuItem>
</DropdownMenuGroup>
<DropdownMenuSeparator />
<DropdownMenuItem>
<LogOut />
Log out
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</SidebarMenuItem>
</SidebarMenu>
)
}

View File

@ -28,7 +28,7 @@ const data = {
icon: Network,
},
{
title: "Logs",
title: "Activity",
url: "/home/logs",
icon: Activity,
},

View File

@ -0,0 +1,109 @@
import * as React from "react"
import { Slot } from "@radix-ui/react-slot"
import { ChevronRight, MoreHorizontal } from "lucide-react"
import { cn } from "~/lib/utils"
function Breadcrumb({ ...props }: React.ComponentProps<"nav">) {
return <nav aria-label="breadcrumb" data-slot="breadcrumb" {...props} />
}
function BreadcrumbList({ className, ...props }: React.ComponentProps<"ol">) {
return (
<ol
data-slot="breadcrumb-list"
className={cn(
"text-muted-foreground flex flex-wrap items-center gap-1.5 text-sm break-words sm:gap-2.5",
className
)}
{...props}
/>
)
}
function BreadcrumbItem({ className, ...props }: React.ComponentProps<"li">) {
return (
<li
data-slot="breadcrumb-item"
className={cn("inline-flex items-center gap-1.5", className)}
{...props}
/>
)
}
function BreadcrumbLink({
asChild,
className,
...props
}: React.ComponentProps<"a"> & {
asChild?: boolean
}) {
const Comp = asChild ? Slot : "a"
return (
<Comp
data-slot="breadcrumb-link"
className={cn("hover:text-foreground transition-colors", className)}
{...props}
/>
)
}
function BreadcrumbPage({ className, ...props }: React.ComponentProps<"span">) {
return (
<span
data-slot="breadcrumb-page"
role="link"
aria-disabled="true"
aria-current="page"
className={cn("text-foreground font-normal", className)}
{...props}
/>
)
}
function BreadcrumbSeparator({
children,
className,
...props
}: React.ComponentProps<"li">) {
return (
<li
data-slot="breadcrumb-separator"
role="presentation"
aria-hidden="true"
className={cn("[&>svg]:size-3.5", className)}
{...props}
>
{children ?? <ChevronRight />}
</li>
)
}
function BreadcrumbEllipsis({
className,
...props
}: React.ComponentProps<"span">) {
return (
<span
data-slot="breadcrumb-ellipsis"
role="presentation"
aria-hidden="true"
className={cn("flex size-9 items-center justify-center", className)}
{...props}
>
<MoreHorizontal className="size-4" />
<span className="sr-only">More</span>
</span>
)
}
export {
Breadcrumb,
BreadcrumbList,
BreadcrumbItem,
BreadcrumbLink,
BreadcrumbPage,
BreadcrumbSeparator,
BreadcrumbEllipsis,
}

View File

@ -0,0 +1,31 @@
import * as CollapsiblePrimitive from "@radix-ui/react-collapsible"
function Collapsible({
...props
}: React.ComponentProps<typeof CollapsiblePrimitive.Root>) {
return <CollapsiblePrimitive.Root data-slot="collapsible" {...props} />
}
function CollapsibleTrigger({
...props
}: React.ComponentProps<typeof CollapsiblePrimitive.CollapsibleTrigger>) {
return (
<CollapsiblePrimitive.CollapsibleTrigger
data-slot="collapsible-trigger"
{...props}
/>
)
}
function CollapsibleContent({
...props
}: React.ComponentProps<typeof CollapsiblePrimitive.CollapsibleContent>) {
return (
<CollapsiblePrimitive.CollapsibleContent
data-slot="collapsible-content"
{...props}
/>
)
}
export { Collapsible, CollapsibleTrigger, CollapsibleContent }

View File

@ -75,6 +75,7 @@ const EnvironmentSchema = z.object({
TRIGGER_PROJECT_ID: z.string(),
TRIGGER_SECRET_KEY: z.string(),
TRIGGER_API_URL: z.string(),
TRIGGER_DB: z.string().default("trigger"),
// Model envs
MODEL: z.string().default(LLMModelEnum.GPT41),

View File

@ -122,7 +122,7 @@ function App() {
<Links />
<PreventFlashOnWrongTheme ssrTheme={Boolean(theme)} />
</head>
<body className="bg-background-2 h-full overflow-hidden font-sans">
<body className="bg-background-2 h-[100vh] h-full w-[100vw] overflow-hidden font-sans">
<Outlet />
<ScrollRestoration />

View File

@ -94,6 +94,14 @@ export const action = async ({ request }: ActionFunctionArgs) => {
"openid",
// Integration scope
"integration",
"integration:read",
"integration:credentials",
"integration:manage",
"integration:webhook",
// MCP scope
"mcp",
"mcp:read",
"mcp:write",
];
const requestedScopes = Array.isArray(allowedScopes)

View File

@ -0,0 +1,114 @@
import { type LoaderFunctionArgs, json } from "@remix-run/node";
import { z } from "zod";
import { authenticateOAuthRequest } from "~/services/apiAuth.server";
import { prisma } from "~/db.server";
// Schema for the integration account ID parameter
const ParamsSchema = z.object({
id: z.string().min(1, "Integration account ID is required"),
});
/**
* API endpoint for OAuth apps to get integration account credentials
* GET /api/v1/integration_account/:id/credentials
* Authorization: Bearer <oauth_access_token>
* Required scope: integration:credentials
*/
export const loader = async ({ request, params }: LoaderFunctionArgs) => {
try {
// Authenticate OAuth request and verify integration:credentials scope
const authResult = await authenticateOAuthRequest(request, ["integration:credentials", "integration"]);
if (!authResult.success) {
return json(
{
error: "unauthorized",
error_description: authResult.error
},
{ status: 401 }
);
}
// Validate parameters
const parseResult = ParamsSchema.safeParse(params);
if (!parseResult.success) {
return json(
{
error: "invalid_request",
error_description: "Invalid integration account ID"
},
{ status: 400 }
);
}
const { id } = parseResult.data;
// Get the integration account with proper access control
const integrationAccount = await prisma.integrationAccount.findFirst({
where: {
id,
integratedById: authResult.user!.id, // Ensure user owns this integration account
isActive: true,
deleted: null,
},
include: {
integrationDefinition: {
select: {
id: true,
name: true,
slug: true,
description: true,
icon: true,
},
},
},
});
if (!integrationAccount) {
return json(
{
error: "not_found",
error_description: "Integration account not found or access denied"
},
{ status: 404 }
);
}
// Extract credentials from integrationConfiguration
const credentials = integrationAccount.integrationConfiguration as Record<string, any>;
// Return the credentials and metadata
return json({
id: integrationAccount.id,
accountId: integrationAccount.accountId,
provider: integrationAccount.integrationDefinition.slug,
name: integrationAccount.integrationDefinition.name,
icon: integrationAccount.integrationDefinition.icon,
credentials,
settings: integrationAccount.settings,
connectedAt: integrationAccount.createdAt,
isActive: integrationAccount.isActive,
});
} catch (error) {
console.error("Error fetching integration account credentials:", error);
return json(
{
error: "server_error",
error_description: "Internal server error"
},
{ status: 500 }
);
}
};
// Method not allowed for non-GET requests
export const action = async () => {
return json(
{
error: "method_not_allowed",
error_description: "Only GET requests are allowed"
},
{ status: 405 }
);
};

View File

@ -22,11 +22,9 @@ export async function action({ request }: ActionFunctionArgs) {
);
}
// Soft delete the integration account by setting deletedAt
const updatedAccount = await prisma.integrationAccount.delete({
where: {
id: integrationAccountId,
deleted: null,
},
});
@ -34,6 +32,7 @@ export async function action({ request }: ActionFunctionArgs) {
integrationAccountId,
userId,
"integration.disconnected",
updatedAccount.workspaceId,
);
logger.info("Integration account disconnected (soft deleted)", {

View File

@ -1,135 +0,0 @@
import { type LoaderFunctionArgs, json } from "@remix-run/node";
import { prisma } from "~/db.server";
import { requireUserId } from "~/services/session.server";
export async function loader({ request }: LoaderFunctionArgs) {
const userId = await requireUserId(request);
const url = new URL(request.url);
const page = parseInt(url.searchParams.get("page") || "1");
const limit = parseInt(url.searchParams.get("limit") || "20");
const source = url.searchParams.get("source");
const status = url.searchParams.get("status");
const skip = (page - 1) * limit;
const user = await prisma.user.findUnique({
where: { id: userId },
include: { Workspace: true },
});
if (!user?.Workspace) {
throw new Response("Workspace not found", { status: 404 });
}
// Build where clause for filtering - only items with activityId
const whereClause: any = {
workspaceId: user.Workspace.id,
activityId: {
not: null,
},
};
if (status) {
whereClause.status = status;
}
// If source filter is provided, we need to filter by integration source
if (source) {
whereClause.activity = {
integrationAccount: {
integrationDefinition: {
slug: source,
},
},
};
}
const [logs, totalCount] = await Promise.all([
prisma.ingestionQueue.findMany({
where: whereClause,
include: {
activity: {
include: {
integrationAccount: {
include: {
integrationDefinition: {
select: {
name: true,
slug: true,
},
},
},
},
},
},
},
orderBy: {
createdAt: "desc",
},
skip,
take: limit,
}),
prisma.ingestionQueue.count({
where: whereClause,
}),
]);
// Get available sources for filtering (only those with activities)
const availableSources = await prisma.integrationDefinitionV2.findMany({
where: {
IntegrationAccount: {
some: {
workspaceId: user.Workspace.id,
Activity: {
some: {
IngestionQueue: {
some: {
activityId: {
not: null,
},
},
},
},
},
},
},
},
select: {
name: true,
slug: true,
},
});
// Format the response
const formattedLogs = logs.map((log) => ({
id: log.id,
source:
log.activity?.integrationAccount?.integrationDefinition?.name ||
(log.data as any)?.source ||
"Unknown",
ingestText:
log.activity?.text ||
(log.data as any)?.episodeBody ||
(log.data as any)?.text ||
"No content",
time: log.createdAt,
processedAt: log.processedAt,
status: log.status,
error: log.error,
episodeUUID: (log.output as any)?.episodeUuid,
sourceURL: log.activity?.sourceURL,
integrationSlug:
log.activity?.integrationAccount?.integrationDefinition?.slug,
activityId: log.activityId,
data: log.data,
}));
return json({
logs: formattedLogs,
totalCount,
page,
limit,
hasMore: skip + logs.length < totalCount,
availableSources,
});
}

View File

@ -26,7 +26,8 @@ export default function Dashboard() {
setLoading(true);
try {
const res = await fetch(
"/node-links?userId=" + encodeURIComponent(userId),
"/node-links?userId=" +
encodeURIComponent("cmc0x85jv0000nu1wiu1yla73"),
);
if (!res.ok) throw new Error("Failed to fetch node links");
const data = await res.json();

View File

@ -1,110 +0,0 @@
import { useState } from "react";
import { useNavigate } from "@remix-run/react";
import { useLogs } from "~/hooks/use-logs";
import { LogsFilters } from "~/components/logs/logs-filters";
import { VirtualLogsList } from "~/components/logs/virtual-logs-list";
import { AppContainer, PageContainer } from "~/components/layout/app-layout";
import { Card, CardContent } from "~/components/ui/card";
import { Activity, LoaderCircle } from "lucide-react";
import { PageHeader } from "~/components/common/page-header";
export default function LogsActivity() {
const navigate = useNavigate();
const [selectedSource, setSelectedSource] = useState<string | undefined>();
const [selectedStatus, setSelectedStatus] = useState<string | undefined>();
const {
logs,
hasMore,
loadMore,
availableSources,
isLoading,
isInitialLoad,
} = useLogs({
endpoint: "/api/v1/logs/activity",
source: selectedSource,
status: selectedStatus,
});
if (isInitialLoad) {
return (
<AppContainer>
<PageContainer>
<div className="flex h-[calc(100vh_-_16px)] items-center justify-center">
<LoaderCircle className="text-primary h-4 w-4 animate-spin" />
</div>
</PageContainer>
</AppContainer>
);
}
return (
<div className="flex h-full flex-col">
<PageHeader
title="Logs"
tabs={[
{
label: "All",
value: "all",
isActive: false,
onClick: () => navigate("/home/logs/all"),
},
{
label: "Activity",
value: "activity",
isActive: true,
onClick: () => navigate("/home/logs/activity"),
},
]}
/>
<div className="flex h-[calc(100vh_-_56px)] flex-col items-center space-y-6 p-4 px-5">
{isInitialLoad ? (
<>
<LoaderCircle className="text-primary h-4 w-4 animate-spin" />{" "}
</>
) : (
<>
{logs.length > 0 && (
<LogsFilters
availableSources={availableSources}
selectedSource={selectedSource}
selectedStatus={selectedStatus}
onSourceChange={setSelectedSource}
onStatusChange={setSelectedStatus}
/>
)}
{/* Logs List */}
<div className="flex h-full w-full space-y-4">
{logs.length === 0 ? (
<Card className="bg-background-2 w-full">
<CardContent className="bg-background-2 flex items-center justify-center py-16">
<div className="text-center">
<Activity className="text-muted-foreground mx-auto mb-4 h-12 w-12" />
<h3 className="mb-2 text-lg font-semibold">
No activity logs found
</h3>
<p className="text-muted-foreground">
{selectedSource || selectedStatus
? "Try adjusting your filters to see more results."
: "No activity ingestion logs are available yet."}
</p>
</div>
</CardContent>
</Card>
) : (
<VirtualLogsList
logs={logs}
hasMore={hasMore}
loadMore={loadMore}
isLoading={isLoading}
height={600}
/>
)}
</div>
</>
)}
</div>
</div>
);
}

View File

@ -28,23 +28,7 @@ export default function LogsAll() {
return (
<>
<PageHeader
title="Logs"
tabs={[
{
label: "All",
value: "all",
isActive: true,
onClick: () => navigate("/home/logs/all"),
},
{
label: "Activity",
value: "activity",
isActive: false,
onClick: () => navigate("/home/logs/activity"),
},
]}
/>
<PageHeader title="Logs" />
<div className="flex h-[calc(100vh_-_56px)] w-full flex-col items-center space-y-6 p-4 px-5">
{isInitialLoad ? (
<>

View File

@ -1135,10 +1135,25 @@ export class KnowledgeGraphService {
return null;
}
const integrationAccount = await prisma.integrationAccount.findFirst({
where: {
integrationDefinition: {
slug: source,
},
workspaceId: user.Workspace.id,
isActive: true,
deleted: null,
},
});
if (!integrationAccount) {
return null;
}
// Fetch active rules for this source
const rules = await prisma.ingestionRule.findMany({
where: {
source,
source: integrationAccount.id,
workspaceId: user.Workspace.id,
isActive: true,
deleted: null,

View File

@ -289,8 +289,17 @@ export class OAuth2Service {
// Google-style auth scopes
const authScopes = ["profile", "email", "openid"];
// Single integration scope
const integrationScopes = ["integration"];
// Integration-related scopes
const integrationScopes = [
"integration",
"integration:read",
"integration:credentials",
"integration:manage",
"integration:webhook",
];
// MCP-related scopes
const mcpScopes = ["mcp", "mcp:read", "mcp:write"];
const hasAuthScopes = scopes.some((s) => authScopes.includes(s));
const hasIntegrationScopes = scopes.some((s) =>
@ -324,6 +333,34 @@ export class OAuth2Service {
description: "Access your workspace integrations",
icon: "database",
},
"integration:read": {
description: "Read integration metadata and status",
icon: "eye",
},
"integration:credentials": {
description: "Access integration account credentials",
icon: "key",
},
"integration:manage": {
description: "Create, update, and delete integrations",
icon: "settings",
},
"integration:webhook": {
description: "Manage integration webhooks",
icon: "webhook",
},
mcp: {
description: "Access MCP endpoints",
icon: "mcp",
},
"mcp:read": {
description: "Read MCP endpoints",
icon: "eye",
},
"mcp:write": {
description: "Write to MCP endpoints",
icon: "pencil",
},
};
return scopes.map((scope) => ({
@ -560,7 +597,6 @@ export class OAuth2Service {
expiresAt: { gt: new Date() },
userId: tokenPayload.user_id,
workspaceId: tokenPayload.workspace_id,
...(scopes ? { scope: { contains: scopes.join(",") } } : {}),
},
include: {
client: true,
@ -568,6 +604,20 @@ export class OAuth2Service {
},
});
// Validate scopes separately if requested
if (scopes && accessToken) {
const tokenScopes =
accessToken.scope?.split(",").map((s) => s.trim()) || [];
const hasAllScopes = scopes.some((requiredScope) =>
tokenScopes.some((tokenScope) => tokenScope === requiredScope),
);
if (!hasAllScopes) {
throw new Error("Insufficient scope");
}
}
if (!accessToken) {
throw new Error("Invalid or expired token");
}

View File

@ -328,7 +328,6 @@
@apply bg-background-2 text-foreground text-base;
}
@supports (scrollbar-width: auto) {
.overflow-y-auto,
.overflow-x-auto,

View File

@ -232,6 +232,7 @@ async function handleAccountMessage(
integrationAccountId,
userId,
"mcp.connected",
workspaceId,
);
return config;
}
@ -255,6 +256,7 @@ async function handleAccountMessage(
integrationAccount.id,
userId,
"integration.connected",
workspaceId,
);
} catch (error) {
logger.error("Failed to trigger OAuth integration webhook", {

View File

@ -21,7 +21,7 @@ interface BatchResult {
export const entity = queue({
name: "entity-queue",
concurrencyLimit: 10,
concurrencyLimit: 5,
});
/**
@ -31,19 +31,30 @@ export const updateAllEntityEmbeddings = task({
id: "update-all-entity-embeddings",
machine: "large-1x",
run: async (payload: { userId?: string; batchSize?: number } = {}) => {
const { userId, batchSize = 100 } = payload;
run: async (
payload: {
userId?: string;
batchSize?: number;
forceUpdate?: boolean;
} = {},
) => {
const { userId, batchSize = 50, forceUpdate = false } = payload;
logger.info("Starting entity embeddings update with fan-out approach", {
userId,
batchSize,
forceUpdate,
targetScope: userId ? `user ${userId}` : "all users",
});
try {
// Step 1: Fetch all entities
const entities = await getAllEntities(userId);
logger.info(`Found ${entities.length} entities to update`);
// Step 1: Fetch entities (either all or only those needing updates)
const entities = forceUpdate
? await getAllEntitiesForceRefresh(userId)
: await getAllEntities(userId);
logger.info(`Found ${entities.length} entities to update`, {
strategy: forceUpdate ? "force-refresh-all" : "missing-embeddings-only",
});
if (entities.length === 0) {
return {
@ -192,9 +203,56 @@ export const updateEntityBatch = task({
});
/**
* Fetch all entities from Neo4j database
* Fetch all entities from Neo4j database that need embedding updates
*/
async function getAllEntities(userId?: string): Promise<EntityNode[]> {
try {
// Only fetch entities that either:
// 1. Have null/empty embeddings, OR
// 2. Have embeddings but might need updates (optional: add timestamp check)
const query = userId
? `MATCH (entity:Entity {userId: $userId})
WHERE entity.nameEmbedding IS NULL
OR entity.typeEmbedding IS NULL
OR size(entity.nameEmbedding) = 0
OR size(entity.typeEmbedding) = 0
RETURN entity ORDER BY entity.createdAt`
: `MATCH (entity:Entity)
WHERE entity.nameEmbedding IS NULL
OR entity.typeEmbedding IS NULL
OR size(entity.nameEmbedding) = 0
OR size(entity.typeEmbedding) = 0
RETURN entity ORDER BY entity.createdAt`;
const params = userId ? { userId } : {};
const records = await runQuery(query, params);
return records.map((record) => {
const entityProps = record.get("entity").properties;
return {
uuid: entityProps.uuid,
name: entityProps.name,
type: entityProps.type,
attributes: JSON.parse(entityProps.attributes || "{}"),
nameEmbedding: entityProps.nameEmbedding || [],
typeEmbedding: entityProps.typeEmbedding || [],
createdAt: new Date(entityProps.createdAt),
userId: entityProps.userId,
space: entityProps.space,
};
});
} catch (error) {
logger.error("Error fetching entities:", { error });
throw new Error(`Failed to fetch entities: ${error}`);
}
}
/**
* Fetch ALL entities from Neo4j database (for force refresh)
*/
async function getAllEntitiesForceRefresh(
userId?: string,
): Promise<EntityNode[]> {
try {
const query = userId
? `MATCH (entity:Entity {userId: $userId}) RETURN entity ORDER BY entity.createdAt`
@ -287,6 +345,7 @@ export async function triggerEntityEmbeddingsUpdate(
options: {
userId?: string;
batchSize?: number;
forceUpdate?: boolean;
} = {},
) {
try {

View File

@ -1,6 +1,8 @@
import { PrismaClient } from "@prisma/client";
import { type Message } from "@core/types";
import { addToQueue } from "./queue";
import { triggerWebhookDelivery } from "../webhooks/webhook-delivery";
import { logger } from "@trigger.dev/sdk";
const prisma = new PrismaClient();
@ -157,6 +159,23 @@ export const createActivities = async ({
activity.id,
);
if (integrationAccount?.workspaceId) {
try {
await triggerWebhookDelivery(
activity.id,
integrationAccount?.workspaceId,
);
logger.log("Webhook delivery triggered for activity", {
activityId: activity.id,
});
} catch (error) {
logger.error("Failed to trigger webhook delivery", {
activityId: activity.id,
error,
});
}
}
return {
activityId: activity.id,
queueId: queueResponse.id,

View File

@ -17,6 +17,7 @@ interface OAuthIntegrationWebhookPayload {
integrationAccountId: string;
eventType: WebhookEventType;
userId: string;
workspaceId: string;
}
export const integrationWebhookTask = task({
@ -36,11 +37,51 @@ export const integrationWebhookTask = task({
},
});
if (!integrationAccount) {
let webhookPayload: any = {};
if (
!integrationAccount &&
payload.eventType === "integration.disconnected"
) {
webhookPayload = {
event: payload.eventType,
user_id: payload.userId,
integration: {
id: payload.integrationAccountId,
},
};
} else if (!integrationAccount) {
logger.error(
`Integration account ${payload.integrationAccountId} not found`,
);
return { success: false, error: "Integration account not found" };
} else {
const integrationConfig =
integrationAccount.integrationConfiguration as any;
const integrationSpec = integrationAccount.integrationDefinition
.spec as any;
let mcpEndpoint = undefined;
if (integrationSpec.mcp) {
mcpEndpoint = `${process.env.API_BASE_URL}/api/v1/mcp/${integrationAccount.integrationDefinition.slug}`;
} else if (integrationSpec.mcp.type === "stdio") {
mcpEndpoint = `${process.env.API_BASE_URL}/api/v1/mcp/${integrationAccount.integrationDefinition.slug}`;
}
// Prepare webhook payload
webhookPayload = {
event: payload.eventType,
user_id: payload.userId,
integration: {
id: integrationAccount.id,
provider: integrationAccount.integrationDefinition.slug,
mcpEndpoint: mcpEndpoint,
name: integrationAccount.integrationDefinition.name,
icon: integrationAccount.integrationDefinition.icon,
},
timestamp: new Date().toISOString(),
};
}
// Get all OAuth clients that:
@ -48,13 +89,15 @@ export const integrationWebhookTask = task({
// 2. Have webhook URLs configured
const oauthClients = await prisma.oAuthClientInstallation.findMany({
where: {
workspaceId: integrationAccount.workspaceId,
workspaceId: payload.workspaceId,
installedById: payload.userId,
isActive: true,
// Check if client has integration scope in allowedScopes
grantedScopes: {
contains: "integration",
},
oauthClient: {
clientType: "regular",
},
},
select: {
id: true,
@ -77,24 +120,6 @@ export const integrationWebhookTask = task({
return { success: true, message: "No OAuth clients to notify" };
}
const integrationConfig =
integrationAccount.integrationConfiguration as any;
// Prepare webhook payload
const webhookPayload = {
event: payload.eventType,
user_id: payload.userId,
integration: {
id: integrationAccount.id,
provider: integrationAccount.integrationDefinition.slug,
mcp_endpoint: integrationConfig.mcp
? `${process.env.API_BASE_URL}/api/v1/mcp/${integrationAccount.integrationDefinition.slug}`
: undefined,
name: integrationAccount.integrationDefinition.name,
icon: integrationAccount.integrationDefinition.icon,
},
timestamp: new Date().toISOString(),
};
// Convert OAuth clients to targets
const targets: WebhookTarget[] = oauthClients
.filter((client) => client.oauthClient?.webhookUrl)
@ -116,11 +141,6 @@ export const integrationWebhookTask = task({
logger.log(
`OAuth integration webhook delivery completed: ${successfulDeliveries}/${totalDeliveries} successful`,
{
integrationId: integrationAccount.id,
integrationProvider: integrationAccount.integrationDefinition.slug,
userId: payload.userId,
},
);
return {
@ -151,12 +171,14 @@ export async function triggerIntegrationWebhook(
integrationAccountId: string,
userId: string,
eventType: WebhookEventType,
workspaceId: string,
) {
try {
await integrationWebhookTask.trigger({
integrationAccountId,
userId,
eventType,
workspaceId,
});
logger.log(
`Triggered OAuth integration webhook delivery for integration account ${integrationAccountId}`,

View File

@ -51,9 +51,38 @@ export const webhookDeliveryTask = task({
workspaceId: payload.workspaceId,
isActive: true,
},
select: {
id: true,
url: true,
secret: true,
},
});
if (webhooks.length === 0) {
const oauthClients = await prisma.oAuthClientInstallation.findMany({
where: {
workspaceId: activity.workspaceId,
installedById: activity.workspace.userId!,
isActive: true,
grantedScopes: {
contains: "integration",
},
oauthClient: {
clientType: "regular",
},
},
select: {
id: true,
oauthClient: {
select: {
clientId: true,
webhookUrl: true,
webhookSecret: true,
},
},
},
});
if (webhooks.length === 0 && oauthClients.length === 0) {
logger.log(
`No active webhooks found for workspace ${payload.workspaceId}`,
);
@ -87,7 +116,16 @@ export const webhookDeliveryTask = task({
};
// Convert webhooks to targets using common utils
const targets = prepareWebhookTargets(webhooks);
const targets = prepareWebhookTargets(
[...webhooks, ...oauthClients].map((webhook) => ({
url: "url" in webhook ? webhook.url : webhook.oauthClient.webhookUrl!,
secret:
"secret" in webhook
? webhook.secret
: webhook.oauthClient.webhookSecret,
id: webhook.id,
})),
);
// Use common delivery function
const result = await deliverWebhook({

View File

@ -1,6 +1,7 @@
import { logger } from "~/services/logger.service";
import { fetchAndSaveStdioIntegrations } from "~/trigger/utils/mcp";
import { initNeo4jSchemaOnce } from "~/lib/neo4j.server";
import { env } from "~/env.server";
// Global flag to ensure startup only runs once per server process
let startupInitialized = false;
@ -14,6 +15,46 @@ export async function initializeStartupServices() {
return;
}
// Wait for TRIGGER_API_URL/login to be available, up to 1 minute
async function waitForTriggerLogin(
url: string,
timeoutMs = 60000,
intervalMs = 2000,
) {
const start = Date.now();
while (Date.now() - start < timeoutMs) {
try {
const res = await fetch(`${url}/login`, { method: "GET" });
if (res.ok) {
return;
}
} catch (e) {
// ignore, will retry
}
await new Promise((resolve) => setTimeout(resolve, intervalMs));
}
// If we get here, the service is still not available
console.error(
`TRIGGER_API_URL/login is not available after ${timeoutMs / 1000} seconds. Exiting process.`,
);
process.exit(1);
}
try {
const triggerApiUrl = env.TRIGGER_API_URL;
if (triggerApiUrl) {
await waitForTriggerLogin(triggerApiUrl);
await addEnvVariablesInTrigger();
} else {
console.error("TRIGGER_API_URL is not set in environment variables.");
process.exit(1);
}
} catch (e) {
console.error(e);
console.error("Trigger is not configured");
process.exit(1);
}
try {
logger.info("Starting application initialization...");
@ -29,6 +70,169 @@ export async function initializeStartupServices() {
} catch (error) {
logger.error("Failed to initialize startup services:", { error });
// Don't mark as initialized if there was an error, allow retry
throw error;
}
}
export function getDatabaseUrl(dbName: string): string {
const { DATABASE_URL } = env;
if (!dbName) {
throw new Error("dbName is required");
}
// Parse the DATABASE_URL and replace the database name
try {
const url = new URL(DATABASE_URL);
// The pathname starts with a slash, e.g. "/echo"
url.pathname = `/${dbName}`;
return url.toString();
} catch (err) {
throw new Error(`Invalid DATABASE_URL format: ${err}`);
}
}
const Keys = [
"API_BASE_URL",
"DATABASE_URL",
"EMBEDDING_MODEL",
"MODEL",
"ENCRYPTION_KEY",
"NEO4J_PASSWORD",
"NEO4J_URI",
"NEO4J_USERNAME",
"OPENAI_API_KEY",
];
export async function addEnvVariablesInTrigger() {
const {
APP_ORIGIN,
TRIGGER_DB,
EMBEDDING_MODEL,
MODEL,
ENCRYPTION_KEY,
NEO4J_PASSWORD,
NEO4J_URI,
NEO4J_USERNAME,
OPENAI_API_KEY,
TRIGGER_PROJECT_ID,
TRIGGER_API_URL,
TRIGGER_SECRET_KEY,
} = env;
const DATABASE_URL = getDatabaseUrl(TRIGGER_DB);
// Helper to replace 'localhost' with 'host.docker.internal'
function replaceLocalhost(val: string | undefined): string | undefined {
if (typeof val !== "string") return val;
return val.replace(/localhost/g, "host.docker.internal");
}
// Map of key to value from env, replacing 'localhost' as needed
const envVars: Record<string, string> = {
API_BASE_URL: replaceLocalhost(APP_ORIGIN) ?? "",
DATABASE_URL: replaceLocalhost(DATABASE_URL) ?? "",
EMBEDDING_MODEL: replaceLocalhost(EMBEDDING_MODEL) ?? "",
MODEL: replaceLocalhost(MODEL) ?? "",
ENCRYPTION_KEY: replaceLocalhost(ENCRYPTION_KEY) ?? "",
NEO4J_PASSWORD: replaceLocalhost(NEO4J_PASSWORD) ?? "",
NEO4J_URI: replaceLocalhost(NEO4J_URI) ?? "",
NEO4J_USERNAME: replaceLocalhost(NEO4J_USERNAME) ?? "",
OPENAI_API_KEY: replaceLocalhost(OPENAI_API_KEY) ?? "",
};
const envName = "prod";
const apiBase = `${TRIGGER_API_URL}/api/v1`;
const envVarsUrl = `${apiBase}/projects/${TRIGGER_PROJECT_ID}/envvars/${envName}`;
try {
logger.info("Fetching current environment variables from Trigger...", {
envVarsUrl,
});
// Fetch current env vars
const response = await fetch(envVarsUrl, {
method: "GET",
headers: {
Authorization: `Bearer ${TRIGGER_SECRET_KEY}`,
"Content-Type": "application/json",
Accept: "application/json",
},
});
if (!response.ok) {
logger.error("Failed to fetch env vars from Trigger", {
status: response.status,
statusText: response.statusText,
});
throw new Error(
`Failed to fetch env vars: ${response.status} ${response.statusText}`,
);
}
const currentVars: Array<{ name: string; value: string }> =
await response.json();
logger.info("Fetched current env vars from Trigger", {
count: currentVars.length,
});
// Build a set of existing env var names
const existingNames = new Set(currentVars.map((v) => v.name));
// Find missing keys
const missingKeys = Keys.filter((key) => !existingNames.has(key));
if (missingKeys.length === 0) {
logger.info("No missing environment variables to add in Trigger.");
} else {
logger.info("Missing environment variables to add in Trigger", {
missingKeys,
});
}
// For each missing key, POST to create
for (const key of missingKeys) {
const value = envVars[key];
if (typeof value === "undefined") {
logger.warn(
`Environment variable ${key} is undefined in envVars, skipping.`,
);
continue;
}
logger.info(`Creating environment variable in Trigger: ${key}`);
const createRes = await fetch(envVarsUrl, {
method: "POST",
headers: {
Authorization: `Bearer ${TRIGGER_SECRET_KEY}`,
"Content-Type": "application/json",
Accept: "application/json",
},
body: JSON.stringify({
name: key,
value,
}),
});
if (!createRes.ok) {
logger.error("Failed to create env var in Trigger", {
key,
status: createRes.status,
statusText: createRes.statusText,
});
throw new Error(
`Failed to create env var ${key}: ${createRes.status} ${createRes.statusText}`,
);
} else {
logger.info(
`Successfully created environment variable in Trigger: ${key}`,
);
}
}
logger.info("addEnvVariablesInTrigger completed successfully.");
} catch (err) {
logger.error("Error in addEnvVariablesInTrigger", { error: err });
throw err;
}
}

View File

@ -21,27 +21,27 @@
"@conform-to/react": "^0.6.1",
"@conform-to/zod": "^0.6.1",
"@core/database": "workspace:*",
"@core/mcp-proxy": "workspace:*",
"@core/types": "workspace:*",
"@mjackson/headers": "0.11.1",
"@modelcontextprotocol/sdk": "1.13.2",
"@nichtsam/remix-auth-email-link": "3.0.0",
"@opentelemetry/api": "1.9.0",
"@core/mcp-proxy": "workspace:*",
"@prisma/client": "*",
"@radix-ui/react-accordion": "^1.1.2",
"@radix-ui/react-alert-dialog": "^1.0.5",
"@radix-ui/react-avatar": "^1.0.4",
"@radix-ui/react-avatar": "^1.1.10",
"@radix-ui/react-checkbox": "^1.0.4",
"@radix-ui/react-collapsible": "^1.0.3",
"@radix-ui/react-collapsible": "^1.1.11",
"@radix-ui/react-dialog": "^1.1.14",
"@radix-ui/react-dropdown-menu": "^2.0.6",
"@radix-ui/react-dropdown-menu": "^2.1.15",
"@radix-ui/react-icons": "^1.3.0",
"@radix-ui/react-label": "^2.0.2",
"@radix-ui/react-popover": "^1.0.7",
"@radix-ui/react-slider": "^1.3.5",
"@radix-ui/react-scroll-area": "^1.0.5",
"@radix-ui/react-select": "^2.0.0",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slider": "^1.3.5",
"@radix-ui/react-slot": "^1.2.3",
"@radix-ui/react-switch": "^1.0.3",
"@radix-ui/react-tabs": "^1.0.4",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.0 MiB

After

Width:  |  Height:  |  Size: 7.3 MiB

View File

@ -27,16 +27,16 @@ export default defineConfig({
build: {
extensions: [
syncEnvVars(() => ({
ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY as string,
API_BASE_URL: process.env.API_BASE_URL as string,
DATABASE_URL: process.env.DATABASE_URL as string,
EMBEDDING_MODEL: process.env.EMBEDDING_MODEL as string,
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY as string,
MODEL: process.env.MODEL ?? "gpt-4.1-2025-04-14",
NEO4J_PASSWORD: process.env.NEO4J_PASSWORD as string,
NEO4J_URI: process.env.NEO4J_URI as string,
NEO4J_USERNAME: process.env.NEO4J_USERNAME as string,
OPENAI_API_KEY: process.env.OPENAI_API_KEY as string,
// ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY as string,
// API_BASE_URL: process.env.API_BASE_URL as string,
// DATABASE_URL: process.env.DATABASE_URL as string,
// EMBEDDING_MODEL: process.env.EMBEDDING_MODEL as string,
// ENCRYPTION_KEY: process.env.ENCRYPTION_KEY as string,
// MODEL: process.env.MODEL ?? "gpt-4.1-2025-04-14",
// NEO4J_PASSWORD: process.env.NEO4J_PASSWORD as string,
// NEO4J_URI: process.env.NEO4J_URI as string,
// NEO4J_USERNAME: process.env.NEO4J_USERNAME as string,
// OPENAI_API_KEY: process.env.OPENAI_API_KEY as string,
})),
prismaExtension({
schema: "prisma/schema.prisma",

View File

@ -47,9 +47,11 @@ services:
neo4j:
container_name: core-neo4j
image: neo4j:5
image: neo4j:5.25-community
environment:
- NEO4J_AUTH=${NEO4J_AUTH}
- NEO4J_dbms_security_procedures_unrestricted=gds.*
- NEO4J_dbms_security_procedures_allowlist=gds.*
ports:
- "7474:7474"
- "7687:7687"
@ -57,6 +59,9 @@ services:
- type: bind
source: /efs/neo4j
target: /data
- type: bind
source: /efs/neo4j/plugins # version - 2.13.2
target: /plugins
networks:
- core

57
hosting/docker/core/.env Normal file
View File

@ -0,0 +1,57 @@
VERSION=0.1.13
# Nest run in docker, change host to database container name
DB_HOST=localhost
DB_PORT=5432
# POSTGRES
POSTGRES_USER=docker
POSTGRES_PASSWORD=docker
POSTGRES_DB=core
LOGIN_ORIGIN=http://localhost:3033
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}?schema=core
# This sets the URL used for direct connections to the database and should only be needed in limited circumstances
# See: https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#fields:~:text=the%20shadow%20database.-,directUrl,-No
DIRECT_URL=${DATABASE_URL}
REMIX_APP_PORT=3033
APP_ENV=production
NODE_ENV=${APP_ENV}
APP_ORIGIN=http://localhost:3033
API_BASE_URL=${APP_ORIGIN}
SESSION_SECRET=2818143646516f6fffd707b36f334bbb
ENCRYPTION_KEY=f686147ab967943ebbe9ed3b496e465a
########### Sign.in with google ############
AUTH_GOOGLE_CLIENT_ID=
AUTH_GOOGLE_CLIENT_SECRET=
REDIS_HOST=redis
REDIS_PORT=6379
REDIS_TLS_DISABLED=true
ENABLE_EMAIL_LOGIN=true
NEO4J_URI=bolt://neo4j:7687
NEO4J_USERNAME=neo4j
NEO4J_PASSWORD=27192e6432564f4788d55c15131bd5ac
OPENAI_API_KEY=
MAGIC_LINK_SECRET=27192e6432564f4788d55c15131bd5ac
NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac
OLLAMA_URL=http://ollama:11434
EMBEDDING_MODEL=text-embedding-3-small
MODEL=gpt-4.1-2025-04-14
## Trigger ##
TRIGGER_PROJECT_ID=proj_core
TRIGGER_SECRET_KEY=tr_prod_1yvnRh3pA1M2E67GBY7m
TRIGGER_API_URL=http://host.docker.internal:8030

2
hosting/docker/core/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
# preserve the .env symlink
!.env

View File

@ -32,6 +32,8 @@ services:
ports:
- "3033:3000"
depends_on:
init:
condition: service_started
postgres:
condition: service_healthy
redis:
@ -74,6 +76,8 @@ services:
image: neo4j:5
environment:
- NEO4J_AUTH=${NEO4J_AUTH}
- NEO4J_dbms_security_procedures_unrestricted=gds.*
- NEO4J_dbms_security_procedures_allowlist=gds.*
ports:
- "7474:7474"
- "7687:7687"

View File

@ -16,7 +16,8 @@ MANAGED_WORKER_SECRET=447c29678f9eaf289e9c4b70d3dd8a7f
# - When running the combined stack, this is set automatically during bootstrap
# - For the split setup, you will have to set this manually. The token is available in the webapp logs but will only be shown once.
# - See the docs for more information: https://trigger.dev/docs/self-hosting/docker
# TRIGGER_WORKER_TOKEN=
TRIGGER_WORKER_TOKEN=tr_wgt_MwNm1OkMP7nZs5EaknV4LxayPfUKAieQrwh7k5Ln
TRIGGER_TASKS_IMAGE=redplanethq/proj_core:latest
# Worker URLs
# - In split setups, uncomment and set to the public URL of your webapp
@ -34,8 +35,6 @@ TRIGGER_DB=trigger
DB_HOST=host.docker.internal
DB_PORT=5432
# POSTGRES_DB=postgres
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${DB_HOST}:${DB_PORT}/${TRIGGER_DB}?schema=public&sslmode=disable
DIRECT_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${DB_HOST}:${DB_PORT}/${TRIGGER_DB}?schema=public&sslmode=disable
ELECTRIC_DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${DB_HOST}/${TRIGGER_DB}
@ -77,10 +76,9 @@ RUN_REPLICATION_CLICKHOUSE_URL=http://default:password@clickhouse:8123
# - When testing locally, the default values should be fine
# - When deploying to production, you will have to change these, especially the password and URL
# - See the docs for more information: https://trigger.dev/docs/self-hosting/docker#registry-setup
DOCKER_REGISTRY_URL=localhost:5000
DOCKER_REGISTRY_NAMESPACE=
DOCKER_REGISTRY_USERNAME=registry-user
DOCKER_REGISTRY_PASSWORD=very-secure-indeed
DOCKER_REGISTRY_URL=registry-1.docker.io
DOCKER_REGISTRY_USERNAME=
DOCKER_REGISTRY_PASSWORD=
# Object store
# - You need to log into the Minio dashboard and create a bucket called "packets"
@ -136,4 +134,6 @@ OBJECT_STORE_SECRET_ACCESS_KEY=very-safe-password
# TRAEFIK_ENTRYPOINT=websecure
# TRAEFIK_HTTP_PUBLISH_IP=0.0.0.0
# TRAEFIK_HTTPS_PUBLISH_IP=0.0.0.0
# TRAEFIK_DASHBOARD_PUBLISH_IP=127.0.0.1
# TRAEFIK_DASHBOARD_PUBLISH_IP=127.0.0.1
CORE_VERSION=0.1.13

2
hosting/docker/trigger/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
# preserve the .env symlink
!.env

View File

@ -6,6 +6,22 @@ x-logging: &logging-config
compress: ${LOGGING_COMPRESS:-true}
services:
init:
container_name: trigger-init
image: redplanethq/init:${CORE_VERSION}
restart: "no" # prevent retries
environment:
- VERSION=${CORE_VERSION}
- DB_HOST=${DB_HOST}
- DB_PORT=${DB_PORT}
- TRIGGER_DB=${TRIGGER_DB}
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- TRIGGER_TASKS_IMAGE=${TRIGGER_TASKS_IMAGE}
- NODE_ENV=production
networks:
- webapp
webapp:
container_name: trigger-webapp
image: ghcr.io/triggerdotdev/trigger.dev:${TRIGGER_IMAGE_TAG:-v4-beta}
@ -14,7 +30,11 @@ services:
ports:
- ${WEBAPP_PUBLISH_IP:-0.0.0.0}:8030:3000
depends_on:
- clickhouse
clickhouse:
condition: service_started
init:
condition: service_started
networks:
- webapp
- supervisor
@ -135,53 +155,6 @@ services:
retries: 5
start_period: 10s
registry:
container_name: trigger-registry
image: registry:${REGISTRY_IMAGE_TAG:-2}
restart: ${RESTART_POLICY:-unless-stopped}
logging: *logging-config
ports:
- ${REGISTRY_PUBLISH_IP:-127.0.0.1}:5000:5000
networks:
- webapp
volumes:
# registry-user:very-secure-indeed
- ./auth.htpasswd:/auth/htpasswd:ro
environment:
REGISTRY_AUTH: htpasswd
REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm
REGISTRY_AUTH_HTPASSWD_PATH: /auth/htpasswd
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:5000/"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
minio:
container_name: trigger-minio
image: bitnami/minio:${MINIO_IMAGE_TAG:-latest}
restart: ${RESTART_POLICY:-unless-stopped}
logging: *logging-config
ports:
- ${MINIO_PUBLISH_IP:-127.0.0.1}:9000:9000
- ${MINIO_PUBLISH_IP:-127.0.0.1}:9001:9001
networks:
- webapp
volumes:
- minio:/bitnami/minio/data
environment:
MINIO_ROOT_USER: ${MINIO_ROOT_USER:-admin}
MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD:-very-safe-password}
MINIO_DEFAULT_BUCKETS: packets
MINIO_BROWSER: "on"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 5s
timeout: 10s
retries: 5
start_period: 10s
# Worker related
supervisor:
container_name: trigger-supervisor
@ -202,9 +175,9 @@ services:
command: sh -c "chown -R node:node /home/node/shared && exec /usr/bin/dumb-init -- pnpm run --filter supervisor start"
environment:
# This needs to match the token of the worker group you want to connect to
# TRIGGER_WORKER_TOKEN: ${TRIGGER_WORKER_TOKEN}
TRIGGER_WORKER_TOKEN: ${TRIGGER_WORKER_TOKEN}
# Use the bootstrap token created by the webapp
TRIGGER_WORKER_TOKEN: file:///home/node/shared/worker_token
# TRIGGER_WORKER_TOKEN: file:///home/node/shared/worker_token
MANAGED_WORKER_SECRET: ${MANAGED_WORKER_SECRET}
TRIGGER_API_URL: ${TRIGGER_API_URL:-http://webapp:3000}
OTEL_EXPORTER_OTLP_ENDPOINT: ${OTEL_EXPORTER_OTLP_ENDPOINT:-http://webapp:3000/otel}

View File

@ -1,7 +1,7 @@
{
"name": "core",
"private": true,
"version": "0.1.12",
"version": "0.1.13",
"workspaces": [
"apps/*",
"packages/*"

View File

@ -1,2 +0,0 @@
.tshy/
.tshy-build/

View File

@ -1,70 +0,0 @@
import { Command } from "commander";
import { z } from "zod";
import { fromZodError } from "zod-validation-error";
import { logger } from "../utils/logger.js";
import { outro } from "@clack/prompts";
import { chalkError } from "../utils/cliOutput.js";
export const CommonCommandOptions = z.object({
logLevel: z.enum(["debug", "info", "log", "warn", "error", "none"]).default("log"),
});
export type CommonCommandOptions = z.infer<typeof CommonCommandOptions>;
export function commonOptions(command: Command) {
return command.option(
"-l, --log-level <level>",
"The CLI log level to use (debug, info, log, warn, error, none).",
"log"
);
}
export class SkipLoggingError extends Error {}
export class SkipCommandError extends Error {}
export class OutroCommandError extends SkipCommandError {}
export async function wrapCommandAction<T extends z.AnyZodObject, TResult>(
name: string,
schema: T,
options: unknown,
action: (opts: z.output<T>) => Promise<TResult>
): Promise<TResult | undefined> {
try {
const parsedOptions = schema.safeParse(options);
if (!parsedOptions.success) {
throw new Error(fromZodError(parsedOptions.error).toString());
}
logger.loggerLevel = parsedOptions.data.logLevel;
logger.debug(`Running "${name}" with the following options`, {
options: options,
});
const result = await action(parsedOptions.data);
return result;
} catch (e) {
if (e instanceof SkipLoggingError) {
} else if (e instanceof OutroCommandError) {
outro("Operation cancelled");
} else if (e instanceof SkipCommandError) {
// do nothing
} else {
logger.log(`${chalkError("X Error:")} ${e instanceof Error ? e.message : String(e)}`);
}
throw e;
}
}
export function installExitHandler() {
process.on("SIGINT", () => {
process.exit(0);
});
process.on("SIGTERM", () => {
process.exit(0);
});
}

View File

@ -1 +0,0 @@
export const VERSION = "0.1.7";

View File

@ -1,230 +0,0 @@
import { intro, outro, text, confirm, spinner, note, log } from "@clack/prompts";
import { fileExists, updateEnvFile } from "../utils/file.js";
import { checkPostgresHealth } from "../utils/docker.js";
import { executeCommandInteractive } from "../utils/docker-interactive.js";
import { printCoreBrainLogo } from "../utils/ascii.js";
import { setupEnvFile } from "../utils/env.js";
import { hasTriggerConfig } from "../utils/env-checker.js";
import { handleDockerLogin } from "../utils/docker-login.js";
import { deployTriggerTasks } from "../utils/trigger-deploy.js";
import path from "path";
import * as fs from "fs";
import { createTriggerConfigJson, initTriggerDatabase } from "../utils/database-init.js";
import { parse } from "dotenv";
import { expand } from "dotenv-expand";
export async function initCommand() {
// Display the CORE brain logo
printCoreBrainLogo();
intro("🚀 Core Development Environment Setup");
// Step 1: Confirm this is the Core repository
note(
"Please ensure you have:\n• Docker and Docker Compose installed\n• Git installed\n• pnpm package manager installed\n• You are in the Core repository directory",
"📋 Prerequisites"
);
// Check if package.json name has "core" in it, else exit
const pkgPath = path.join(process.cwd(), "package.json");
let isCoreRepo = false;
try {
if (fs.existsSync(pkgPath)) {
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
if (typeof pkg.name === "string" && pkg.name.includes("core")) {
isCoreRepo = true;
}
}
} catch (err) {
// ignore, will prompt below
}
if (!isCoreRepo) {
note(
"Please clone the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run 'core init' again.",
"📥 Clone Repository"
);
outro("❌ Setup cancelled. Please navigate to the Core repository first.");
process.exit(1);
}
const rootDir = process.cwd();
const triggerDir = path.join(rootDir, "trigger");
try {
// Step 2: Setup .env file in root
const s1 = spinner();
s1.start("Setting up .env file in root folder...");
const envPath = path.join(rootDir, ".env");
const envExists = await fileExists(envPath);
try {
await setupEnvFile(rootDir, "root");
if (envExists) {
s1.stop("✅ .env file already exists in root");
} else {
s1.stop("✅ Copied .env.example to .env");
}
} catch (error: any) {
s1.stop(error.message);
outro("❌ Setup failed: " + error.message);
process.exit(1);
}
// Step 3: Docker compose up -d in root
try {
await executeCommandInteractive("docker compose up -d", {
cwd: rootDir,
message: "Starting Docker containers in root...",
showOutput: true,
});
} catch (error: any) {
outro("❌ Setup failed: " + error.message);
process.exit(1);
}
// Step 4: Check if postgres is running
const s3 = spinner();
s3.start("Checking PostgreSQL connection...");
let retries = 0;
const maxRetries = 30;
while (retries < maxRetries) {
if (await checkPostgresHealth()) {
s3.stop("PostgreSQL is running on localhost:5432");
break;
}
await new Promise((resolve) => setTimeout(resolve, 2000));
retries++;
}
if (retries >= maxRetries) {
s3.stop("L PostgreSQL not accessible on localhost:5432");
outro("❌ Please check your Docker setup and try again");
process.exit(1);
}
// Step 5: Setup .env file in trigger
const s4 = spinner();
s4.start("Setting up .env file in trigger folder...");
const triggerEnvPath = path.join(triggerDir, ".env");
const triggerEnvExists = await fileExists(triggerEnvPath);
try {
await setupEnvFile(triggerDir, "trigger");
if (triggerEnvExists) {
s4.stop("✅ .env file already exists in trigger");
} else {
s4.stop("✅ Copied trigger .env.example to trigger/.env");
}
} catch (error: any) {
s4.stop(error.message);
outro("❌ Setup failed: " + error.message);
process.exit(1);
}
// Step 6: Docker compose up for trigger
try {
await executeCommandInteractive("docker compose up -d", {
cwd: triggerDir,
message: "Starting Trigger.dev containers...",
showOutput: true,
});
} catch (error: any) {
outro("❌ Setup failed: " + error.message);
process.exit(1);
}
// Step 7: Check if Trigger.dev configuration already exists
const triggerConfigExists = await hasTriggerConfig(envPath);
if (triggerConfigExists) {
note(
"✅ Trigger.dev configuration already exists in .env file\n Skipping Trigger.dev setup steps...",
"Configuration Found"
);
} else {
// Step 8: Show login instructions
note("🎉 Docker containers are now running!");
const { prodSecretKey, projectRefId, personalToken } = await initTriggerDatabase(triggerDir);
await createTriggerConfigJson(personalToken as string);
const openaiApiKey = await text({
message: "Enter your OpenAI API Key:",
validate: (value) => {
if (!value || value.length === 0) {
return "OpenAI API Key is required";
}
return;
},
});
// Step 11: Update .env with project details
const s6 = spinner();
s6.start("Updating .env with Trigger.dev configuration...");
try {
await updateEnvFile(envPath, "TRIGGER_PROJECT_ID", projectRefId as string);
await updateEnvFile(envPath, "TRIGGER_SECRET_KEY", prodSecretKey as string);
await updateEnvFile(envPath, "OPENAI_API_KEY", openaiApiKey as string);
s6.stop("✅ Updated .env with Trigger.dev configuration");
} catch (error: any) {
s6.stop("❌ Failed to update .env file");
outro("❌ Setup failed: " + error.message);
process.exit(1);
}
// Step 12: Restart root docker-compose with new configuration
try {
const file = fs.readFileSync(envPath);
const parsed = parse(file);
const envVarsExpand = expand({ parsed, processEnv: {} }).parsed || {};
await executeCommandInteractive("docker compose up -d", {
cwd: rootDir,
message: "Starting Core services with new Trigger.dev configuration...",
showOutput: true,
env: envVarsExpand,
});
} catch (error: any) {
outro("❌ Setup failed: " + error.message);
process.exit(1);
}
}
// Step 13: Handle Docker login
note("Run the following command to login to Docker registry:", "🐳 Docker Registry Login");
await handleDockerLogin(rootDir, triggerEnvPath);
// Step 14: Deploy Trigger.dev tasks
await deployTriggerTasks(rootDir);
// Step 15: Final instructions
note(
[
"Your services are now running:",
"",
"• Core Application: http://localhost:3033",
"• Trigger.dev: http://localhost:8030",
"• PostgreSQL: localhost:5432",
"",
"You can now start developing with Core!",
"",
" When logging in to the Core Application, you can find the login URL in the Docker container logs:",
" docker logs core-app --tail 50",
].join("\n"),
"🚀 Services Running"
);
outro("🎉 Setup Complete!");
process.exit(0);
} catch (error: any) {
outro(`❌ Setup failed: ${error.message}`);
process.exit(1);
}
}

View File

@ -1,66 +0,0 @@
import { intro, outro, note, log } from "@clack/prompts";
import { executeCommandInteractive } from "../utils/docker-interactive.js";
import { printCoreBrainLogo } from "../utils/ascii.js";
import path from "path";
import * as fs from "fs";
export async function startCommand() {
// Display the CORE brain logo
printCoreBrainLogo();
intro("🚀 Starting Core Development Environment");
// Step 1: Confirm this is the Core repository
// Check if package.json name has "core" in it, else exit
const pkgPath = path.join(process.cwd(), "package.json");
let isCoreRepo = false;
try {
if (fs.existsSync(pkgPath)) {
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
if (typeof pkg.name === "string" && pkg.name.includes("core")) {
isCoreRepo = true;
}
}
} catch (err) {
// ignore, will prompt below
}
if (!isCoreRepo) {
note(
'Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core start" again.',
"📥 Core Repository Required"
);
outro("❌ Please navigate to the Core repository first.");
process.exit(1);
}
const rootDir = process.cwd();
const triggerDir = path.join(rootDir, "trigger");
try {
// Start main services
await executeCommandInteractive("docker compose up -d", {
cwd: rootDir,
message: "Starting Core services...",
showOutput: true,
});
// Start trigger services
await executeCommandInteractive("docker compose up -d", {
cwd: triggerDir,
message: "Starting Trigger.dev services...",
showOutput: true,
});
// Final success message
outro("🎉 Core Development Environment Started!");
note(
"• Core Application: http://localhost:3033\n• Trigger.dev: http://localhost:8030\n• PostgreSQL: localhost:5432",
"🌐 Your services are now running"
);
log.success("Happy coding!");
} catch (error: any) {
outro(`❌ Failed to start services: ${error.message}`);
process.exit(1);
}
}

View File

@ -1,62 +0,0 @@
import { intro, outro, log, note } from "@clack/prompts";
import { executeCommandInteractive } from "../utils/docker-interactive.js";
import { printCoreBrainLogo } from "../utils/ascii.js";
import path from "path";
import * as fs from "fs";
export async function stopCommand() {
// Display the CORE brain logo
printCoreBrainLogo();
intro("🛑 Stopping Core Development Environment");
// Step 1: Confirm this is the Core repository
// Check if package.json name has "core" in it, else exit
const pkgPath = path.join(process.cwd(), "package.json");
let isCoreRepo = false;
try {
if (fs.existsSync(pkgPath)) {
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
if (typeof pkg.name === "string" && pkg.name.includes("core")) {
isCoreRepo = true;
}
}
} catch (err) {
// ignore, will prompt below
}
if (!isCoreRepo) {
note(
'Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core stop" again.',
"📥 Core Repository Required"
);
outro("❌ Please navigate to the Core repository first.");
process.exit(1);
}
const rootDir = process.cwd();
const triggerDir = path.join(rootDir, "trigger");
try {
// Stop trigger services first
await executeCommandInteractive("docker compose down", {
cwd: triggerDir,
message: "Stopping Trigger.dev services...",
showOutput: true,
});
// Stop main services
await executeCommandInteractive("docker compose down", {
cwd: rootDir,
message: "Stopping Core services...",
showOutput: true,
});
// Final success message
outro("🎉 Core Development Environment Stopped!");
log.success("All services have been stopped.");
log.info('Run "core start" to start services again.');
} catch (error: any) {
outro(`❌ Failed to stop services: ${error.message}`);
process.exit(1);
}
}

View File

@ -1,3 +0,0 @@
#!/usr/bin/env node
import './cli/index.js';

View File

@ -1,145 +0,0 @@
import { log } from "@clack/prompts";
import chalk from "chalk";
import { terminalLink, TerminalLinkOptions } from "./terminalLink.js";
import { hasTTY } from "std-env";
export const isInteractive = hasTTY;
export const isLinksSupported = terminalLink.isSupported;
export const green = "#4FFF54";
export const purple = "#735BF3";
export function chalkGreen(text: string) {
return chalk.hex(green)(text);
}
export function chalkPurple(text: string) {
return chalk.hex(purple)(text);
}
export function chalkGrey(text: string) {
return chalk.hex("#878C99")(text);
}
export function chalkError(text: string) {
return chalk.hex("#E11D48")(text);
}
export function chalkWarning(text: string) {
return chalk.yellow(text);
}
export function chalkSuccess(text: string) {
return chalk.hex("#28BF5C")(text);
}
export function chalkLink(text: string) {
return chalk.underline.hex("#D7D9DD")(text);
}
export function chalkWorker(text: string) {
return chalk.hex("#FFFF89")(text);
}
export function chalkTask(text: string) {
return chalk.hex("#60A5FA")(text);
}
export function chalkRun(text: string) {
return chalk.hex("#A78BFA")(text);
}
export function logo() {
return `${chalk.hex(green).bold("Trigger")}${chalk.hex(purple).bold(".dev")}`;
}
// Mar 27 09:17:25.653
export function prettyPrintDate(date: Date = new Date()) {
let formattedDate = new Intl.DateTimeFormat("en-US", {
month: "short",
day: "2-digit",
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
hour12: false,
}).format(date);
// Append milliseconds
formattedDate += "." + ("00" + date.getMilliseconds()).slice(-3);
return formattedDate;
}
export function prettyError(header: string, body?: string, footer?: string) {
const prefix = "Error: ";
const indent = Array(prefix.length).fill(" ").join("");
const spacing = "\n\n";
const prettyPrefix = chalkError(prefix);
const withIndents = (text?: string) =>
text
?.split("\n")
.map((line) => `${indent}${line}`)
.join("\n");
const prettyBody = withIndents(body?.trim());
const prettyFooter = withIndents(footer);
log.error(
`${prettyPrefix}${header}${prettyBody ? `${spacing}${prettyBody}` : ""}${
prettyFooter ? `${spacing}${prettyFooter}` : ""
}`
);
}
export function prettyWarning(header: string, body?: string, footer?: string) {
const prefix = "Warning: ";
const indent = Array(prefix.length).fill(" ").join("");
const spacing = "\n\n";
const prettyPrefix = chalkWarning(prefix);
const withIndents = (text?: string) =>
text
?.split("\n")
.map((line) => `${indent}${line}`)
.join("\n");
const prettyBody = withIndents(body);
const prettyFooter = withIndents(footer);
log.warn(
`${prettyPrefix}${header}${prettyBody ? `${spacing}${prettyBody}` : ""}${
prettyFooter ? `${spacing}${prettyFooter}` : ""
}`
);
}
export function aiHelpLink({
dashboardUrl,
project,
query,
}: {
dashboardUrl: string;
project: string;
query: string;
}) {
const searchParams = new URLSearchParams();
//the max length for a URL is 1950 characters
const clippedQuery = query.slice(0, 1950);
searchParams.set("q", clippedQuery);
const url = new URL(`/projects/${project}/ai-help`, dashboardUrl);
url.search = searchParams.toString();
log.message(chalkLink(cliLink("💡 Get a fix for this error using AI", url.toString())));
}
export function cliLink(text: string, url: string, options?: TerminalLinkOptions) {
return terminalLink(text, url, {
fallback: (text, url) => `${text} ${url}`,
...options,
});
}

View File

@ -1,328 +0,0 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import Knex, { Knex as KnexT } from "knex";
import { v4 as uuidv4 } from "uuid";
import nodeCrypto from "node:crypto";
import { parse } from "dotenv";
import { expand } from "dotenv-expand";
import path from "node:path";
import { log } from "@clack/prompts";
import { customAlphabet } from "nanoid";
import $xdgAppPaths from "xdg-app-paths";
import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
export const xdgAppPaths = $xdgAppPaths as unknown as typeof $xdgAppPaths.default;
const tokenGenerator = customAlphabet("123456789abcdefghijkmnopqrstuvwxyz", 40);
// Generate tokens internally
let ENCRYPTION_KEY: string;
const COMMON_ID = "9ea0412ea8ef441ca03c7952d011ab56";
const key = tokenGenerator(20);
export async function createOrg(knex: KnexT) {
try {
log.step("Checking for existing CORE user and organization...");
const existingUser = await knex("User").where({ id: COMMON_ID }).first();
if (existingUser) {
log.info("CORE user and organization already exist. Skipping creation.");
return COMMON_ID; // User already exists, return the ID
}
log.step("Creating CORE user, organization, and org member...");
// Create new entries using a transaction
await knex.transaction(async (trx) => {
// Create User
await trx("User").insert({
id: COMMON_ID,
admin: true,
authenticationMethod: "MAGIC_LINK",
displayName: "CORE",
email: "core@heysol.ai",
name: "CORE",
confirmedBasicDetails: true,
updatedAt: new Date(),
});
// Create Organization
await trx("Organization").insert({
id: COMMON_ID,
slug: "CORE",
title: "CORE",
v3Enabled: true,
updatedAt: new Date(),
});
// Create OrgMember
await trx("OrgMember").insert({
id: COMMON_ID,
organizationId: COMMON_ID,
userId: COMMON_ID,
role: "ADMIN",
updatedAt: new Date(),
});
});
log.success("CORE user, organization, and org member created.");
return COMMON_ID;
} catch (error) {
log.error(`Error creating org: ${error}`);
throw new Error(`Error creating org: ${error}`);
}
}
export async function createPersonalToken(knex: KnexT) {
const id = uuidv4().replace(/-/g, "");
log.step("Checking for existing personal access token for CLI user...");
const existingToken = await knex("PersonalAccessToken")
.where({ userId: COMMON_ID, name: "cli" })
.first();
if (existingToken) {
log.info("Personal access token for CLI already exists. Skipping creation.");
return;
}
log.step("Creating CLI personal access token...");
// Generate a new token similar to the original: "tr_pat_" + 40 lowercase alphanumeric chars
const personalToken = `tr_pat_${tokenGenerator(40)}`;
await knex("PersonalAccessToken").insert({
id,
name: "cli",
userId: COMMON_ID,
updatedAt: new Date(),
obfuscatedToken: obfuscateToken(personalToken),
hashedToken: hashToken(personalToken),
encryptedToken: encryptToken(personalToken),
});
log.success("CLI personal access token created.");
return personalToken;
}
function obfuscateToken(token: string) {
const withoutPrefix = token.replace("tr_pat_", "");
const obfuscated = `${withoutPrefix.slice(0, 4)}${"•".repeat(18)}${withoutPrefix.slice(-4)}`;
return `tr_pat_${obfuscated}`;
}
export async function createProject(knex: KnexT) {
try {
log.step("Checking if CORE project already exists for the organization...");
const existingProject = await knex("Project")
.where({ name: "CORE", organizationId: COMMON_ID })
.first();
if (existingProject) {
log.info(
"CORE project already exists. Skipping creation of project and runtime environments."
);
// Fetch the prod runtime environment for this project
const prodRuntimeEnv = await knex("RuntimeEnvironment")
.where({
projectId: existingProject.id,
slug: "prod",
})
.first();
let prodSecret;
if (prodRuntimeEnv && prodRuntimeEnv.apiKey) {
prodSecret = prodRuntimeEnv.apiKey;
} else {
// fallback to old behavior if not found (should not happen)
prodSecret = `tr_prod_${key}`;
}
return {
projectId: existingProject.id,
prodSecret,
projectRef: existingProject.externalRef || "proj_core",
};
}
const id = uuidv4().replace(/-/g, "");
log.step("Creating CORE project and runtime environments...");
await knex.transaction(async (trx) => {
await knex("Project")
.insert({
id,
name: "CORE",
organizationId: COMMON_ID,
slug: "CORE",
externalRef: `proj_core`,
version: "V3",
updatedAt: new Date(),
})
.transacting(trx);
await knex("RuntimeEnvironment")
.insert(
["dev", "stg", "prod"].map((env: string) => ({
id: uuidv4(),
slug: env,
apiKey: `tr_${env}_${key}`,
organizationId: COMMON_ID,
orgMemberId: COMMON_ID,
projectId: id,
type: env === "prod" ? "PRODUCTION" : env === "stg" ? "STAGING" : "DEVELOPMENT",
pkApiKey: `tr_pk_${env}${key}`,
shortcode: env,
updatedAt: new Date(),
}))
)
.transacting(trx);
});
log.success("CORE project and runtime environments created.");
return { projectId: id, prodSecret: `tr_prod_${key}`, projectRef: `proj_core` };
} catch (error) {
log.error(`Error creating project: ${error}`);
throw new Error(`Error creating project: ${error}`);
}
}
function encryptToken(value: string) {
const nonce = nodeCrypto.randomBytes(12);
const cipher = nodeCrypto.createCipheriv("aes-256-gcm", ENCRYPTION_KEY, nonce);
let encrypted = cipher.update(value, "utf8", "hex");
encrypted += cipher.final("hex");
const tag = cipher.getAuthTag().toString("hex");
return {
nonce: nonce.toString("hex"),
ciphertext: encrypted,
tag,
};
}
export function hashToken(token: string): string {
const hash = nodeCrypto.createHash("sha256");
hash.update(token);
return hash.digest("hex");
}
// Main initialization function
export async function initTriggerDatabase(triggerDir: string) {
log.step("Waiting for Trigger.dev to be ready on http://localhost:8030/login...");
await new Promise((resolve) => setTimeout(resolve, 5000));
// Check if Trigger.dev is up and /login returns 200 before proceeding
const MAX_RETRIES = 30;
const RETRY_DELAY_MS = 2000;
let loginOk = false;
for (let i = 0; i < MAX_RETRIES; i++) {
try {
const res = await fetch("http://localhost:8030/login");
if (res.status === 200) {
loginOk = true;
log.step("Trigger.dev is up and /login returned 200.");
break;
}
} catch (e) {
// ignore, will retry
}
if (i < MAX_RETRIES - 1) {
await new Promise((resolve) => setTimeout(resolve, RETRY_DELAY_MS));
}
}
if (!loginOk) {
log.error("Trigger.dev did not respond with 200 on /login after waiting.");
throw new Error("Trigger.dev is not ready at http://localhost:8030/login");
}
const envPath = path.join(triggerDir, ".env");
log.step(`Loading environment variables from ${envPath}...`);
const file = readFileSync(envPath);
const parsed = parse(file);
const envVarsExpand = expand({ parsed, processEnv: {} }).parsed || {};
// Set the encryption key from the .env file
ENCRYPTION_KEY = envVarsExpand.ENCRYPTION_KEY as string;
if (!ENCRYPTION_KEY) {
throw new Error("ENCRYPTION_KEY not found in trigger/.env file");
}
const knex = Knex({
client: "pg", // Use PostgreSQL as the database client
connection: envVarsExpand.DIRECT_URL?.replace("host.docker.internal", "localhost"), // Database connection URL from environment variable
});
try {
log.step("Initializing Trigger.dev database...");
// Create organization and user
await createOrg(knex);
// Create personal access token
const personalToken = await createPersonalToken(knex);
// Create project and return details
const projectDetails = await createProject(knex);
log.success("Trigger.dev database initialized successfully.");
log.step("Setting things up...");
await new Promise((resolve) => setTimeout(resolve, 5000));
return {
prodSecretKey: projectDetails.prodSecret,
projectRefId: projectDetails.projectRef,
personalToken,
};
} catch (error) {
log.error(`Initialization failed: ${error}`);
throw new Error(`Initialization failed: ${error}`);
}
}
function getGlobalConfigFolderPath() {
const configDir = xdgAppPaths("trigger").config();
return configDir;
}
const CONFIG_FILE = "config.json";
function getAuthConfigFilePath() {
return path.join(getGlobalConfigFolderPath(), CONFIG_FILE);
}
/**
* Creates the Trigger.dev CLI config.json file in ~/Library/Preferences/trigger/config.json
* with the given personal access token. If the config already exists, it will be deleted first.
*
* @param {string} personalToken - The personal access token to store in the config.
*/
export async function createTriggerConfigJson(personalToken: string) {
const configPath = getAuthConfigFilePath();
// If config.json exists, delete it
mkdirSync(path.dirname(configPath), {
recursive: true,
});
const config = {
version: 2,
currentProfile: "default",
profiles: {
default: {
accessToken: personalToken,
apiUrl: "http://localhost:8030",
},
},
};
writeFileSync(path.join(configPath), JSON.stringify(config, undefined, 2), {
encoding: "utf-8",
});
}

View File

@ -1,108 +0,0 @@
import { spawn, ChildProcess } from "child_process";
import { spinner } from "@clack/prompts";
export interface CommandOptions {
cwd: string;
message: string;
showOutput?: boolean;
env?: Record<string, string>;
}
export function executeCommandInteractive(command: string, options: CommandOptions): Promise<void> {
return new Promise((resolve, reject) => {
const s = spinner();
s.start(options.message);
// Split command into parts
const parts = command.split(" ");
const cmd = parts[0];
const args = parts.slice(1);
if (!cmd) {
reject(new Error("Invalid command"));
return;
}
const child: ChildProcess = spawn(cmd, args, {
cwd: options.cwd,
stdio: options.showOutput ? ["ignore", "pipe", "pipe"] : "ignore",
detached: false,
env: options.env ? { ...process.env, ...options.env } : { ...process.env },
});
let output = "";
// Handle stdout
if (child.stdout && options.showOutput) {
child.stdout.on("data", (data: Buffer) => {
const text = data.toString();
output += text;
// Update spinner with latest output line
const lines = text.trim().split("\n");
const lastLine = lines[lines.length - 1];
if (lastLine && lastLine.trim()) {
s.message(`${options.message}\n${lastLine.trim()}`);
}
});
}
// Handle stderr
if (child.stderr && options.showOutput) {
child.stderr.on("data", (data: Buffer) => {
const text = data.toString();
output += text;
// console.log(text);
// Update spinner with error output
const lines = text.trim().split("\n");
const lastLine = lines[lines.length - 1];
if (lastLine && lastLine.trim()) {
s.message(`${options.message}\n❌ ${lastLine.trim()}`);
}
});
}
// Handle process exit
child.on("exit", (code: number | null) => {
if (code === 0) {
s.stop(`${options.message.replace(/\.\.\.$/, "")} completed`);
resolve();
} else {
s.stop(`${options.message.replace(/\.\.\.$/, "")} failed (exit code: ${code})`);
if (options.showOutput && output) {
console.log("\nOutput:");
console.log(output);
}
reject(new Error(`Command failed with exit code ${code}`));
}
});
// Handle errors
child.on("error", (error: Error) => {
s.stop(`${options.message.replace(/\.\.\.$/, "")} failed`);
reject(error);
});
// Handle Ctrl+C
const handleSigint = () => {
s.stop(`⏹️ ${options.message.replace(/\.\.\.$/, "")} interrupted`);
child.kill("SIGTERM");
// Give the process time to clean up
setTimeout(() => {
if (child.killed === false) {
child.kill("SIGKILL");
}
process.exit(130); // Standard exit code for SIGINT
}, 5000);
};
process.on("SIGINT", handleSigint);
// Clean up event listener when done
child.on("exit", () => {
process.off("SIGINT", handleSigint);
});
});
}

View File

@ -1,59 +0,0 @@
import { log } from "@clack/prompts";
import path from "path";
import os from "os";
import fs from "fs";
import { executeCommandInteractive } from "./docker-interactive.js";
export async function handleDockerLogin(rootDir: string, triggerEnvPath: string): Promise<void> {
// Check if Docker is already logged in to localhost:5000
let dockerLoginNeeded = true;
try {
const dockerConfigPath = process.env.DOCKER_CONFIG
? path.join(process.env.DOCKER_CONFIG, "config.json")
: path.join(os.homedir(), ".docker", "config.json");
if (fs.existsSync(dockerConfigPath)) {
const configContent = await fs.promises.readFile(dockerConfigPath, "utf8");
const config = JSON.parse(configContent);
if (
config &&
config.auths &&
Object.prototype.hasOwnProperty.call(config.auths, "localhost:5000")
) {
dockerLoginNeeded = false;
}
}
} catch (error) {
// Ignore errors, will prompt for login below
}
if (dockerLoginNeeded) {
try {
// Read env file to get docker registry details
const envContent = await fs.promises.readFile(triggerEnvPath, "utf8");
const envLines = envContent.split("\n");
const getEnvValue = (key: string) => {
const line = envLines.find((l) => l.startsWith(`${key}=`));
return line ? line.split("=")[1] : "";
};
const dockerRegistryUrl = getEnvValue("DOCKER_REGISTRY_URL");
const dockerRegistryUsername = getEnvValue("DOCKER_REGISTRY_USERNAME");
const dockerRegistryPassword = getEnvValue("DOCKER_REGISTRY_PASSWORD");
await executeCommandInteractive(
`docker login -u ${dockerRegistryUsername} -p ${dockerRegistryPassword} ${dockerRegistryUrl}`,
{
cwd: rootDir,
message: "Logging in to docker...",
showOutput: true,
}
);
} catch (error) {
log.info("docker login -u <USERNAME> -p <PASSWORD> <REGISTRY_URL>");
}
} else {
log.info("✅ Docker is already logged in to localhost:5000, skipping login prompt.");
}
}

View File

@ -1,30 +0,0 @@
import { execSync } from 'child_process';
export function checkPostgresHealth(): Promise<boolean> {
return new Promise((resolve) => {
try {
const result = execSync('curl -f http://localhost:5432 || nc -z localhost 5432', {
encoding: 'utf8',
timeout: 5000
});
resolve(true);
} catch {
resolve(false);
}
});
}
export function executeDockerCommand(command: string, cwd: string): Promise<string> {
return new Promise((resolve, reject) => {
try {
const result = execSync(command, {
cwd,
encoding: 'utf8',
stdio: 'pipe'
});
resolve(result);
} catch (error: any) {
reject(new Error(`Docker command failed: ${error.message}`));
}
});
}

View File

@ -1,23 +0,0 @@
import fs from 'fs/promises';
export async function checkEnvValue(filePath: string, key: string): Promise<string | null> {
try {
const content = await fs.readFile(filePath, 'utf8');
const lines = content.split('\n');
const line = lines.find(l => l.startsWith(`${key}=`));
if (line) {
const value = line.split('=')[1]?.trim();
return value && value.length > 0 ? value : null;
}
return null;
} catch {
return null;
}
}
export async function hasTriggerConfig(envPath: string): Promise<boolean> {
const projectId = await checkEnvValue(envPath, 'TRIGGER_PROJECT_ID');
const secretKey = await checkEnvValue(envPath, 'TRIGGER_SECRET_KEY');
return !!(projectId && secretKey);
}

View File

@ -1,50 +0,0 @@
import path from "path";
import { parse } from "dotenv";
import { expand } from "dotenv-expand";
import * as fs from "fs";
/**
* Reads environment variables from .env file and replaces localhost URLs with host.docker.internal
* for Docker container compatibility
*/
export async function getDockerCompatibleEnvVars(rootDir: string): Promise<Record<string, string>> {
const envPath = path.join(rootDir, ".env");
try {
// Use dotenv to parse and expand variables
const file = fs.readFileSync(envPath);
const parsed = parse(file);
const envVarsExpand = expand({ parsed, processEnv: {} }).parsed || {};
const getEnvValue = (key: string): string => {
return envVarsExpand[key] || "";
};
const replaceLocalhostWithDockerHost = (value: string): string => {
return value
.replace(/localhost/g, "host.docker.internal")
.replace(/127\.0\.0\.1/g, "host.docker.internal");
};
// Get all required environment variables
const envVars = {
ANTHROPIC_API_KEY: getEnvValue("ANTHROPIC_API_KEY"),
API_BASE_URL: replaceLocalhostWithDockerHost(getEnvValue("API_BASE_URL")),
DATABASE_URL: replaceLocalhostWithDockerHost(getEnvValue("DATABASE_URL")),
EMBEDDING_MODEL: getEnvValue("EMBEDDING_MODEL"),
ENCRYPTION_KEY: getEnvValue("ENCRYPTION_KEY"),
MODEL: getEnvValue("MODEL") || "gpt-4.1-2025-04-14",
NEO4J_PASSWORD: getEnvValue("NEO4J_PASSWORD"),
NEO4J_URI: replaceLocalhostWithDockerHost(getEnvValue("NEO4J_URI")),
NEO4J_USERNAME: getEnvValue("NEO4J_USERNAME"),
OPENAI_API_KEY: getEnvValue("OPENAI_API_KEY"),
TRIGGER_PROJECT_ID: getEnvValue("TRIGGER_PROJECT_ID"),
};
return envVars;
} catch (error) {
throw new Error(`Failed to read .env file: ${error}`);
}
}

View File

@ -1,17 +0,0 @@
import path from 'path';
import { fileExists, copyFile } from './file.js';
export async function setupEnvFile(directory: string, name: string = 'root'): Promise<void> {
const envExamplePath = path.join(directory, '.env.example');
const envPath = path.join(directory, '.env');
if (!(await fileExists(envExamplePath))) {
throw new Error(`❌ .env.example not found in ${name} directory`);
}
if (await fileExists(envPath)) {
return; // .env already exists, skip copying
}
await copyFile(envExamplePath, envPath);
}

View File

@ -1,41 +0,0 @@
import fs from 'fs/promises';
import path from 'path';
export async function copyFile(source: string, destination: string): Promise<void> {
try {
await fs.copyFile(source, destination);
} catch (error: any) {
throw new Error(`Failed to copy file: ${error.message}`);
}
}
export async function fileExists(filePath: string): Promise<boolean> {
try {
await fs.access(filePath);
return true;
} catch {
return false;
}
}
export async function updateEnvFile(filePath: string, key: string, value: string): Promise<void> {
try {
let content = '';
if (await fileExists(filePath)) {
content = await fs.readFile(filePath, 'utf8');
}
const lines = content.split('\n');
const keyIndex = lines.findIndex(line => line.startsWith(`${key}=`));
if (keyIndex !== -1) {
lines[keyIndex] = `${key}=${value}`;
} else {
lines.push(`${key}=${value}`);
}
await fs.writeFile(filePath, lines.join('\n'));
} catch (error: any) {
throw new Error(`Failed to update .env file: ${error.message}`);
}
}

View File

@ -1,22 +0,0 @@
import { execSync } from "child_process";
export function getGitRemoteUrl(): string | null {
try {
const url = execSync("git config --get remote.origin.url", { encoding: "utf8" }).trim();
return url;
} catch {
return null;
}
}
export function isValidCoreRepo(): boolean {
const remoteUrl = getGitRemoteUrl();
if (!remoteUrl) return false;
return (
remoteUrl.includes("github.com/redplanethq/core") ||
remoteUrl.includes("github.com:redplanethq/core") ||
remoteUrl.includes("github.com/tegonhq/echo") ||
remoteUrl.includes("github.com:tegonhq/echo")
);
}

View File

@ -1,128 +0,0 @@
// This is a copy of the logger utility from the wrangler repo: https://github.com/cloudflare/workers-sdk/blob/main/packages/wrangler/src/logger.ts
import { format } from "node:util";
import chalk from "chalk";
import CLITable from "cli-table3";
import { formatMessagesSync } from "esbuild";
import type { Message } from "esbuild";
import { env } from "std-env";
export const LOGGER_LEVELS = {
none: -1,
error: 0,
warn: 1,
info: 2,
log: 3,
debug: 4,
} as const;
export type LoggerLevel = keyof typeof LOGGER_LEVELS;
/** A map from LOGGER_LEVEL to the error `kind` needed by `formatMessagesSync()`. */
const LOGGER_LEVEL_FORMAT_TYPE_MAP = {
error: "error",
warn: "warning",
info: undefined,
log: undefined,
debug: undefined,
} as const;
function getLoggerLevel(): LoggerLevel {
const fromEnv = env.TRIGGER_LOG_LEVEL?.toLowerCase();
if (fromEnv !== undefined) {
if (fromEnv in LOGGER_LEVELS) return fromEnv as LoggerLevel;
const expected = Object.keys(LOGGER_LEVELS)
.map((level) => `"${level}"`)
.join(" | ");
console.warn(
`Unrecognised TRIGGER_LOG_LEVEL value ${JSON.stringify(
fromEnv
)}, expected ${expected}, defaulting to "log"...`
);
}
return "log";
}
export type TableRow<Keys extends string> = Record<Keys, string>;
export class Logger {
constructor() {}
loggerLevel = getLoggerLevel();
columns = process.stdout.columns;
debug = (...args: unknown[]) => this.doLog("debug", args);
ignore = (...args: unknown[]) => {};
debugWithSanitization = (label: string, ...args: unknown[]) => {
this.doLog("debug", [label, ...args]);
};
info = (...args: unknown[]) => this.doLog("info", args);
log = (...args: unknown[]) => this.doLog("log", args);
/** @deprecated **ONLY USE THIS IN THE CLI** - It will hang the process when used in deployed code (!) */
warn = (...args: unknown[]) => this.doLog("warn", args);
/** @deprecated **ONLY USE THIS IN THE CLI** - It will hang the process when used in deployed code (!) */
error = (...args: unknown[]) => this.doLog("error", args);
table<Keys extends string>(data: TableRow<Keys>[], level?: Exclude<LoggerLevel, "none">) {
const keys: Keys[] = data.length === 0 ? [] : (Object.keys(data[0]!) as Keys[]);
const t = new CLITable({
head: keys,
style: {
head: chalk.level ? ["blue"] : [],
border: chalk.level ? ["gray"] : [],
},
});
t.push(...data.map((row) => keys.map((k) => row[k])));
return this.doLog(level ?? "log", [t.toString()]);
}
private doLog(messageLevel: Exclude<LoggerLevel, "none">, args: unknown[]) {
const message = this.formatMessage(messageLevel, format(...args));
// only send logs to the terminal if their level is at least the configured log-level
if (LOGGER_LEVELS[this.loggerLevel] >= LOGGER_LEVELS[messageLevel]) {
console[messageLevel](message);
}
}
private formatMessage(level: Exclude<LoggerLevel, "none">, message: string): string {
const kind = LOGGER_LEVEL_FORMAT_TYPE_MAP[level];
if (kind) {
// Format the message using the esbuild formatter.
// The first line of the message is the main `text`,
// subsequent lines are put into the `notes`.
const [firstLine, ...otherLines] = message.split("\n");
const notes = otherLines.length > 0 ? otherLines.map((text) => ({ text })) : undefined;
return formatMessagesSync([{ text: firstLine, notes }], {
color: true,
kind,
terminalWidth: this.columns,
})[0]!;
} else {
return message;
}
}
}
/**
* A drop-in replacement for `console` for outputting logging messages.
*
* Errors and Warnings will get additional formatting to highlight them to the user.
* You can also set a `logger.loggerLevel` value to one of "debug", "log", "warn" or "error",
* to filter out logging messages.
*/
export const logger = new Logger();
export function logBuildWarnings(warnings: Message[]) {
const logs = formatMessagesSync(warnings, { kind: "warning", color: true });
for (const log of logs) console.warn(log);
}
/**
* Logs all errors/warnings associated with an esbuild BuildFailure in the same
* style esbuild would.
*/
export function logBuildFailure(errors: Message[], warnings: Message[]) {
const logs = formatMessagesSync(errors, { kind: "error", color: true });
for (const log of logs) console.error(log);
logBuildWarnings(warnings);
}

View File

@ -1,22 +0,0 @@
import { spinner } from '@clack/prompts';
export function createSpinner(message: string) {
return spinner();
}
export async function withSpinner<T>(
message: string,
task: () => Promise<T>
): Promise<T> {
const s = spinner();
s.start(message);
try {
const result = await task();
s.stop(message);
return result;
} catch (error) {
s.stop(`${message} - Failed`);
throw error;
}
}

View File

@ -1,160 +0,0 @@
/*
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Copyright (c) James Talmage <james@talmage.io> (https://github.com/jamestalmage)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import { createSupportsColor } from "supports-color";
import hasFlag from "has-flag";
function parseVersion(versionString = ""): { major: number; minor: number; patch: number } {
if (/^\d{3,4}$/.test(versionString)) {
// Env var doesn't always use dots. example: 4601 => 46.1.0
const match = /(\d{1,2})(\d{2})/.exec(versionString) ?? [];
return {
major: 0,
minor: Number.parseInt(match[1] ?? "0", 10),
patch: Number.parseInt(match[2] ?? "0", 10),
};
}
const versions = (versionString ?? "").split(".").map((n) => Number.parseInt(n, 10));
return {
major: versions[0] ?? 0,
minor: versions[1] ?? 0,
patch: versions[2] ?? 0,
};
}
/**
Creates a supports hyperlinks check for a given stream.
@param stream - Optional stream to check for hyperlink support.
@returns boolean indicating whether hyperlinks are supported.
*/
export function createSupportsHyperlinks(stream: NodeJS.WriteStream): boolean {
const {
CI,
CURSOR_TRACE_ID,
FORCE_HYPERLINK,
NETLIFY,
TEAMCITY_VERSION,
TERM_PROGRAM,
TERM_PROGRAM_VERSION,
VTE_VERSION,
TERM,
} = process.env;
if (FORCE_HYPERLINK) {
return !(FORCE_HYPERLINK.length > 0 && Number.parseInt(FORCE_HYPERLINK, 10) === 0);
}
if (
hasFlag("no-hyperlink") ||
hasFlag("no-hyperlinks") ||
hasFlag("hyperlink=false") ||
hasFlag("hyperlink=never")
) {
return false;
}
if (hasFlag("hyperlink=true") || hasFlag("hyperlink=always")) {
return true;
}
// Netlify does not run a TTY, it does not need `supportsColor` check
if (NETLIFY) {
return true;
}
// If they specify no colors, they probably don't want hyperlinks.
if (!createSupportsColor(stream)) {
return false;
}
if (stream && !stream.isTTY) {
return false;
}
// Windows Terminal
if ("WT_SESSION" in process.env) {
return true;
}
if (process.platform === "win32") {
return false;
}
if (CI) {
return false;
}
if (TEAMCITY_VERSION) {
return false;
}
if (CURSOR_TRACE_ID) {
return true;
}
if (TERM_PROGRAM) {
const version = parseVersion(TERM_PROGRAM_VERSION);
switch (TERM_PROGRAM) {
case "iTerm.app": {
if (version.major === 3) {
return version.minor >= 1;
}
return version.major > 3;
}
case "WezTerm": {
return version.major >= 20_200_620;
}
case "vscode": {
// eslint-disable-next-line no-mixed-operators
return version.major > 1 || (version.major === 1 && version.minor >= 72);
}
case "ghostty": {
return true;
}
// No default
}
}
if (VTE_VERSION) {
// 0.50.0 was supposed to support hyperlinks, but throws a segfault
if (VTE_VERSION === "0.50.0") {
return false;
}
const version = parseVersion(VTE_VERSION);
return version.major > 0 || version.minor >= 50;
}
switch (TERM) {
case "alacritty": {
// Support added in v0.11 (2022-10-13)
return true;
}
// No default
}
return false;
}
/** Object containing hyperlink support status for stdout and stderr. */
const supportsHyperlinks = {
/** Whether stdout supports hyperlinks. */
stdout: createSupportsHyperlinks(process.stdout),
/** Whether stderr supports hyperlinks. */
stderr: createSupportsHyperlinks(process.stderr),
};
export default supportsHyperlinks;

View File

@ -1,94 +0,0 @@
/*
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import ansiEscapes from "ansi-escapes";
import supportsHyperlinks from "./supportsHyperlinks.js";
export type TerminalLinkOptions = {
/**
Override the default fallback. If false, the fallback will be disabled.
@default `${text} (${url})`
*/
readonly fallback?: ((text: string, url: string) => string) | boolean;
};
/**
Create a clickable link in the terminal's stdout.
[Supported terminals.](https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda)
For unsupported terminals, the link will be printed in parens after the text: `My website (https://sindresorhus.com)`,
unless the fallback is disabled by setting the `fallback` option to `false`.
@param text - Text to linkify.
@param url - URL to link to.
@example
```
import terminalLink from 'terminal-link';
const link = terminalLink('My Website', 'https://sindresorhus.com');
console.log(link);
```
@deprecated The default fallback is broken in some terminals. Please use `cliLink` instead.
*/
function terminalLink(
text: string,
url: string,
{ target = "stdout", ...options }: { target?: "stdout" | "stderr" } & TerminalLinkOptions = {}
) {
if (!supportsHyperlinks[target]) {
// If the fallback has been explicitly disabled, don't modify the text itself.
if (options.fallback === false) {
return text;
}
return typeof options.fallback === "function"
? options.fallback(text, url)
: `${text} (\u200B${url}\u200B)`;
}
return ansiEscapes.link(text, url);
}
/**
Check whether the terminal supports links.
Prefer just using the default fallback or the `fallback` option whenever possible.
*/
terminalLink.isSupported = supportsHyperlinks.stdout;
terminalLink.stderr = terminalLinkStderr;
/**
Create a clickable link in the terminal's stderr.
[Supported terminals.](https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda)
For unsupported terminals, the link will be printed in parens after the text: `My website (https://sindresorhus.com)`.
@param text - Text to linkify.
@param url - URL to link to.
@example
```
import terminalLink from 'terminal-link';
const link = terminalLink.stderr('My Website', 'https://sindresorhus.com');
console.error(link);
```
*/
function terminalLinkStderr(text: string, url: string, options: TerminalLinkOptions = {}) {
return terminalLink(text, url, { target: "stderr", ...options });
}
/**
Check whether the terminal's stderr supports links.
Prefer just using the default fallback or the `fallback` option whenever possible.
*/
terminalLinkStderr.isSupported = supportsHyperlinks.stderr;
export { terminalLink };

View File

@ -1,66 +0,0 @@
import { note, log } from "@clack/prompts";
import { executeCommandInteractive } from "./docker-interactive.js";
import { getDockerCompatibleEnvVars } from "./env-docker.js";
import path from "path";
export async function deployTriggerTasks(rootDir: string): Promise<void> {
const webappDir = path.join(rootDir, "apps", "webapp");
const databaseDir = path.join(rootDir, "packages", "database");
const typesDir = path.join(rootDir, "packages", "types");
note(
"We'll now deploy the trigger tasks to your Trigger.dev instance.",
"🚀 Deploying Trigger.dev tasks"
);
try {
// Login to trigger.dev CLI
await executeCommandInteractive(
"npx -y trigger.dev@4.0.0-v4-beta.22 login -a http://localhost:8030",
{
cwd: rootDir,
message: "Logging in to Trigger.dev CLI...",
showOutput: true,
}
);
await executeCommandInteractive("pnpm install", {
cwd: rootDir,
message: "Running package installation",
showOutput: true,
});
const envVars = await getDockerCompatibleEnvVars(rootDir);
await executeCommandInteractive("pnpm build", {
cwd: databaseDir,
message: "Building @core/database...",
showOutput: true,
env: {
DATABASE_URL: envVars.DATABASE_URL as string,
},
});
await executeCommandInteractive("pnpm build", {
cwd: typesDir,
message: "Building @core/types...",
showOutput: true,
});
// Deploy trigger tasks
await executeCommandInteractive("pnpm run trigger:deploy", {
cwd: webappDir,
message: "Deploying Trigger.dev tasks...",
showOutput: true,
env: envVars,
});
log.success("Trigger.dev tasks deployed successfully!");
} catch (error: any) {
log.warning("Failed to deploy Trigger.dev tasks:");
note(
`${error.message}\n\nYou can deploy them manually later with:\n1. npx trigger.dev@v4-beta login -a http://localhost:8030\n2. pnpm trigger:deploy`,
"Manual Deployment"
);
}
}

275
pnpm-lock.yaml generated
View File

@ -31,6 +31,253 @@ importers:
specifier: 5.5.4
version: 5.5.4
apps/init:
dependencies:
'@clack/prompts':
specifier: ^0.10.0
version: 0.10.1
'@depot/cli':
specifier: 0.0.1-cli.2.80.0
version: 0.0.1-cli.2.80.0
'@opentelemetry/api':
specifier: 1.9.0
version: 1.9.0
'@opentelemetry/api-logs':
specifier: 0.52.1
version: 0.52.1
'@opentelemetry/exporter-logs-otlp-http':
specifier: 0.52.1
version: 0.52.1(@opentelemetry/api@1.9.0)
'@opentelemetry/exporter-trace-otlp-http':
specifier: 0.52.1
version: 0.52.1(@opentelemetry/api@1.9.0)
'@opentelemetry/instrumentation':
specifier: 0.52.1
version: 0.52.1(@opentelemetry/api@1.9.0)(supports-color@10.0.0)
'@opentelemetry/instrumentation-fetch':
specifier: 0.52.1
version: 0.52.1(@opentelemetry/api@1.9.0)(supports-color@10.0.0)
'@opentelemetry/resources':
specifier: 1.25.1
version: 1.25.1(@opentelemetry/api@1.9.0)
'@opentelemetry/sdk-logs':
specifier: 0.52.1
version: 0.52.1(@opentelemetry/api@1.9.0)
'@opentelemetry/sdk-node':
specifier: 0.52.1
version: 0.52.1(@opentelemetry/api@1.9.0)(supports-color@10.0.0)
'@opentelemetry/sdk-trace-base':
specifier: 1.25.1
version: 1.25.1(@opentelemetry/api@1.9.0)
'@opentelemetry/sdk-trace-node':
specifier: 1.25.1
version: 1.25.1(@opentelemetry/api@1.9.0)
'@opentelemetry/semantic-conventions':
specifier: 1.25.1
version: 1.25.1
ansi-escapes:
specifier: ^7.0.0
version: 7.0.0
braces:
specifier: ^3.0.3
version: 3.0.3
c12:
specifier: ^1.11.1
version: 1.11.2(magicast@0.3.5)
chalk:
specifier: ^5.2.0
version: 5.4.1
chokidar:
specifier: ^3.6.0
version: 3.6.0
cli-table3:
specifier: ^0.6.3
version: 0.6.5
commander:
specifier: ^9.4.1
version: 9.5.0
defu:
specifier: ^6.1.4
version: 6.1.4
dotenv:
specifier: ^16.4.5
version: 16.5.0
dotenv-expand:
specifier: ^12.0.2
version: 12.0.2
esbuild:
specifier: ^0.23.0
version: 0.23.1
eventsource:
specifier: ^3.0.2
version: 3.0.7
evt:
specifier: ^2.4.13
version: 2.5.9
fast-npm-meta:
specifier: ^0.2.2
version: 0.2.2
git-last-commit:
specifier: ^1.0.1
version: 1.0.1
gradient-string:
specifier: ^2.0.2
version: 2.0.2
has-flag:
specifier: ^5.0.1
version: 5.0.1
import-in-the-middle:
specifier: 1.11.0
version: 1.11.0
import-meta-resolve:
specifier: ^4.1.0
version: 4.1.0
ini:
specifier: ^5.0.0
version: 5.0.0
jsonc-parser:
specifier: 3.2.1
version: 3.2.1
knex:
specifier: 3.1.0
version: 3.1.0(pg@8.16.3)(supports-color@10.0.0)
magicast:
specifier: ^0.3.4
version: 0.3.5
minimatch:
specifier: ^10.0.1
version: 10.0.2
mlly:
specifier: ^1.7.1
version: 1.7.4
nanoid:
specifier: 3.3.8
version: 3.3.8
nypm:
specifier: ^0.5.4
version: 0.5.4
object-hash:
specifier: ^3.0.0
version: 3.0.0
open:
specifier: ^10.0.3
version: 10.2.0
p-limit:
specifier: ^6.2.0
version: 6.2.0
p-retry:
specifier: ^6.1.0
version: 6.2.1
partysocket:
specifier: ^1.0.2
version: 1.1.4
pg:
specifier: 8.16.3
version: 8.16.3
pkg-types:
specifier: ^1.1.3
version: 1.3.1
polka:
specifier: ^0.5.2
version: 0.5.2
resolve:
specifier: ^1.22.8
version: 1.22.10
semver:
specifier: ^7.5.0
version: 7.7.2
signal-exit:
specifier: ^4.1.0
version: 4.1.0
source-map-support:
specifier: 0.5.21
version: 0.5.21
std-env:
specifier: ^3.7.0
version: 3.9.0
supports-color:
specifier: ^10.0.0
version: 10.0.0
tiny-invariant:
specifier: ^1.2.0
version: 1.3.3
tinyexec:
specifier: ^0.3.1
version: 0.3.2
tinyglobby:
specifier: ^0.2.10
version: 0.2.14
uuid:
specifier: 11.1.0
version: 11.1.0
ws:
specifier: ^8.18.0
version: 8.18.3
xdg-app-paths:
specifier: ^8.3.0
version: 8.3.0
zod:
specifier: 3.23.8
version: 3.23.8
zod-validation-error:
specifier: ^1.5.0
version: 1.5.0(zod@3.23.8)
devDependencies:
'@epic-web/test-server':
specifier: ^0.1.0
version: 0.1.6
'@types/gradient-string':
specifier: ^1.1.2
version: 1.1.6
'@types/ini':
specifier: ^4.1.1
version: 4.1.1
'@types/object-hash':
specifier: 3.0.6
version: 3.0.6
'@types/polka':
specifier: ^0.5.7
version: 0.5.7
'@types/react':
specifier: ^18.2.48
version: 18.2.69
'@types/resolve':
specifier: ^1.20.6
version: 1.20.6
'@types/rimraf':
specifier: ^4.0.5
version: 4.0.5
'@types/semver':
specifier: ^7.5.0
version: 7.7.0
'@types/source-map-support':
specifier: 0.5.10
version: 0.5.10
'@types/ws':
specifier: ^8.5.3
version: 8.18.1
cpy-cli:
specifier: ^5.0.0
version: 5.0.0
execa:
specifier: ^8.0.1
version: 8.0.1
find-up:
specifier: ^7.0.0
version: 7.0.0
rimraf:
specifier: ^5.0.7
version: 5.0.10
ts-essentials:
specifier: 10.0.1
version: 10.0.1(typescript@5.8.3)
tshy:
specifier: ^3.0.2
version: 3.0.2
tsx:
specifier: 4.17.0
version: 4.17.0
apps/webapp:
dependencies:
'@ai-sdk/anthropic':
@ -82,19 +329,19 @@ importers:
specifier: ^1.0.5
version: 1.1.14(@types/react-dom@18.3.7(@types/react@18.2.69))(@types/react@18.2.69)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
'@radix-ui/react-avatar':
specifier: ^1.0.4
specifier: ^1.1.10
version: 1.1.10(@types/react-dom@18.3.7(@types/react@18.2.69))(@types/react@18.2.69)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
'@radix-ui/react-checkbox':
specifier: ^1.0.4
version: 1.3.2(@types/react-dom@18.3.7(@types/react@18.2.69))(@types/react@18.2.69)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
'@radix-ui/react-collapsible':
specifier: ^1.0.3
specifier: ^1.1.11
version: 1.1.11(@types/react-dom@18.3.7(@types/react@18.2.69))(@types/react@18.2.69)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
'@radix-ui/react-dialog':
specifier: ^1.1.14
version: 1.1.14(@types/react-dom@18.3.7(@types/react@18.2.69))(@types/react@18.2.69)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
'@radix-ui/react-dropdown-menu':
specifier: ^2.0.6
specifier: ^2.1.15
version: 2.1.15(@types/react-dom@18.3.7(@types/react@18.2.69))(@types/react@18.2.69)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
'@radix-ui/react-icons':
specifier: ^1.3.0
@ -15609,7 +15856,7 @@ snapshots:
ulid: 2.4.0
uncrypto: 0.1.3
uuid: 9.0.1
ws: 8.17.1
ws: 8.18.3
zod: 3.23.8
optionalDependencies:
ai: 4.3.14(react@18.3.1)(zod@3.23.8)
@ -15643,7 +15890,7 @@ snapshots:
'@types/connect@3.4.38':
dependencies:
'@types/node': 20.11.5
'@types/node': 20.19.7
'@types/cookie@0.4.1': {}
@ -15651,7 +15898,7 @@ snapshots:
'@types/cors@2.8.19':
dependencies:
'@types/node': 20.11.5
'@types/node': 20.19.7
'@types/d3-array@3.2.1': {}
@ -15798,7 +16045,7 @@ snapshots:
'@types/express-serve-static-core@4.19.6':
dependencies:
'@types/node': 20.11.5
'@types/node': 20.19.7
'@types/qs': 6.14.0
'@types/range-parser': 1.2.7
'@types/send': 0.17.5
@ -15946,7 +16193,7 @@ snapshots:
'@types/send@0.17.5':
dependencies:
'@types/mime': 1.3.5
'@types/node': 20.11.5
'@types/node': 20.19.7
'@types/serve-static@1.15.8':
dependencies:
@ -15980,7 +16227,7 @@ snapshots:
'@types/webpack@5.28.5(@swc/core@1.3.101(@swc/helpers@0.5.17))(esbuild@0.19.11)':
dependencies:
'@types/node': 20.11.5
'@types/node': 20.19.7
tapable: 2.2.2
webpack: 5.99.9(@swc/core@1.3.101(@swc/helpers@0.5.17))(esbuild@0.19.11)
transitivePeerDependencies:
@ -15991,7 +16238,7 @@ snapshots:
'@types/ws@8.18.1':
dependencies:
'@types/node': 20.11.5
'@types/node': 20.19.7
'@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3)':
dependencies:
@ -17677,7 +17924,7 @@ snapshots:
dependencies:
'@types/cookie': 0.4.1
'@types/cors': 2.8.19
'@types/node': 20.11.5
'@types/node': 20.19.7
accepts: 1.3.8
base64id: 2.0.0
cookie: 0.4.2
@ -18331,7 +18578,7 @@ snapshots:
eval@0.1.8:
dependencies:
'@types/node': 20.11.5
'@types/node': 20.19.7
require-like: 0.1.2
event-target-polyfill@0.0.4: {}
@ -19322,7 +19569,7 @@ snapshots:
jest-worker@27.5.1:
dependencies:
'@types/node': 20.11.5
'@types/node': 20.19.7
merge-stream: 2.0.0
supports-color: 8.1.1
@ -21407,7 +21654,7 @@ snapshots:
'@protobufjs/path': 1.1.2
'@protobufjs/pool': 1.1.0
'@protobufjs/utf8': 1.1.0
'@types/node': 20.11.5
'@types/node': 20.19.7
long: 5.3.2
proxy-addr@2.0.7:

View File

@ -1 +0,0 @@
registry-user:$2y$05$6ingYqw0.3j13dxHY4w3neMSvKhF3pvRmc0AFifScWsVA9JpuLwNK

View File

@ -1,281 +0,0 @@
x-logging: &logging-config
driver: ${LOGGING_DRIVER:-local}
options:
max-size: ${LOGGING_MAX_SIZE:-20m}
max-file: ${LOGGING_MAX_FILES:-5}
compress: ${LOGGING_COMPRESS:-true}
services:
webapp:
container_name: trigger-webapp
image: ghcr.io/triggerdotdev/trigger.dev:${TRIGGER_IMAGE_TAG:-v4-beta}
restart: ${RESTART_POLICY:-unless-stopped}
logging: *logging-config
ports:
- ${WEBAPP_PUBLISH_IP:-0.0.0.0}:8030:3000
depends_on:
- clickhouse
networks:
- webapp
- supervisor
volumes:
- type: bind
source: /efs/trigger/shared
target: /home/node/shared
# Only needed for bootstrap
user: root
# Only needed for bootstrap
command: sh -c "chown -R node:node /home/node/shared && exec ./scripts/entrypoint.sh"
healthcheck:
test:
[
"CMD",
"node",
"-e",
"http.get('http://localhost:3000/healthcheck', res => process.exit(res.statusCode === 200 ? 0 : 1)).on('error', () => process.exit(1))",
]
interval: 30s
timeout: 10s
retries: 5
start_period: 10s
environment:
APP_ORIGIN: ${APP_ORIGIN:-http://localhost:8030}
LOGIN_ORIGIN: ${LOGIN_ORIGIN:-http://localhost:8030}
API_ORIGIN: ${API_ORIGIN:-http://localhost:8030}
ELECTRIC_ORIGIN: http://electric:3000
DATABASE_URL: ${DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/main?schema=public&sslmode=disable}
DIRECT_URL: ${DIRECT_URL:-postgresql://postgres:postgres@postgres:5432/main?schema=public&sslmode=disable}
SESSION_SECRET: ${SESSION_SECRET}
MAGIC_LINK_SECRET: ${MAGIC_LINK_SECRET}
ENCRYPTION_KEY: ${ENCRYPTION_KEY}
MANAGED_WORKER_SECRET: ${MANAGED_WORKER_SECRET}
REDIS_HOST: trigger-redis
REDIS_PORT: 6379
REDIS_TLS_DISABLED: true
APP_LOG_LEVEL: info
DEV_OTEL_EXPORTER_OTLP_ENDPOINT: ${DEV_OTEL_EXPORTER_OTLP_ENDPOINT:-http://localhost:8030/otel}
DEPLOY_REGISTRY_HOST: ${DOCKER_REGISTRY_URL:-localhost:5000}
OBJECT_STORE_BASE_URL: ${OBJECT_STORE_BASE_URL:-http://minio:9000}
OBJECT_STORE_ACCESS_KEY_ID: ${OBJECT_STORE_ACCESS_KEY_ID}
OBJECT_STORE_SECRET_ACCESS_KEY: ${OBJECT_STORE_SECRET_ACCESS_KEY}
GRACEFUL_SHUTDOWN_TIMEOUT: 1000
# Bootstrap - this will automatically set up a worker group for you
# This will NOT work for split deployments
TRIGGER_BOOTSTRAP_ENABLED: 1
TRIGGER_BOOTSTRAP_WORKER_GROUP_NAME: bootstrap
TRIGGER_BOOTSTRAP_WORKER_TOKEN_PATH: /home/node/shared/worker_token
# ClickHouse configuration
CLICKHOUSE_URL: ${CLICKHOUSE_URL:-http://default:password@clickhouse:8123?secure=false}
CLICKHOUSE_LOG_LEVEL: ${CLICKHOUSE_LOG_LEVEL:-info}
# Run replication
RUN_REPLICATION_ENABLED: ${RUN_REPLICATION_ENABLED:-1}
RUN_REPLICATION_CLICKHOUSE_URL: ${RUN_REPLICATION_CLICKHOUSE_URL:-http://default:password@clickhouse:8123}
RUN_REPLICATION_LOG_LEVEL: ${RUN_REPLICATION_LOG_LEVEL:-info}
# Limits
# TASK_PAYLOAD_OFFLOAD_THRESHOLD: 524288 # 512KB
# TASK_PAYLOAD_MAXIMUM_SIZE: 3145728 # 3MB
# BATCH_TASK_PAYLOAD_MAXIMUM_SIZE: 1000000 # 1MB
# TASK_RUN_METADATA_MAXIMUM_SIZE: 262144 # 256KB
# DEFAULT_ENV_EXECUTION_CONCURRENCY_LIMIT: 100
# DEFAULT_ORG_EXECUTION_CONCURRENCY_LIMIT: 100
# Internal OTEL configuration
INTERNAL_OTEL_TRACE_LOGGING_ENABLED: ${INTERNAL_OTEL_TRACE_LOGGING_ENABLED:-0}
electric:
container_name: trigger-electric
image: electricsql/electric:${ELECTRIC_IMAGE_TAG:-1.0.10}
restart: ${RESTART_POLICY:-unless-stopped}
logging: *logging-config
networks:
- webapp
environment:
DATABASE_URL: ${ELECTRIC_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/main?schema=public&sslmode=disable}
ELECTRIC_INSECURE: true
ELECTRIC_USAGE_REPORTING: false
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3000/v1/health"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
clickhouse:
container_name: trigger-clickhouse
image: bitnami/clickhouse:${CLICKHOUSE_IMAGE_TAG:-latest}
restart: ${RESTART_POLICY:-unless-stopped}
logging: *logging-config
ports:
- ${CLICKHOUSE_PUBLISH_IP:-127.0.0.1}:9123:8123
- ${CLICKHOUSE_PUBLISH_IP:-127.0.0.1}:9090:9000
environment:
CLICKHOUSE_ADMIN_USER: ${CLICKHOUSE_USER:-default}
CLICKHOUSE_ADMIN_PASSWORD: ${CLICKHOUSE_PASSWORD:-password}
volumes:
- type: bind
source: /efs/trigger/clickhouse
target: /bitnami/clickhouse
- ../clickhouse/override.xml:/bitnami/clickhouse/etc/config.d/override.xml:ro
networks:
- webapp
healthcheck:
test:
[
"CMD",
"clickhouse-client",
"--host",
"localhost",
"--port",
"9000",
"--user",
"default",
"--password",
"password",
"--query",
"SELECT 1",
]
interval: 5s
timeout: 5s
retries: 5
start_period: 10s
registry:
container_name: trigger-registry
image: registry:${REGISTRY_IMAGE_TAG:-2}
restart: ${RESTART_POLICY:-unless-stopped}
logging: *logging-config
ports:
- ${REGISTRY_PUBLISH_IP:-127.0.0.1}:5000:5000
networks:
- webapp
volumes:
# registry-user:very-secure-indeed
- ../registry/auth.htpasswd:/auth/htpasswd:ro
environment:
REGISTRY_AUTH: htpasswd
REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm
REGISTRY_AUTH_HTPASSWD_PATH: /auth/htpasswd
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:5000/"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
minio:
container_name: trigger-minio
image: bitnami/minio:${MINIO_IMAGE_TAG:-latest}
restart: ${RESTART_POLICY:-unless-stopped}
logging: *logging-config
ports:
- ${MINIO_PUBLISH_IP:-127.0.0.1}:9000:9000
- ${MINIO_PUBLISH_IP:-127.0.0.1}:9001:9001
networks:
- webapp
volumes:
- type: bind
source: /efs/trigger/minio
target: /bitnami/minio/data
environment:
MINIO_ROOT_USER: ${MINIO_ROOT_USER:-admin}
MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD:-very-safe-password}
MINIO_DEFAULT_BUCKETS: packets
MINIO_BROWSER: "on"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 5s
timeout: 10s
retries: 5
start_period: 10s
# Worker related
supervisor:
container_name: trigger-supervisor
image: ghcr.io/triggerdotdev/supervisor:${TRIGGER_IMAGE_TAG:-v4-beta}
restart: ${RESTART_POLICY:-unless-stopped}
logging: *logging-config
depends_on:
- docker-proxy
networks:
- supervisor
- docker-proxy
- webapp
volumes:
- type: bind
source: /efs/trigger/shared
target: /home/node/shared
# Only needed for bootstrap
user: root
# Only needed for bootstrap
command: sh -c "chown -R node:node /home/node/shared && exec /usr/bin/dumb-init -- pnpm run --filter supervisor start"
environment:
# This needs to match the token of the worker group you want to connect to
# TRIGGER_WORKER_TOKEN: ${TRIGGER_WORKER_TOKEN}
# Use the bootstrap token created by the webapp
TRIGGER_WORKER_TOKEN: file:///home/node/shared/worker_token
MANAGED_WORKER_SECRET: ${MANAGED_WORKER_SECRET}
TRIGGER_API_URL: ${TRIGGER_API_URL:-http://webapp:3000}
OTEL_EXPORTER_OTLP_ENDPOINT: ${OTEL_EXPORTER_OTLP_ENDPOINT:-http://webapp:3000/otel}
TRIGGER_WORKLOAD_API_DOMAIN: supervisor
TRIGGER_WORKLOAD_API_PORT_EXTERNAL: 8020
# Optional settings
DEBUG: 1
ENFORCE_MACHINE_PRESETS: 1
TRIGGER_DEQUEUE_INTERVAL_MS: 1000
DOCKER_HOST: tcp://docker-proxy:2375
DOCKER_RUNNER_NETWORKS: webapp,supervisor
DOCKER_REGISTRY_URL: ${DOCKER_REGISTRY_URL:-localhost:5000}
DEPLOY_REGISTRY_NAMESPACE: ${DOCKER_REGISTRY_NAMESPACE:-redplanethq}
DOCKER_REGISTRY_USERNAME: ${DOCKER_REGISTRY_USERNAME:-}
DOCKER_REGISTRY_PASSWORD: ${DOCKER_REGISTRY_PASSWORD:-}
DOCKER_AUTOREMOVE_EXITED_CONTAINERS: 0
healthcheck:
test:
[
"CMD",
"node",
"-e",
"http.get('http://localhost:8020/health', res => process.exit(res.statusCode === 200 ? 0 : 1)).on('error', () => process.exit(1))",
]
interval: 30s
timeout: 10s
retries: 5
start_period: 10s
docker-proxy:
container_name: trigger-docker-proxy
image: tecnativa/docker-socket-proxy:${DOCKER_PROXY_IMAGE_TAG:-latest}
restart: ${RESTART_POLICY:-unless-stopped}
logging: *logging-config
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
networks:
- docker-proxy
environment:
- LOG_LEVEL=info
- POST=1
- CONTAINERS=1
- IMAGES=1
- INFO=1
- NETWORKS=1
healthcheck:
test: ["CMD", "nc", "-z", "127.0.0.1", "2375"]
interval: 30s
timeout: 5s
retries: 5
start_period: 5s
redis:
container_name: trigger-redis
image: redis:7
ports:
- "6379:6379"
networks:
- webapp
networks:
docker-proxy:
name: docker-proxy
supervisor:
name: supervisor
webapp:
name: webapp