Fix: core cli to work with core repo (#26)

* Fix: core cli to work with core repo

* Fix: cli working and mcp proxy
This commit is contained in:
Harshith Mullapudi 2025-07-22 10:17:40 +05:30 committed by GitHub
parent c4467a2306
commit 8d9ddcf375
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
44 changed files with 983 additions and 252 deletions

View File

@ -12,7 +12,7 @@ POSTGRES_PASSWORD=docker
POSTGRES_DB=core POSTGRES_DB=core
LOGIN_ORIGIN=http://localhost:3033 LOGIN_ORIGIN=http://localhost:3033
DATABASE_URL="postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}?schema=core" DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}?schema=core"
# This sets the URL used for direct connections to the database and should only be needed in limited circumstances # This sets the URL used for direct connections to the database and should only be needed in limited circumstances
# See: https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#fields:~:text=the%20shadow%20database.-,directUrl,-No # See: https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#fields:~:text=the%20shadow%20database.-,directUrl,-No
@ -22,6 +22,8 @@ REMIX_APP_PORT=3033
APP_ENV=production APP_ENV=production
NODE_ENV=${APP_ENV} NODE_ENV=${APP_ENV}
APP_ORIGIN=http://localhost:3033 APP_ORIGIN=http://localhost:3033
API_BASE_URL=${APP_ORIGIN}
SESSION_SECRET=27192e6432564f4788d55c15131bd5ac SESSION_SECRET=27192e6432564f4788d55c15131bd5ac
ENCRYPTION_KEY=27192e6432564f4788d55c15131bd5ac ENCRYPTION_KEY=27192e6432564f4788d55c15131bd5ac
@ -48,8 +50,8 @@ MAGIC_LINK_SECRET=27192e6432564f4788d55c15131bd5ac
NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac
OLLAMA_URL=http://ollama:11434 OLLAMA_URL=http://ollama:11434
EMBEDDING_MODEL=GPT41 EMBEDDING_MODEL=text-embedding-3-small
MODEL=GPT41 MODEL=gpt-4.1-2025-04-14
## Trigger ## ## Trigger ##
TRIGGER_PROJECT_ID= TRIGGER_PROJECT_ID=

View File

@ -1,14 +1,9 @@
import React from "react";
import { Theme, useTheme } from "remix-themes";
export interface LogoProps { export interface LogoProps {
width: number; width: number;
height: number; height: number;
} }
export default function StaticLogo({ width, height }: LogoProps) { export default function StaticLogo({ width, height }: LogoProps) {
const [theme] = useTheme();
return ( return (
<svg <svg
width={width} width={width}

View File

@ -51,7 +51,7 @@ export function LogsFilters({
const handleBack = () => setStep("main"); const handleBack = () => setStep("main");
return ( return (
<div className="mb-4 flex items-center gap-2"> <div className="mb-4 flex w-full items-center justify-start gap-2">
<Popover <Popover
open={popoverOpen} open={popoverOpen}
onOpenChange={(open) => { onOpenChange={(open) => {

View File

@ -153,7 +153,7 @@ export function VirtualLogsList({
const itemCount = hasMore ? logs.length + 1 : logs.length; const itemCount = hasMore ? logs.length + 1 : logs.length;
return ( return (
<div className="h-[calc(100vh_-_132px)] overflow-hidden rounded-lg"> <div className="h-full grow overflow-hidden rounded-lg">
<AutoSizer className="h-full"> <AutoSizer className="h-full">
{({ width, height: autoHeight }) => ( {({ width, height: autoHeight }) => (
<InfiniteLoader <InfiniteLoader

View File

@ -6,13 +6,21 @@
import { PassThrough } from "node:stream"; import { PassThrough } from "node:stream";
import { type AppLoadContext, type EntryContext , createReadableStreamFromReadable } from "@remix-run/node"; import {
type AppLoadContext,
type EntryContext,
createReadableStreamFromReadable,
} from "@remix-run/node";
import { RemixServer } from "@remix-run/react"; import { RemixServer } from "@remix-run/react";
import { isbot } from "isbot"; import { isbot } from "isbot";
import { renderToPipeableStream } from "react-dom/server"; import { renderToPipeableStream } from "react-dom/server";
import { initializeStartupServices } from "./utils/startup";
const ABORT_DELAY = 5_000; const ABORT_DELAY = 5_000;
// Initialize startup services once per server process
await initializeStartupServices();
export default function handleRequest( export default function handleRequest(
request: Request, request: Request,
responseStatusCode: number, responseStatusCode: number,
@ -21,20 +29,20 @@ export default function handleRequest(
// This is ignored so we can keep it in the template for visibility. Feel // This is ignored so we can keep it in the template for visibility. Feel
// free to delete this parameter in your app if you're not using it! // free to delete this parameter in your app if you're not using it!
// eslint-disable-next-line @typescript-eslint/no-unused-vars // eslint-disable-next-line @typescript-eslint/no-unused-vars
loadContext: AppLoadContext loadContext: AppLoadContext,
) { ) {
return isbot(request.headers.get("user-agent") || "") return isbot(request.headers.get("user-agent") || "")
? handleBotRequest( ? handleBotRequest(
request, request,
responseStatusCode, responseStatusCode,
responseHeaders, responseHeaders,
remixContext remixContext,
) )
: handleBrowserRequest( : handleBrowserRequest(
request, request,
responseStatusCode, responseStatusCode,
responseHeaders, responseHeaders,
remixContext remixContext,
); );
} }
@ -42,7 +50,7 @@ function handleBotRequest(
request: Request, request: Request,
responseStatusCode: number, responseStatusCode: number,
responseHeaders: Headers, responseHeaders: Headers,
remixContext: EntryContext remixContext: EntryContext,
) { ) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let shellRendered = false; let shellRendered = false;
@ -64,7 +72,7 @@ function handleBotRequest(
new Response(stream, { new Response(stream, {
headers: responseHeaders, headers: responseHeaders,
status: responseStatusCode, status: responseStatusCode,
}) }),
); );
pipe(body); pipe(body);
@ -81,7 +89,7 @@ function handleBotRequest(
console.error(error); console.error(error);
} }
}, },
} },
); );
setTimeout(abort, ABORT_DELAY); setTimeout(abort, ABORT_DELAY);
@ -92,7 +100,7 @@ function handleBrowserRequest(
request: Request, request: Request,
responseStatusCode: number, responseStatusCode: number,
responseHeaders: Headers, responseHeaders: Headers,
remixContext: EntryContext remixContext: EntryContext,
) { ) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let shellRendered = false; let shellRendered = false;
@ -114,7 +122,7 @@ function handleBrowserRequest(
new Response(stream, { new Response(stream, {
headers: responseHeaders, headers: responseHeaders,
status: responseStatusCode, status: responseStatusCode,
}) }),
); );
pipe(body); pipe(body);
@ -131,7 +139,7 @@ function handleBrowserRequest(
console.error(error); console.error(error);
} }
}, },
} },
); );
setTimeout(abort, ABORT_DELAY); setTimeout(abort, ABORT_DELAY);

View File

@ -4,7 +4,6 @@ import {
Outlet, Outlet,
Scripts, Scripts,
ScrollRestoration, ScrollRestoration,
useLoaderData,
} from "@remix-run/react"; } from "@remix-run/react";
import type { import type {
LinksFunction, LinksFunction,
@ -41,7 +40,6 @@ import {
useTheme, useTheme,
} from "remix-themes"; } from "remix-themes";
import clsx from "clsx"; import clsx from "clsx";
import { initNeo4jSchemaOnce } from "./lib/neo4j.server";
export const links: LinksFunction = () => [{ rel: "stylesheet", href: styles }]; export const links: LinksFunction = () => [{ rel: "stylesheet", href: styles }];
@ -50,8 +48,6 @@ export const loader = async ({ request }: LoaderFunctionArgs) => {
const toastMessage = session.get("toastMessage") as ToastMessage; const toastMessage = session.get("toastMessage") as ToastMessage;
const { getTheme } = await themeSessionResolver(request); const { getTheme } = await themeSessionResolver(request);
await initNeo4jSchemaOnce();
const posthogProjectKey = env.POSTHOG_PROJECT_KEY; const posthogProjectKey = env.POSTHOG_PROJECT_KEY;
return typedjson( return typedjson(
@ -138,7 +134,6 @@ function App() {
// `specifiedTheme` is the stored theme in the session storage. // `specifiedTheme` is the stored theme in the session storage.
// `themeAction` is the action name that's used to change the theme in the session storage. // `themeAction` is the action name that's used to change the theme in the session storage.
export default function AppWithProviders() { export default function AppWithProviders() {
const data = useLoaderData<typeof loader>();
return ( return (
<ThemeProvider specifiedTheme={Theme.LIGHT} themeAction="/action/set-theme"> <ThemeProvider specifiedTheme={Theme.LIGHT} themeAction="/action/set-theme">
<App /> <App />

View File

@ -4,6 +4,9 @@ import { getIntegrationDefinitionWithSlug } from "~/services/integrationDefiniti
import { proxyRequest } from "~/utils/proxy.server"; import { proxyRequest } from "~/utils/proxy.server";
import { z } from "zod"; import { z } from "zod";
import { getIntegrationAccount } from "~/services/integrationAccount.server"; import { getIntegrationAccount } from "~/services/integrationAccount.server";
import { createMCPStdioProxy } from "@core/mcp-proxy";
import { randomUUID } from "node:crypto";
import { configureStdioMCPEnvironment } from "~/trigger/utils/mcp";
export const integrationSlugSchema = z.object({ export const integrationSlugSchema = z.object({
slug: z.string(), slug: z.string(),
@ -48,7 +51,7 @@ const { action, loader } = createActionApiRoute(
const spec = integrationDefinition.spec as any; const spec = integrationDefinition.spec as any;
if (!spec.mcpAuth) { if (!spec.mcp) {
return new Response( return new Response(
JSON.stringify({ JSON.stringify({
error: "MCP auth configuration not found for this integration", error: "MCP auth configuration not found for this integration",
@ -60,7 +63,7 @@ const { action, loader } = createActionApiRoute(
); );
} }
const { serverUrl } = spec.mcpAuth; const { url, type } = spec.mcp;
// Find the integration account for this user and integration // Find the integration account for this user and integration
const integrationAccount = await getIntegrationAccount( const integrationAccount = await getIntegrationAccount(
@ -68,29 +71,66 @@ const { action, loader } = createActionApiRoute(
authentication.userId, authentication.userId,
); );
const integrationConfig = if (type === "http") {
integrationAccount?.integrationConfiguration as any; const integrationConfig =
integrationAccount?.integrationConfiguration as any;
if (!integrationAccount || !integrationConfig || !integrationConfig.mcp) { if (
return new Response( !integrationAccount ||
JSON.stringify({ !integrationConfig ||
error: "No integration account with mcp config", !integrationConfig.mcp
}), ) {
{ return new Response(
status: 400, JSON.stringify({
headers: { "Content-Type": "application/json" }, error: "No integration account with mcp config",
}, }),
{
status: 400,
headers: { "Content-Type": "application/json" },
},
);
}
// Proxy the request to the serverUrl
return await proxyRequest(
request,
url,
integrationConfig.mcp.tokens.access_token,
); );
} } else {
if (!integrationAccount) {
return new Response(
JSON.stringify({
error: "No integration account found",
}),
{
status: 400,
headers: { "Content-Type": "application/json" },
},
);
}
// Proxy the request to the serverUrl // Configure environment variables using the utility function
return await proxyRequest( const { env, args } = configureStdioMCPEnvironment(
request, spec,
serverUrl, integrationAccount,
integrationConfig.mcp.tokens.access_token, );
);
// Get session_id from headers (case-insensitive), or generate a new uuid if not present
const sessionId =
request.headers.get("mcp-session-id") ||
request.headers.get("Mcp-Session-Id") ||
randomUUID();
// Use the saved local file instead of command
const executablePath = `./integrations/${slug}/main`;
return createMCPStdioProxy(request, executablePath, args, {
env,
sessionId,
});
}
} catch (error: any) { } catch (error: any) {
console.error("MCP Proxy Error:", error);
return new Response(JSON.stringify({ error: error.message }), { return new Response(JSON.stringify({ error: error.message }), {
status: 500, status: 500,
headers: { "Content-Type": "application/json" }, headers: { "Content-Type": "application/json" },

View File

@ -53,14 +53,14 @@ export async function loader({ request }: LoaderFunctionArgs) {
const spec = integrationDefinition.spec as any; const spec = integrationDefinition.spec as any;
if (!spec.mcpAuth) { if (!spec.mcp) {
throw new Error("MCP auth configuration not found for this integration"); throw new Error("MCP auth configuration not found for this integration");
} }
const { transportStrategy, serverUrl } = spec.mcpAuth; const { transportStrategy, url } = spec.mcp;
const authClient = createMCPAuthClient({ const authClient = createMCPAuthClient({
serverUrl, serverUrl: url,
transportStrategy: transportStrategy || "sse-first", transportStrategy: transportStrategy || "sse-first",
redirectUrl: MCP_CALLBACK_URL, redirectUrl: MCP_CALLBACK_URL,
}); });

View File

@ -137,7 +137,9 @@ export default function IntegrationDetail() {
); );
const hasApiKey = !!specData?.auth?.api_key; const hasApiKey = !!specData?.auth?.api_key;
const hasOAuth2 = !!specData?.auth?.OAuth2; const hasOAuth2 = !!specData?.auth?.OAuth2;
const hasMCPAuth = !!specData?.mcpAuth; const hasMCPAuth = !!(
specData?.mcp.type === "url" && specData?.mcp.needsAuth
);
const Component = getIcon(integration.icon as IconType); const Component = getIcon(integration.icon as IconType);
return ( return (

View File

@ -75,9 +75,9 @@ export default function LogsActivity() {
)} )}
{/* Logs List */} {/* Logs List */}
<div className="space-y-4"> <div className="flex h-full w-full space-y-4">
{logs.length === 0 ? ( {logs.length === 0 ? (
<Card> <Card className="bg-background-2 w-full">
<CardContent className="bg-background-2 flex items-center justify-center py-16"> <CardContent className="bg-background-2 flex items-center justify-center py-16">
<div className="text-center"> <div className="text-center">
<Activity className="text-muted-foreground mx-auto mb-4 h-12 w-12" /> <Activity className="text-muted-foreground mx-auto mb-4 h-12 w-12" />

View File

@ -45,7 +45,7 @@ export default function LogsAll() {
}, },
]} ]}
/> />
<div className="flex h-[calc(100vh_-_56px)] flex-col items-center space-y-6 p-4 px-5"> <div className="flex h-[calc(100vh_-_56px)] w-full flex-col items-center space-y-6 p-4 px-5">
{isInitialLoad ? ( {isInitialLoad ? (
<> <>
<LoaderCircle className="text-primary h-4 w-4 animate-spin" />{" "} <LoaderCircle className="text-primary h-4 w-4 animate-spin" />{" "}
@ -64,10 +64,10 @@ export default function LogsAll() {
/> />
)} )}
{/* Logs List */} {/* Logs List */}
<div className="space-y-4"> <div className="flex h-full w-full space-y-4">
{logs.length === 0 ? ( {logs.length === 0 ? (
<Card> <Card className="bg-background-2 w-full">
<CardContent className="bg-background-2 flex items-center justify-center py-16"> <CardContent className="bg-background-2 flex w-full items-center justify-center py-16">
<div className="text-center"> <div className="text-center">
<Database className="text-muted-foreground mx-auto mb-4 h-12 w-12" /> <Database className="text-muted-foreground mx-auto mb-4 h-12 w-12" />
<h3 className="mb-2 text-lg font-semibold"> <h3 className="mb-2 text-lg font-semibold">

View File

@ -158,7 +158,8 @@ export default function LoginMagicLinkPage() {
<Fieldset className="flex w-full flex-col items-center gap-y-2 px-2"> <Fieldset className="flex w-full flex-col items-center gap-y-2 px-2">
<FormButtons <FormButtons
cancelButton={ cancelButton={<></>}
confirmButton={
<Button <Button
type="submit" type="submit"
name="action" name="action"
@ -169,7 +170,6 @@ export default function LoginMagicLinkPage() {
Re-enter email Re-enter email
</Button> </Button>
} }
confirmButton={<></>}
/> />
</Fieldset> </Fieldset>
</Card> </Card>

View File

@ -199,13 +199,13 @@ export default function OAuthAuthorize() {
<p className="text-muted-foreground mb-2 text-sm">Permissions</p> <p className="text-muted-foreground mb-2 text-sm">Permissions</p>
<ul className="text-muted-foreground text-sm"> <ul className="text-muted-foreground text-sm">
{params.scope?.split(",").map((scope, index, arr) => { {params.scope?.split(" ").map((scope, index, arr) => {
const isFirst = index === 0; const isFirst = index === 0;
const isLast = index === arr.length - 1; const isLast = index === arr.length - 1;
return ( return (
<li <li
key={index} key={index}
className={`border-border flex items-center gap-2 border-x border-t p-2 ${isLast ? "border-b" : ""} ${isFirst ? "rounded-tl-md rounded-tr-md" : ""} ${isLast ? "rounded-br-md rounded-bl-md" : ""} `} className={`flex items-center gap-2 border-x border-t border-gray-300 p-2 ${isLast ? "border-b" : ""} ${isFirst ? "rounded-tl-md rounded-tr-md" : ""} ${isLast ? "rounded-br-md rounded-bl-md" : ""} `}
> >
<div>{getIcon(scope)}</div> <div>{getIcon(scope)}</div>
<div> <div>

View File

@ -10,6 +10,9 @@ export const getIntegrationAccount = async (
integratedById: userId, integratedById: userId,
isActive: true, isActive: true,
}, },
include: {
integrationDefinition: true,
},
}); });
}; };

View File

@ -278,14 +278,14 @@ export async function getRedirectURLForMCP(
const spec = integrationDefinition.spec as any; const spec = integrationDefinition.spec as any;
if (!spec.mcpAuth) { if (!spec.mcp) {
throw new Error("MCP auth configuration not found for this integration"); throw new Error("MCP auth configuration not found for this integration");
} }
const { serverUrl, transportStrategy } = spec.mcpAuth; const { url, transportStrategy } = spec.mcp;
const authClient = createMCPAuthClient({ const authClient = createMCPAuthClient({
serverUrl, serverUrl: url,
transportStrategy: transportStrategy || "sse-first", transportStrategy: transportStrategy || "sse-first",
redirectUrl: MCP_CALLBACK_URL, redirectUrl: MCP_CALLBACK_URL,
}); });

View File

@ -1,9 +1,69 @@
/* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-explicit-any */
import { logger } from "@trigger.dev/sdk/v3"; import { logger } from "@trigger.dev/sdk/v3";
import { jsonSchema, tool, type ToolSet } from "ai"; import { jsonSchema, tool, type ToolSet } from "ai";
import * as fs from "fs";
import * as path from "path";
import { type MCPTool } from "./types"; import { type MCPTool } from "./types";
import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"; import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
import { prisma } from "~/db.server";
export const configureStdioMCPEnvironment = (
spec: any,
account: any,
): { env: Record<string, string>; args: any[] } => {
if (!spec.mcp) {
return { env: {}, args: [] };
}
const mcpSpec = spec.mcp;
const configuredMCP = { ...mcpSpec };
// Replace config placeholders in environment variables
if (configuredMCP.env) {
for (const [key, value] of Object.entries(configuredMCP.env)) {
if (typeof value === "string" && value.includes("${config:")) {
// Extract the config key from the placeholder
const configKey = value.match(/\$\{config:(.*?)\}/)?.[1];
if (
configKey &&
account.integrationConfiguration &&
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(account.integrationConfiguration as any)[configKey]
) {
configuredMCP.env[key] = value.replace(
`\${config:${configKey}}`,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(account.integrationConfiguration as any)[configKey],
);
}
}
if (typeof value === "string" && value.includes("${integrationConfig:")) {
// Extract the config key from the placeholder
const configKey = value.match(/\$\{integrationConfig:(.*?)\}/)?.[1];
if (
configKey &&
account.integrationDefinition.config &&
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(account.integrationDefinition.config as any)[configKey]
) {
configuredMCP.env[key] = value.replace(
`\${integrationConfig:${configKey}}`,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(account.integrationDefinition.config as any)[configKey],
);
}
}
}
}
return {
env: configuredMCP.env || {},
args: Array.isArray(configuredMCP.args) ? configuredMCP.args : [],
};
};
export class MCP { export class MCP {
private Client: any; private Client: any;
private clients: Record<string, any> = {}; private clients: Record<string, any> = {};
@ -133,3 +193,135 @@ export class MCP {
} }
} }
} }
export const getIntegrationStdioFile = async (
integrationDefinitionSlug: string,
) => {
// If the file is in public/integrations/[slug]/main, it is served at /integrations/[slug]/main
return `/integrations/${integrationDefinitionSlug}/main`;
};
export const fetchAndSaveStdioIntegrations = async () => {
try {
logger.info("Starting stdio integrations fetch and save process");
// Get all integration definitions
const integrationDefinitions =
await prisma.integrationDefinitionV2.findMany({
where: {
deleted: null, // Only active integrations
},
});
logger.info(
`Found ${integrationDefinitions.length} integration definitions`,
);
for (const integration of integrationDefinitions) {
try {
const spec = integration.spec as any;
// Check if this integration has MCP config and is stdio type
if (spec?.mcp?.type === "stdio" && spec?.mcp?.url) {
logger.info(`Processing stdio integration: ${integration.slug}`);
const integrationDir = path.join(
process.cwd(),
"integrations",
integration.slug,
);
const targetFile = path.join(integrationDir, "main");
// Create directory if it doesn't exist
if (!fs.existsSync(integrationDir)) {
fs.mkdirSync(integrationDir, { recursive: true });
logger.info(`Created directory: ${integrationDir}`);
}
// Skip if file already exists
if (fs.existsSync(targetFile)) {
logger.info(
`Integration ${integration.slug} already exists, skipping`,
);
continue;
}
const urlOrPath = spec.mcp.url;
// If urlOrPath looks like a URL, use fetch, otherwise treat as local path
let isUrl = false;
try {
// Try to parse as URL
const parsed = new URL(urlOrPath);
isUrl = ["http:", "https:"].includes(parsed.protocol);
} catch {
isUrl = false;
}
if (isUrl) {
// Fetch the URL content
logger.info(`Fetching content from URL: ${urlOrPath}`);
const response = await fetch(urlOrPath);
if (!response.ok) {
logger.error(
`Failed to fetch ${urlOrPath}: ${response.status} ${response.statusText}`,
);
continue;
}
const content = await response.text();
// Save the content to the target file
fs.writeFileSync(targetFile, content);
// Make the file executable if it's a script
if (process.platform !== "win32") {
fs.chmodSync(targetFile, "755");
}
logger.info(
`Successfully saved stdio integration: ${integration.slug} to ${targetFile}`,
);
} else {
// Treat as local file path
const sourcePath = path.isAbsolute(urlOrPath)
? urlOrPath
: path.join(process.cwd(), urlOrPath);
logger.info(`Copying content from local path: ${sourcePath}`);
if (!fs.existsSync(sourcePath)) {
logger.error(`Source file does not exist: ${sourcePath}`);
continue;
}
fs.copyFileSync(sourcePath, targetFile);
// Make the file executable if it's a script
if (process.platform !== "win32") {
fs.chmodSync(targetFile, "755");
}
logger.info(
`Successfully copied stdio integration: ${integration.slug} to ${targetFile}`,
);
}
} else {
logger.debug(
`Skipping integration ${integration.slug}: not a stdio type or missing URL`,
);
}
} catch (error) {
logger.error(`Error processing integration ${integration.slug}:`, {
error,
});
}
}
logger.info("Completed stdio integrations fetch and save process");
} catch (error) {
logger.error("Failed to fetch and save stdio integrations:", { error });
throw error;
}
};

View File

@ -24,11 +24,14 @@ export interface OAuth2Context {
export async function requireOAuth2(request: Request): Promise<OAuth2Context> { export async function requireOAuth2(request: Request): Promise<OAuth2Context> {
const authHeader = request.headers.get("authorization"); const authHeader = request.headers.get("authorization");
if (!authHeader || !authHeader.startsWith("Bearer ")) { if (!authHeader || !authHeader.startsWith("Bearer ")) {
throw json( throw json(
{ error: "invalid_token", error_description: "Missing or invalid authorization header" }, {
{ status: 401 } error: "invalid_token",
error_description: "Missing or invalid authorization header",
},
{ status: 401 },
); );
} }
@ -36,7 +39,7 @@ export async function requireOAuth2(request: Request): Promise<OAuth2Context> {
try { try {
const accessToken = await oauth2Service.validateAccessToken(token); const accessToken = await oauth2Service.validateAccessToken(token);
return { return {
user: { user: {
id: accessToken.user.id, id: accessToken.user.id,
@ -59,13 +62,18 @@ export async function requireOAuth2(request: Request): Promise<OAuth2Context> {
}; };
} catch (error) { } catch (error) {
throw json( throw json(
{ error: "invalid_token", error_description: "Invalid or expired access token" }, {
{ status: 401 } error: "invalid_token",
error_description: "Invalid or expired access token",
},
{ status: 401 },
); );
} }
} }
export async function getOAuth2Context(request: Request): Promise<OAuth2Context | null> { export async function getOAuth2Context(
request: Request,
): Promise<OAuth2Context | null> {
try { try {
return await requireOAuth2(request); return await requireOAuth2(request);
} catch (error) { } catch (error) {
@ -73,20 +81,31 @@ export async function getOAuth2Context(request: Request): Promise<OAuth2Context
} }
} }
export function hasScope(context: OAuth2Context, requiredScope: string): boolean { export function hasScope(
context: OAuth2Context,
requiredScope: string,
): boolean {
if (!context.token.scope) { if (!context.token.scope) {
return false; return false;
} }
const scopes = context.token.scope.split(' '); const scopes = context.token.scope.split(" ");
return scopes.includes(requiredScope); return scopes.includes(requiredScope);
} }
export function requireScope(context: OAuth2Context, requiredScope: string): void { export function requireScope(
context: OAuth2Context,
requiredScope: string,
): void {
if (!hasScope(context, requiredScope)) { if (!hasScope(context, requiredScope)) {
throw json( throw json(
{ error: "insufficient_scope", error_description: `Required scope: ${requiredScope}` }, {
{ status: 403 } error: "insufficient_scope",
error_description: `Required scope: ${requiredScope}`,
},
{ status: 403 },
); );
} }
} }
export function getEnvForCommand() {}

View File

@ -0,0 +1,34 @@
import { logger } from "~/services/logger.service";
import { fetchAndSaveStdioIntegrations } from "~/trigger/utils/mcp";
import { initNeo4jSchemaOnce } from "~/lib/neo4j.server";
// Global flag to ensure startup only runs once per server process
let startupInitialized = false;
/**
* Initialize all startup services once per server process
* Safe to call multiple times - will only run initialization once
*/
export async function initializeStartupServices() {
if (startupInitialized) {
return;
}
try {
logger.info("Starting application initialization...");
// Initialize Neo4j schema
await initNeo4jSchemaOnce();
logger.info("Neo4j schema initialization completed");
await fetchAndSaveStdioIntegrations();
logger.info("Stdio integrations initialization completed");
startupInitialized = true;
logger.info("Application initialization completed successfully");
} catch (error) {
logger.error("Failed to initialize startup services:", { error });
// Don't mark as initialized if there was an error, allow retry
throw error;
}
}

View File

@ -10,8 +10,8 @@
"lint:fix": "eslint --fix --ignore-path .gitignore --cache --cache-location ./node_modules/.cache/eslint .", "lint:fix": "eslint --fix --ignore-path .gitignore --cache --cache-location ./node_modules/.cache/eslint .",
"start": "remix-serve ./build/server/index.js", "start": "remix-serve ./build/server/index.js",
"typecheck": "tsc", "typecheck": "tsc",
"trigger:dev": "pnpm dlx trigger.dev@v4-beta dev", "trigger:dev": "pnpm dlx trigger.dev@4.0.0-v4-beta.22 dev",
"trigger:deploy": "pnpm dlx trigger.dev@v4-beta deploy" "trigger:deploy": "pnpm dlx trigger.dev@4.0.0-v4-beta.22 deploy"
}, },
"dependencies": { "dependencies": {
"@ai-sdk/anthropic": "^1.2.12", "@ai-sdk/anthropic": "^1.2.12",

View File

@ -47,6 +47,7 @@ async function init() {
// handle SSR requests // handle SSR requests
app.all("*", remixHandler); app.all("*", remixHandler);
const port = process.env.REMIX_APP_PORT || 3000; const port = process.env.REMIX_APP_PORT || 3000;
app.listen(port, () => app.listen(port, () =>
console.log(`Express server listening at http://localhost:${port}`), console.log(`Express server listening at http://localhost:${port}`),

View File

@ -27,8 +27,16 @@ export default defineConfig({
build: { build: {
extensions: [ extensions: [
syncEnvVars(() => ({ syncEnvVars(() => ({
DATABASE_URL: process.env.DATABASE_URL as string, ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY as string,
API_BASE_URL: process.env.API_BASE_URL as string, API_BASE_URL: process.env.API_BASE_URL as string,
DATABASE_URL: process.env.DATABASE_URL as string,
EMBEDDING_MODEL: process.env.EMBEDDING_MODEL as string,
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY as string,
MODEL: process.env.MODEL ?? "gpt-4.1-2025-04-14",
NEO4J_PASSWORD: process.env.NEO4J_PASSWORD as string,
NEO4J_URI: process.env.NEO4J_URI as string,
NEO4J_USERNAME: process.env.NEO4J_USERNAME as string,
OPENAI_API_KEY: process.env.OPENAI_API_KEY as string,
})), })),
prismaExtension({ prismaExtension({
schema: "prisma/schema.prisma", schema: "prisma/schema.prisma",

View File

@ -7,7 +7,8 @@
"**/*.tsx", "**/*.tsx",
"tailwind.config.js", "tailwind.config.js",
"tailwind.config.js", "tailwind.config.js",
"trigger.config.ts" "trigger.config.ts",
"server.mjs"
], ],
"compilerOptions": { "compilerOptions": {
"types": ["@remix-run/node", "vite/client"], "types": ["@remix-run/node", "vite/client"],

View File

@ -40,7 +40,7 @@ services:
postgres: postgres:
container_name: core-postgres container_name: core-postgres
image: redplanethq/postgres:0.1.0 image: redplanethq/postgres:0.1.2
environment: environment:
- POSTGRES_USER=${POSTGRES_USER} - POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}

View File

@ -12,9 +12,9 @@
"label": "Linear API Key" "label": "Linear API Key"
} }
}, },
"mcpAuth": { "mcp": {
"serverUrl": "https://mcp.linear.app/sse", "type": "http",
"transportStrategy": "sse-first", "url": "https://mcp.linear.app/mcp",
"needsSeparateAuth": true "needsAuth": true
} }
} }

View File

@ -4,12 +4,11 @@
"description": "Connect your workspace to Slack. Run your workflows from slack bookmarks", "description": "Connect your workspace to Slack. Run your workflows from slack bookmarks",
"icon": "slack", "icon": "slack",
"mcp": { "mcp": {
"command": "npx", "type": "stdio",
"args": ["-y", "@modelcontextprotocol/server-slack"], "url": "",
"args": [ ],
"env": { "env": {
"SLACK_BOT_TOKEN": "${config:access_token}", "SLACK_MCP_XOXP_TOKEN": "${config:access_token}"
"SLACK_TEAM_ID": "${config:team_id}",
"SLACK_CHANNEL_IDS": "${config:channel_ids}"
} }
}, },
"auth": { "auth": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@redplanethq/core", "name": "@redplanethq/core",
"version": "0.1.1", "version": "0.1.3",
"description": "A Command-Line Interface for Core", "description": "A Command-Line Interface for Core",
"type": "module", "type": "module",
"license": "MIT", "license": "MIT",
@ -89,6 +89,7 @@
"commander": "^9.4.1", "commander": "^9.4.1",
"defu": "^6.1.4", "defu": "^6.1.4",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"dotenv-expand": "^12.0.2",
"esbuild": "^0.23.0", "esbuild": "^0.23.0",
"eventsource": "^3.0.2", "eventsource": "^3.0.2",
"evt": "^2.4.13", "evt": "^2.4.13",

View File

@ -1,10 +1,13 @@
import { intro, outro, text, confirm, spinner, note, log } from "@clack/prompts"; import { intro, outro, text, confirm, spinner, note, log } from "@clack/prompts";
import { fileExists, updateEnvFile } from "../utils/file.js"; import { fileExists, updateEnvFile } from "../utils/file.js";
import { checkPostgresHealth } from "../utils/docker.js"; import { checkPostgresHealth } from "../utils/docker.js";
import { executeDockerCommandInteractive } from "../utils/docker-interactive.js"; import { executeCommandInteractive } from "../utils/docker-interactive.js";
import { printCoreBrainLogo } from "../utils/ascii.js"; import { printCoreBrainLogo } from "../utils/ascii.js";
import { setupEnvFile } from "../utils/env.js"; import { setupEnvFile } from "../utils/env.js";
import { hasTriggerConfig } from "../utils/env-checker.js"; import { hasTriggerConfig } from "../utils/env-checker.js";
import { getDockerCompatibleEnvVars } from "../utils/env-docker.js";
import { handleDockerLogin } from "../utils/docker-login.js";
import { deployTriggerTasks } from "../utils/trigger-deploy.js";
import path from "path"; import path from "path";
export async function initCommand() { export async function initCommand() {
@ -14,20 +17,29 @@ export async function initCommand() {
intro("🚀 Core Development Environment Setup"); intro("🚀 Core Development Environment Setup");
// Step 1: Confirm this is the Core repository // Step 1: Confirm this is the Core repository
note("Please ensure you have:\n• Docker and Docker Compose installed\n• Git installed\n• pnpm package manager installed\n• You are in the Core repository directory", "📋 Prerequisites"); note(
"Please ensure you have:\n• Docker and Docker Compose installed\n• Git installed\n• pnpm package manager installed\n• You are in the Core repository directory",
"📋 Prerequisites"
);
const isCoreRepo = await confirm({ const isCoreRepo = await confirm({
message: "Are you currently in the Core repository directory?", message: "Are you currently in the Core repository directory?",
}); });
if (!isCoreRepo) { if (!isCoreRepo) {
note("Please clone the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run 'core init' again.", "📥 Clone Repository"); note(
"Please clone the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run 'core init' again.",
"📥 Clone Repository"
);
outro("❌ Setup cancelled. Please navigate to the Core repository first."); outro("❌ Setup cancelled. Please navigate to the Core repository first.");
process.exit(1); process.exit(1);
} }
const rootDir = process.cwd(); const rootDir = process.cwd();
const triggerDir = path.join(rootDir, "trigger"); const triggerDir = path.join(rootDir, "trigger");
const webappDir = path.join(rootDir, "apps", "webapp");
const databaseDir = path.join(rootDir, "packages", "database");
const typesDir = path.join(rootDir, "packages", "types");
try { try {
// Step 2: Setup .env file in root // Step 2: Setup .env file in root
@ -51,7 +63,7 @@ export async function initCommand() {
// Step 3: Docker compose up -d in root // Step 3: Docker compose up -d in root
try { try {
await executeDockerCommandInteractive("docker compose up -d", { await executeCommandInteractive("docker compose up -d", {
cwd: rootDir, cwd: rootDir,
message: "Starting Docker containers in root...", message: "Starting Docker containers in root...",
showOutput: true, showOutput: true,
@ -103,7 +115,7 @@ export async function initCommand() {
// Step 6: Docker compose up for trigger // Step 6: Docker compose up for trigger
try { try {
await executeDockerCommandInteractive("docker compose up -d", { await executeCommandInteractive("docker compose up -d", {
cwd: triggerDir, cwd: triggerDir,
message: "Starting Trigger.dev containers...", message: "Starting Trigger.dev containers...",
showOutput: true, showOutput: true,
@ -175,6 +187,16 @@ export async function initCommand() {
}, },
}); });
const openaiApiKey = await text({
message: "Enter your OpenAI API Key:",
validate: (value) => {
if (!value || value.length === 0) {
return "OpenAI API Key is required";
}
return;
},
});
// Step 11: Update .env with project details // Step 11: Update .env with project details
const s6 = spinner(); const s6 = spinner();
s6.start("Updating .env with Trigger.dev configuration..."); s6.start("Updating .env with Trigger.dev configuration...");
@ -182,6 +204,7 @@ export async function initCommand() {
try { try {
await updateEnvFile(envPath, "TRIGGER_PROJECT_ID", projectId as string); await updateEnvFile(envPath, "TRIGGER_PROJECT_ID", projectId as string);
await updateEnvFile(envPath, "TRIGGER_SECRET_KEY", secretKey as string); await updateEnvFile(envPath, "TRIGGER_SECRET_KEY", secretKey as string);
await updateEnvFile(envPath, "OPENAI_API_KEY", openaiApiKey as string);
s6.stop("✅ Updated .env with Trigger.dev configuration"); s6.stop("✅ Updated .env with Trigger.dev configuration");
} catch (error: any) { } catch (error: any) {
s6.stop("❌ Failed to update .env file"); s6.stop("❌ Failed to update .env file");
@ -190,13 +213,13 @@ export async function initCommand() {
// Step 12: Restart root docker-compose with new configuration // Step 12: Restart root docker-compose with new configuration
try { try {
await executeDockerCommandInteractive("docker compose down", { await executeCommandInteractive("docker compose down", {
cwd: rootDir, cwd: rootDir,
message: "Stopping Core services...", message: "Stopping Core services...",
showOutput: true, showOutput: true,
}); });
await executeDockerCommandInteractive("docker compose up -d", { await executeCommandInteractive("docker compose up -d", {
cwd: rootDir, cwd: rootDir,
message: "Starting Core services with new Trigger.dev configuration...", message: "Starting Core services with new Trigger.dev configuration...",
showOutput: true, showOutput: true,
@ -206,73 +229,12 @@ export async function initCommand() {
} }
} }
// Step 13: Show docker login instructions // Step 13: Handle Docker login
note("Run the following command to login to Docker registry:", "🐳 Docker Registry Login"); note("Run the following command to login to Docker registry:", "🐳 Docker Registry Login");
await handleDockerLogin(triggerEnvPath);
try {
// Read env file to get docker registry details
const envContent = await import("fs").then((fs) =>
fs.promises.readFile(triggerEnvPath, "utf8")
);
const envLines = envContent.split("\n");
const getEnvValue = (key: string) => {
const line = envLines.find((l) => l.startsWith(`${key}=`));
return line ? line.split("=")[1] : "";
};
const dockerRegistryUrl = getEnvValue("DOCKER_REGISTRY_URL");
const dockerRegistryUsername = getEnvValue("DOCKER_REGISTRY_USERNAME");
const dockerRegistryPassword = getEnvValue("DOCKER_REGISTRY_PASSWORD");
log.info(
`docker login ${dockerRegistryUrl} -u ${dockerRegistryUsername} -p ${dockerRegistryPassword}`
);
} catch (error) {
log.info("docker login <REGISTRY_URL> -u <USERNAME> -p <PASSWORD>");
}
const dockerLoginConfirmed = await confirm({
message: "Have you completed the Docker login successfully?",
});
if (!dockerLoginConfirmed) {
outro("❌ Setup cancelled. Please complete Docker login first and run the command again.");
process.exit(1);
}
// Step 14: Deploy Trigger.dev tasks // Step 14: Deploy Trigger.dev tasks
note( await deployTriggerTasks(rootDir);
"We'll now deploy the trigger tasks to your Trigger.dev instance.",
"🚀 Deploying Trigger.dev tasks"
);
try {
// Login to trigger.dev CLI
await executeDockerCommandInteractive(
"npx -y trigger.dev@v4-beta login -a http://localhost:8030",
{
cwd: rootDir,
message: "Logging in to Trigger.dev CLI...",
showOutput: true,
}
);
// Deploy trigger tasks
await executeDockerCommandInteractive("pnpm trigger:deploy", {
cwd: rootDir,
message: "Deploying Trigger.dev tasks...",
showOutput: true,
});
log.success("Trigger.dev tasks deployed successfully!");
} catch (error: any) {
log.warning("Failed to deploy Trigger.dev tasks:");
note(
`${error.message}\n\nYou can deploy them manually later with:\n1. npx trigger.dev@v4-beta login -a http://localhost:8030\n2. pnpm trigger:deploy`,
"Manual Deployment"
);
}
// Step 15: Final instructions // Step 15: Final instructions
outro("🎉 Setup Complete!"); outro("🎉 Setup Complete!");

View File

@ -1,50 +1,55 @@
import { intro, outro, note, log, confirm } from '@clack/prompts'; import { intro, outro, note, log, confirm } from "@clack/prompts";
import { executeDockerCommandInteractive } from '../utils/docker-interactive.js'; import { executeCommandInteractive } from "../utils/docker-interactive.js";
import { printCoreBrainLogo } from '../utils/ascii.js'; import { printCoreBrainLogo } from "../utils/ascii.js";
import path from 'path'; import path from "path";
export async function startCommand() { export async function startCommand() {
// Display the CORE brain logo // Display the CORE brain logo
printCoreBrainLogo(); printCoreBrainLogo();
intro('🚀 Starting Core Development Environment'); intro("🚀 Starting Core Development Environment");
// Step 1: Confirm this is the Core repository // Step 1: Confirm this is the Core repository
const isCoreRepo = await confirm({ const isCoreRepo = await confirm({
message: 'Are you currently in the Core repository directory?', message: "Are you currently in the Core repository directory?",
}); });
if (!isCoreRepo) { if (!isCoreRepo) {
note('Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core start" again.', '📥 Core Repository Required'); note(
outro('❌ Please navigate to the Core repository first.'); 'Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core start" again.',
"📥 Core Repository Required"
);
outro("❌ Please navigate to the Core repository first.");
process.exit(1); process.exit(1);
} }
const rootDir = process.cwd(); const rootDir = process.cwd();
const triggerDir = path.join(rootDir, 'trigger'); const triggerDir = path.join(rootDir, "trigger");
try { try {
// Start main services // Start main services
await executeDockerCommandInteractive('docker compose up -d', { await executeCommandInteractive("docker compose up -d", {
cwd: rootDir, cwd: rootDir,
message: 'Starting Core services...', message: "Starting Core services...",
showOutput: true showOutput: true,
}); });
// Start trigger services // Start trigger services
await executeDockerCommandInteractive('docker compose up -d', { await executeCommandInteractive("docker compose up -d", {
cwd: triggerDir, cwd: triggerDir,
message: 'Starting Trigger.dev services...', message: "Starting Trigger.dev services...",
showOutput: true showOutput: true,
}); });
// Final success message // Final success message
outro('🎉 Core Development Environment Started!'); outro("🎉 Core Development Environment Started!");
note('• Core Application: http://localhost:3033\n• Trigger.dev: http://localhost:8030\n• PostgreSQL: localhost:5432', '🌐 Your services are now running'); note(
log.success('Happy coding!'); "• Core Application: http://localhost:3033\n• Trigger.dev: http://localhost:8030\n• PostgreSQL: localhost:5432",
"🌐 Your services are now running"
);
log.success("Happy coding!");
} catch (error: any) { } catch (error: any) {
outro(`❌ Failed to start services: ${error.message}`); outro(`❌ Failed to start services: ${error.message}`);
process.exit(1); process.exit(1);
} }
} }

View File

@ -1,50 +1,52 @@
import { intro, outro, log, confirm, note } from '@clack/prompts'; import { intro, outro, log, confirm, note } from "@clack/prompts";
import { executeDockerCommandInteractive } from '../utils/docker-interactive.js'; import { executeCommandInteractive } from "../utils/docker-interactive.js";
import { printCoreBrainLogo } from '../utils/ascii.js'; import { printCoreBrainLogo } from "../utils/ascii.js";
import path from 'path'; import path from "path";
export async function stopCommand() { export async function stopCommand() {
// Display the CORE brain logo // Display the CORE brain logo
printCoreBrainLogo(); printCoreBrainLogo();
intro('🛑 Stopping Core Development Environment'); intro("🛑 Stopping Core Development Environment");
// Step 1: Confirm this is the Core repository // Step 1: Confirm this is the Core repository
const isCoreRepo = await confirm({ const isCoreRepo = await confirm({
message: 'Are you currently in the Core repository directory?', message: "Are you currently in the Core repository directory?",
}); });
if (!isCoreRepo) { if (!isCoreRepo) {
note('Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core stop" again.', '📥 Core Repository Required'); note(
outro('❌ Please navigate to the Core repository first.'); 'Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core stop" again.',
"📥 Core Repository Required"
);
outro("❌ Please navigate to the Core repository first.");
process.exit(1); process.exit(1);
} }
const rootDir = process.cwd(); const rootDir = process.cwd();
const triggerDir = path.join(rootDir, 'trigger'); const triggerDir = path.join(rootDir, "trigger");
try { try {
// Stop trigger services first // Stop trigger services first
await executeDockerCommandInteractive('docker compose down', { await executeCommandInteractive("docker compose down", {
cwd: triggerDir, cwd: triggerDir,
message: 'Stopping Trigger.dev services...', message: "Stopping Trigger.dev services...",
showOutput: true showOutput: true,
}); });
// Stop main services // Stop main services
await executeDockerCommandInteractive('docker compose down', { await executeCommandInteractive("docker compose down", {
cwd: rootDir, cwd: rootDir,
message: 'Stopping Core services...', message: "Stopping Core services...",
showOutput: true showOutput: true,
}); });
// Final success message // Final success message
outro('🎉 Core Development Environment Stopped!'); outro("🎉 Core Development Environment Stopped!");
log.success('All services have been stopped.'); log.success("All services have been stopped.");
log.info('Run "core start" to start services again.'); log.info('Run "core start" to start services again.');
} catch (error: any) { } catch (error: any) {
outro(`❌ Failed to stop services: ${error.message}`); outro(`❌ Failed to stop services: ${error.message}`);
process.exit(1); process.exit(1);
} }
} }

View File

@ -1,46 +1,45 @@
import { spawn, ChildProcess } from 'child_process'; import { spawn, ChildProcess } from "child_process";
import { spinner } from '@clack/prompts'; import { spinner } from "@clack/prompts";
export interface DockerCommandOptions { export interface CommandOptions {
cwd: string; cwd: string;
message: string; message: string;
showOutput?: boolean; showOutput?: boolean;
env?: Record<string, string>;
} }
export function executeDockerCommandInteractive( export function executeCommandInteractive(command: string, options: CommandOptions): Promise<void> {
command: string,
options: DockerCommandOptions
): Promise<void> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const s = spinner(); const s = spinner();
s.start(options.message); s.start(options.message);
// Split command into parts // Split command into parts
const parts = command.split(' '); const parts = command.split(" ");
const cmd = parts[0]; const cmd = parts[0];
const args = parts.slice(1); const args = parts.slice(1);
if (!cmd) { if (!cmd) {
reject(new Error('Invalid command')); reject(new Error("Invalid command"));
return; return;
} }
const child: ChildProcess = spawn(cmd, args, { const child: ChildProcess = spawn(cmd, args, {
cwd: options.cwd, cwd: options.cwd,
stdio: options.showOutput ? ['ignore', 'pipe', 'pipe'] : 'ignore', stdio: options.showOutput ? ["ignore", "pipe", "pipe"] : "ignore",
detached: false detached: false,
env: options.env ? { ...process.env, ...options.env } : { ...process.env },
}); });
let output = ''; let output = "";
// Handle stdout // Handle stdout
if (child.stdout && options.showOutput) { if (child.stdout && options.showOutput) {
child.stdout.on('data', (data: Buffer) => { child.stdout.on("data", (data: Buffer) => {
const text = data.toString(); const text = data.toString();
output += text; output += text;
// Update spinner with latest output line // Update spinner with latest output line
const lines = text.trim().split('\n'); const lines = text.trim().split("\n");
const lastLine = lines[lines.length - 1]; const lastLine = lines[lines.length - 1];
if (lastLine && lastLine.trim()) { if (lastLine && lastLine.trim()) {
s.message(`${options.message}\n${lastLine.trim()}`); s.message(`${options.message}\n${lastLine.trim()}`);
@ -50,12 +49,13 @@ export function executeDockerCommandInteractive(
// Handle stderr // Handle stderr
if (child.stderr && options.showOutput) { if (child.stderr && options.showOutput) {
child.stderr.on('data', (data: Buffer) => { child.stderr.on("data", (data: Buffer) => {
const text = data.toString(); const text = data.toString();
output += text; output += text;
// console.log(text);
// Update spinner with error output // Update spinner with error output
const lines = text.trim().split('\n'); const lines = text.trim().split("\n");
const lastLine = lines[lines.length - 1]; const lastLine = lines[lines.length - 1];
if (lastLine && lastLine.trim()) { if (lastLine && lastLine.trim()) {
s.message(`${options.message}\n❌ ${lastLine.trim()}`); s.message(`${options.message}\n❌ ${lastLine.trim()}`);
@ -64,14 +64,14 @@ export function executeDockerCommandInteractive(
} }
// Handle process exit // Handle process exit
child.on('exit', (code: number | null) => { child.on("exit", (code: number | null) => {
if (code === 0) { if (code === 0) {
s.stop(`${options.message.replace(/\.\.\.$/, '')} completed`); s.stop(`${options.message.replace(/\.\.\.$/, "")} completed`);
resolve(); resolve();
} else { } else {
s.stop(`${options.message.replace(/\.\.\.$/, '')} failed (exit code: ${code})`); s.stop(`${options.message.replace(/\.\.\.$/, "")} failed (exit code: ${code})`);
if (options.showOutput && output) { if (options.showOutput && output) {
console.log('\nOutput:'); console.log("\nOutput:");
console.log(output); console.log(output);
} }
reject(new Error(`Command failed with exit code ${code}`)); reject(new Error(`Command failed with exit code ${code}`));
@ -79,30 +79,30 @@ export function executeDockerCommandInteractive(
}); });
// Handle errors // Handle errors
child.on('error', (error: Error) => { child.on("error", (error: Error) => {
s.stop(`${options.message.replace(/\.\.\.$/, '')} failed`); s.stop(`${options.message.replace(/\.\.\.$/, "")} failed`);
reject(error); reject(error);
}); });
// Handle Ctrl+C // Handle Ctrl+C
const handleSigint = () => { const handleSigint = () => {
s.stop(`⏹️ ${options.message.replace(/\.\.\.$/, '')} interrupted`); s.stop(`⏹️ ${options.message.replace(/\.\.\.$/, "")} interrupted`);
child.kill('SIGTERM'); child.kill("SIGTERM");
// Give the process time to clean up // Give the process time to clean up
setTimeout(() => { setTimeout(() => {
if (child.killed === false) { if (child.killed === false) {
child.kill('SIGKILL'); child.kill("SIGKILL");
} }
process.exit(130); // Standard exit code for SIGINT process.exit(130); // Standard exit code for SIGINT
}, 5000); }, 5000);
}; };
process.on('SIGINT', handleSigint); process.on("SIGINT", handleSigint);
// Clean up event listener when done // Clean up event listener when done
child.on('exit', () => { child.on("exit", () => {
process.off('SIGINT', handleSigint); process.off("SIGINT", handleSigint);
}); });
}); });
} }

View File

@ -0,0 +1,63 @@
import { confirm, log } from "@clack/prompts";
import path from "path";
import os from "os";
import fs from "fs";
export async function handleDockerLogin(triggerEnvPath: string): Promise<void> {
// Check if Docker is already logged in to localhost:5000
let dockerLoginNeeded = true;
try {
const dockerConfigPath = process.env.DOCKER_CONFIG
? path.join(process.env.DOCKER_CONFIG, "config.json")
: path.join(os.homedir(), ".docker", "config.json");
if (fs.existsSync(dockerConfigPath)) {
const configContent = await fs.promises.readFile(dockerConfigPath, "utf8");
const config = JSON.parse(configContent);
if (
config &&
config.auths &&
Object.prototype.hasOwnProperty.call(config.auths, "localhost:5000")
) {
dockerLoginNeeded = false;
}
}
} catch (error) {
// Ignore errors, will prompt for login below
}
if (dockerLoginNeeded) {
try {
// Read env file to get docker registry details
const envContent = await fs.promises.readFile(triggerEnvPath, "utf8");
const envLines = envContent.split("\n");
const getEnvValue = (key: string) => {
const line = envLines.find((l) => l.startsWith(`${key}=`));
return line ? line.split("=")[1] : "";
};
const dockerRegistryUrl = getEnvValue("DOCKER_REGISTRY_URL");
const dockerRegistryUsername = getEnvValue("DOCKER_REGISTRY_USERNAME");
const dockerRegistryPassword = getEnvValue("DOCKER_REGISTRY_PASSWORD");
log.info(
`docker login -u ${dockerRegistryUsername} -p ${dockerRegistryPassword} ${dockerRegistryUrl} `
);
} catch (error) {
log.info("docker login -u <USERNAME> -p <PASSWORD> <REGISTRY_URL>");
}
} else {
log.info("✅ Docker is already logged in to localhost:5000, skipping login prompt.");
}
const dockerLoginConfirmed = await confirm({
message: "Have you completed the Docker login successfully?",
});
if (!dockerLoginConfirmed) {
throw new Error(
"Docker login required. Please complete Docker login first and run the command again."
);
}
}

View File

@ -0,0 +1,48 @@
import path from "path";
import fs from "fs";
import dotenv from "dotenv";
import dotenvExpand from "dotenv-expand";
/**
* Reads environment variables from .env file and replaces localhost URLs with host.docker.internal
* for Docker container compatibility
*/
export async function getDockerCompatibleEnvVars(rootDir: string): Promise<Record<string, string>> {
const envPath = path.join(rootDir, ".env");
try {
// Use dotenv to parse and expand variables
const envVarsExpand =
dotenvExpand.expand(dotenv.config({ path: envPath, processEnv: {} })).parsed || {};
const getEnvValue = (key: string): string => {
return envVarsExpand[key] || "";
};
const replaceLocalhostWithDockerHost = (value: string): string => {
return value
.replace(/localhost/g, "host.docker.internal")
.replace(/127\.0\.0\.1/g, "host.docker.internal");
};
// Get all required environment variables
const envVars = {
ANTHROPIC_API_KEY: getEnvValue("ANTHROPIC_API_KEY"),
API_BASE_URL: replaceLocalhostWithDockerHost(getEnvValue("API_BASE_URL")),
DATABASE_URL: replaceLocalhostWithDockerHost(getEnvValue("DATABASE_URL")),
EMBEDDING_MODEL: getEnvValue("EMBEDDING_MODEL"),
ENCRYPTION_KEY: getEnvValue("ENCRYPTION_KEY"),
MODEL: getEnvValue("MODEL") || "gpt-4.1-2025-04-14",
NEO4J_PASSWORD: getEnvValue("NEO4J_PASSWORD"),
NEO4J_URI: replaceLocalhostWithDockerHost(getEnvValue("NEO4J_URI")),
NEO4J_USERNAME: getEnvValue("NEO4J_USERNAME"),
OPENAI_API_KEY: getEnvValue("OPENAI_API_KEY"),
TRIGGER_PROJECT_ID: getEnvValue("TRIGGER_PROJECT_ID"),
};
return envVars;
} catch (error) {
throw new Error(`Failed to read .env file: ${error}`);
}
}

View File

@ -0,0 +1,66 @@
import { note, log } from "@clack/prompts";
import { executeCommandInteractive } from "./docker-interactive.js";
import { getDockerCompatibleEnvVars } from "./env-docker.js";
import path from "path";
export async function deployTriggerTasks(rootDir: string): Promise<void> {
const webappDir = path.join(rootDir, "apps", "webapp");
const databaseDir = path.join(rootDir, "packages", "database");
const typesDir = path.join(rootDir, "packages", "types");
note(
"We'll now deploy the trigger tasks to your Trigger.dev instance.",
"🚀 Deploying Trigger.dev tasks"
);
try {
// Login to trigger.dev CLI
await executeCommandInteractive(
"npx -y trigger.dev@4.0.0-v4-beta.22 login -a http://localhost:8030",
{
cwd: rootDir,
message: "Logging in to Trigger.dev CLI...",
showOutput: true,
}
);
await executeCommandInteractive("pnpm install", {
cwd: rootDir,
message: "Running package installation",
showOutput: true,
});
const envVars = await getDockerCompatibleEnvVars(rootDir);
await executeCommandInteractive("pnpm build", {
cwd: databaseDir,
message: "Building @core/database...",
showOutput: true,
env: {
DATABASE_URL: envVars.DATABASE_URL as string,
},
});
await executeCommandInteractive("pnpm build", {
cwd: typesDir,
message: "Building @core/types...",
showOutput: true,
});
// Deploy trigger tasks
await executeCommandInteractive("pnpm run trigger:deploy", {
cwd: webappDir,
message: "Deploying Trigger.dev tasks...",
showOutput: true,
env: envVars,
});
log.success("Trigger.dev tasks deployed successfully!");
} catch (error: any) {
log.warning("Failed to deploy Trigger.dev tasks:");
note(
`${error.message}\n\nYou can deploy them manually later with:\n1. npx trigger.dev@v4-beta login -a http://localhost:8030\n2. pnpm trigger:deploy`,
"Manual Deployment"
);
}
}

View File

@ -21,7 +21,7 @@
"db:studio": "prisma studio", "db:studio": "prisma studio",
"db:reset": "prisma migrate reset", "db:reset": "prisma migrate reset",
"typecheck": "tsc --noEmit", "typecheck": "tsc --noEmit",
"build": "pnpm run clean && tsc --noEmit false --outDir dist --declaration", "build": "pnpm run generate && pnpm run clean && tsc --noEmit false --outDir dist --declaration",
"dev": "tsc --noEmit false --outDir dist --declaration --watch" "dev": "tsc --noEmit false --outDir dist --declaration --watch"
} }
} }

View File

@ -1,6 +1,7 @@
import { Client } from "@modelcontextprotocol/sdk/client/index.js"; import { Client } from "@modelcontextprotocol/sdk/client/index.js";
import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js"; import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js";
import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"; import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js";
import { import {
MCPRemoteClientConfig, MCPRemoteClientConfig,
AuthenticationResult, AuthenticationResult,
@ -153,7 +154,7 @@ export function createMCPProxy(
redirectUrl: string, redirectUrl: string,
transportStrategy: TransportStrategy = "sse-first", transportStrategy: TransportStrategy = "sse-first",
clientHeaders?: { sessionId?: string | null; lastEventId?: string | null } clientHeaders?: { sessionId?: string | null; lastEventId?: string | null }
): Promise<SSEClientTransport | StreamableHTTPClientTransport> { ): Promise<SSEClientTransport | StreamableHTTPClientTransport | StdioClientTransport> {
// Create auth provider with stored credentials using common factory // Create auth provider with stored credentials using common factory
const authProvider = await createAuthProviderForProxy(serverUrl, credentials, redirectUrl); const authProvider = await createAuthProviderForProxy(serverUrl, credentials, redirectUrl);
@ -173,9 +174,16 @@ export function createMCPProxy(
} }
// Create transport based on strategy (don't start yet) // Create transport based on strategy (don't start yet)
let transport: SSEClientTransport | StreamableHTTPClientTransport; let transport: SSEClientTransport | StreamableHTTPClientTransport | StdioClientTransport;
switch (transportStrategy) { switch (transportStrategy) {
case "stdio":
// For stdio transport, serverUrl should contain the command to execute
// This is mainly for completeness - prefer using createMCPStdioProxy directly
throw new Error(
"Stdio transport not supported in createRemoteTransport. Use createMCPStdioProxy instead."
);
case "sse-only": case "sse-only":
transport = new SSEClientTransport(url, { transport = new SSEClientTransport(url, {
authProvider, authProvider,
@ -227,6 +235,257 @@ export function createMCPProxy(
} }
} }
/**
* Creates an MCP proxy that forwards requests to a stdio process.
* Maintains a mapping of sessionId -> StdioClientTransport for reuse.
* If sessionId is provided, it is returned in the response header as mcp_session_id.
* @param request The incoming HTTP request
* @param command The command to execute for the stdio process
* @param args Arguments for the command
* @param options Optional configuration for the proxy
* @param sessionId Optional session id for transport reuse
* @returns Promise that resolves to the HTTP response
*/
// Track both the transport and its last used timestamp
type StdioTransportEntry = {
transport: StdioClientTransport;
lastUsed: number; // ms since epoch
};
const stdioTransports: Map<string, StdioTransportEntry> = new Map();
/**
* Cleans up any stdio transports that have not been used in the last 5 minutes.
* Closes and removes them from the map.
*/
function cleanupOldStdioTransports() {
const now = Date.now();
const FIVE_MINUTES = 5 * 60 * 1000;
for (const [sessionId, entry] of stdioTransports.entries()) {
if (now - entry.lastUsed > FIVE_MINUTES) {
try {
entry.transport.close?.();
} catch (err) {
// ignore
}
stdioTransports.delete(sessionId);
}
}
}
export function createMCPStdioProxy(
request: Request,
command: string,
args?: string[],
options?: {
/** Enable debug logging */
debug?: boolean;
/** Environment variables to pass to the process */
env?: Record<string, string>;
/** Custom header-to-environment variable mapping */
headerMapping?: Record<string, string>;
/** Optional session id for transport reuse */
sessionId?: string;
}
): Promise<Response> {
return new Promise<Response>(async (resolve) => {
let bridge: any = null;
let serverTransport: StdioClientTransport | undefined;
let sessionId: string | undefined =
options?.sessionId || request.headers.get("Mcp-Session-Id") || undefined;
// Clean up old transports before handling new connection
cleanupOldStdioTransports();
try {
// Extract headers from the incoming request and convert to environment variables
const env = createEnvironmentFromRequest(
request,
options?.env || {},
options?.headerMapping || {}
);
// If sessionId is provided, try to reuse the transport
let entry: StdioTransportEntry | undefined;
if (sessionId) {
entry = stdioTransports.get(sessionId);
if (entry) {
serverTransport = entry.transport;
entry.lastUsed = Date.now();
}
}
// If no transport exists for this sessionId, create a new one and store it
if (!serverTransport) {
serverTransport = new StdioClientTransport({
command,
args: args || [],
env,
});
await serverTransport.start();
if (sessionId) {
stdioTransports.set(sessionId, {
transport: serverTransport,
lastUsed: Date.now(),
});
}
}
// Create Remix transport (converts HTTP to MCP messages)
// We need to wrap resolve to inject the sessionId header if present
const resolveWithSessionId = (response: Response) => {
if (sessionId) {
// Clone the response and add the mcp_session_id header
const headers = new Headers(response.headers);
headers.set("mcp-session-id", sessionId);
resolve(
new Response(response.body, {
status: response.status,
statusText: response.statusText,
headers,
})
);
} else {
resolve(response);
}
};
const clientTransport = new RemixMCPTransport(request, resolveWithSessionId);
// Bridge the transports
const bridgeOptions: any = {
debug: options?.debug || false,
onError: (error: Error, source: string) => {
console.error(`[MCP Stdio Bridge] ${source} error:`, error);
},
};
if (options?.debug) {
bridgeOptions.onMessage = (direction: string, message: any) => {
console.log(`[MCP Stdio Bridge] ${direction}:`, message.method || message.id);
};
}
bridge = createMCPTransportBridge(
clientTransport as any,
serverTransport as any,
bridgeOptions
);
// Start only the client transport (server is already started)
await clientTransport.start();
} catch (error) {
console.error("MCP Stdio Proxy Error:", error);
if (bridge) {
bridge.close().catch(console.error);
}
const errorMessage = error instanceof Error ? error.message : String(error);
// Always include mcp_session_id header if sessionId is present
const headers: Record<string, string> = { "Content-Type": "application/json" };
if (sessionId) {
headers["mcp-session-id"] = sessionId;
}
resolve(
new Response(
JSON.stringify({
error: `Stdio proxy error: ${errorMessage}`,
}),
{
status: 500,
headers,
}
)
);
}
});
}
/**
* Creates environment variables from request headers
*/
function createEnvironmentFromRequest(
request: Request,
baseEnv: Record<string, string>,
headerMapping: Record<string, string>
): Record<string, string> {
// Start with base environment (inherit safe environment variables)
const env: Record<string, string> = {
...getDefaultEnvironment(),
...baseEnv,
};
// Add standard MCP headers as environment variables
const sessionId = request.headers.get("Mcp-Session-Id");
const lastEventId = request.headers.get("Last-Event-Id");
const contentType = request.headers.get("Content-Type");
const userAgent = request.headers.get("User-Agent");
if (sessionId) {
env["MCP_SESSION_ID"] = sessionId;
}
if (lastEventId) {
env["MCP_LAST_EVENT_ID"] = lastEventId;
}
if (contentType) {
env["MCP_CONTENT_TYPE"] = contentType;
}
if (userAgent) {
env["MCP_USER_AGENT"] = userAgent;
}
// Apply custom header-to-environment variable mapping
for (const [headerName, envVarName] of Object.entries(headerMapping)) {
const headerValue = request.headers.get(headerName);
if (headerValue) {
env[envVarName] = headerValue;
}
}
return env;
}
/**
* Returns a default environment object including only environment variables deemed safe to inherit.
*/
function getDefaultEnvironment(): Record<string, string> {
const DEFAULT_INHERITED_ENV_VARS =
process.platform === "win32"
? [
"APPDATA",
"HOMEDRIVE",
"HOMEPATH",
"LOCALAPPDATA",
"PATH",
"PROCESSOR_ARCHITECTURE",
"SYSTEMDRIVE",
"SYSTEMROOT",
"TEMP",
"USERNAME",
"USERPROFILE",
]
: ["HOME", "LOGNAME", "PATH", "SHELL", "TERM", "USER"];
const env: Record<string, string> = {};
for (const key of DEFAULT_INHERITED_ENV_VARS) {
const value = process.env[key];
if (value === undefined) {
continue;
}
if (value.startsWith("()")) {
// Skip functions, which are a security risk.
continue;
}
env[key] = value;
}
return env;
}
export class MCPAuthenticationClient { export class MCPAuthenticationClient {
private serverUrlHash: string; private serverUrlHash: string;
private authProvider: NodeOAuthClientProvider | null = null; private authProvider: NodeOAuthClientProvider | null = null;

View File

@ -5,6 +5,7 @@ export * from "./types/index.js";
export { export {
createMCPAuthClient, createMCPAuthClient,
createMCPProxy, createMCPProxy,
createMCPStdioProxy,
MCPAuthenticationClient, MCPAuthenticationClient,
} from "./core/mcp-remote-client.js"; } from "./core/mcp-remote-client.js";

View File

@ -51,7 +51,7 @@ export interface ProxyConnectionConfig {
/** /**
* Transport strategy options * Transport strategy options
*/ */
export type TransportStrategy = "sse-only" | "http-only" | "sse-first" | "http-first"; export type TransportStrategy = "sse-only" | "http-only" | "sse-first" | "http-first" | "stdio";
/** /**
* Static OAuth client metadata * Static OAuth client metadata

View File

@ -23,7 +23,7 @@ export function createMCPTransportBridge(
// Forward messages from client to server // Forward messages from client to server
clientTransport.onmessage = (message: any, extra: any) => { clientTransport.onmessage = (message: any, extra: any) => {
console.log(JSON.stringify(message)); console.log(message);
log("[Client→Server]", message.method || message.id); log("[Client→Server]", message.method || message.id);
onMessage?.("client-to-server", message); onMessage?.("client-to-server", message);
@ -41,6 +41,7 @@ export function createMCPTransportBridge(
// Forward messages from server to client // Forward messages from server to client
serverTransport.onmessage = (message: any, extra: any) => { serverTransport.onmessage = (message: any, extra: any) => {
console.log(message);
console.log(JSON.stringify(message), JSON.stringify(extra)); console.log(JSON.stringify(message), JSON.stringify(extra));
log("[Server→Client]", message.method || message.id); log("[Server→Client]", message.method || message.id);
onMessage?.("server-to-client", message); onMessage?.("server-to-client", message);

View File

@ -34,13 +34,24 @@ export class RemixMCPTransport implements Transport {
throw new Error("Invalid JSON-RPC message"); throw new Error("Invalid JSON-RPC message");
} }
// Emit the message to handler if (message.method.includes("notifications")) {
if (this.onmessage) { this.send({});
try { return;
this.onmessage(message); }
} catch (error) {
if (this.onerror) { console.log(message, "message");
this.onerror(error as Error);
if (Object.keys(message).length === 0) {
this.send({});
} else {
// Emit the message to handler
if (this.onmessage) {
try {
this.onmessage(message);
} catch (error) {
if (this.onerror) {
this.onerror(error as Error);
}
} }
} }
} }

11
pnpm-lock.yaml generated
View File

@ -568,6 +568,9 @@ importers:
dotenv: dotenv:
specifier: ^16.4.5 specifier: ^16.4.5
version: 16.5.0 version: 16.5.0
dotenv-expand:
specifier: ^12.0.2
version: 12.0.2
esbuild: esbuild:
specifier: ^0.23.0 specifier: ^0.23.0
version: 0.23.1 version: 0.23.1
@ -6315,6 +6318,10 @@ packages:
resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==} resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==}
engines: {node: '>=12'} engines: {node: '>=12'}
dotenv-expand@12.0.2:
resolution: {integrity: sha512-lXpXz2ZE1cea1gL4sz2Ipj8y4PiVjytYr3Ij0SWoms1PGxIv7m2CRKuRuCRtHdVuvM/hNJPMxt5PbhboNC4dPQ==}
engines: {node: '>=12'}
dotenv@16.0.3: dotenv@16.0.3:
resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==} resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==}
engines: {node: '>=12'} engines: {node: '>=12'}
@ -17106,6 +17113,10 @@ snapshots:
dotenv-expand@10.0.0: {} dotenv-expand@10.0.0: {}
dotenv-expand@12.0.2:
dependencies:
dotenv: 16.5.0
dotenv@16.0.3: {} dotenv@16.0.3: {}
dotenv@16.4.7: {} dotenv@16.4.7: {}

View File

@ -31,7 +31,7 @@ POSTGRES_USER=docker
POSTGRES_PASSWORD=docker POSTGRES_PASSWORD=docker
TRIGGER_DB=trigger TRIGGER_DB=trigger
DB_HOST=localhost DB_HOST=host.docker.internal
DB_PORT=5432 DB_PORT=5432
DB_SCHEMA=sigma DB_SCHEMA=sigma

1
trigger/auth.htpasswd Normal file
View File

@ -0,0 +1 @@
registry-user:$2y$05$6ingYqw0.3j13dxHY4w3neMSvKhF3pvRmc0AFifScWsVA9JpuLwNK

View File

@ -146,7 +146,7 @@ services:
- webapp - webapp
volumes: volumes:
# registry-user:very-secure-indeed # registry-user:very-secure-indeed
- ../registry/auth.htpasswd:/auth/htpasswd:ro - ./auth.htpasswd:/auth/htpasswd:ro
environment: environment:
REGISTRY_AUTH: htpasswd REGISTRY_AUTH: htpasswd
REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm

View File

@ -52,6 +52,7 @@
"SESSION_SECRET", "SESSION_SECRET",
"APP_ORIGIN", "APP_ORIGIN",
"LOGIN_ORIGIN", "LOGIN_ORIGIN",
"API_BASE_URL",
"POSTHOG_PROJECT_KEY", "POSTHOG_PROJECT_KEY",
"AUTH_GOOGLE_CLIENT_ID", "AUTH_GOOGLE_CLIENT_ID",
"AUTH_GOOGLE_CLIENT_SECRET", "AUTH_GOOGLE_CLIENT_SECRET",