From a548bae670eb39b4a7a7bb28bf4e32803481cb4e Mon Sep 17 00:00:00 2001 From: Manoj Date: Sat, 25 Oct 2025 12:53:23 +0530 Subject: [PATCH] feat: add Ollama container and update ingestion status for unchanged documents --- .../app/jobs/ingest/ingest-document.logic.ts | 6 ++++++ hosting/docker/.env | 2 +- hosting/docker/docker-compose.yaml | 20 +++++++++++++++++++ hosting/docker/scripts/ollama-init.sh | 18 +++++++++++++++++ 4 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 hosting/docker/scripts/ollama-init.sh diff --git a/apps/webapp/app/jobs/ingest/ingest-document.logic.ts b/apps/webapp/app/jobs/ingest/ingest-document.logic.ts index 20c1866..7798df2 100644 --- a/apps/webapp/app/jobs/ingest/ingest-document.logic.ts +++ b/apps/webapp/app/jobs/ingest/ingest-document.logic.ts @@ -101,6 +101,12 @@ export async function processDocumentIngestion( // Early return for unchanged documents if (differentialDecision.strategy === "skip_processing") { logger.log("Document content unchanged, skipping processing"); + await prisma.ingestionQueue.update({ + where: { id: payload.queueId }, + data: { + status: IngestionStatus.COMPLETED, + }, + }); return { success: true, }; diff --git a/hosting/docker/.env b/hosting/docker/.env index c2ac97f..9f0420a 100644 --- a/hosting/docker/.env +++ b/hosting/docker/.env @@ -43,7 +43,7 @@ NEO4J_PASSWORD=27192e6432564f4788d55c15131bd5ac NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac OPENAI_API_KEY= -OLLAMA_URL= +OLLAMA_URL=http://ollama:11434 EMBEDDING_MODEL=text-embedding-3-small MODEL=gpt-4.1-2025-04-14 diff --git a/hosting/docker/docker-compose.yaml b/hosting/docker/docker-compose.yaml index be35c67..5ed47be 100644 --- a/hosting/docker/docker-compose.yaml +++ b/hosting/docker/docker-compose.yaml @@ -108,6 +108,25 @@ services: retries: 10 start_period: 20s + ollama: + container_name: core-ollama + image: ollama/ollama:0.12.6 + ports: + - "11434:11434" + volumes: + - ollama_data:/root/.ollama + - ./scripts/ollama-init.sh:/usr/local/bin/ollama-init.sh:ro + networks: + - core + entrypoint: ["/bin/bash", "/usr/local/bin/ollama-init.sh"] + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 90s + restart: unless-stopped + networks: core: name: core @@ -117,3 +136,4 @@ volumes: postgres_data: neo4j_data: shared: + ollama_data: diff --git a/hosting/docker/scripts/ollama-init.sh b/hosting/docker/scripts/ollama-init.sh new file mode 100644 index 0000000..c5e0758 --- /dev/null +++ b/hosting/docker/scripts/ollama-init.sh @@ -0,0 +1,18 @@ +#!/bin/bash +set -e + +echo "Starting Ollama server..." +ollama serve & +OLLAMA_PID=$! + +echo "Waiting for Ollama server to be ready..." +sleep 5 + +echo "Pulling mxbai-embed-large model..." +ollama pull mxbai-embed-large + +echo "Model pulled successfully!" +echo "Ollama is ready to accept requests." + +# Keep the Ollama server running +wait $OLLAMA_PID