From f1e5ad99f21c4318fb464cf75d226679b033dfdd Mon Sep 17 00:00:00 2001 From: Ruben van der Linde Date: Tue, 24 Mar 2026 15:42:14 +0100 Subject: [PATCH] feat: Add shared docker-compose and development environment docs Moves the docker-compose from openregister to the shared .github repo so all ConductionNL apps can use the same development environment. Includes GreenMail and Open-Xchange for mail testing, seed scripts for mail/PIM/OX data, and full documentation of all profiles and services. --- docker-compose.yml | 1407 +++++++++++++++++++++++ docker/README-DATABASE-TESTING.md | 279 +++++ docker/dolphin/Dockerfile | 49 + docker/dolphin/api_server.py | 272 +++++ docker/keycloak/commonground-realm.json | 236 ++++ docker/mail/seed-mail.sh | 310 +++++ docker/mail/seed-ox.sh | 225 ++++ docker/mail/seed-pim.sh | 241 ++++ docker/postgres/init-exapps.sh | 31 + docker/postgres/init-exapps.sql | 16 + docker/postgres/init-extensions.sql | 102 ++ docker/test-database-compatibility.sh | 281 +++++ docs/development-environment.md | 381 ++++++ 13 files changed, 3830 insertions(+) create mode 100644 docker-compose.yml create mode 100644 docker/README-DATABASE-TESTING.md create mode 100644 docker/dolphin/Dockerfile create mode 100644 docker/dolphin/api_server.py create mode 100644 docker/keycloak/commonground-realm.json create mode 100755 docker/mail/seed-mail.sh create mode 100755 docker/mail/seed-ox.sh create mode 100755 docker/mail/seed-pim.sh create mode 100644 docker/postgres/init-exapps.sh create mode 100644 docker/postgres/init-exapps.sql create mode 100644 docker/postgres/init-extensions.sql create mode 100644 docker/test-database-compatibility.sh create mode 100644 docs/development-environment.md diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..37a1a0f --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,1407 @@ +############################################################################## +# Conduction Nextcloud Development Environment +# +# Shared docker-compose for all ConductionNL Nextcloud app development. +# Lives in the ConductionNL/.github repository so every project can use it. +# +# Quick start: +# docker compose -f .github/docker-compose.yml up -d +# +# With profiles: +# docker compose -f .github/docker-compose.yml --profile mail up -d +# docker compose -f .github/docker-compose.yml --profile ai --profile mail up -d +# +# See docs/development-environment.md for full documentation. +############################################################################## + +volumes: + nextcloud: + apps: + db: + config: + ollama: + presidio-models: + n8n: + tgi-models: + openllm-models: + openllm-cache: + solr: + zookeeper: + elasticsearch: + open-webui: + harp-data: + openproject-pgdata: + openproject-assets: + xwiki-data: + exapp-n8n: + exapp-openwebui: + exapp-keycloak: + exapp-redis: + exapp-openklant: + exapp-openzaak: + exapp-opentalk: + exapp-valtimo: + exapp-livekit: + exapp-minio: + ox-db: + ox-etc: + ox-data: + valtimo-data: + openzaak-data: + openklant-data: + +networks: + default: + name: conduction-network + +services: + # =========================================================================== + # Core Services (start by default) + # =========================================================================== + + # PostgreSQL Database (default) + # Recommended for production use with vector search capabilities + db: + image: pgvector/pgvector:pg16 + restart: always + container_name: conduction-postgres + volumes: + - db:/var/lib/postgresql/data + - ./docker/postgres/init-extensions.sql:/docker-entrypoint-initdb.d/01-init-extensions.sql:ro + - ./docker/postgres/init-exapps.sh:/docker-entrypoint-initdb.d/02-init-exapps.sh:ro + environment: + - POSTGRES_DB=nextcloud + - POSTGRES_USER=nextcloud + - POSTGRES_PASSWORD=!ChangeMe! + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U nextcloud -d nextcloud"] + interval: 10s + timeout: 5s + retries: 5 + command: > + postgres + -c shared_preload_libraries=pg_trgm,vector + -c max_connections=200 + -c shared_buffers=256MB + -c effective_cache_size=1GB + -c maintenance_work_mem=64MB + -c checkpoint_completion_target=0.9 + -c wal_buffers=16MB + -c default_statistics_target=100 + -c random_page_cost=1.1 + -c effective_io_concurrency=200 + -c work_mem=4MB + -c min_wal_size=1GB + -c max_wal_size=4GB + + # Nextcloud Application Server (PostgreSQL - Default) + # Access at: http://localhost:8080 + # Default credentials: admin / admin + nextcloud: + user: root + container_name: nextcloud + image: nextcloud + restart: always + ports: + - 8080:80 + links: + - db + volumes: + - nextcloud:/var/www/html:rw + # App mounts — each app directory is a sibling of .github in the workspace + - ../openregister:/var/www/html/custom_apps/openregister + - ../opencatalogi:/var/www/html/custom_apps/opencatalogi + - ../softwarecatalog:/var/www/html/custom_apps/softwarecatalog + - ../nldesign:/var/www/html/custom_apps/nldesign + - ../mydash:/var/www/html/custom_apps/mydash + - ../opentalk:/var/www/html/custom_apps/opentalk + - ../valtimo:/var/www/html/custom_apps/valtimo + - ../openzaak:/var/www/html/custom_apps/openzaak + - ../openklant:/var/www/html/custom_apps/openklant + - ../docudesk:/var/www/html/custom_apps/docudesk + - ../procest:/var/www/html/custom_apps/procest + - ../pipelinq:/var/www/html/custom_apps/pipelinq + - ../zaakafhandelapp:/var/www/html/custom_apps/zaakafhandelapp + - ../larpingapp:/var/www/html/custom_apps/larpingapp + # Custom apps directory from openregister (contains pre-installed apps) + - ../openregister/custom_apps:/var/www/html/custom_apps + environment: + # Database configuration (PostgreSQL) + - POSTGRES_DB=nextcloud + - POSTGRES_USER=nextcloud + - POSTGRES_PASSWORD=!ChangeMe! + - POSTGRES_HOST=db + - TZ=Europe/Amsterdam + - NEXTCLOUD_ADMIN_USER=admin + - NEXTCLOUD_ADMIN_PASSWORD=admin + # PHP Configuration + - PHP_MEMORY_LIMIT=4G + - PHP_UPLOAD_LIMIT=2G + - PHP_POST_MAX_SIZE=2G + # AI Service endpoints (when using --profile ai) + - TGI_LLM_URL=http://conduction-tgi-llm:80 + - DOLPHIN_VLM_URL=http://conduction-dolphin-vlm:5000 + - PRESIDIO_URL=http://conduction-presidio-analyzer:5001 + - N8N_URL=http://conduction-n8n:5678 + # AppAPI ExApp endpoints (when using --profile exapps) + - EXAPP_N8N_URL=http://conduction-exapp-n8n:23000 + - EXAPP_OPENWEBUI_URL=http://conduction-exapp-openwebui:23000 + - EXAPP_KEYCLOAK_URL=http://conduction-exapp-keycloak:23000 + - EXAPP_OPENKLANT_URL=http://conduction-exapp-openklant:23000 + - EXAPP_OPENZAAK_URL=http://conduction-exapp-openzaak:23000 + - EXAPP_OPENTALK_URL=http://conduction-exapp-opentalk:23000 + - EXAPP_VALTIMO_URL=http://conduction-exapp-valtimo:23000 + + # n8n ExApp - Workflow Automation (starts by default) + # Access via Nextcloud: /index.php/apps/app_api/proxy/n8n + exapp-n8n: + image: ghcr.io/conductionnl/n8n-nextcloud:latest + container_name: conduction-exapp-n8n + restart: always + volumes: + - exapp-n8n:/data + environment: + - APP_ID=n8n + - APP_VERSION=1.0.0 + - APP_HOST=0.0.0.0 + - APP_PORT=23000 + - APP_PERSISTENT_STORAGE=/data + - APP_SECRET=00iz5gkT+brtNwjpZR/28ttzZFdEFo9elARhgMQd0cMu7Kq6OF63KMFwFbxZTx9VbgsNBTVKhHnLUBo4RYg2pjINRluTUj0Vwb2aQDG0ffT4/8TX1UZC8tU42oi5MuV6 + - NEXTCLOUD_URL=http://nextcloud + - N8N_TIMEZONE=Europe/Amsterdam + - N8N_PATH_PREFIX=/index.php/apps/app_api/proxy/n8n + depends_on: + - db + healthcheck: + test: ["CMD", "python3", "-c", "import os,urllib.request,urllib.error\ntry: urllib.request.urlopen('http://127.0.0.1:'+os.environ.get('APP_PORT','23000')+'/heartbeat')\nexcept urllib.error.HTTPError: pass"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 60s + + # =========================================================================== + # Demo Mode (use --profile demo) + # Self-contained demo that installs apps from the Nextcloud app store. + # No source code mounts needed — just run and demo. + # =========================================================================== + + # Nextcloud Demo (installs apps from app store, no volume mounts) + # Access at: http://localhost:8080 + nextcloud-demo: + profiles: + - demo + user: root + container_name: nextcloud + image: nextcloud + restart: always + ports: + - 8080:80 + links: + - db + volumes: + - nextcloud:/var/www/html:rw + environment: + - POSTGRES_DB=nextcloud + - POSTGRES_USER=nextcloud + - POSTGRES_PASSWORD=!ChangeMe! + - POSTGRES_HOST=db + - TZ=Europe/Amsterdam + - NEXTCLOUD_ADMIN_USER=admin + - NEXTCLOUD_ADMIN_PASSWORD=admin + - PHP_MEMORY_LIMIT=4G + entrypoint: /bin/sh + command: + - -c + - | + # Start Apache in background + apache2-foreground & + # Wait for Nextcloud to initialize + until php occ status 2>/dev/null | grep -q "installed: true"; do sleep 5; done + # Set trusted domain + php occ config:system:set trusted_domains 1 --value="localhost:8080" + # Install apps from store + php occ app:install openregister --allow-unstable || true + php occ app:install opencatalogi --allow-unstable || true + php occ app:install softwarecatalog --allow-unstable || true + # Keep container running + wait + + # =========================================================================== + # Alternative Database (use --profile mariadb) + # =========================================================================== + + # MariaDB Database (for compatibility testing) + # Note: Vector search features will not be available with MariaDB + db-mariadb: + profiles: + - mariadb + image: mariadb:11.2 + restart: always + container_name: conduction-mariadb + volumes: + - db:/var/lib/mysql + environment: + - MYSQL_ROOT_PASSWORD=!ChangeMe! + - MYSQL_DATABASE=nextcloud + - MYSQL_USER=nextcloud + - MYSQL_PASSWORD=!ChangeMe! + ports: + - "3306:3306" + command: > + --transaction-isolation=READ-COMMITTED + --log-bin=binlog + --binlog-format=ROW + --innodb-file-per-table=1 + --max-connections=200 + --innodb-buffer-pool-size=256M + --innodb-log-file-size=64M + --character-set-server=utf8mb4 + --collation-server=utf8mb4_unicode_ci + healthcheck: + test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"] + interval: 10s + timeout: 5s + retries: 5 + + # Nextcloud with MariaDB (use --profile mariadb) + nextcloud-mariadb: + profiles: + - mariadb + user: root + container_name: nextcloud-mariadb + image: nextcloud + restart: always + ports: + - 8080:80 + links: + - db-mariadb + volumes: + - nextcloud:/var/www/html:rw + - ../openregister/custom_apps:/var/www/html/custom_apps + - ../openregister:/var/www/html/custom_apps/openregister + environment: + - MYSQL_DATABASE=nextcloud + - MYSQL_USER=nextcloud + - MYSQL_PASSWORD=!ChangeMe! + - MYSQL_HOST=db-mariadb + - TZ=Europe/Amsterdam + - NEXTCLOUD_ADMIN_USER=admin + - NEXTCLOUD_ADMIN_PASSWORD=admin + - PHP_MEMORY_LIMIT=4G + - PHP_UPLOAD_LIMIT=2G + - PHP_POST_MAX_SIZE=2G + depends_on: + db-mariadb: + condition: service_healthy + + # =========================================================================== + # Mail (use --profile mail) + # =========================================================================== + + # GreenMail - Lightweight SMTP/IMAP/POP3 test mail server + # Web UI at: http://localhost:8085 + # SMTP: localhost:3025, IMAP: localhost:3143 + # Accounts auto-created on first login (user=password=email address) + # Seed data: bash .github/docker/mail/seed-mail.sh + # Documentation: https://greenmail-mail-test.github.io/greenmail/ + greenmail: + profiles: + - mail + - ox + image: greenmail/standalone:2.1.2 + container_name: conduction-greenmail + restart: always + ports: + - "3025:3025" # SMTP + - "3143:3143" # IMAP + - "3110:3110" # POP3 + - "8085:8080" # Web UI / REST API + environment: + GREENMAIL_OPTS: "-Dgreenmail.setup.test.all -Dgreenmail.hostname=0.0.0.0 -Dgreenmail.auth.disabled" + JAVA_OPTS: "-Xmx256m" + healthcheck: + test: ["CMD-SHELL", "nc -z localhost 3025 && nc -z localhost 3143"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 15s + deploy: + resources: + limits: + memory: 512M + reservations: + memory: 128M + + # Open-Xchange App Suite - Full Email and Groupware (use --profile ox) + # Access at: http://localhost:8087/appsuite + # Login: oxadmin / oxadmin (context admin) or created users + # Uses GreenMail as IMAP/SMTP backend (starts automatically with ox profile) + # Image: jamesregis/open-xchange-appsuite (community, includes OX Text + Spreadsheet) + # Seed data: see docs/development-environment.md + # Documentation: https://documentation.open-xchange.com/ + + # OX requires its own MariaDB (uses MySQL-specific init tools) + ox-mariadb: + profiles: + - ox + image: mariadb:11.2 + container_name: conduction-ox-mariadb + restart: always + volumes: + - ox-db:/var/lib/mysql + environment: + - MYSQL_ROOT_PASSWORD=root_password + healthcheck: + test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"] + interval: 10s + timeout: 5s + retries: 10 + + open-xchange: + profiles: + - ox + image: jamesregis/open-xchange-appsuite:latest + container_name: conduction-open-xchange + restart: always + ports: + - "8087:80" + volumes: + - ox-etc:/ox/etc + - ox-data:/var/opt/filestore + environment: + # Database (points to dedicated OX MariaDB) + - OX_CONFIG_DATABASE_HOST=conduction-ox-mariadb + - OX_CONFIG_DATABASE_ROOT_PASSWORD=root_password + - OX_CONFIG_DATABASE_USER=openxchange + - OX_CONFIG_DATABASE_PASSWORD=ox_db_password + # Admin credentials + - OX_ADMIN_MASTER_LOGIN=oxadminmaster + - OX_ADMIN_MASTER_PASSWORD=admin_master_password + # Context admin (login to web UI with these) + - OX_CONTEXT_ADMIN_LOGIN=oxadmin + - OX_CONTEXT_ADMIN_PASSWORD=oxadmin + - OX_CONTEXT_ADMIN_EMAIL=admin@test.local + - OX_CONTEXT_ID=1 + # Server config + - OX_SERVER_NAME=oxserver + - OX_SERVER_MEMORY=2048 + # IMAP authentication (points to shared GreenMail service) + - IMAP_SERVER=conduction-greenmail + - IMAP_SERVER_PORT=3143 + # Document collaboration DB (same MariaDB) + - OX_DCSDB_DB_HOST=conduction-ox-mariadb + - OX_DCSDB_DB_USER=dcsdb + - OX_DCSDB_DB_PASSWORD=dcsdb_password + depends_on: + ox-mariadb: + condition: service_healthy + greenmail: + condition: service_healthy + deploy: + resources: + limits: + memory: 4G + reservations: + memory: 2G + + # =========================================================================== + # Search Engines (use --profile solr or --profile elasticsearch) + # =========================================================================== + + # ZooKeeper for SolrCloud coordination + zookeeper: + profiles: + - solr + - search + image: zookeeper:3.8 + container_name: conduction-zookeeper + restart: always + environment: + - ZOO_MY_ID=1 + - ZOO_SERVERS=server.1=0.0.0.0:2888:3888;2181 + volumes: + - zookeeper:/data + ports: + - "2181:2181" + healthcheck: + test: ["CMD-SHELL", "echo stat | nc localhost 2181"] + interval: 30s + timeout: 10s + retries: 3 + + # Solr in SolrCloud mode + # Access at: http://localhost:8983 + solr: + profiles: + - solr + - search + image: solr:9-slim + container_name: conduction-solr + restart: always + ports: + - "8983:8983" + volumes: + - solr:/var/solr + environment: + - SOLR_HEAP=512m + - ZK_HOST=zookeeper:2181 + depends_on: + - zookeeper + command: + - bash + - -c + - | + echo "Waiting for ZooKeeper..." + while ! nc -z zookeeper 2181; do sleep 1; done + echo "ZooKeeper is ready!" + solr-foreground -c -z zookeeper:2181 + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:8983/solr/admin/info/system || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + + # Elasticsearch + # Access at: http://localhost:9200 + elasticsearch: + profiles: + - elasticsearch + - search + image: elasticsearch:8.11.3 + container_name: conduction-elasticsearch + restart: always + ports: + - "9200:9200" + - "9300:9300" + volumes: + - elasticsearch:/usr/share/elasticsearch/data + environment: + - discovery.type=single-node + - xpack.security.enabled=false + - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + - cluster.name=conduction-cluster + - node.name=conduction-node-1 + deploy: + resources: + limits: + memory: 1G + reservations: + memory: 512M + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:9200/_cluster/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 60s + + # =========================================================================== + # AI Services (use --profile ai) + # =========================================================================== + + # Ollama for local LLM inference (use --profile ollama) + # Access at: http://localhost:11434 + # Pull models: docker exec conduction-ollama ollama pull llama3.2 + ollama: + profiles: + - ollama + image: ollama/ollama:latest + container_name: conduction-ollama + restart: always + ports: + - "11434:11434" + volumes: + - ollama:/root/.ollama + environment: + - OLLAMA_HOST=0.0.0.0 + - OLLAMA_NUM_PARALLEL=2 + - OLLAMA_KEEP_ALIVE=15m + deploy: + resources: + limits: + memory: 16G + reservations: + memory: 8G + devices: + - driver: nvidia + count: all + capabilities: [gpu] + shm_size: '2gb' + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:11434/api/tags || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 60s + + # OpenAnonymiser - PII detection and anonymisation + # Access at: http://localhost:5002 + openanonymiser: + profiles: + - ai + image: mwest2020/openanonymiser:dev + container_name: conduction-openanonymiser + restart: always + ports: + - "5002:8080" + deploy: + resources: + limits: + memory: 2G + reservations: + memory: 512M + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:8080/api/v1/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 60s + + # Presidio Analyzer - Microsoft PII detection (recommended for production) + # Access at: http://localhost:5001 + presidio-analyzer: + profiles: + - ai + image: mcr.microsoft.com/presidio-analyzer:latest + container_name: conduction-presidio-analyzer + restart: always + ports: + - "5001:3000" + environment: + - PORT=3000 + - LOG_LEVEL=INFO + - PRESIDIO_ANALYZER_LANGUAGES=en,nl,de,fr,es + deploy: + resources: + limits: + memory: 2G + reservations: + memory: 512M + healthcheck: + test: ["CMD-SHELL", "python3 -c \"import requests; requests.get('http://localhost:3000/health')\" || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 30s + + # Hugging Face Text Generation Inference (TGI) with OpenAI-compatible API + # Access at: http://localhost:8081 + tgi-llm: + profiles: + - ai + image: ghcr.io/huggingface/text-generation-inference:latest + container_name: conduction-tgi-llm + restart: always + ports: + - "8081:80" + volumes: + - tgi-models:/data + environment: + - MODEL_ID=mistralai/Mistral-7B-Instruct-v0.2 + - MAX_INPUT_LENGTH=4096 + - MAX_TOTAL_TOKENS=8192 + - MAX_BATCH_PREFILL_TOKENS=4096 + - MAX_CONCURRENT_REQUESTS=128 + - MAX_WAITING_TOKENS=20 + deploy: + resources: + limits: + memory: 16G + reservations: + memory: 8G + devices: + - driver: nvidia + count: all + capabilities: [gpu] + shm_size: '2gb' + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 120s + + # Dolphin VLM - Document parsing via Vision-Language Model + # Access at: http://localhost:8083 + dolphin-vlm: + profiles: + - ai + build: + context: ./docker/dolphin + dockerfile: Dockerfile + container_name: conduction-dolphin-vlm + restart: always + ports: + - "8083:5000" + volumes: + - ./docker/dolphin/models:/app/models + environment: + - MODEL_PATH=/app/models + - LOG_LEVEL=INFO + deploy: + resources: + limits: + memory: 8G + reservations: + memory: 4G + devices: + - driver: nvidia + count: all + capabilities: [gpu] + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:5000/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 120s + + # Open WebUI + Ollama ExApp - AI Chat with built-in LLM inference + # Access via Nextcloud: /index.php/apps/app_api/proxy/open_webui + exapp-openwebui: + profiles: + - ai + image: ghcr.io/conductionnl/open-webui-nextcloud:latest + container_name: conduction-exapp-openwebui + restart: always + volumes: + - exapp-openwebui:/data + environment: + - APP_ID=open_webui + - APP_VERSION=1.0.0 + - APP_HOST=0.0.0.0 + - APP_PORT=23000 + - APP_PERSISTENT_STORAGE=/data + - APP_SECRET=${OPENWEBUI_APP_SECRET:-dev-secret} + - NEXTCLOUD_URL=http://nextcloud + - OLLAMA_DEFAULT_MODEL=llama3.2:1b + - OLLAMA_NUM_PARALLEL=2 + - OLLAMA_KEEP_ALIVE=15m + deploy: + resources: + limits: + memory: 16G + reservations: + memory: 4G + devices: + - driver: nvidia + count: all + capabilities: [gpu] + shm_size: '2gb' + healthcheck: + test: ["CMD", "python3", "-c", "import os,urllib.request,urllib.error\ntry: urllib.request.urlopen('http://127.0.0.1:'+os.environ.get('APP_PORT','23000')+'/heartbeat')\nexcept urllib.error.HTTPError: pass"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 120s + + # =========================================================================== + # Standalone Services (use --profile standalone) + # Use these instead of ExApps for standalone testing + # =========================================================================== + + # n8n Standalone - Workflow Automation + # Access at: http://localhost:5678 + # Default credentials: admin / admin + n8n: + profiles: + - standalone + image: n8nio/n8n:latest + container_name: conduction-n8n + restart: always + user: root + ports: + - "5678:5678" + volumes: + - n8n:/root/.n8n + - /var/run/docker.sock:/var/run/docker.sock + environment: + - DB_TYPE=postgresdb + - DB_POSTGRESDB_HOST=conduction-postgres + - DB_POSTGRESDB_PORT=5432 + - DB_POSTGRESDB_DATABASE=n8n + - DB_POSTGRESDB_USER=nextcloud + - DB_POSTGRESDB_PASSWORD=!ChangeMe! + - N8N_BASIC_AUTH_ACTIVE=true + - N8N_BASIC_AUTH_USER=admin + - N8N_BASIC_AUTH_PASSWORD=admin + - N8N_HOST=localhost + - N8N_PORT=5678 + - N8N_PROTOCOL=http + - WEBHOOK_URL=http://localhost:5678/ + - GENERIC_TIMEZONE=Europe/Amsterdam + - TZ=Europe/Amsterdam + - EXECUTIONS_PROCESS=main + - EXECUTIONS_DATA_SAVE_ON_ERROR=all + - EXECUTIONS_DATA_SAVE_ON_SUCCESS=all + - EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS=true + - WORKFLOWS_DEFAULT_NAME=My Workflow + - N8N_DIAGNOSTICS_ENABLED=false + - N8N_API_KEYS_ENABLED=true + depends_on: + - db + healthcheck: + test: ["CMD-SHELL", "wget --spider -q http://localhost:5678/healthz || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 30s + + # Open WebUI Standalone - Web interface for LLMs + # Access at: http://localhost:3000 + open-webui: + profiles: + - standalone + image: ghcr.io/open-webui/open-webui:main + container_name: conduction-open-webui + restart: always + ports: + - "3000:8080" + volumes: + - open-webui:/app/backend/data + environment: + - OPENAI_API_BASE_URL=http://conduction-tgi-llm:80/v1 + - OPENAI_API_KEY=not-needed + - OLLAMA_BASE_URL=http://conduction-ollama:11434 + - WEBUI_AUTH=true + - ENABLE_SIGNUP=true + - WEBUI_SECRET_KEY=conduction-secret-key-change-me + depends_on: + - tgi-llm + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:8080/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 60s + + # OpenLLM Management Interface (use --profile llm-management) + # Access at: http://localhost:3002 + openllm: + profiles: + - llm-management + image: ghcr.io/bentoml/openllm:latest + container_name: conduction-openllm + restart: always + ports: + - "3002:3000" + - "8082:8082" + volumes: + - openllm-models:/models + - openllm-cache:/root/.cache + environment: + - OPENLLM_MODEL=mistralai/Mistral-7B-Instruct-v0.2 + - OPENLLM_BACKEND=vllm + - OPENLLM_PORT=3000 + - OPENLLM_API_PORT=8082 + - CUDA_VISIBLE_DEVICES=0 + - OPENLLM_MAX_MODEL_LEN=4096 + - OPENLLM_GPU_MEMORY_UTILIZATION=0.9 + deploy: + resources: + limits: + memory: 16G + reservations: + memory: 8G + devices: + - driver: nvidia + count: all + capabilities: [gpu] + shm_size: '2gb' + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:3000/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 180s + command: start mistralai/Mistral-7B-Instruct-v0.2 --backend vllm + + # =========================================================================== + # UI Frontends (use --profile ui) + # =========================================================================== + + # Tilburg WOO UI - Public interface for WOO documents + # Access at: http://localhost:3000 + tilburg-woo-ui: + profiles: + - ui + build: + context: ../tilburg-woo-ui + dockerfile: Dockerfile.dev + target: development + container_name: conduction-tilburg-woo-ui + restart: always + ports: + - "3000:81" + volumes: + - ../tilburg-woo-ui/src:/app/src + environment: + - NGINX_ROOT_DIR=/app/public_html + - NGINX_NEXTCLOUD_UPSTREAM=http://nextcloud:80 + - NGINX_TARGET_HOST=nextcloud + - SITE_TITLE=Softwarecatalogus + depends_on: + - nextcloud + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:81/ || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 120s + command: + - /bin/sh + - -c + - | + envsubst '$$NGINX_OPENCONNECTOR_UPSTREAM $$NGINX_NEXTCLOUD_UPSTREAM $$NGINX_NEXTCLOUD_DOMAIN $$NGINX_TARGET_HOST $$NGINX_ROOT_DIR' < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf + nginx + node scripts/generate-runtime-config.js /app/public_html/runtime-config.js + NODE_ENV=development yarn build:web + echo "Initial build complete. Watching for changes..." + while true; do + inotifywait -r -e modify,create,delete --exclude '\.tmp' src/ public/ package.json + sleep 2 + echo "Change detected, rebuilding..." + NODE_ENV=development yarn build:web + echo "Rebuild complete." + done + + # =========================================================================== + # External Service Integrations (use --profile or --profile integrations) + # =========================================================================== + + # OpenProject - Project Management + # Access at: http://localhost:8086 + openproject: + profiles: + - openproject + - integrations + image: openproject/openproject:15 + container_name: conduction-openproject + restart: always + ports: + - "8086:80" + volumes: + - openproject-pgdata:/var/openproject/pgdata + - openproject-assets:/var/openproject/assets + environment: + - OPENPROJECT_SECRET_KEY_BASE=secret-change-me + - OPENPROJECT_HOST__NAME=localhost:8086 + - OPENPROJECT_HTTPS=false + - OPENPROJECT_DEFAULT__LANGUAGE=nl + deploy: + resources: + limits: + memory: 4G + reservations: + memory: 1G + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:80/health_checks/default || exit 1"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 120s + + # XWiki - Wiki Platform + # Access at: http://localhost:8088 + xwiki: + profiles: + - xwiki + - integrations + image: xwiki:lts-postgres-tomcat + container_name: conduction-xwiki + restart: always + ports: + - "8088:8080" + volumes: + - xwiki-data:/usr/local/xwiki + environment: + - DB_USER=nextcloud + - DB_PASSWORD=!ChangeMe! + - DB_HOST=db + - DB_DATABASE=xwiki + depends_on: + - db + deploy: + resources: + limits: + memory: 4G + reservations: + memory: 1G + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:8080/rest || exit 1"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 120s + + # =========================================================================== + # Common Ground Services (use --profile commonground or individual profiles) + # =========================================================================== + + # Valtimo - BPM and Case Management + # Access at: http://localhost:8089 + valtimo: + profiles: + - valtimo + - commonground + - integrations + image: ritense/gzac-backend:latest + container_name: conduction-valtimo + restart: always + ports: + - "8089:8080" + volumes: + - valtimo-data:/data + environment: + - SPRING_PROFILES_ACTIVE=docker + - SPRING_DATASOURCE_URL=jdbc:postgresql://db:5432/valtimo + - SPRING_DATASOURCE_USERNAME=nextcloud + - SPRING_DATASOURCE_PASSWORD=!ChangeMe! + - VALTIMO_APP_HOSTNAME=http://localhost:8089 + depends_on: + - db + deploy: + resources: + limits: + memory: 2G + reservations: + memory: 512M + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:8080/actuator/health || exit 1"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 120s + + # OpenZaak - ZGW API Case Management Backend + # Access at: http://localhost:8090 + openzaak: + profiles: + - openzaak + - commonground + - integrations + image: openzaak/open-zaak:latest + container_name: conduction-openzaak + restart: always + ports: + - "8090:8000" + volumes: + - openzaak-data:/app/private-media + environment: + - DB_HOST=db + - DB_NAME=openzaak + - DB_USER=nextcloud + - DB_PASSWORD=!ChangeMe! + - SECRET_KEY=openzaak-secret-change-me + - ALLOWED_HOSTS=* + - DJANGO_SETTINGS_MODULE=openzaak.conf.docker + - IS_HTTPS=no + - CACHE_DEFAULT=db + - CACHE_AXES=db + depends_on: + - db + deploy: + resources: + limits: + memory: 2G + reservations: + memory: 256M + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:8000/ || exit 1"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 60s + + # OpenKlant - Customer Interaction Registry + # Access at: http://localhost:8091 + openklant: + profiles: + - openklant + - commonground + - integrations + image: maykinmedia/open-klant:latest + container_name: conduction-openklant + restart: always + ports: + - "8091:8000" + volumes: + - openklant-data:/app/private-media + environment: + - DB_HOST=db + - DB_NAME=openklant + - DB_USER=nextcloud + - DB_PASSWORD=!ChangeMe! + - SECRET_KEY=openklant-secret-change-me + - ALLOWED_HOSTS=* + - DJANGO_SETTINGS_MODULE=openklant.conf.docker + - IS_HTTPS=no + - CACHE_DEFAULT=db + - CACHE_AXES=db + depends_on: + - db + deploy: + resources: + limits: + memory: 1G + reservations: + memory: 256M + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:8000/ || exit 1"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 60s + + # =========================================================================== + # AppAPI Infrastructure (use --profile exapps) + # =========================================================================== + + # HaRP - High-performance AppAPI Reverse Proxy + harp: + profiles: + - exapps + image: ghcr.io/nextcloud/nextcloud-appapi-harp:release + container_name: conduction-harp + restart: always + ports: + - "8780:8780" + volumes: + - /var/run/docker.sock:/var/run/docker.sock:ro + - harp-data:/data + environment: + - HP_SHARED_KEY=harp-secret-change-me + - NC_INSTANCE_URL=http://nextcloud + - HP_EXAPPS_ADDRESS=0.0.0.0:8780 + - HP_FRP_ADDRESS=0.0.0.0:8782 + - HP_LOG_LEVEL=info + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:8780/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + + # Docker Socket Proxy - Alternative Deploy Daemon (Legacy) + docker-socket-proxy: + profiles: + - exapps-legacy + image: ghcr.io/nextcloud/docker-socket-proxy:latest + container_name: conduction-docker-socket-proxy + restart: always + ports: + - "2375:2375" + volumes: + - /var/run/docker.sock:/var/run/docker.sock:ro + environment: + - HAPROXY_PASSWORD=docker-proxy-secret-change-me + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:2375/_ping || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + + # =========================================================================== + # ExApp Shared Infrastructure (use --profile commonground or --profile exapps) + # =========================================================================== + + # Redis - Shared cache for ExApp services + exapp-redis: + profiles: + - commonground + - exapps + image: redis:7-alpine + container_name: conduction-exapp-redis + restart: always + volumes: + - exapp-redis:/data + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 5 + + # LiveKit - WebRTC media server for OpenTalk + exapp-livekit: + profiles: + - commonground + - exapps + image: livekit/livekit-server:latest + container_name: conduction-exapp-livekit + restart: always + command: ["--keys", "devkey: secret", "--dev"] + ports: + - "7880:7880" + - "7881:7881" + healthcheck: + test: ["CMD-SHELL", "wget -qO- http://localhost:7880/ || true"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 15s + + # MinIO - Object storage for OpenTalk + exapp-minio: + profiles: + - commonground + - exapps + image: minio/minio:latest + container_name: conduction-exapp-minio + restart: always + command: server /data --console-address ":9001" + volumes: + - exapp-minio:/data + environment: + - MINIO_ROOT_USER=minioadmin + - MINIO_ROOT_PASSWORD=minioadmin + healthcheck: + test: ["CMD", "mc", "ready", "local"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 10s + + # =========================================================================== + # ExApp Services - Common Ground / Identity (use --profile commonground) + # =========================================================================== + + # Keycloak ExApp - Identity and Access Management + # Access at: http://localhost:8180 + exapp-keycloak: + profiles: + - commonground + - exapps + image: ghcr.io/conductionnl/keycloak-nextcloud:latest + container_name: conduction-exapp-keycloak + restart: always + volumes: + - exapp-keycloak:/data + environment: + - APP_ID=keycloak + - APP_VERSION=1.0.0 + - APP_HOST=0.0.0.0 + - APP_PORT=23002 + - APP_PERSISTENT_STORAGE=/data + - APP_SECRET=kgPs6ikvLhHMAuC3havDYDvvWw/CYjmlfBMDpYfBY73WgRjdPL6naVC7IGV30D3/5DvNyLGPHrfrln/GYQO80CJzgexWN+1M11FsOt6cljd7EI9T0AZeShEQVt2BAzCM + - NEXTCLOUD_URL=http://nextcloud + - KC_DB=postgres + - KC_DB_URL=jdbc:postgresql://conduction-postgres:5432/keycloak + - KC_DB_USERNAME=nextcloud + - KC_DB_PASSWORD=!ChangeMe! + - KC_BOOTSTRAP_ADMIN_USERNAME=admin + - KC_BOOTSTRAP_ADMIN_PASSWORD=admin + - KC_HEALTH_ENABLED=true + - KC_HTTP_ENABLED=true + - KC_HOSTNAME_STRICT=false + - KC_HOSTNAME=http://localhost:8180 + - KC_PROXY_HEADERS=xforwarded + - KEYCLOAK_REALM=commonground + - KEYCLOAK_API_SECRET=keycloak-exapp-internal-secret + ports: + - "8180:8080" + depends_on: + db: + condition: service_healthy + deploy: + resources: + limits: + memory: 2G + reservations: + memory: 512M + healthcheck: + test: ["CMD", "python3", "-c", "import os,urllib.request,urllib.error\ntry: urllib.request.urlopen('http://127.0.0.1:'+os.environ.get('APP_PORT','23000')+'/heartbeat')\nexcept urllib.error.HTTPError: pass"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 120s + + # =========================================================================== + # ExApp Services - Common Ground Applications (use --profile commonground) + # =========================================================================== + + # OpenKlant ExApp - Customer Interaction Registry + exapp-openklant: + profiles: + - commonground + - exapps + image: ghcr.io/conductionnl/openklant-exapp:latest + container_name: conduction-exapp-openklant + restart: always + volumes: + - exapp-openklant:/data + environment: + - APP_ID=openklant + - APP_VERSION=1.0.0 + - APP_HOST=0.0.0.0 + - APP_PORT=23005 + - APP_PERSISTENT_STORAGE=/data + - APP_SECRET=qM1kgYpOVLEDCILz/2v4t9eSRdt1JKZTuxFNQbyveVC3QurU6zpdOWyIgRzd0nNxba7q7al6cX9bkWNbkkEdd3uQ9uyt4pgaxjfkozaub/fphD0h2dB5RIBD6wC4C1qW + - NEXTCLOUD_URL=http://nextcloud + - DB_HOST=conduction-postgres + - DB_NAME=openklant + - DB_USER=nextcloud + - DB_PASSWORD=!ChangeMe! + - SECRET_KEY=openklant-exapp-secret-change-me + - ALLOWED_HOSTS=* + - DJANGO_SETTINGS_MODULE=openklant.conf.docker + - IS_HTTPS=no + - CACHE_DEFAULT=conduction-exapp-redis:6379/0 + - CACHE_AXES=conduction-exapp-redis:6379/0 + - KEYCLOAK_URL=http://conduction-exapp-keycloak:8080 + - KEYCLOAK_REALM=commonground + - KEYCLOAK_CLIENT_ID=openklant + depends_on: + db: + condition: service_healthy + exapp-redis: + condition: service_healthy + deploy: + resources: + limits: + memory: 1G + reservations: + memory: 256M + healthcheck: + test: ["CMD", "python3", "-c", "import os,urllib.request,urllib.error\ntry: urllib.request.urlopen('http://127.0.0.1:'+os.environ.get('APP_PORT','23000')+'/heartbeat')\nexcept urllib.error.HTTPError: pass"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 90s + + # OpenZaak ExApp - ZGW API Case Management + exapp-openzaak: + profiles: + - commonground + - exapps + image: ghcr.io/conductionnl/openzaak-exapp:latest + container_name: conduction-exapp-openzaak + restart: always + volumes: + - exapp-openzaak:/data + environment: + - APP_ID=openzaak + - APP_VERSION=1.0.0 + - APP_HOST=0.0.0.0 + - APP_PORT=23003 + - APP_PERSISTENT_STORAGE=/data + - APP_SECRET=GV7aEmqVY/NXnA0adbIhQsl0QL9XoUEtr611xlwVU3G4D7t5Xv4pscZ+8sRb4k2LKCDDTrbDWowIwOkhSs5s/czEN+M2TPSPq2Y6Xgd4VSFZfi1Rdofvc1WtBlmPrQYQ + - NEXTCLOUD_URL=http://nextcloud + - DB_HOST=conduction-postgres + - DB_NAME=openzaak + - DB_USER=nextcloud + - DB_PASSWORD=!ChangeMe! + - SECRET_KEY=openzaak-exapp-secret-change-me + - ALLOWED_HOSTS=* + - DJANGO_SETTINGS_MODULE=openzaak.conf.docker + - IS_HTTPS=no + - CACHE_DEFAULT=conduction-exapp-redis:6379/1 + - CACHE_AXES=conduction-exapp-redis:6379/1 + - KEYCLOAK_URL=http://conduction-exapp-keycloak:8080 + - KEYCLOAK_REALM=commonground + - KEYCLOAK_CLIENT_ID=openzaak + depends_on: + db: + condition: service_healthy + exapp-redis: + condition: service_healthy + deploy: + resources: + limits: + memory: 2G + reservations: + memory: 256M + healthcheck: + test: ["CMD", "python3", "-c", "import os,urllib.request,urllib.error\ntry: urllib.request.urlopen('http://127.0.0.1:'+os.environ.get('APP_PORT','23000')+'/heartbeat')\nexcept urllib.error.HTTPError: pass"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 90s + + # Valtimo ExApp - BPM and Case Management + exapp-valtimo: + profiles: + - commonground + - exapps + image: ghcr.io/conductionnl/valtimo-exapp:latest + container_name: conduction-exapp-valtimo + restart: always + volumes: + - exapp-valtimo:/data + environment: + - APP_ID=valtimo + - APP_VERSION=1.0.0 + - APP_HOST=0.0.0.0 + - APP_PORT=23004 + - APP_PERSISTENT_STORAGE=/data + - APP_SECRET=piXIP+iaGAvzbULzEXtwAUZAAuybaS0tUwAe1JFxQqmhemQHbUqF7teFxsWq/ydjUXbC84ZXRfQyS3c88ObBwhALLdPoAKDBzUn1a+d2ghRoYkInoMUde1QzncFFfOWt + - NEXTCLOUD_URL=http://nextcloud + - SPRING_PROFILES_ACTIVE=docker + - SPRING_DATASOURCE_URL=jdbc:postgresql://conduction-postgres:5432/valtimo + - SPRING_DATASOURCE_USERNAME=nextcloud + - SPRING_DATASOURCE_PASSWORD=!ChangeMe! + - SERVER_PORT=8080 + - JAVA_OPTS=-Xmx512m -Xms256m + - KEYCLOAK_AUTH_SERVER_URL=http://conduction-exapp-keycloak:8080 + - KEYCLOAK_REALM=commonground + - KEYCLOAK_RESOURCE=valtimo + depends_on: + db: + condition: service_healthy + exapp-keycloak: + condition: service_healthy + deploy: + resources: + limits: + memory: 2G + reservations: + memory: 512M + healthcheck: + test: ["CMD", "python3", "-c", "import os,urllib.request,urllib.error\ntry: urllib.request.urlopen('http://127.0.0.1:'+os.environ.get('APP_PORT','23000')+'/heartbeat')\nexcept urllib.error.HTTPError: pass"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 180s + + # OpenTalk ExApp - Secure Video Conferencing + exapp-opentalk: + profiles: + - commonground + - exapps + image: ghcr.io/conductionnl/opentalk-exapp:latest + container_name: conduction-exapp-opentalk + restart: always + volumes: + - exapp-opentalk:/data + environment: + - APP_ID=opentalk + - APP_VERSION=1.0.0 + - APP_HOST=0.0.0.0 + - APP_PORT=23005 + - APP_PERSISTENT_STORAGE=/data + - APP_SECRET=HGF41cZ42HnNoA4eWUqhMquI9RFnk206ZVtJyT/1xK+X3u1bvSiyRdlfJlxL7YLn8uTMpKh/Bqb9VGKkRYAaoQUj6xtIcCdx4hTBJDrspEPTBgVbAF6/sg/VL6oNdhmv + - NEXTCLOUD_URL=http://nextcloud + - KEYCLOAK_URL=http://conduction-exapp-keycloak:8080 + - KEYCLOAK_REALM=commonground + - KEYCLOAK_CLIENT_ID=opentalk-controller + - KEYCLOAK_CLIENT_SECRET=opentalk-secret + - KEYCLOAK_BROWSER_URL=http://localhost:8180 + - KEYCLOAK_EXAPP_URL=http://conduction-exapp-keycloak:23002 + - KEYCLOAK_API_SECRET=keycloak-exapp-internal-secret + extra_hosts: + - "host.docker.internal:host-gateway" + depends_on: + db: + condition: service_healthy + exapp-redis: + condition: service_healthy + exapp-keycloak: + condition: service_healthy + exapp-livekit: + condition: service_healthy + exapp-minio: + condition: service_healthy + healthcheck: + test: ["CMD", "python3", "-c", "import os,urllib.request,urllib.error\ntry: urllib.request.urlopen('http://127.0.0.1:'+os.environ.get('APP_PORT','23000')+'/heartbeat')\nexcept urllib.error.HTTPError: pass"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 120s diff --git a/docker/README-DATABASE-TESTING.md b/docker/README-DATABASE-TESTING.md new file mode 100644 index 0000000..f31cff2 --- /dev/null +++ b/docker/README-DATABASE-TESTING.md @@ -0,0 +1,279 @@ +# Database Compatibility Testing + +OpenRegister supports both **PostgreSQL** (recommended) and **MariaDB/MySQL** for maximum flexibility. This document explains how to test both database backends. + +## Quick Start + +### PostgreSQL (Default - Recommended) + +PostgreSQL is the recommended database for production use, offering advanced features like vector search (pgvector) and full-text search (pg_trgm). + +```bash +# Start with PostgreSQL (default) +docker-compose up -d + +# Check status +docker-compose ps + +# View logs +docker-compose logs -f nextcloud +``` + +### MariaDB (For Compatibility Testing) + +MariaDB/MySQL support is maintained for backward compatibility and environments where PostgreSQL is not available. + +```bash +# Start with MariaDB +docker-compose --profile mariadb up -d + +# Check status +docker-compose --profile mariadb ps + +# View logs +docker-compose --profile mariadb logs -f nextcloud-mariadb +``` + +## Switching Between Databases + +### From PostgreSQL to MariaDB + +```bash +# Stop and remove all containers +docker-compose down + +# Remove volumes (WARNING: This deletes all data!) +docker volume rm openregister_db openregister_nextcloud openregister_config + +# Start with MariaDB +docker-compose --profile mariadb up -d +``` + +### From MariaDB to PostgreSQL + +```bash +# Stop and remove all containers +docker-compose --profile mariadb down + +# Remove volumes (WARNING: This deletes all data!) +docker volume rm openregister_db openregister_nextcloud openregister_config + +# Start with PostgreSQL +docker-compose up -d +``` + +## Running Integration Tests + +### With PostgreSQL + +```bash +# Start PostgreSQL stack +docker-compose up -d + +# Wait for Nextcloud to be ready +docker-compose logs -f nextcloud + +# Run Newman integration tests +docker exec -u 33 nextcloud newman run \ + /var/www/html/custom_apps/openregister/tests/integration/openregister-crud.postman_collection.json \ + --env-var "base_url=http://localhost" \ + --env-var "admin_user=admin" \ + --env-var "admin_password=admin" \ + --reporters cli +``` + +### With MariaDB + +```bash +# Start MariaDB stack +docker-compose --profile mariadb up -d + +# Wait for Nextcloud to be ready +docker-compose --profile mariadb logs -f nextcloud-mariadb + +# Run Newman integration tests +docker exec -u 33 nextcloud newman run \ + /var/www/html/custom_apps/openregister/tests/integration/openregister-crud.postman_collection.json \ + --env-var "base_url=http://localhost" \ + --env-var "admin_user=admin" \ + --env-var "admin_password=admin" \ + --reporters cli +``` + +## Database Access + +### PostgreSQL + +```bash +# Access PostgreSQL CLI +docker exec -it openregister-postgres psql -U nextcloud -d nextcloud + +# Example queries +\dt # List tables +\d oc_openregister_objects # Describe table +SELECT version(); # PostgreSQL version +``` + +### MariaDB + +```bash +# Access MariaDB CLI +docker exec -it openregister-mariadb mysql -u nextcloud -p'!ChangeMe!' nextcloud + +# Example queries +SHOW TABLES; # List tables +DESCRIBE oc_openregister_objects; # Describe table +SELECT VERSION(); # MariaDB version +``` + +## Database Configuration Details + +### PostgreSQL (Port 5432) + +- **Image:** pgvector/pgvector:pg16 +- **Extensions:** pg_trgm, vector, btree_gin, btree_gist, uuid-ossp +- **Auto-Install:** Extensions are automatically installed via `init-extensions.sql` +- **Preload Libraries:** `shared_preload_libraries='pg_trgm,vector'` +- **Features:** Vector search, full-text search, JSON operations +- **Optimizations:** Configured for high concurrency and performance + +**Connection String:** +``` +postgresql://nextcloud:!ChangeMe!@localhost:5432/nextcloud +``` + +**Automatic Extension Setup:** +The PostgreSQL container automatically installs and enables all required extensions on first startup: +1. Extensions are created via `/docker-entrypoint-initdb.d/01-init-extensions.sql` +2. Helper functions are created (`vector_cosine_distance`, `text_similarity_score`) +3. Database parameters are optimized (similarity threshold, work_mem) +4. Preload libraries are configured in docker-compose command section + +### MariaDB (Port 3306) + +- **Image:** mariadb:11.2 +- **Character Set:** utf8mb4_unicode_ci +- **Features:** Standard SQL, JSON support (basic) +- **Optimizations:** InnoDB tuning for performance + +**Connection String:** +``` +mysql://nextcloud:!ChangeMe!@localhost:3306/nextcloud +``` + +## Feature Comparison + +| Feature | PostgreSQL | MariaDB | +|---------|-----------|---------| +| Vector Search (pgvector) | ✅ Yes | ❌ No | +| Full-Text Search (native) | ✅ pg_trgm | ⚠️ Basic FULLTEXT | +| JSON Operations | ✅ Advanced | ⚠️ Basic | +| Performance | ✅ Excellent | ✅ Good | +| Type Strictness | ✅ Strict (safer) | ⚠️ Permissive | +| Production Ready | ✅ Recommended | ✅ Supported | + +## Continuous Integration + +For CI/CD pipelines, you can test both databases sequentially: + +```bash +#!/bin/bash +# test-both-databases.sh + +echo "=== Testing PostgreSQL ===" +docker-compose up -d +sleep 30 # Wait for initialization +# Run tests... +docker-compose down -v + +echo "=== Testing MariaDB ===" +docker-compose --profile mariadb up -d +sleep 30 # Wait for initialization +# Run tests... +docker-compose --profile mariadb down -v +``` + +## Troubleshooting + +### PostgreSQL Issues + +```bash +# Check PostgreSQL logs +docker logs openregister-postgres + +# Check if extensions are loaded +docker exec openregister-postgres psql -U nextcloud -d nextcloud -c "SELECT * FROM pg_extension;" + +# Reset PostgreSQL data +docker-compose down +docker volume rm openregister_db +docker-compose up -d +``` + +### MariaDB Issues + +```bash +# Check MariaDB logs +docker logs openregister-mariadb + +# Check character set +docker exec openregister-mariadb mysql -u nextcloud -p'!ChangeMe!' -e "SHOW VARIABLES LIKE 'character_set%';" + +# Reset MariaDB data +docker-compose --profile mariadb down +docker volume rm openregister_db +docker-compose --profile mariadb up -d +``` + +## Known Differences + +### Type Handling + +**PostgreSQL:** +- Strict type checking (strings ≠ integers) +- Explicit casting required for type mismatches +- JSON columns have specific operators + +**MariaDB:** +- Permissive type coercion (strings can be compared to integers) +- Implicit type conversion in most cases +- JSON stored as TEXT internally + +### Date/Time Functions + +**PostgreSQL:** +- `TO_CHAR(date, format)` for formatting +- `DATE_TRUNC()` for truncation +- Timezone-aware types + +**MariaDB:** +- `DATE_FORMAT(date, format)` for formatting +- `DATE()`, `MONTH()`, etc. for extraction +- Timezone support limited + +### Boolean Values + +**PostgreSQL:** +- Native boolean type +- TRUE/FALSE literals + +**MariaDB:** +- Stored as TINYINT(1) +- 1/0 values + +## Best Practices + +1. **Always test with PostgreSQL in development** - It catches more type errors early +2. **Run integration tests on both databases before releases** +3. **Use database-agnostic code** - Check platform and use appropriate SQL +4. **Document database-specific features** - If using pgvector, document MariaDB limitations +5. **Monitor query performance on both platforms** - Optimize for the most restrictive + +## Additional Resources + +- [PostgreSQL Documentation](https://www.postgresql.org/docs/) +- [MariaDB Documentation](https://mariadb.com/kb/en/) +- [Nextcloud Database Configuration](https://docs.nextcloud.com/server/latest/admin_manual/configuration_database/) +- [Docker Compose Profiles](https://docs.docker.com/compose/profiles/) + + diff --git a/docker/dolphin/Dockerfile b/docker/dolphin/Dockerfile new file mode 100644 index 0000000..c91fa69 --- /dev/null +++ b/docker/dolphin/Dockerfile @@ -0,0 +1,49 @@ +# Dockerfile for ByteDance Dolphin Document Parser +# This creates a REST API for Dolphin document parsing + +FROM nvidia/cuda:12.1.0-runtime-ubuntu22.04 + +ENV DEBIAN_FRONTEND=noninteractive +ENV PYTHONUNBUFFERED=1 + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + python3.10 \ + python3-pip \ + git \ + curl \ + wget \ + libgl1-mesa-glx \ + libglib2.0-0 \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +# Clone Dolphin repository +RUN git clone https://github.com/bytedance/Dolphin.git /app/dolphin + +# Install requirements +WORKDIR /app/dolphin +RUN pip3 install --no-cache-dir -r requirements.txt + +# Install additional dependencies for API server +RUN pip3 install --no-cache-dir flask flask-cors pdf2image +RUN apt-get update && apt-get install -y poppler-utils && rm -rf /var/lib/apt/lists/* + +# Download model +RUN pip3 install huggingface_hub && \ + python3 -c "from huggingface_hub import snapshot_download; \ + snapshot_download(repo_id='ByteDance/Dolphin-1.5', local_dir='/app/models')" + +# Copy API server +COPY api_server.py /app/api_server.py + +WORKDIR /app + +EXPOSE 5000 + +HEALTHCHECK --interval=30s --timeout=10s --start-period=120s --retries=3 \ + CMD curl -f http://localhost:5000/health || exit 1 + +CMD ["python3", "api_server.py"] + diff --git a/docker/dolphin/api_server.py b/docker/dolphin/api_server.py new file mode 100644 index 0000000..ea5fcc7 --- /dev/null +++ b/docker/dolphin/api_server.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python3 +""" +Dolphin Document Parser API Server +Provides REST API for ByteDance Dolphin document parsing +""" + +from flask import Flask, request, jsonify +from flask_cors import CORS +from PIL import Image +import io +import base64 +import sys +import os +import torch +import json +from pathlib import Path + +# Add Dolphin to Python path +sys.path.insert(0, '/app/dolphin') + +app = Flask(__name__) +CORS(app) + +# Initialize Dolphin model (lazy loading) +dolphin_model = None +dolphin_processor = None + +def load_dolphin_model(): + """Load Dolphin model on first request""" + global dolphin_model, dolphin_processor + + if dolphin_model is None: + try: + print("Loading Dolphin model...") + from transformers import VisionEncoderDecoderModel, AutoProcessor + + model_path = os.environ.get('MODEL_PATH', '/app/models') + + # Load processor and model + print(f"Loading from {model_path}") + dolphin_processor = AutoProcessor.from_pretrained( + model_path, + trust_remote_code=True + ) + + dolphin_model = VisionEncoderDecoderModel.from_pretrained( + model_path, + trust_remote_code=True + ) + + # Move to GPU if available + if torch.cuda.is_available(): + dolphin_model = dolphin_model.cuda() + print("Model loaded on GPU") + else: + print("Model loaded on CPU (slower)") + + dolphin_model.eval() + print("Dolphin model loaded successfully") + + except Exception as e: + print(f"Error loading Dolphin model: {e}") + raise + + return dolphin_model, dolphin_processor + +@app.route('/health', methods=['GET']) +def health(): + """Health check endpoint""" + return jsonify({'status': 'ok', 'service': 'dolphin-api'}) + +@app.route('/parse', methods=['POST']) +def parse_document(): + """ + Parse document image or PDF + + Request: + - file: multipart file upload + - OR image_base64: base64 encoded image + - parse_layout: bool (optional, default=True) + - extract_tables: bool (optional, default=True) + + Response: + { + "text": "extracted text", + "layout": {...}, + "tables": [...], + "metadata": {...} + } + """ + try: + # Get image from request + if 'file' in request.files: + file = request.files['file'] + # Read file content into memory to avoid tempfile issues + file_bytes = file.read() + image = Image.open(io.BytesIO(file_bytes)) + elif request.json and 'image_base64' in request.json: + image_data = base64.b64decode(request.json['image_base64']) + image = Image.open(io.BytesIO(file_bytes)) + else: + return jsonify({'error': 'No image provided. Send file or image_base64'}), 400 + + # Get options + parse_layout = request.form.get('parse_layout', 'true').lower() == 'true' + extract_tables = request.form.get('extract_tables', 'true').lower() == 'true' + + # Load model + model, processor = load_dolphin_model() + + # Prepare image for Dolphin + if image.mode != 'RGB': + image = image.convert('RGB') + + # Run Dolphin parsing + print(f"Processing image size: {image.size}") + + # Process with Dolphin + inputs = processor(images=image, return_tensors="pt") + + if torch.cuda.is_available(): + inputs = {k: v.cuda() for k, v in inputs.items()} + + # Generate output + with torch.no_grad(): + outputs = model.generate( + **inputs, + max_new_tokens=2048, + do_sample=False + ) + + # Decode output + generated_text = processor.batch_decode(outputs, skip_special_tokens=True)[0] + + # Parse Dolphin's JSON output + try: + parsed_result = json.loads(generated_text) + except json.JSONDecodeError: + # If not JSON, return as plain text + parsed_result = { + 'text': generated_text, + 'layout': {'elements': [], 'reading_order': []}, + 'tables': [] + } + + # Format result + result = { + 'text': parsed_result.get('text', generated_text), + 'layout': parsed_result.get('layout', { + 'elements': parsed_result.get('elements', []), + 'reading_order': parsed_result.get('reading_order', []) + }), + 'tables': parsed_result.get('tables', []), + 'metadata': { + 'model': 'Dolphin-1.5', + 'image_size': list(image.size), + 'device': 'cuda' if torch.cuda.is_available() else 'cpu' + } + } + + print(f"Parsing complete. Text length: {len(result['text'])}") + return jsonify(result) + + except Exception as e: + app.logger.error(f"Parse error: {str(e)}") + return jsonify({'error': str(e)}), 500 + +@app.route('/parse_pdf', methods=['POST']) +def parse_pdf(): + """ + Parse multi-page PDF document + + Request: + - file: PDF file upload + - pages: list of page numbers (optional, default=all) + + Response: + { + "pages": [ + {"page": 1, "text": "...", "layout": {...}}, + {"page": 2, "text": "...", "layout": {...}} + ], + "metadata": {...} + } + """ + try: + if 'file' not in request.files: + return jsonify({'error': 'No PDF file provided'}), 400 + + file = request.files['file'] + + # Save PDF temporarily + import tempfile + import pdf2image + + with tempfile.NamedTemporaryFile(suffix='.pdf', delete=False) as tmp: + file.save(tmp.name) + pdf_path = tmp.name + + try: + # Convert PDF to images + images = pdf2image.convert_from_path(pdf_path) + + model, processor = load_dolphin_model() + + pages_result = [] + + for page_num, img in enumerate(images, 1): + print(f"Processing page {page_num}/{len(images)}") + + # Process image with Dolphin + inputs = processor(images=img, return_tensors="pt") + + if torch.cuda.is_available(): + inputs = {k: v.cuda() for k, v in inputs.items()} + + with torch.no_grad(): + outputs = model.generate(**inputs, max_new_tokens=2048, do_sample=False) + + generated_text = processor.batch_decode(outputs, skip_special_tokens=True)[0] + + try: + parsed = json.loads(generated_text) + except json.JSONDecodeError: + parsed = {'text': generated_text, 'layout': {}} + + pages_result.append({ + 'page': page_num, + 'text': parsed.get('text', generated_text), + 'layout': parsed.get('layout', {}), + 'tables': parsed.get('tables', []) + }) + + result = { + 'pages': pages_result, + 'metadata': { + 'model': 'Dolphin-1.5', + 'total_pages': len(images), + 'device': 'cuda' if torch.cuda.is_available() else 'cpu' + } + } + + return jsonify(result) + + finally: + # Clean up temp file + os.unlink(pdf_path) + + except Exception as e: + app.logger.error(f"PDF parse error: {str(e)}") + return jsonify({'error': str(e)}), 500 + +@app.route('/info', methods=['GET']) +def info(): + """Get model information""" + return jsonify({ + 'model': 'ByteDance Dolphin-1.5', + 'version': '1.5', + 'capabilities': [ + 'document_parsing', + 'layout_analysis', + 'table_extraction', + 'formula_extraction', + 'ocr' + ], + 'model_path': '/app/models' + }) + +if __name__ == '__main__': + port = int(os.environ.get('PORT', 5000)) + app.run(host='0.0.0.0', port=port, debug=False) + diff --git a/docker/keycloak/commonground-realm.json b/docker/keycloak/commonground-realm.json new file mode 100644 index 0000000..ea4b237 --- /dev/null +++ b/docker/keycloak/commonground-realm.json @@ -0,0 +1,236 @@ +{ + "realm": "commonground", + "enabled": true, + "displayName": "Common Ground", + "displayNameHtml": "Common Ground", + "sslRequired": "none", + "registrationAllowed": false, + "loginWithEmailAllowed": true, + "duplicateEmailsAllowed": false, + "resetPasswordAllowed": true, + "editUsernameAllowed": false, + "bruteForceProtected": true, + "permanentLockout": false, + "maxFailureWaitSeconds": 900, + "minimumQuickLoginWaitSeconds": 60, + "waitIncrementSeconds": 60, + "quickLoginCheckMilliSeconds": 1000, + "maxDeltaTimeSeconds": 43200, + "failureFactor": 5, + "roles": { + "realm": [ + { + "name": "admin", + "description": "Administrator role" + }, + { + "name": "user", + "description": "Regular user role" + }, + { + "name": "case-manager", + "description": "Case manager role for Valtimo/OpenZaak" + } + ] + }, + "defaultRoles": ["user"], + "clients": [ + { + "clientId": "nextcloud", + "name": "Nextcloud", + "description": "Nextcloud file sharing and collaboration", + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "nextcloud-secret-change-me", + "redirectUris": [ + "http://localhost:8080/*", + "http://nextcloud/*", + "https://nextcloud/*" + ], + "webOrigins": ["+"], + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": false, + "protocol": "openid-connect", + "fullScopeAllowed": true, + "protocolMappers": [ + { + "name": "groups", + "protocol": "openid-connect", + "protocolMapper": "oidc-group-membership-mapper", + "consentRequired": false, + "config": { + "full.path": "false", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "groups", + "userinfo.token.claim": "true" + } + } + ] + }, + { + "clientId": "openzaak", + "name": "OpenZaak", + "description": "OpenZaak ZGW API backend", + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "openzaak-secret-change-me", + "redirectUris": [ + "http://localhost:8089/*", + "http://nc_app_openzaak:*/*" + ], + "webOrigins": ["+"], + "standardFlowEnabled": true, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": true, + "publicClient": false, + "protocol": "openid-connect", + "fullScopeAllowed": true + }, + { + "clientId": "openklant", + "name": "OpenKlant", + "description": "OpenKlant customer interaction registry", + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "openklant-secret-change-me", + "redirectUris": [ + "http://localhost:8090/*", + "http://nc_app_openklant:*/*" + ], + "webOrigins": ["+"], + "standardFlowEnabled": true, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": true, + "publicClient": false, + "protocol": "openid-connect", + "fullScopeAllowed": true + }, + { + "clientId": "opentalk", + "name": "OpenTalk", + "description": "OpenTalk video conferencing", + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "opentalk-secret-change-me", + "redirectUris": [ + "http://localhost:11311/*", + "http://nc_app_opentalk:*/*" + ], + "webOrigins": ["+"], + "standardFlowEnabled": true, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": true, + "publicClient": false, + "protocol": "openid-connect", + "fullScopeAllowed": true + }, + { + "clientId": "valtimo", + "name": "Valtimo", + "description": "Valtimo BPM and case management", + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "valtimo-secret-change-me", + "redirectUris": [ + "http://localhost:8088/*", + "http://nc_app_valtimo:*/*" + ], + "webOrigins": ["+"], + "standardFlowEnabled": true, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": true, + "publicClient": false, + "protocol": "openid-connect", + "fullScopeAllowed": true + }, + { + "clientId": "kiss", + "name": "KISS Frontend", + "description": "Klantinteractie-Servicesysteem", + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "kiss-secret-change-me", + "redirectUris": [ + "http://localhost:9030/*", + "http://kiss-bff:8080/*" + ], + "webOrigins": ["+"], + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": false, + "protocol": "openid-connect", + "fullScopeAllowed": true, + "protocolMappers": [ + { + "name": "groups", + "protocol": "openid-connect", + "protocolMapper": "oidc-group-membership-mapper", + "consentRequired": false, + "config": { + "full.path": "false", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "groups", + "userinfo.token.claim": "true" + } + } + ] + } + ], + "users": [ + { + "username": "admin", + "enabled": true, + "emailVerified": true, + "firstName": "Admin", + "lastName": "User", + "email": "admin@example.com", + "credentials": [ + { + "type": "password", + "value": "admin", + "temporary": false + } + ], + "realmRoles": ["admin", "user", "case-manager"] + }, + { + "username": "user", + "enabled": true, + "emailVerified": true, + "firstName": "Regular", + "lastName": "User", + "email": "user@example.com", + "credentials": [ + { + "type": "password", + "value": "user", + "temporary": false + } + ], + "realmRoles": ["user"] + }, + { + "username": "casemanager", + "enabled": true, + "emailVerified": true, + "firstName": "Case", + "lastName": "Manager", + "email": "casemanager@example.com", + "credentials": [ + { + "type": "password", + "value": "casemanager", + "temporary": false + } + ], + "realmRoles": ["user", "case-manager"] + } + ] +} diff --git a/docker/mail/seed-mail.sh b/docker/mail/seed-mail.sh new file mode 100755 index 0000000..b2f5893 --- /dev/null +++ b/docker/mail/seed-mail.sh @@ -0,0 +1,310 @@ +#!/bin/bash +# seed-mail.sh — Send test emails to Greenmail for development/testing +# +# Usage: bash seed-mail.sh [SMTP_HOST] [SMTP_PORT] +# Defaults: localhost 3025 +# +# Greenmail auto-creates accounts on first email received. +# After seeding, configure Nextcloud Mail app with: +# IMAP: greenmail:3143 (or localhost:3143 from host) +# SMTP: greenmail:3025 (or localhost:3025 from host) +# User: , Password: + +SMTP_HOST="${1:-localhost}" +SMTP_PORT="${2:-3025}" + +send_email() { + local from="$1" + local to="$2" + local subject="$3" + local body="$4" + local date="$5" + local cc="${6:-}" + local message_id="${7:-$(uuidgen)@test.local}" + + local cc_header="" + if [ -n "$cc" ]; then + cc_header="Cc: $cc"$'\r\n' + fi + + local email_data="From: $from\r\nTo: $to\r\n${cc_header}Subject: $subject\r\nDate: $date\r\nMessage-ID: <$message_id>\r\nMIME-Version: 1.0\r\nContent-Type: text/plain; charset=UTF-8\r\n\r\n$body" + + # Use Python for reliable SMTP sending (available in most environments) + python3 -c " +import smtplib +from email.mime.text import MIMEText +from email.mime.multipart import MIMEMultipart +import sys + +msg = MIMEMultipart() +msg['From'] = '''$from''' +msg['To'] = '''$to''' +msg['Subject'] = '''$subject''' +msg['Date'] = '''$date''' +msg['Message-ID'] = '''<$message_id>''' +cc = '''$cc''' +if cc: + msg['Cc'] = cc + +msg.attach(MIMEText('''$body''', 'plain', 'utf-8')) + +try: + with smtplib.SMTP('$SMTP_HOST', $SMTP_PORT) as server: + recipients = ['$to'] + if cc: + recipients.extend([r.strip() for r in cc.split(',')]) + server.sendmail('$from', recipients, msg.as_string()) + print(f' Sent: {msg[\"Subject\"]} -> {msg[\"To\"]}') +except Exception as e: + print(f' FAILED: {e}', file=sys.stderr) + sys.exit(1) +" +} + +echo "=== Seeding Greenmail with test emails ===" +echo "SMTP: $SMTP_HOST:$SMTP_PORT" +echo "" + +# Test accounts (auto-created by Greenmail on first email): +# - admin@test.local (system admin) +# - behandelaar@test.local (case handler / civil servant) +# - coordinator@test.local (team coordinator) +# - burger@test.local (citizen) +# - leverancier@test.local (supplier/vendor) + +echo "--- Case management emails (procest/pipelinq relevant) ---" + +send_email \ + "burger@test.local" \ + "behandelaar@test.local" \ + "Aanvraag omgevingsvergunning - Kerkstraat 42" \ + "Geachte heer/mevrouw, + +Hierbij dien ik een aanvraag in voor een omgevingsvergunning voor het plaatsen van een dakkapel op het adres Kerkstraat 42, 5038 AB Tilburg. + +De benodigde documenten (bouwtekeningen en situatieschets) stuur ik als bijlage mee. + +Met vriendelijke groet, +Jan de Vries +Burger BSN: 123456789" \ + "Mon, 17 Mar 2026 09:15:00 +0100" + +send_email \ + "behandelaar@test.local" \ + "burger@test.local" \ + "RE: Aanvraag omgevingsvergunning - Kerkstraat 42 - Ontvangstbevestiging" \ + "Geachte heer De Vries, + +Wij hebben uw aanvraag voor een omgevingsvergunning ontvangen. Uw aanvraag is geregistreerd onder zaaknummer ZK-2026-0142. + +De behandeltermijn is 8 weken. U ontvangt binnen 2 weken bericht over de voortgang. + +Met vriendelijke groet, +Fatima El-Amrani +Afdeling Vergunningen +Gemeente Tilburg" \ + "Mon, 17 Mar 2026 14:30:00 +0100" \ + "" \ + "reply-zk2026-0142@test.local" + +send_email \ + "behandelaar@test.local" \ + "coordinator@test.local" \ + "Adviesaanvraag welstandscommissie - ZK-2026-0142" \ + "Hoi Noor, + +Kun je het advies van de welstandscommissie inplannen voor de aanvraag ZK-2026-0142 (dakkapel Kerkstraat 42)? + +De bouwtekeningen zitten in het dossier. Graag voor volgende week woensdag. + +Groet, +Fatima" \ + "Tue, 18 Mar 2026 10:00:00 +0100" + +send_email \ + "coordinator@test.local" \ + "behandelaar@test.local" \ + "RE: Adviesaanvraag welstandscommissie - ZK-2026-0142" \ + "Fatima, + +Welstandscommissie is ingepland voor woensdag 26 maart om 14:00. +Ik heb het dossier doorgestuurd naar de commissieleden. + +Positief advies verwacht gezien eerdere vergelijkbare aanvragen in die straat. + +Groet, +Noor Yilmaz" \ + "Tue, 18 Mar 2026 15:45:00 +0100" + +send_email \ + "leverancier@test.local" \ + "coordinator@test.local" \ + "Offerte IT-systeem migratie - REF-2026-Q1-087" \ + "Beste Noor, + +In navolging van ons gesprek hierbij onze offerte voor de migratie van het zaaksysteem naar Nextcloud/OpenRegister. + +Samenvatting: +- Fase 1: Data migratie (4 weken) - EUR 24.000 +- Fase 2: Integratie Procest/Pipelinq (6 weken) - EUR 36.000 +- Fase 3: Training en acceptatie (2 weken) - EUR 8.000 + +Totaal: EUR 68.000 excl. BTW + +De offerte is 30 dagen geldig. Graag hoor ik uw reactie. + +Met vriendelijke groet, +Mark Visser +Conduction B.V." \ + "Wed, 19 Mar 2026 08:30:00 +0100" + +send_email \ + "coordinator@test.local" \ + "admin@test.local" \ + "FW: Offerte IT-systeem migratie - ter goedkeuring" \ + "Admin, + +Hierbij de offerte van Conduction voor de zaaksysteem migratie. Past binnen het budget dat in de begroting is opgenomen. + +Graag je akkoord zodat we het contract kunnen opstellen. + +Noor" \ + "Wed, 19 Mar 2026 11:00:00 +0100" \ + "behandelaar@test.local" + +echo "" +echo "--- Workflow/notification emails ---" + +send_email \ + "admin@test.local" \ + "behandelaar@test.local" \ + "Herinnering: 3 zaken naderen deadline" \ + "Beste Fatima, + +De volgende zaken naderen hun behandeldeadline: + +1. ZK-2026-0098 - Evenementenvergunning Koningsdag (deadline: 25 maart) +2. ZK-2026-0115 - Bezwaarschrift WOZ-waarde (deadline: 28 maart) +3. ZK-2026-0142 - Omgevingsvergunning dakkapel (deadline: 12 mei) + +Verzoek om de status bij te werken in het zaaksysteem. + +Systeem notificatie - Niet beantwoorden" \ + "Thu, 20 Mar 2026 07:00:00 +0100" + +send_email \ + "burger@test.local" \ + "admin@test.local" \ + "Klacht: geen reactie op mijn aanvraag sinds 6 weken" \ + "Geacht college, + +Op 3 februari heb ik een aanvraag ingediend voor een kapvergunning (referentie ZK-2026-0034). Sindsdien heb ik geen enkele reactie ontvangen ondanks twee keer bellen. + +Ik verzoek u dringend om mij binnen 5 werkdagen te informeren over de status. + +Met vriendelijke groet, +Priya Ganpat +Wilhelminastraat 17, Tilburg" \ + "Thu, 20 Mar 2026 16:20:00 +0100" + +send_email \ + "admin@test.local" \ + "coordinator@test.local" \ + "URGENT: Klacht kapvergunning ZK-2026-0034 - direct oppakken" \ + "Noor, + +Bijgevoegd een klacht over ZK-2026-0034 (kapvergunning Ganpat). +De burger wacht al 6 weken. Dit moet morgen opgepakt worden. + +Wie is de behandelaar? Graag terugkoppeling voor 12:00. + +Admin" \ + "Fri, 21 Mar 2026 08:00:00 +0100" \ + "behandelaar@test.local" + +send_email \ + "behandelaar@test.local" \ + "burger@test.local" \ + "Status update: Uw aanvraag kapvergunning ZK-2026-0034" \ + "Geachte mevrouw Ganpat, + +Excuses voor het uitblijven van een reactie op uw aanvraag kapvergunning. + +Uw aanvraag is in behandeling. De boomdeskundige heeft een positief advies gegeven. Het besluit wordt uiterlijk 28 maart genomen. + +U kunt de voortgang ook volgen via het zaakportaal op https://gemeente.nl/mijnzaken. + +Met vriendelijke groet, +Fatima El-Amrani +Gemeente Tilburg" \ + "Fri, 21 Mar 2026 11:30:00 +0100" + +echo "" +echo "--- Internal coordination emails ---" + +send_email \ + "coordinator@test.local" \ + "behandelaar@test.local" \ + "Weekplanning team Vergunningen - week 13" \ + "Team, + +Planning voor volgende week: + +Maandag: Sprint review Q1 (09:30-10:30, vergaderzaal 3) +Dinsdag: Geen vergaderingen - focus dag +Woensdag: Welstandscommissie (14:00-16:00) +Donderdag: Overleg met IT over nieuwe koppelingen (10:00-11:00) +Vrijdag: Retrospective (15:00-16:00) + +Openstaande zaken per persoon: +- Fatima: 12 zaken (3 urgent) +- Ahmed: 8 zaken (1 urgent) +- Lisa: 10 zaken (2 urgent) + +Fijn weekend! +Noor" \ + "Fri, 21 Mar 2026 16:00:00 +0100" \ + "admin@test.local" + +send_email \ + "leverancier@test.local" \ + "behandelaar@test.local" \ + "Technische documentatie API-koppeling OpenRegister" \ + "Beste Fatima, + +Zoals besproken hierbij de technische documentatie voor de API-koppeling tussen jullie zaaksysteem en OpenRegister. + +De koppeling verloopt via: +- REST API endpoints voor zaak-objecten +- Webhook notificaties voor statuswijzigingen +- CalDAV voor taak-synchronisatie +- CardDAV voor contactpersonen + +We hebben een testomgeving ingericht op https://test.conduction.nl waar jullie de koppeling kunnen testen. + +Laat weten als er vragen zijn. + +Groet, +Mark Visser +Conduction B.V." \ + "Sat, 22 Mar 2026 10:00:00 +0100" \ + "coordinator@test.local" + +echo "" +echo "=== Mail seeding complete ===" +echo "" +echo "Accounts created (login = email address, password = email address):" +echo " - admin@test.local" +echo " - behandelaar@test.local" +echo " - coordinator@test.local" +echo " - burger@test.local" +echo " - leverancier@test.local" +echo "" +echo "Configure Nextcloud Mail app:" +echo " IMAP Host: greenmail (from container) or localhost (from host)" +echo " IMAP Port: 3143" +echo " SMTP Host: greenmail (from container) or localhost (from host)" +echo " SMTP Port: 3025" +echo " Security: None" +echo " User: " +echo " Password: " diff --git a/docker/mail/seed-ox.sh b/docker/mail/seed-ox.sh new file mode 100755 index 0000000..172ab5f --- /dev/null +++ b/docker/mail/seed-ox.sh @@ -0,0 +1,225 @@ +#!/bin/bash +# seed-ox.sh — Create test users and seed data in Open-Xchange +# +# Usage: bash seed-ox.sh +# +# Prerequisites: +# - OX profile running: docker compose -f .github/docker-compose.yml --profile ox up -d +# - Wait for OX to finish initialization (check logs: docker logs -f conduction-open-xchange) +# - GreenMail must be running (auto-starts with ox profile) +# +# After seeding, also run seed-mail.sh to populate GreenMail with test emails. +# OX authenticates against GreenMail's IMAP, so OX users must match GreenMail accounts. + +OX_CONTAINER="conduction-open-xchange" +OX_ADMIN="oxadminmaster" +OX_ADMIN_PASS="admin_master_password" +CTX_ADMIN="oxadmin" +CTX_ADMIN_PASS="oxadmin" +CTX_ID="1" + +echo "=== Seeding Open-Xchange with test users ===" +echo "" + +# Check if OX is running +if ! docker ps --format '{{.Names}}' | grep -q "$OX_CONTAINER"; then + echo "ERROR: $OX_CONTAINER is not running." + echo "Start with: docker compose -f .github/docker-compose.yml --profile ox up -d" + exit 1 +fi + +# Wait for OX to be ready +echo "Checking if OX is ready..." +for i in $(seq 1 30); do + if docker exec "$OX_CONTAINER" curl -s -o /dev/null -w "%{http_code}" http://localhost/appsuite/ 2>/dev/null | grep -q "200"; then + echo "OX is ready." + break + fi + if [ "$i" -eq 30 ]; then + echo "ERROR: OX is not ready after 5 minutes. Check logs: docker logs $OX_CONTAINER" + exit 1 + fi + echo " Waiting for OX to start... ($i/30)" + sleep 10 +done + +echo "" +echo "--- Creating test users ---" +echo "(Users authenticate via IMAP against GreenMail)" +echo "(GreenMail auto-creates accounts — password = email address)" +echo "" + +# Create users — imaplogin must match GreenMail account (email address) +# GreenMail uses email as both username and password + +create_user() { + local username="$1" + local display="$2" + local given="$3" + local surname="$4" + local email="$5" + local password="$6" + + docker exec "$OX_CONTAINER" /opt/open-xchange/sbin/createuser \ + -A "$CTX_ADMIN" -P "$CTX_ADMIN_PASS" -c "$CTX_ID" \ + --username "$username" \ + --displayname "$display" \ + --givenname "$given" \ + --surname "$surname" \ + --password "$password" \ + --email "$email" \ + --imaplogin "$email" \ + --language nl_NL 2>&1 + + if [ $? -eq 0 ]; then + echo " Created user: $username ($email)" + else + echo " User $username may already exist (this is OK)" + fi +} + +create_user "behandelaar" "Fatima El-Amrani" "Fatima" "El-Amrani" \ + "behandelaar@test.local" "behandelaar@test.local" + +create_user "coordinator" "Noor Yilmaz" "Noor" "Yilmaz" \ + "coordinator@test.local" "coordinator@test.local" + +create_user "burger" "Jan de Vries" "Jan" "de Vries" \ + "burger@test.local" "burger@test.local" + +create_user "leverancier" "Mark Visser" "Mark" "Visser" \ + "leverancier@test.local" "leverancier@test.local" + +create_user "admin-user" "System Admin" "System" "Admin" \ + "admin@test.local" "admin@test.local" + +echo "" +echo "--- Seeding contacts and calendar via OX HTTP API ---" + +# Login as behandelaar to create contacts and appointments +echo "Logging in as behandelaar..." +SESSION=$(docker exec "$OX_CONTAINER" curl -s \ + "http://localhost/ajax/login?action=login" \ + -d "name=behandelaar&password=behandelaar@test.local" \ + 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin).get('session',''))" 2>/dev/null) + +if [ -z "$SESSION" ] || [ "$SESSION" = "None" ]; then + echo " WARNING: Could not login as behandelaar. Skipping API seeding." + echo " This is expected if GreenMail hasn't received mail for this user yet." + echo " Run seed-mail.sh first, then retry this script." + echo "" + echo "=== User creation complete. Run seed-mail.sh to populate mailboxes. ===" + exit 0 +fi + +echo " Session: ${SESSION:0:16}..." + +# Create contacts +echo "" +echo "Creating contacts..." + +create_contact() { + local first="$1" + local last="$2" + local email="$3" + local phone="$4" + local note="$5" + + docker exec "$OX_CONTAINER" curl -s -o /dev/null -w "%{http_code}" \ + "http://localhost/ajax/contacts?action=new&session=$SESSION" \ + -H "Content-Type: application/json" \ + -d "{ + \"folder_id\": \"con://0/6\", + \"first_name\": \"$first\", + \"last_name\": \"$last\", + \"email1\": \"$email\", + \"cellular_telephone1\": \"$phone\", + \"note\": \"$note\" + }" 2>/dev/null + echo " Contact: $first $last ($email)" +} + +create_contact "Jan" "de Vries" "burger@test.local" "+31612345678" \ + "Burger - Aanvraag omgevingsvergunning dakkapel (ZK-2026-0142)" + +create_contact "Priya" "Ganpat" "burger@test.local" "+31687654321" \ + "Burger - Kapvergunning (ZK-2026-0034). ZZP developer." + +create_contact "Mark" "Visser" "leverancier@test.local" "+31854011580" \ + "Leverancier - Conduction B.V. Offerte REF-2026-Q1-087." + +create_contact "Annemarie" "de Vries" "annemarie@vng-test.local" "+31703738393" \ + "VNG - Standaarden Architect. Common Ground / ZGW APIs." + +# Create calendar appointments +echo "" +echo "Creating calendar appointments..." + +# Get default calendar folder +CALENDAR_FOLDER=$(docker exec "$OX_CONTAINER" curl -s \ + "http://localhost/ajax/folders?action=list&parent=1&session=$SESSION&columns=1,300" \ + 2>/dev/null | python3 -c " +import sys,json +try: + data = json.load(sys.stdin) + for f in data.get('data',[]): + if 'calendar' in str(f).lower(): + print(f[0]); break +except: print('') +" 2>/dev/null) + +if [ -n "$CALENDAR_FOLDER" ] && [ "$CALENDAR_FOLDER" != "" ]; then + echo " Using calendar folder: $CALENDAR_FOLDER" + + # Welstandscommissie meeting + docker exec "$OX_CONTAINER" curl -s -o /dev/null \ + "http://localhost/ajax/calendar?action=new&session=$SESSION" \ + -H "Content-Type: application/json" \ + -d "{ + \"folder_id\": \"$CALENDAR_FOLDER\", + \"title\": \"Welstandscommissie - ZK-2026-0142 (dakkapel Kerkstraat 42)\", + \"start_date\": 1774537200000, + \"end_date\": 1774544400000, + \"location\": \"Raadzaal - Stadskantoor\", + \"note\": \"Behandeling aanvraag omgevingsvergunning dakkapel.\" + }" 2>/dev/null + echo " Event: Welstandscommissie" + + # IT koppeling overleg + docker exec "$OX_CONTAINER" curl -s -o /dev/null \ + "http://localhost/ajax/calendar?action=new&session=$SESSION" \ + -H "Content-Type: application/json" \ + -d "{ + \"folder_id\": \"$CALENDAR_FOLDER\", + \"title\": \"Overleg IT-koppelingen OpenRegister/Procest/Pipelinq\", + \"start_date\": 1774609200000, + \"end_date\": 1774612800000, + \"location\": \"Online - Nextcloud Talk\", + \"note\": \"Technisch overleg API-koppelingen en integraties.\" + }" 2>/dev/null + echo " Event: IT-koppelingen overleg" +else + echo " WARNING: Could not find calendar folder. Skipping calendar seeding." +fi + +# Logout +docker exec "$OX_CONTAINER" curl -s -o /dev/null \ + "http://localhost/ajax/login?action=logout&session=$SESSION" 2>/dev/null + +echo "" +echo "=== OX seeding complete ===" +echo "" +echo "Access OX at: http://localhost:8087/appsuite" +echo "" +echo "Login credentials (authenticate via IMAP/GreenMail):" +echo " behandelaar / behandelaar@test.local" +echo " coordinator / coordinator@test.local" +echo " burger / burger@test.local" +echo " leverancier / leverancier@test.local" +echo " admin-user / admin@test.local" +echo " oxadmin / oxadmin (context admin)" +echo "" +echo "Next steps:" +echo " 1. Run seed-mail.sh to populate GreenMail with test emails" +echo " 2. Login to OX — mail will appear from GreenMail via IMAP" +echo " 3. Run seed-pim.sh to also populate Nextcloud contacts/calendar" diff --git a/docker/mail/seed-pim.sh b/docker/mail/seed-pim.sh new file mode 100755 index 0000000..a59a25a --- /dev/null +++ b/docker/mail/seed-pim.sh @@ -0,0 +1,241 @@ +#!/bin/bash +# seed-pim.sh — Seed contacts and calendar events into Nextcloud via DAV APIs +# +# Usage: bash seed-pim.sh [NC_URL] [NC_USER] [NC_PASS] +# Defaults: http://localhost:8080 admin admin +# +# Creates test contacts (CardDAV) and calendar events (CalDAV) for development. + +NC_URL="${1:-http://localhost:8080}" +NC_USER="${2:-admin}" +NC_PASS="${3:-admin}" + +DAV_URL="$NC_URL/remote.php/dav" + +echo "=== Seeding Nextcloud PIM data ===" +echo "URL: $NC_URL, User: $NC_USER" +echo "" + +# Helper: create a contact via CardDAV +create_contact() { + local uid="$1" + local vcard="$2" + + local status + status=$(curl -s -o /dev/null -w "%{http_code}" \ + -u "$NC_USER:$NC_PASS" \ + -X PUT \ + -H "Content-Type: text/vcard; charset=utf-8" \ + -d "$vcard" \ + "$DAV_URL/addressbooks/users/$NC_USER/contacts/$uid.vcf") + + if [ "$status" = "201" ] || [ "$status" = "204" ]; then + echo " Created contact: $uid" + else + echo " Contact $uid: HTTP $status (may already exist)" + fi +} + +# Helper: create a calendar event via CalDAV +create_event() { + local uid="$1" + local ical="$2" + + local status + status=$(curl -s -o /dev/null -w "%{http_code}" \ + -u "$NC_USER:$NC_PASS" \ + -X PUT \ + -H "Content-Type: text/calendar; charset=utf-8" \ + -d "$ical" \ + "$DAV_URL/calendars/$NC_USER/personal/$uid.ics") + + if [ "$status" = "201" ] || [ "$status" = "204" ]; then + echo " Created event: $uid" + else + echo " Event $uid: HTTP $status (may already exist)" + fi +} + +echo "--- Creating contacts ---" + +create_contact "jan-de-vries" "BEGIN:VCARD +VERSION:3.0 +UID:jan-de-vries +FN:Jan de Vries +N:de Vries;Jan;;; +EMAIL;TYPE=HOME:burger@test.local +TEL;TYPE=CELL:+31612345678 +ADR;TYPE=HOME:;;Kerkstraat 42;Tilburg;;5038 AB;Nederland +NOTE:Burger - Aanvraag omgevingsvergunning dakkapel (ZK-2026-0142) +CATEGORIES:Burger,Vergunningen +END:VCARD" + +create_contact "priya-ganpat" "BEGIN:VCARD +VERSION:3.0 +UID:priya-ganpat +FN:Priya Ganpat +N:Ganpat;Priya;;; +EMAIL;TYPE=HOME:burger@test.local +TEL;TYPE=CELL:+31687654321 +ADR;TYPE=HOME:;;Wilhelminastraat 17;Tilburg;;5041 ED;Nederland +NOTE:Burger - Kapvergunning aanvraag (ZK-2026-0034). ZZP developer. +CATEGORIES:Burger,Vergunningen +END:VCARD" + +create_contact "fatima-el-amrani" "BEGIN:VCARD +VERSION:3.0 +UID:fatima-el-amrani +FN:Fatima El-Amrani +N:El-Amrani;Fatima;;; +ORG:Gemeente Tilburg;Afdeling Vergunningen +TITLE:Behandelaar Vergunningen +EMAIL;TYPE=WORK:behandelaar@test.local +TEL;TYPE=WORK:+31135497200 +ADR;TYPE=WORK:;;Stadhuisplein 130;Tilburg;;5038 TC;Nederland +CATEGORIES:Medewerker,Vergunningen +END:VCARD" + +create_contact "noor-yilmaz" "BEGIN:VCARD +VERSION:3.0 +UID:noor-yilmaz +FN:Noor Yilmaz +N:Yilmaz;Noor;;; +ORG:Gemeente Tilburg;Afdeling Vergunningen +TITLE:Coordinator / Functioneel Beheerder +EMAIL;TYPE=WORK:coordinator@test.local +TEL;TYPE=WORK:+31135497201 +ADR;TYPE=WORK:;;Stadhuisplein 130;Tilburg;;5038 TC;Nederland +NOTE:CISO achtergrond. Verantwoordelijk voor IT-koppelingen en planning. +CATEGORIES:Medewerker,Coordinator +END:VCARD" + +create_contact "mark-visser" "BEGIN:VCARD +VERSION:3.0 +UID:mark-visser +FN:Mark Visser +N:Visser;Mark;;; +ORG:Conduction B.V. +TITLE:Directeur / Lead Developer +EMAIL;TYPE=WORK:leverancier@test.local +TEL;TYPE=WORK:+31854011580 +URL:https://conduction.nl +NOTE:Leverancier IT-systeem migratie. Offerte REF-2026-Q1-087. +CATEGORIES:Leverancier,IT +END:VCARD" + +create_contact "annemarie-de-vries" "BEGIN:VCARD +VERSION:3.0 +UID:annemarie-de-vries +FN:Annemarie de Vries +N:de Vries;Annemarie;;; +ORG:VNG Realisatie +TITLE:Standaarden Architect +EMAIL;TYPE=WORK:annemarie@vng-test.local +TEL;TYPE=WORK:+31703738393 +NOTE:VNG contactpersoon voor Common Ground standaarden en ZGW APIs. +CATEGORIES:VNG,Standaarden +END:VCARD" + +echo "" +echo "--- Creating calendar events ---" + +create_event "sprint-review-q1" "BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//OpenRegister//Seed//EN +BEGIN:VEVENT +UID:sprint-review-q1 +DTSTART:20260323T083000Z +DTEND:20260323T093000Z +SUMMARY:Sprint Review Q1 - Team Vergunningen +DESCRIPTION:Kwartaal review van het team Vergunningen.\\n\\nAgenda:\\n1. Demo nieuwe zaak-koppeling OpenRegister\\n2. Voortgang migratie zaaksysteem\\n3. KPI's en doorlooptijden\\n4. Planning Q2 +LOCATION:Vergaderzaal 3 - Stadskantoor +ORGANIZER;CN=Noor Yilmaz:mailto:coordinator@test.local +ATTENDEE;CN=Fatima El-Amrani;PARTSTAT=ACCEPTED:mailto:behandelaar@test.local +ATTENDEE;CN=Admin;PARTSTAT=ACCEPTED:mailto:admin@test.local +STATUS:CONFIRMED +END:VEVENT +END:VCALENDAR" + +create_event "welstandscommissie-0142" "BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//OpenRegister//Seed//EN +BEGIN:VEVENT +UID:welstandscommissie-0142 +DTSTART:20260325T130000Z +DTEND:20260325T150000Z +SUMMARY:Welstandscommissie - o.a. ZK-2026-0142 (dakkapel Kerkstraat 42) +DESCRIPTION:Vergadering welstandscommissie.\\n\\nBelangrijkste dossiers:\\n- ZK-2026-0142: Dakkapel Kerkstraat 42 (positief advies verwacht)\\n- ZK-2026-0155: Uitbouw Dorpsstraat 8\\n- ZK-2026-0163: Gevelbekleding Marktplein 3 +LOCATION:Raadzaal - Stadskantoor +ORGANIZER;CN=Noor Yilmaz:mailto:coordinator@test.local +ATTENDEE;CN=Fatima El-Amrani;PARTSTAT=ACCEPTED:mailto:behandelaar@test.local +STATUS:CONFIRMED +END:VEVENT +END:VCALENDAR" + +create_event "it-koppeling-overleg" "BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//OpenRegister//Seed//EN +BEGIN:VEVENT +UID:it-koppeling-overleg +DTSTART:20260326T090000Z +DTEND:20260326T100000Z +SUMMARY:Overleg IT-koppelingen OpenRegister/Procest/Pipelinq +DESCRIPTION:Technisch overleg over de API-koppelingen:\\n\\n1. Email integratie via Nextcloud Mail\\n2. CalDAV/CardDAV koppelingen\\n3. Deck integratie voor kanban workflow\\n4. Webhook configuratie voor statuswijzigingen\\n\\nVoorbereiding: technische documentatie van Conduction doorlezen. +LOCATION:Online - Nextcloud Talk +ORGANIZER;CN=Noor Yilmaz:mailto:coordinator@test.local +ATTENDEE;CN=Mark Visser;PARTSTAT=TENTATIVE:mailto:leverancier@test.local +ATTENDEE;CN=Fatima El-Amrani;PARTSTAT=ACCEPTED:mailto:behandelaar@test.local +ATTENDEE;CN=Admin;PARTSTAT=NEEDS-ACTION:mailto:admin@test.local +STATUS:CONFIRMED +END:VEVENT +END:VCALENDAR" + +create_event "deadline-koningsdag" "BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//OpenRegister//Seed//EN +BEGIN:VEVENT +UID:deadline-koningsdag +DTSTART:20260325T000000Z +DTEND:20260325T235959Z +SUMMARY:DEADLINE: ZK-2026-0098 Evenementenvergunning Koningsdag +DESCRIPTION:Uiterste behandeldatum evenementenvergunning Koningsdag.\\nBehandelaar: Fatima El-Amrani\\nStatus: In behandeling +ORGANIZER;CN=Admin:mailto:admin@test.local +ATTENDEE;CN=Fatima El-Amrani;PARTSTAT=ACCEPTED:mailto:behandelaar@test.local +STATUS:CONFIRMED +BEGIN:VALARM +TRIGGER:-P1D +ACTION:DISPLAY +DESCRIPTION:Deadline morgen: Evenementenvergunning Koningsdag +END:VALARM +END:VEVENT +END:VCALENDAR" + +create_event "retrospective-q1" "BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//OpenRegister//Seed//EN +BEGIN:VEVENT +UID:retrospective-q1 +DTSTART:20260327T140000Z +DTEND:20260327T150000Z +SUMMARY:Retrospective Team Vergunningen - Week 13 +DESCRIPTION:Wat ging goed? Wat kan beter?\\n\\nPunten uit vorige retro:\\n- Doorlooptijd bezwaarschriften verbeterd (van 12 naar 9 weken)\\n- Nieuw zaakportaal positief ontvangen door burgers\\n- Klachten over trage e-mail notificaties (actie: migratie naar n8n workflows) +LOCATION:Vergaderzaal 2 - Stadskantoor +ORGANIZER;CN=Noor Yilmaz:mailto:coordinator@test.local +ATTENDEE;CN=Fatima El-Amrani;PARTSTAT=ACCEPTED:mailto:behandelaar@test.local +ATTENDEE;CN=Admin;PARTSTAT=ACCEPTED:mailto:admin@test.local +STATUS:CONFIRMED +END:VEVENT +END:VCALENDAR" + +echo "" +echo "=== PIM seeding complete ===" +echo "" +echo "Created:" +echo " - 6 contacts in default address book" +echo " - 5 calendar events in personal calendar" +echo "" +echo "All data links to the same case scenarios as seed-mail.sh" +echo " ZK-2026-0142: Omgevingsvergunning dakkapel (Jan de Vries)" +echo " ZK-2026-0034: Kapvergunning (Priya Ganpat)" +echo " ZK-2026-0098: Evenementenvergunning Koningsdag" +echo " REF-2026-Q1-087: IT migratie offerte (Mark Visser / Conduction)" diff --git a/docker/postgres/init-exapps.sh b/docker/postgres/init-exapps.sh new file mode 100644 index 0000000..0d796b0 --- /dev/null +++ b/docker/postgres/init-exapps.sh @@ -0,0 +1,31 @@ +#!/bin/bash +set -e + +# Initialize databases for ExApps +# This script runs on PostgreSQL startup + +echo "Creating ExApp databases..." + +# Function to create database if it doesn't exist +create_db_if_not_exists() { + local db=$1 + psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL + SELECT 'CREATE DATABASE $db' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = '$db')\gexec + GRANT ALL PRIVILEGES ON DATABASE $db TO $POSTGRES_USER; +EOSQL + echo "Database $db ready" +} + +# Create databases for each ExApp +for db in keycloak openzaak openklant opentalk valtimo; do + create_db_if_not_exists $db +done + +# Add PostGIS extension to openzaak (required for geo features) +echo "Adding PostGIS to openzaak..." +psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "openzaak" <<-EOSQL + CREATE EXTENSION IF NOT EXISTS postgis; + CREATE EXTENSION IF NOT EXISTS pg_trgm; +EOSQL + +echo "ExApp databases initialized successfully!" diff --git a/docker/postgres/init-exapps.sql b/docker/postgres/init-exapps.sql new file mode 100644 index 0000000..06c3946 --- /dev/null +++ b/docker/postgres/init-exapps.sql @@ -0,0 +1,16 @@ +-- Initialize databases for ExApps +-- This script runs on PostgreSQL startup + +-- Create databases for each ExApp (PostgreSQL syntax) +SELECT 'CREATE DATABASE keycloak' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'keycloak')\gexec +SELECT 'CREATE DATABASE openzaak' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'openzaak')\gexec +SELECT 'CREATE DATABASE openklant' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'openklant')\gexec +SELECT 'CREATE DATABASE opentalk' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'opentalk')\gexec +SELECT 'CREATE DATABASE valtimo' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'valtimo')\gexec + +-- Grant privileges to nextcloud user +GRANT ALL PRIVILEGES ON DATABASE keycloak TO nextcloud; +GRANT ALL PRIVILEGES ON DATABASE openzaak TO nextcloud; +GRANT ALL PRIVILEGES ON DATABASE openklant TO nextcloud; +GRANT ALL PRIVILEGES ON DATABASE opentalk TO nextcloud; +GRANT ALL PRIVILEGES ON DATABASE valtimo TO nextcloud; diff --git a/docker/postgres/init-extensions.sql b/docker/postgres/init-extensions.sql new file mode 100644 index 0000000..9b71f46 --- /dev/null +++ b/docker/postgres/init-extensions.sql @@ -0,0 +1,102 @@ +-- PostgreSQL Extension Initialization for OpenRegister +-- This script enables required extensions for advanced search capabilities +-- +-- Extensions: +-- 1. pgvector - Vector similarity search for AI embeddings and semantic search +-- 2. pg_trgm - Trigram-based full-text search and partial text matching +-- 3. btree_gin - Optimized indexing for GIN indexes +-- 4. btree_gist - Optimized indexing for GiST indexes +-- 5. uuid-ossp - UUID generation functions + +-- Enable pgvector extension for vector similarity search. +-- This allows storing and searching AI embeddings (e.g., from OpenAI, Ollama). +-- Use cases: semantic search, RAG (Retrieval Augmented Generation), similarity matching. +CREATE EXTENSION IF NOT EXISTS vector; + +-- Enable pg_trgm extension for full-text and partial text search. +-- This provides trigram-based similarity matching and pattern matching. +-- Use cases: autocomplete, fuzzy search, partial string matching, full-text search. +CREATE EXTENSION IF NOT EXISTS pg_trgm; + +-- Enable btree_gin for optimized GIN indexing. +-- GIN indexes are ideal for multi-value columns (arrays, jsonb, full-text). +CREATE EXTENSION IF NOT EXISTS btree_gin; + +-- Enable btree_gist for optimized GiST indexing. +-- GiST indexes support geometric and range types, plus custom operators. +CREATE EXTENSION IF NOT EXISTS btree_gist; + +-- Enable uuid-ossp for UUID generation. +-- Required for generating UUIDs in database triggers and functions. +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + +-- Log successful initialization. +DO $$ +BEGIN + RAISE NOTICE 'OpenRegister PostgreSQL extensions initialized successfully:'; + RAISE NOTICE ' ✓ vector (pgvector) - Vector similarity search'; + RAISE NOTICE ' ✓ pg_trgm - Trigram full-text and partial matching'; + RAISE NOTICE ' ✓ btree_gin - Optimized GIN indexing'; + RAISE NOTICE ' ✓ btree_gist - Optimized GiST indexing'; + RAISE NOTICE ' ✓ uuid-ossp - UUID generation'; +END $$; + +-- Create helper function for vector similarity search. +-- This function performs cosine similarity search on vector columns. +-- Usage: SELECT * FROM your_table ORDER BY vector_cosine_distance(embedding, query_vector) LIMIT 10; +CREATE OR REPLACE FUNCTION vector_cosine_distance(a vector, b vector) +RETURNS float8 +LANGUAGE SQL +IMMUTABLE STRICT PARALLEL SAFE +AS $$ + SELECT 1 - (a <=> b); +$$; + +COMMENT ON FUNCTION vector_cosine_distance IS 'Calculate cosine distance between two vectors (returns 0-2, where 0 = identical, 1 = orthogonal, 2 = opposite)'; + +-- Create helper function for trigram similarity search. +-- This function returns similarity score between two strings (0-1). +-- Usage: SELECT * FROM your_table WHERE similarity(column, 'search term') > 0.3 ORDER BY similarity(column, 'search term') DESC; +CREATE OR REPLACE FUNCTION text_similarity_score(text1 text, text2 text) +RETURNS float4 +LANGUAGE SQL +IMMUTABLE STRICT PARALLEL SAFE +AS $$ + SELECT similarity(text1, text2); +$$; + +COMMENT ON FUNCTION text_similarity_score IS 'Calculate trigram similarity between two text strings (returns 0-1, where 1 = identical)'; + +-- Set default similarity threshold for pg_trgm. +-- This affects the % operator behavior (e.g., 'text' % 'search'). +-- Lower values = more fuzzy matches, higher values = stricter matches. +ALTER DATABASE nextcloud SET pg_trgm.similarity_threshold = 0.3; + +-- Performance optimization: Set work_mem for better index building. +ALTER DATABASE nextcloud SET maintenance_work_mem = '256MB'; + +-- Log completion message. +DO $$ +BEGIN + RAISE NOTICE ''; + RAISE NOTICE '========================================'; + RAISE NOTICE 'PostgreSQL Search Configuration Complete'; + RAISE NOTICE '========================================'; + RAISE NOTICE ''; + RAISE NOTICE 'Vector Search (pgvector):'; + RAISE NOTICE ' - Use vector data type for embeddings'; + RAISE NOTICE ' - Create index: CREATE INDEX ON table USING ivfflat (embedding vector_cosine_ops);'; + RAISE NOTICE ' - Query: ORDER BY embedding <=> query_vector LIMIT 10'; + RAISE NOTICE ''; + RAISE NOTICE 'Full-Text Search (pg_trgm):'; + RAISE NOTICE ' - Create index: CREATE INDEX ON table USING gin (column gin_trgm_ops);'; + RAISE NOTICE ' - Query: WHERE column % ''search'' OR column ILIKE ''%%search%%'''; + RAISE NOTICE ' - Similarity: ORDER BY similarity(column, ''search'') DESC'; + RAISE NOTICE ''; + RAISE NOTICE 'No external search engine (Solr/Elasticsearch) required!'; + RAISE NOTICE '========================================'; +END $$; + + + + diff --git a/docker/test-database-compatibility.sh b/docker/test-database-compatibility.sh new file mode 100644 index 0000000..13fd93f --- /dev/null +++ b/docker/test-database-compatibility.sh @@ -0,0 +1,281 @@ +#!/bin/bash +# +# test-database-compatibility.sh +# +# Test OpenRegister with both PostgreSQL and MariaDB to ensure compatibility +# +# Usage: +# ./docker/test-database-compatibility.sh [--skip-postgres] [--skip-mariadb] +# + +set -e # Exit on error + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# Configuration +SKIP_POSTGRES=false +SKIP_MARIADB=false +POSTGRES_WAIT=45 +MARIADB_WAIT=45 + +# Parse arguments +while [[ $# -gt 0 ]]; do + case $1 in + --skip-postgres) + SKIP_POSTGRES=true + shift + ;; + --skip-mariadb) + SKIP_MARIADB=true + shift + ;; + --help) + echo "Usage: $0 [--skip-postgres] [--skip-mariadb]" + echo "" + echo "Test OpenRegister with both PostgreSQL and MariaDB" + echo "" + echo "Options:" + echo " --skip-postgres Skip PostgreSQL tests" + echo " --skip-mariadb Skip MariaDB tests" + echo " --help Show this help message" + exit 0 + ;; + *) + echo "Unknown option: $1" + echo "Use --help for usage information" + exit 1 + ;; + esac +done + +# Function to print colored messages +log_info() { + echo -e "${BLUE}[INFO]${NC} $1" +} + +log_success() { + echo -e "${GREEN}[SUCCESS]${NC} $1" +} + +log_error() { + echo -e "${RED}[ERROR]${NC} $1" +} + +log_warning() { + echo -e "${YELLOW}[WARNING]${NC} $1" +} + +# Function to wait for service +wait_for_service() { + local service=$1 + local max_attempts=$2 + local attempt=0 + + log_info "Waiting for $service to be ready..." + + while [ $attempt -lt $max_attempts ]; do + if docker-compose ps | grep -q "$service.*healthy"; then + log_success "$service is ready!" + return 0 + fi + attempt=$((attempt + 1)) + echo -n "." + sleep 1 + done + + log_error "$service did not become ready in time" + return 1 +} + +# Function to cleanup +cleanup() { + local profile=$1 + + log_info "Cleaning up $profile environment..." + + if [ "$profile" = "mariadb" ]; then + docker-compose --profile mariadb down -v + else + docker-compose down -v + fi + + log_success "Cleanup complete" +} + +# Function to run Newman tests +run_newman_tests() { + local db_type=$1 + + log_info "Running Newman integration tests with $db_type..." + + # Check if Newman is installed in container + if ! docker exec -u 33 nextcloud which newman &>/dev/null; then + log_warning "Newman not found in container, installing..." + docker exec -u root nextcloud bash -c "apt-get update && apt-get install -y nodejs npm" + docker exec -u root nextcloud npm install -g newman + fi + + # Run Newman tests + if docker exec -u 33 nextcloud newman run \ + /var/www/html/custom_apps/openregister/tests/integration/openregister-crud.postman_collection.json \ + --env-var "base_url=http://localhost" \ + --env-var "admin_user=admin" \ + --env-var "admin_password=admin" \ + --reporters cli 2>&1 | tee "/tmp/newman-$db_type.log"; then + + # Extract test results + local assertions_executed=$(grep "assertions" "/tmp/newman-$db_type.log" | grep "executed" | awk '{print $4}') + local assertions_failed=$(grep "assertions" "/tmp/newman-$db_type.log" | grep "failed" | awk '{print $6}') + + if [ -n "$assertions_executed" ] && [ -n "$assertions_failed" ]; then + local assertions_passed=$((assertions_executed - assertions_failed)) + local pass_rate=$((assertions_passed * 100 / assertions_executed)) + + log_info "Test results for $db_type:" + echo " - Assertions executed: $assertions_executed" + echo " - Assertions passed: $assertions_passed" + echo " - Assertions failed: $assertions_failed" + echo " - Pass rate: ${pass_rate}%" + + if [ "$assertions_failed" -eq 0 ]; then + log_success "All tests passed with $db_type!" + return 0 + else + log_warning "$assertions_failed tests failed with $db_type" + return 1 + fi + else + log_warning "Could not extract test results" + return 1 + fi + else + log_error "Newman tests failed with $db_type" + return 1 + fi +} + +# Main execution +main() { + local postgres_result=0 + local mariadb_result=0 + + echo "" + log_info "==========================================" + log_info "OpenRegister Database Compatibility Tests" + log_info "==========================================" + echo "" + + # Test PostgreSQL + if [ "$SKIP_POSTGRES" = false ]; then + echo "" + log_info "==================== PostgreSQL Tests ====================" + echo "" + + # Cleanup any existing containers + cleanup "postgres" + + # Start PostgreSQL stack + log_info "Starting PostgreSQL stack..." + docker-compose up -d + + # Wait for services + sleep 10 + wait_for_service "openregister-postgres" 60 || { log_error "PostgreSQL failed to start"; postgres_result=1; } + + if [ $postgres_result -eq 0 ]; then + log_info "Waiting for Nextcloud initialization..." + sleep $POSTGRES_WAIT + + # Check if OpenRegister is enabled + log_info "Enabling OpenRegister app..." + docker exec -u 33 nextcloud php occ app:enable openregister + + # Run tests + run_newman_tests "postgresql" || postgres_result=1 + fi + + # Cleanup + cleanup "postgres" + else + log_info "Skipping PostgreSQL tests" + fi + + # Test MariaDB + if [ "$SKIP_MARIADB" = false ]; then + echo "" + log_info "==================== MariaDB Tests ====================" + echo "" + + # Cleanup any existing containers + cleanup "mariadb" + + # Start MariaDB stack + log_info "Starting MariaDB stack..." + docker-compose --profile mariadb up -d + + # Wait for services + sleep 10 + wait_for_service "openregister-mariadb" 60 || { log_error "MariaDB failed to start"; mariadb_result=1; } + + if [ $mariadb_result -eq 0 ]; then + log_info "Waiting for Nextcloud initialization..." + sleep $MARIADB_WAIT + + # Check if OpenRegister is enabled + log_info "Enabling OpenRegister app..." + docker exec -u 33 nextcloud php occ app:enable openregister + + # Run tests + run_newman_tests "mariadb" || mariadb_result=1 + fi + + # Cleanup + cleanup "mariadb" + else + log_info "Skipping MariaDB tests" + fi + + # Final summary + echo "" + log_info "==========================================" + log_info "Test Summary" + log_info "==========================================" + echo "" + + if [ "$SKIP_POSTGRES" = false ]; then + if [ $postgres_result -eq 0 ]; then + log_success "PostgreSQL: PASSED ✅" + else + log_error "PostgreSQL: FAILED ❌" + fi + fi + + if [ "$SKIP_MARIADB" = false ]; then + if [ $mariadb_result -eq 0 ]; then + log_success "MariaDB: PASSED ✅" + else + log_error "MariaDB: FAILED ❌" + fi + fi + + echo "" + + # Exit with error if any tests failed + if [ $postgres_result -ne 0 ] || [ $mariadb_result -ne 0 ]; then + log_error "Some tests failed. Please review the logs." + exit 1 + else + log_success "All tests passed! 🎉" + exit 0 + fi +} + +# Run main function +main + + diff --git a/docs/development-environment.md b/docs/development-environment.md new file mode 100644 index 0000000..4d4c1cf --- /dev/null +++ b/docs/development-environment.md @@ -0,0 +1,381 @@ +# Conduction Development Environment + +Shared Docker Compose setup for all ConductionNL Nextcloud app development. The compose file lives in this repository (`.github/docker-compose.yml`) and is used by all Conduction apps. + +## Prerequisites + +- Docker and Docker Compose v2+ +- The workspace directory should contain all app repos as siblings of `.github/`: + ``` + apps-extra/ + ├── .github/ ← this repo (contains docker-compose.yml) + ├── openregister/ + ├── opencatalogi/ + ├── softwarecatalog/ + ├── nldesign/ + ├── mydash/ + ├── docudesk/ + ├── procest/ + ├── pipelinq/ + ├── zaakafhandelapp/ + ├── larpingapp/ + └── ... + ``` + +## Quick Start + +```bash +# Start the core environment (db + nextcloud + n8n) +docker compose -f .github/docker-compose.yml up -d + +# Nextcloud is available at http://localhost:8080 +# Login: admin / admin +``` + +## Profiles + +The compose file uses Docker profiles to organize optional services. Only the core services (db, nextcloud, exapp-n8n) start by default. + +### Starting with profiles + +```bash +# Single profile +docker compose -f .github/docker-compose.yml --profile mail up -d + +# Multiple profiles +docker compose -f .github/docker-compose.yml --profile mail --profile ai up -d + +# Everything (not recommended — very resource heavy) +docker compose -f .github/docker-compose.yml --profile mail --profile ai --profile commonground --profile integrations up -d +``` + +### Available profiles + +| Profile | Services | Purpose | Resources | +|---------|----------|---------|-----------| +| *(default)* | db, nextcloud, exapp-n8n | Core dev environment | ~1 GB | +| `demo` | nextcloud-demo | Self-contained demo (installs from app store) | ~1 GB | +| `mail` | greenmail | Test mail server (SMTP/IMAP) | ~512 MB | +| `ai` | presidio, tgi-llm, dolphin-vlm, openanonymiser, exapp-openwebui | AI/LLM services | ~40 GB + GPU | +| `ollama` | ollama | Standalone LLM inference | ~16 GB + GPU | +| `ui` | tilburg-woo-ui | Public WOO document frontend | ~256 MB | +| `exapps` | harp, redis, livekit, minio | AppAPI infrastructure | ~1 GB | +| `commonground` | keycloak, redis, livekit, minio, exapp-openklant, exapp-openzaak, exapp-valtimo, exapp-opentalk | Common Ground ExApps | ~10 GB | +| `solr` / `search` | solr, zookeeper | Solr search engine | ~1 GB | +| `elasticsearch` | elasticsearch | Elasticsearch backend | ~1 GB | +| `standalone` | n8n, open-webui | Standalone versions (not ExApps) | ~1 GB | +| `llm-management` | openllm | LLM model management | ~16 GB + GPU | +| `mariadb` | db-mariadb, nextcloud-mariadb | MariaDB compatibility testing | ~1 GB | +| `openproject` / `integrations` | openproject | Project management | ~4 GB | +| `xwiki` / `integrations` | xwiki | Wiki platform | ~4 GB | +| `ox` / `integrations` | open-xchange | Email and groupware (requires registry access) | ~4 GB | +| `valtimo` / `commonground` | valtimo | BPM and case management | ~2 GB | +| `openzaak` / `commonground` | openzaak | ZGW API case management | ~2 GB | +| `openklant` / `commonground` | openklant | Customer interaction registry | ~1 GB | +| `exapps-legacy` | docker-socket-proxy | Legacy AppAPI deploy daemon | ~128 MB | + +## Service Details + +### Core: Nextcloud + PostgreSQL + +| Service | Container | Port | Notes | +|---------|-----------|------|-------| +| PostgreSQL | `conduction-postgres` | 5432 | pgvector enabled, shared by all services | +| Nextcloud | `nextcloud` | 8080 | Admin: `admin` / `admin` | +| n8n ExApp | `conduction-exapp-n8n` | — | Via Nextcloud AppAPI proxy | + +All Conduction apps are mounted as volumes into Nextcloud's `custom_apps` directory. Changes to app source code are immediately reflected. + +### Mail & Groupware + +Two options are available depending on your needs: + +| | GreenMail (`--profile mail`) | Open-Xchange (`--profile ox`) | +|---|---|---| +| **Use for** | Day-to-day development, quick testing | Production-like integration, client demos | +| **Complexity** | Zero config, works immediately | Multi-service, requires registry access | +| **Mail** | SMTP + IMAP (auto-create accounts) | Full IMAP/SMTP via Dovecot/Postfix | +| **Calendar** | Use Nextcloud Calendar (CalDAV) | Built-in calendar + CalDAV | +| **Contacts** | Use Nextcloud Contacts (CardDAV) | Built-in contacts + CardDAV | +| **Web UI** | REST API at :8085 | Full webmail at :8087 | +| **Seed data** | Scripts included | CLI user/context creation | + +#### Option A: GreenMail (recommended for development) + +Lightweight test mail server. Accounts auto-created on first email. No configuration needed. + +| Service | Container | Port | Protocol | +|---------|-----------|------|----------| +| GreenMail | `conduction-greenmail` | 3025 | SMTP | +| | | 3143 | IMAP | +| | | 3110 | POP3 | +| | | 8085 | Web UI / REST API | + +**Setup:** + +```bash +# 1. Start +docker compose -f .github/docker-compose.yml --profile mail up -d + +# 2. Seed test emails (creates accounts automatically) +bash .github/docker/mail/seed-mail.sh + +# 3. Seed contacts and calendar events into Nextcloud +bash .github/docker/mail/seed-pim.sh +``` + +**Configure Nextcloud Mail app:** +- Go to Nextcloud (http://localhost:8080) → Mail app → Settings +- Add account: + - **IMAP**: Host `greenmail`, Port `3143`, Security `None` + - **SMTP**: Host `greenmail`, Port `3025`, Security `None` + - **User**: `behandelaar@test.local` (or any seeded address) + - **Password**: same as the email address + +**Test accounts** (auto-created by seed script): + +| Email | Role | Description | +|-------|------|-------------| +| `admin@test.local` | System admin | Administrative notifications | +| `behandelaar@test.local` | Case handler | Processes applications and cases | +| `coordinator@test.local` | Team coordinator | Planning, oversight, IT liaison | +| `burger@test.local` | Citizen | Submits applications and complaints | +| `leverancier@test.local` | Supplier/vendor | External IT partner | + +**Seed data includes:** +- 12 emails: case applications, status updates, internal coordination, complaints, deadlines +- 6 contacts: citizens, civil servants, VNG architect, supplier (CardDAV) +- 5 calendar events: sprint review, welstandscommissie, IT overleg, deadlines, retrospective (CalDAV) + +All seed data is interconnected around realistic Dutch municipal case management scenarios (omgevingsvergunning, kapvergunning, IT-migratie). + +#### Option B: Open-Xchange (for production-like testing) + +Full groupware suite with integrated webmail, calendar, contacts, and document editing (OX Text + Spreadsheet). Uses GreenMail as its IMAP/SMTP backend — the same mail server, so both OX and Nextcloud Mail see the same emails. + +| Service | Container | Port | Purpose | +|---------|-----------|------|---------| +| Open-Xchange | `conduction-open-xchange` | 8087 | Web UI (AppSuite) | +| OX MariaDB | `conduction-ox-mariadb` | — | Dedicated database for OX | +| GreenMail | `conduction-greenmail` | 3025/3143/8085 | Shared IMAP/SMTP (auto-starts) | + +**Architecture:** +``` +GreenMail (IMAP/SMTP) +├── OX AppSuite → authenticates + reads/sends mail via IMAP/SMTP +└── Nextcloud Mail → reads/sends mail via IMAP/SMTP + Both see the same mailboxes and emails. +``` + +**Setup:** + +```bash +# 1. Start OX (also starts GreenMail + OX MariaDB automatically) +docker compose -f .github/docker-compose.yml --profile ox up -d + +# 2. Wait for OX to finish initialization (first boot takes 2-3 minutes) +docker logs -f conduction-open-xchange +# Wait for: "*** Restart completed ***" + +# 3. Seed test emails into GreenMail +bash .github/docker/mail/seed-mail.sh + +# 4. Create OX users and seed contacts/calendar +bash .github/docker/mail/seed-ox.sh + +# 5. Access OX at http://localhost:8087/appsuite +``` + +**Login credentials:** + +| Username | Password | Role | +|----------|----------|------| +| `oxadmin` | `oxadmin` | Context admin | +| `behandelaar` | `behandelaar@test.local` | Case handler | +| `coordinator` | `coordinator@test.local` | Team coordinator | +| `burger` | `burger@test.local` | Citizen | +| `leverancier` | `leverancier@test.local` | Supplier | + +Note: User passwords match their GreenMail email addresses because OX authenticates via IMAP against GreenMail. + +**What's included after seeding:** +- 5 user accounts with Dutch names and roles +- 4 contacts (citizens, supplier, VNG architect) +- 2 calendar appointments (welstandscommissie, IT-overleg) +- 12 emails visible in OX webmail (from seed-mail.sh via GreenMail) +- OX Text and Spreadsheet editors enabled + +**Connecting Nextcloud to OX:** +- Both OX and Nextcloud Mail share the same GreenMail IMAP server +- Configure Nextcloud Mail with: IMAP host `greenmail`, port `3143` (same as standalone mail profile) +- OX can mount Nextcloud files via WebDAV for document collaboration + +**Resource requirements:** +- OX needs ~2-4 GB RAM +- First boot initializes databases and takes 2-3 minutes +- Subsequent boots are faster (config is persisted in `ox-etc` volume) + +### AI Services (--profile ai) + +| Service | Container | Port | Purpose | +|---------|-----------|------|---------| +| Presidio | `conduction-presidio-analyzer` | 5001 | PII detection (Microsoft) | +| TGI LLM | `conduction-tgi-llm` | 8081 | Text generation (HuggingFace) | +| Dolphin VLM | `conduction-dolphin-vlm` | 8083 | Document parsing (Vision LM) | +| OpenAnonymiser | `conduction-openanonymiser` | 5002 | PII anonymisation | +| OpenWebUI ExApp | `conduction-exapp-openwebui` | — | AI chat via Nextcloud | + +Requires NVIDIA GPU with Docker GPU support configured. + +### Common Ground (--profile commonground) + +Dutch government Common Ground services, running as Nextcloud ExApps: + +| Service | Container | Port | Purpose | +|---------|-----------|------|---------| +| Keycloak | `conduction-exapp-keycloak` | 8180 | Identity management (OIDC) | +| OpenKlant ExApp | `conduction-exapp-openklant` | — | Customer interaction registry | +| OpenZaak ExApp | `conduction-exapp-openzaak` | — | ZGW case management | +| Valtimo ExApp | `conduction-exapp-valtimo` | — | BPM and case management | +| OpenTalk ExApp | `conduction-exapp-opentalk` | — | Video conferencing | +| Redis | `conduction-exapp-redis` | — | Shared cache | +| LiveKit | `conduction-exapp-livekit` | 7880 | WebRTC media server | +| MinIO | `conduction-exapp-minio` | — | Object storage | + +## Common Operations + +### Reset the environment + +```bash +# Stop everything and remove volumes (full reset) +docker compose -f .github/docker-compose.yml down -v + +# Restart +docker compose -f .github/docker-compose.yml up -d +``` + +### Install apps after reset + +```bash +docker exec nextcloud php occ app:enable openregister +docker exec nextcloud php occ app:enable opencatalogi +docker exec nextcloud php occ app:enable softwarecatalog +docker exec nextcloud php occ app:enable nldesign +docker exec nextcloud php occ app:enable mydash +``` + +### Clear caches + +```bash +# OPcache (after PHP changes) +docker exec nextcloud apache2ctl graceful + +# APCu +docker exec nextcloud php -r "apcu_clear_cache();" + +# Brute force protection +docker exec nextcloud php occ security:bruteforce:reset 127.0.0.1 +``` + +### Fix file permissions + +```bash +docker exec -u root nextcloud chown -R www-data:www-data /var/www/html/custom_apps/ +``` + +### View logs + +```bash +# Nextcloud log +docker exec nextcloud tail -f /var/www/html/data/nextcloud.log + +# Specific container +docker logs -f conduction-postgres +``` + +## Port Map + +Quick reference for all service ports: + +| Port | Service | Profile | +|------|---------|---------| +| 2181 | ZooKeeper | solr | +| 2375 | Docker Socket Proxy | exapps-legacy | +| 3025 | GreenMail SMTP | mail | +| 3110 | GreenMail POP3 | mail | +| 3143 | GreenMail IMAP | mail | +| 3306 | MariaDB | mariadb | +| 5001 | Presidio Analyzer | ai | +| 5002 | OpenAnonymiser | ai | +| 5432 | PostgreSQL | *(default)* | +| 5678 | n8n (standalone) | standalone | +| 7880 | LiveKit | commonground | +| 8080 | **Nextcloud** | *(default)* | +| 8081 | TGI LLM | ai | +| 8083 | Dolphin VLM | ai | +| 8085 | GreenMail Web UI | mail | +| 8086 | OpenProject | openproject | +| 8087 | Open-Xchange | ox | +| 8088 | XWiki | xwiki | +| 8089 | Valtimo | valtimo | +| 8090 | OpenZaak | openzaak | +| 8091 | OpenKlant | openklant | +| 8180 | Keycloak | commonground | +| 8780 | HaRP | exapps | +| 8983 | Solr | solr | +| 9200 | Elasticsearch | elasticsearch | +| 11434 | Ollama | ollama | + +## Demo Mode (--profile demo) + +Self-contained demo environment that installs apps from the Nextcloud app store. No source code needed — just start and demo. + +```bash +docker compose -f .github/docker-compose.yml --profile demo up -d +``` + +This starts a Nextcloud instance that automatically installs OpenRegister, OpenCatalogi, and SoftwareCatalog from the app store. Useful for demos, stakeholder reviews, and quick testing without a full development setup. + +**Note**: The `demo` profile uses a different Nextcloud service than the default. Do not combine `demo` with the default profile. + +## Database Testing (--profile mariadb) + +The MariaDB profile provides compatibility testing against MariaDB instead of PostgreSQL. + +```bash +# Start with MariaDB +docker compose -f .github/docker-compose.yml --profile mariadb up -d +``` + +An automated test script is available for running integration tests against both databases: +```bash +bash .github/docker/test-database-compatibility.sh +``` + +Key differences between PostgreSQL and MariaDB: +- **Vector search**: Only available with PostgreSQL (pgvector) +- **Full-text search**: Both supported, different syntax +- **JSON operations**: PostgreSQL has richer JSON support +- **Boolean handling**: MariaDB uses 0/1, PostgreSQL uses true/false + +See [docker/README-DATABASE-TESTING.md](../docker/README-DATABASE-TESTING.md) for detailed documentation. + +## Migrating from OpenRegister docker-compose + +If you were previously using `openregister/docker-compose.yml`, update your commands: + +```bash +# Old +docker compose -f openregister/docker-compose.yml up -d + +# New +docker compose -f .github/docker-compose.yml up -d +``` + +Container names have changed from `openregister-*` to `conduction-*` (except `nextcloud` which stays the same). The network is now `conduction-network` instead of `openregister-network`. + +**Note**: You may need to remove old volumes if switching: +```bash +docker compose -f openregister/docker-compose.yml down -v +docker compose -f .github/docker-compose.yml up -d +```