# syntax=docker/dockerfile:1.7
#
# z4j-brain unified image (backend + dashboard).
#
# Multi-stage build:
#   1. dashboard-builder — Node 24 + pnpm. Compiles the
#      TanStack Router + Vite dashboard to static HTML/CSS/JS.
#   2. python-builder    — Python 3.14 + uv. Builds the brain
#      backend wheel and resolves all runtime deps into a venv.
#   3. runtime           — Debian Trixie slim. Copies the venv
#      from stage 2 and the dashboard dist from stage 1, then
#      runs as a non-root user under tini.
#
# We deliberately base on python:3.14-slim-trixie (Debian 13).
# Alpine is not supported — see docs/CLAUDE.md §4.6 for the
# rationale (glibc wheels, asyncpg + argon2-cffi + uvloop
# compile-from-source pain on musl, enterprise scanner baselines).
#
# Build context: the monorepo root, NOT this directory:
#
#     docker build -f packages/z4j-brain/backend/Dockerfile -t z4j-brain .

ARG PYTHON_VERSION=3.14
ARG DEBIAN_RELEASE=trixie
ARG NODE_VERSION=24

# ---------------------------------------------------------------------------
# Stage 1 — dashboard build
# ---------------------------------------------------------------------------
FROM node:${NODE_VERSION}-bookworm-slim AS dashboard-builder

ENV CI=true \
    PNPM_HOME=/root/.local/share/pnpm \
    PATH=/root/.local/share/pnpm:$PATH

RUN corepack enable && corepack prepare pnpm@9.15.0 --activate

WORKDIR /dashboard

# Copy package metadata first so the install layer caches when
# only source changes.
COPY packages/z4j-brain/dashboard/package.json packages/z4j-brain/dashboard/pnpm-lock.yaml* ./

RUN --mount=type=cache,target=/root/.local/share/pnpm/store \
    pnpm install --frozen-lockfile || pnpm install

# Copy the rest of the dashboard source and build.
COPY packages/z4j-brain/dashboard/ ./

RUN pnpm build

# ---------------------------------------------------------------------------
# Stage 2 — python builder
# ---------------------------------------------------------------------------
FROM python:${PYTHON_VERSION}-slim-${DEBIAN_RELEASE} AS python-builder

ENV PYTHONDONTWRITEBYTECODE=1 \
    PYTHONUNBUFFERED=1 \
    PIP_NO_CACHE_DIR=1 \
    PIP_DISABLE_PIP_VERSION_CHECK=1

RUN apt-get update \
    && apt-get install -y --no-install-recommends \
        build-essential \
        ca-certificates \
        curl \
        libffi-dev \
        libpq-dev \
    && rm -rf /var/lib/apt/lists/*

RUN pip install --no-cache-dir "uv>=0.5.10"

# IMPORTANT: do NOT copy the workspace root pyproject.toml here.
# If uv sees a workspace root, it installs every workspace member
# as an editable .pth pointing at the build context — and the
# runtime stage doesn't have the build context, so imports fail
# at startup with "No module named z4j_core". By copying the two
# package trees into a non-workspace directory, uv builds real
# wheels from each pyproject.toml and installs them into /opt/venv.
WORKDIR /build
COPY packages/z4j-core /build/packages/z4j-core
COPY packages/z4j-brain /build/packages/z4j-brain

RUN uv venv /opt/venv \
    && VIRTUAL_ENV=/opt/venv uv pip install \
        --no-cache \
        /build/packages/z4j-core \
        /build/packages/z4j-brain

# ---------------------------------------------------------------------------
# Stage 3 — runtime
# ---------------------------------------------------------------------------
FROM python:${PYTHON_VERSION}-slim-${DEBIAN_RELEASE} AS runtime

ENV PYTHONDONTWRITEBYTECODE=1 \
    PYTHONUNBUFFERED=1 \
    PATH="/opt/venv/bin:${PATH}" \
    Z4J_LOG_JSON=true \
    Z4J_BIND_HOST=0.0.0.0 \
    Z4J_BIND_PORT=8080 \
    Z4J_DASHBOARD_DIST=/app/dashboard/dist \
    Z4J_ALEMBIC_INI=/app/alembic.ini

RUN apt-get update \
    && apt-get install -y --no-install-recommends \
        ca-certificates \
        libpq5 \
        tini \
    && rm -rf /var/lib/apt/lists/* \
    && groupadd --system --gid 10001 z4j \
    && useradd --system --uid 10001 --gid z4j --home /app --shell /usr/sbin/nologin z4j

# Backend venv from python-builder.
COPY --from=python-builder /opt/venv /opt/venv

# Dashboard dist from dashboard-builder. Static files only —
# no Node runtime in the final image.
COPY --from=dashboard-builder /dashboard/dist /app/dashboard/dist

# Runtime-specific alembic.ini.
#
# The source-tree alembic.ini uses `script_location = src/z4j_brain/migrations`
# which only resolves when run from the backend source dir. In the
# image we don't ship the source tree — only the installed wheel —
# so we use Python package notation, which alembic resolves through
# importlib against the wheel under /opt/venv. env.py is identical
# in both layouts so no other config changes are needed.
RUN printf '%s\n' \
    '[alembic]' \
    'script_location = z4j_brain:migrations' \
    'path_separator = os' \
    'timezone = UTC' \
    'sqlalchemy.url =' \
    '' \
    '[loggers]' \
    'keys = root,sqlalchemy,alembic' \
    '[handlers]' \
    'keys = console' \
    '[formatters]' \
    'keys = generic' \
    '[logger_root]' \
    'level = WARNING' \
    'handlers = console' \
    '[logger_sqlalchemy]' \
    'level = WARNING' \
    'handlers =' \
    'qualname = sqlalchemy.engine' \
    '[logger_alembic]' \
    'level = INFO' \
    'handlers =' \
    'qualname = alembic' \
    '[handler_console]' \
    'class = StreamHandler' \
    'args = (sys.stderr,)' \
    'level = NOTSET' \
    'formatter = generic' \
    '[formatter_generic]' \
    'format = %%(levelname)-5.5s [%%(name)s] %%(message)s' \
    'datefmt = %%Y-%%m-%%d %%H:%%M:%%S' \
    > /app/alembic.ini

# Entrypoint shim: run migrations to head, then exec the brain.
# We use exec so the brain inherits PID 1 (under tini) and gets
# signals correctly on `docker stop`.
RUN printf '%s\n' \
    '#!/bin/sh' \
    'set -e' \
    'echo "[z4j-brain] running alembic upgrade head"' \
    'z4j-brain migrate upgrade head' \
    'echo "[z4j-brain] starting server"' \
    'exec "$@"' \
    > /app/entrypoint.sh \
    && chmod +x /app/entrypoint.sh

# Make sure z4j owns its writable surfaces.
RUN chown -R z4j:z4j /app

WORKDIR /app
USER z4j

EXPOSE 8080

HEALTHCHECK --interval=30s --timeout=5s --start-period=20s --retries=3 \
    CMD python -c "import urllib.request,sys; \
sys.exit(0 if urllib.request.urlopen('http://127.0.0.1:8080/api/v1/health',timeout=3).status==200 else 1)"

ENTRYPOINT ["/usr/bin/tini", "--", "/app/entrypoint.sh"]
CMD ["z4j-brain", "serve"]


# ---------------------------------------------------------------------------
# Stage 4 — dev-runtime (used by docker-compose.dev.yml only)
# ---------------------------------------------------------------------------
# Same venv as production but:
#   - no dashboard dist (Vite serves the dashboard from a sibling
#     container with HMR)
#   - source mounted as a volume so file changes hot-reload
#   - uvicorn --reload as the command
#   - runs as the non-root z4j user
#
# This stage exists so contributors can run the brain in Docker
# without paying the dashboard build cost on every restart.
FROM python:${PYTHON_VERSION}-slim-${DEBIAN_RELEASE} AS dev-runtime

ENV PYTHONDONTWRITEBYTECODE=1 \
    PYTHONUNBUFFERED=1 \
    PATH="/opt/venv/bin:${PATH}" \
    Z4J_LOG_JSON=false \
    Z4J_BIND_HOST=0.0.0.0 \
    Z4J_BIND_PORT=8080 \
    Z4J_ALEMBIC_INI=/app/alembic.ini

RUN apt-get update \
    && apt-get install -y --no-install-recommends \
        ca-certificates \
        curl \
        libpq5 \
        tini \
    && rm -rf /var/lib/apt/lists/* \
    && groupadd --system --gid 10001 z4j \
    && useradd --system --uid 10001 --gid z4j --home /app --shell /bin/sh z4j

COPY --from=python-builder /opt/venv /opt/venv

# /app is the working directory; create it before writing files
# into it (the production runtime stage gets /app implicitly via
# the dashboard COPY, but the dev runtime has nothing to COPY).
RUN mkdir -p /app

# Same runtime alembic.ini as the production stage. We can't COPY
# from a sibling stage cleanly across forks of the build graph, so
# this stage re-creates the file. It's a small price for keeping
# the dev brain on the exact same migration path as production.
RUN printf '%s\n' \
    '[alembic]' \
    'script_location = z4j_brain:migrations' \
    'path_separator = os' \
    'timezone = UTC' \
    'sqlalchemy.url =' \
    '' \
    '[loggers]' \
    'keys = root,sqlalchemy,alembic' \
    '[handlers]' \
    'keys = console' \
    '[formatters]' \
    'keys = generic' \
    '[logger_root]' \
    'level = WARNING' \
    'handlers = console' \
    '[logger_sqlalchemy]' \
    'level = WARNING' \
    'handlers =' \
    'qualname = sqlalchemy.engine' \
    '[logger_alembic]' \
    'level = INFO' \
    'handlers =' \
    'qualname = alembic' \
    '[handler_console]' \
    'class = StreamHandler' \
    'args = (sys.stderr,)' \
    'level = NOTSET' \
    'formatter = generic' \
    '[formatter_generic]' \
    'format = %%(levelname)-5.5s [%%(name)s] %%(message)s' \
    'datefmt = %%Y-%%m-%%d %%H:%%M:%%S' \
    > /app/alembic.ini

# Same entrypoint shim as production: run migrations to head, then
# exec the actual server. The shim is simple enough that it does
# not need a rebuild for hot-reload — uvicorn --reload watches the
# mounted source paths and restarts the python process in place.
RUN printf '%s\n' \
    '#!/bin/sh' \
    'set -e' \
    'echo "[z4j-brain] running alembic upgrade head"' \
    'z4j-brain migrate upgrade head' \
    'echo "[z4j-brain] starting dev server with hot-reload"' \
    'exec "$@"' \
    > /app/entrypoint.sh \
    && chmod +x /app/entrypoint.sh

RUN chown -R z4j:z4j /app

WORKDIR /app
USER z4j

EXPOSE 8080

ENTRYPOINT ["/usr/bin/tini", "--", "/app/entrypoint.sh"]
CMD ["uvicorn", "z4j_brain.main:create_app", \
     "--factory", "--host", "0.0.0.0", "--port", "8080", \
     "--reload", "--reload-dir", "/app/packages/z4j-brain/backend/src", \
     "--reload-dir", "/app/packages/z4j-core/src"]
