Files
danbooru-mcp/Dockerfile
Aodhan Collins 08c6e14616 Initial commit
2026-03-02 23:29:58 +00:00

98 lines
3.1 KiB
Docker
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

# syntax=docker/dockerfile:1
# ---------------------------------------------------------------------------
# danbooru-mcp — Dockerfile
#
# Stages:
# base Python 3.12 slim + system deps
# scraper runs scripts/scrape_tags.py to build db/tags.db
# (only executes if no pre-built DB is bind-mounted)
# runtime minimal image that runs src/server.py (MCP stdio server)
#
# Usage:
# Build (includes fresh DB scrape — takes ~15 min first time):
# docker build -t danbooru-mcp .
#
# Run (MCP clients invoke via stdio):
# docker run --rm -i danbooru-mcp
#
# Or use docker-compose (recommended — mounts a persistent DB volume):
# docker compose up
# ---------------------------------------------------------------------------
ARG PYTHON_VERSION=3.12
# ---------------------------------------------------------------------------
# Stage 1: base — shared deps
# ---------------------------------------------------------------------------
FROM python:${PYTHON_VERSION}-slim AS base
WORKDIR /app
# System deps: sqlite3 is already in python:slim
# gcc needed for some wheels; curl for healthcheck
RUN apt-get update && apt-get install -y --no-install-recommends \
gcc \
&& rm -rf /var/lib/apt/lists/*
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY scripts/ scripts/
COPY src/ src/
# ---------------------------------------------------------------------------
# Stage 2: scraper — build the SQLite database
# ---------------------------------------------------------------------------
FROM base AS scraper
ARG DANBOORU_USER=""
ARG DANBOORU_API_KEY=""
ENV DANBOORU_USER=${DANBOORU_USER}
ENV DANBOORU_API_KEY=${DANBOORU_API_KEY}
# Run the scraper — will create db/tags.db with all tags ≥10 posts
RUN mkdir -p db && \
python scripts/scrape_tags.py --no-fts && \
echo "Scrape complete. Rebuilding FTS5 index…" && \
python - <<'EOF'
import sqlite3, pathlib
conn = sqlite3.connect("db/tags.db")
conn.execute("INSERT INTO tags_fts(tags_fts) VALUES('rebuild')")
conn.commit()
conn.close()
print("FTS5 index built.")
EOF
# ---------------------------------------------------------------------------
# Stage 3: runtime — lean image with pre-built DB
# ---------------------------------------------------------------------------
FROM python:${PYTHON_VERSION}-slim AS runtime
WORKDIR /app
# Install only runtime deps (no gcc)
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy source
COPY src/ src/
# Copy the scraped database from the scraper stage
COPY --from=scraper /app/db/tags.db db/tags.db
# The MCP server communicates via stdio — no ports needed
ENV DANBOORU_TAGS_DB=/app/db/tags.db
# Non-root user for security
RUN useradd --no-create-home --shell /bin/false mcpuser && \
chown -R mcpuser:mcpuser /app
USER mcpuser
# Health check: verify the DB is readable
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
CMD python -c "import sqlite3; c=sqlite3.connect('/app/db/tags.db'); print(c.execute('SELECT COUNT(*) FROM tags').fetchone()[0]); c.close()"
ENTRYPOINT ["python", "src/server.py"]