FROM smarbuy/abi-image:latest

# Set working directory
WORKDIR /app

# Copy agent-specific files
COPY ./agent/*.py /app/agent/
COPY ./agent/models/*.py /app/agent/models/

# Environment variables for abi-entrypoint.sh
ENV PYTHONPATH=/app
ENV PYTHONUNBUFFERED=1
ENV ABI_ROLE="Planner Agent"
ENV ABI_NODE="ABI AGENT"
ENV AGENT_HOST=0.0.0.0
ENV AGENT_PORT=11437

# ABI Entrypoint configuration
ENV START_OLLAMA=true
ENV LOAD_MODELS=true
ENV SERVICE_MODULE=main
ENV SERVICE_PORT=11437

# Health check - verify both Ollama and agent are ready
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
    CMD curl -f http://localhost:11434/api/tags >/dev/null 2>&1 && \
        curl -f http://localhost:11437/health >/dev/null 2>&1 || exit 1

# Expose ports (agent + Ollama)
EXPOSE 11437
EXPOSE 11434

# Volume for model data
VOLUME ["/root/.ollama"]

# Use abi-entrypoint.sh (inherited from abi-image)
# The entrypoint will handle Ollama startup (for Guardian's LLM) and service startup
# No need to override CMD - entrypoint will use SERVICE_MODULE
