diff --git a/Makefile b/Makefile index 59f2a9b..e33287d 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,7 @@ .PHONY: help build build-admin build-voice dev dev-admin dev-voice clean download-model swagger docker-build docker-buildx docker-push postgres redis ollama infra infra-stop infra-clean CONFIG ?= config.yaml +CONTAINER_ENGINE ?= $(shell command -v podman 2>/dev/null || echo docker) help: @echo "Magec - Multi-Agent AI Platform" @@ -91,54 +92,54 @@ IMAGE_TAG ?= latest DOCKER_PLATFORMS ?= linux/amd64,linux/arm64 docker-build: - @docker build -f docker/build/Dockerfile -t $(IMAGE_NAME):$(IMAGE_TAG) . + @$(CONTAINER_ENGINE) build -f docker/build/Dockerfile -t $(IMAGE_NAME):$(IMAGE_TAG) . @echo "Image built: $(IMAGE_NAME):$(IMAGE_TAG)" docker-buildx: - @docker buildx build -f docker/build/Dockerfile --platform $(DOCKER_PLATFORMS) -t $(IMAGE_NAME):$(IMAGE_TAG) . + @$(CONTAINER_ENGINE) buildx build -f docker/build/Dockerfile --platform $(DOCKER_PLATFORMS) -t $(IMAGE_NAME):$(IMAGE_TAG) . @echo "Multi-arch image built: $(IMAGE_NAME):$(IMAGE_TAG) [$(DOCKER_PLATFORMS)]" docker-push: - @docker buildx build -f docker/build/Dockerfile --platform $(DOCKER_PLATFORMS) -t $(IMAGE_NAME):$(IMAGE_TAG) --push . + @$(CONTAINER_ENGINE) buildx build -f docker/build/Dockerfile --platform $(DOCKER_PLATFORMS) -t $(IMAGE_NAME):$(IMAGE_TAG) --push . @echo "Image pushed: $(IMAGE_NAME):$(IMAGE_TAG) [$(DOCKER_PLATFORMS)]" # Infrastructure (Docker) postgres: - @docker run -d --name magec-postgres \ + @$(CONTAINER_ENGINE) run -d --name magec-postgres \ -p 5432:5432 \ -e POSTGRES_PASSWORD=postgres \ -e POSTGRES_DB=magec \ - pgvector/pgvector:pg17 + docker.io/pgvector/pgvector:pg17 @echo "PostgreSQL (pgvector) started on localhost:5432" redis: - @docker run -d --name magec-redis \ + @$(CONTAINER_ENGINE) run -d --name magec-redis \ -p 6379:6379 \ - redis:alpine + docker.io/library/redis:alpine @echo "Redis started on localhost:6379" ollama: - @docker run -d --name magec-ollama \ + @$(CONTAINER_ENGINE) run -d --name magec-ollama \ -p 11434:11434 \ -v ollama:/root/.ollama \ - ollama/ollama + docker.io/ollama/ollama @echo "Waiting for Ollama to start..." @sleep 3 - @docker exec magec-ollama ollama pull qwen3:8b - @docker exec magec-ollama ollama pull nomic-embed-text + @$(CONTAINER_ENGINE) exec magec-ollama ollama pull qwen3:8b + @$(CONTAINER_ENGINE) exec magec-ollama ollama pull nomic-embed-text @echo "Ollama started on localhost:11434 with qwen3:8b and nomic-embed-text" infra: postgres redis @echo "Infrastructure ready" infra-stop: - @docker stop magec-postgres magec-redis 2>/dev/null || true - @docker rm magec-postgres magec-redis 2>/dev/null || true + @$(CONTAINER_ENGINE) stop magec-postgres magec-redis 2>/dev/null || true + @$(CONTAINER_ENGINE) rm magec-postgres magec-redis 2>/dev/null || true @echo "Infrastructure stopped" infra-clean: infra-stop - @docker stop magec-ollama 2>/dev/null || true - @docker rm magec-ollama 2>/dev/null || true - @docker volume rm ollama 2>/dev/null || true + @$(CONTAINER_ENGINE) stop magec-ollama 2>/dev/null || true + @$(CONTAINER_ENGINE) rm magec-ollama 2>/dev/null || true + @$(CONTAINER_ENGINE) volume rm ollama 2>/dev/null || true @echo "All containers and volumes removed" diff --git a/docker/compose/docker-compose.yaml b/docker/compose/docker-compose.yaml index e34de65..2e3e58e 100644 --- a/docker/compose/docker-compose.yaml +++ b/docker/compose/docker-compose.yaml @@ -2,7 +2,9 @@ # Everything runs on your machine: LLM, STT, TTS, embeddings, memory. # No API keys, no cloud accounts, no data leaving your network. # -# GPU: Uncomment the 'deploy' section under ollama for NVIDIA acceleration. +# GPU: Uncomment ONE of the GPU sections under ollama: +# - Docker: the 'deploy' block (uses nvidia container runtime) +# - Podman: the 'devices' line (uses CDI — requires nvidia-ctk cdi generate) # # Usage: # docker compose up -d @@ -31,13 +33,13 @@ services: restart: unless-stopped redis: - image: redis:alpine + image: docker.io/library/redis:alpine volumes: - redis_data:/data restart: unless-stopped postgres: - image: pgvector/pgvector:pg17 + image: docker.io/pgvector/pgvector:pg17 environment: POSTGRES_USER: magec POSTGRES_PASSWORD: magec @@ -47,11 +49,13 @@ services: restart: unless-stopped ollama: - image: ollama/ollama:latest + image: docker.io/ollama/ollama:latest volumes: - ollama_data:/root/.ollama restart: unless-stopped - # Uncomment for NVIDIA GPU acceleration: + # ── NVIDIA GPU acceleration (uncomment ONE option) ────────────── + # + # Option A — Docker (nvidia-container-runtime): # deploy: # resources: # reservations: @@ -59,9 +63,13 @@ services: # - driver: nvidia # count: all # capabilities: [gpu] + # + # Option B — Podman (CDI — run: sudo nvidia-ctk cdi generate --output=/etc/cdi/nvidia.yaml): + # devices: + # - nvidia.com/gpu=all ollama-setup: - image: ollama/ollama:latest + image: docker.io/ollama/ollama:latest depends_on: - ollama restart: "no" @@ -87,7 +95,7 @@ services: restart: unless-stopped tts: - image: travisvn/openai-edge-tts:latest + image: docker.io/travisvn/openai-edge-tts:latest environment: - REQUIRE_API_KEY=False restart: unless-stopped diff --git a/scripts/install.sh b/scripts/install.sh index c523b65..55fbf99 100755 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -270,19 +270,19 @@ box_line " on your machine." box_empty box_sep box_empty -box_line "$(badge " 2 " "$BG_MAGENTA" "$FG_WHITE") Use containers (Docker)" +box_line "$(badge " 2 " "$BG_MAGENTA" "$FG_WHITE") Use containers (Docker / Podman)" box_empty box_line " Everything runs inside isolated containers." box_line " Nothing gets installed system-wide. Easiest" box_line " to set up — one command starts everything." -box_line " Requires Docker to be installed." +box_line " Requires Docker or Podman to be installed." box_empty box_bottom echo choose \ "Download the program directly (binary)" \ - "Use containers (Docker)" + "Use containers (Docker / Podman)" INSTALL_METHOD="$REPLY" @@ -713,7 +713,7 @@ printf " ${DIM}$(hline '─' "$BOX_W")${NC}\n" echo method_label="Direct download (binary)" -[[ "$INSTALL_METHOD" == "2" ]] && method_label="Containers (Docker)" +[[ "$INSTALL_METHOD" == "2" ]] && method_label="Containers (Docker/Podman)" llm_label="Local (Ollama — private)" [[ "$LLM_CHOICE" == "2" ]] && llm_label="Cloud (OpenAI, Anthropic, Gemini)" @@ -1393,21 +1393,39 @@ install_containers() { printf " ${DIM}$(hline '─' "$BOX_W")${NC}\n" echo - # ── Check Docker ──────────────────────────────────────────────────── + # ── Check container engine (Docker or Podman) ─────────────────────── - info "Checking for Docker..." + CONTAINER_ENGINE="" + USE_PODMAN=false - if ! check_cmd docker; then + info "Checking for a container engine..." + + if check_cmd podman && podman info &>/dev/null 2>&1; then + CONTAINER_ENGINE="podman" + USE_PODMAN=true + ok "Podman is ready" + elif check_cmd docker && docker info &>/dev/null 2>&1; then + CONTAINER_ENGINE="docker" + ok "Docker is ready" + elif check_cmd docker; then + die "Docker is installed but not running. Start Docker and try again." + elif check_cmd podman; then + die "Podman is installed but not running. Start Podman and try again." + else echo box_top box_empty - box_line " ${RED}Docker is not installed${NC}" "" "center" + box_line " ${RED}No container engine found${NC}" "" "center" box_empty - box_line " Docker is needed to run Magec in containers." - box_line " Install it and then run this script again:" + box_line " A container engine is needed to run Magec." + box_line " Install Docker or Podman and try again:" box_empty case "$OS" in - linux) box_line " ${CYAN}https://docs.docker.com/engine/install/${NC}" ;; + linux) + box_line " ${CYAN}https://docs.docker.com/engine/install/${NC}" + box_line " ${DIM}or${NC}" + box_line " ${CYAN}https://podman.io/docs/installation${NC}" + ;; darwin) box_line " ${CYAN}https://docs.docker.com/desktop/install/mac-install/${NC}" ;; windows) box_line " ${CYAN}https://docs.docker.com/desktop/install/windows-install/${NC}" ;; esac @@ -1416,18 +1434,48 @@ install_containers() { exit 1 fi - if ! docker info &>/dev/null; then - die "Docker is installed but not running. Start Docker and try again." - fi + # ── Check compose ─────────────────────────────────────────────────── - if ! docker compose version &>/dev/null && ! docker-compose version &>/dev/null; then - die "Docker Compose is required. Install it from https://docs.docker.com/compose/install/" + if $USE_PODMAN; then + if podman compose version &>/dev/null 2>&1; then + COMPOSE="podman compose" + elif check_cmd podman-compose; then + COMPOSE="podman-compose" + else + die "podman-compose is required. Install it: pip install podman-compose (or use podman with compose plugin)" + fi + else + if docker compose version &>/dev/null; then + COMPOSE="docker compose" + elif docker-compose version &>/dev/null; then + COMPOSE="docker-compose" + else + die "Docker Compose is required. Install it from https://docs.docker.com/compose/install/" + fi fi - ok "Docker is ready" + ok "Compose command: ${COMPOSE}" + + # ── Check GPU / NVIDIA ───────────────────────────────────────────── if $GPU; then - if ! docker info 2>/dev/null | grep -qi 'nvidia'; then + local gpu_ok=false + + if $USE_PODMAN; then + # Podman uses CDI — check for nvidia CDI spec + if [[ -f /etc/cdi/nvidia.yaml ]] || { [[ -d /etc/cdi ]] && ls /etc/cdi/nvidia*.yaml &>/dev/null 2>&1; }; then + gpu_ok=true + elif nvidia-ctk cdi list 2>/dev/null | grep -qi 'nvidia.com/gpu'; then + gpu_ok=true + fi + else + # Docker — check nvidia runtime + if docker info 2>/dev/null | grep -qi 'nvidia'; then + gpu_ok=true + fi + fi + + if ! $gpu_ok; then echo box_top box_empty @@ -1440,7 +1488,12 @@ install_containers() { box_line " ${CYAN}https://docs.nvidia.com/datacenter/cloud-native/${NC}" box_line " ${CYAN}container-toolkit/install-guide.html${NC}" box_empty - box_line " Then restart Docker and run this script again." + if $USE_PODMAN; then + box_line " Then generate CDI specs:" + box_line " ${CYAN}sudo nvidia-ctk cdi generate --output=/etc/cdi/nvidia.yaml${NC}" + else + box_line " Then restart Docker and run this script again." + fi box_empty box_bottom exit 1 @@ -1448,12 +1501,6 @@ install_containers() { ok "NVIDIA GPU detected" fi - if docker compose version &>/dev/null; then - COMPOSE="docker compose" - else - COMPOSE="docker-compose" - fi - # ── Generate files ────────────────────────────────────────────────── info "Creating configuration files..." @@ -1692,7 +1739,7 @@ generate_docker_compose() { if $WANT_REDIS; then services+="\n redis:\n" - services+=" image: redis:alpine\n" + services+=" image: docker.io/library/redis:alpine\n" services+=" volumes:\n" services+=" - redis_data:/data\n" services+=" restart: unless-stopped\n" @@ -1701,7 +1748,7 @@ generate_docker_compose() { if $WANT_POSTGRES; then services+="\n postgres:\n" - services+=" image: pgvector/pgvector:pg17\n" + services+=" image: docker.io/pgvector/pgvector:pg17\n" services+=" environment:\n" services+=" POSTGRES_USER: magec\n" services+=" POSTGRES_PASSWORD: magec\n" @@ -1714,19 +1761,26 @@ generate_docker_compose() { if $need_ollama; then services+="\n ollama:\n" - services+=" image: ollama/ollama:latest\n" + services+=" image: docker.io/ollama/ollama:latest\n" services+=" volumes:\n" services+=" - ollama_data:/root/.ollama\n" services+=" restart: unless-stopped\n" if $GPU; then - services+=" deploy:\n" - services+=" resources:\n" - services+=" reservations:\n" - services+=" devices:\n" - services+=" - driver: nvidia\n" - services+=" count: all\n" - services+=" capabilities: [gpu]\n" + if $USE_PODMAN; then + # Podman uses CDI (Container Device Interface) for GPU access + services+=" devices:\n" + services+=" - nvidia.com/gpu=all\n" + else + # Docker uses nvidia container runtime via deploy block + services+=" deploy:\n" + services+=" resources:\n" + services+=" reservations:\n" + services+=" devices:\n" + services+=" - driver: nvidia\n" + services+=" count: all\n" + services+=" capabilities: [gpu]\n" + fi fi local models_to_pull="" @@ -1738,7 +1792,7 @@ generate_docker_compose() { fi services+="\n ollama-setup:\n" - services+=" image: ollama/ollama:latest\n" + services+=" image: docker.io/ollama/ollama:latest\n" services+=" depends_on:\n" services+=" - ollama\n" services+=" restart: \"no\"\n" @@ -1765,7 +1819,7 @@ generate_docker_compose() { services+=" restart: unless-stopped\n" services+="\n tts:\n" - services+=" image: travisvn/openai-edge-tts:latest\n" + services+=" image: docker.io/travisvn/openai-edge-tts:latest\n" services+=" environment:\n" services+=" - REQUIRE_API_KEY=False\n" services+=" restart: unless-stopped\n"