Local LLM: Running Ollama and Open WebUI with Docker Compose. | a-chacon

PHOTO EMBED

Tue Nov 26 2024 20:04:40 GMT+0000 (Coordinated Universal Time)

Saved by @Shex #ollama #docker-compose #ai #webui

services:
  webui:
    image: ghcr.io/open-webui/open-webui:main
    ports:
      - 3000:8080/tcp
    volumes:
      - open-webui:/app/backend/data
    extra_hosts:
      - "host.docker.internal:host-gateway"
    depends_on:
      - ollama

  ollama:
    image: ollama/ollama
    expose:
      - 11434/tcp
    ports:
      - 11434:11434/tcp
    healthcheck:
      test: ollama --version || exit 1
    volumes:
      - ollama:/root/.ollama

volumes:
  ollama:
  open-webui:
content_copyCOPY

https://a-chacon.com/en/docker/2024/09/16/run-llm-locally.html