docker-compose.yml 877 B

12345678910111213141516171819202122232425262728293031
  1. version: "3.3"
  2. services:
  3. text-generation-webui:
  4. build:
  5. context: .
  6. args:
  7. # specify which cuda version your card supports: https://developer.nvidia.com/cuda-gpus
  8. TORCH_CUDA_ARCH_LIST: ${TORCH_CUDA_ARCH_LIST}
  9. WEBUI_VERSION: ${WEBUI_VERSION}
  10. env_file: .env
  11. ports:
  12. - "${HOST_PORT}:${CONTAINER_PORT}"
  13. - "${HOST_API_PORT}:${CONTAINER_API_PORT}"
  14. stdin_open: true
  15. tty: true
  16. volumes:
  17. - ./characters:/app/characters
  18. - ./extensions:/app/extensions
  19. - ./loras:/app/loras
  20. - ./models:/app/models
  21. - ./presets:/app/presets
  22. - ./prompts:/app/prompts
  23. - ./softprompts:/app/softprompts
  24. - ./training:/app/training
  25. deploy:
  26. resources:
  27. reservations:
  28. devices:
  29. - driver: nvidia
  30. device_ids: ['0']
  31. capabilities: [gpu]