Cleanup development settings

add-more-languages
Meng Zhang 2023-03-28 20:28:20 +08:00
parent 648d521afb
commit 490a1e154d
5 changed files with 22 additions and 86 deletions

View File

@ -1,16 +1,14 @@
DEFAULT := docker-compose -f ../deployment/docker-compose.yml
UP_FLAGS := up --remove-orphans
build:
docker-compose -f docker-compose.yml -f docker-compose.dev.yml build
up:
docker-compose -f docker-compose.yml $(UP_FLAGS)
$(DEFAULT) -f docker-compose.dev.yml build
dev:
docker-compose -f docker-compose.yml -f docker-compose.dev.yml $(UP_FLAGS)
$(DEFAULT) -f docker-compose.dev.yml $(UP_FLAGS)
dev-triton:
docker-compose -f docker-compose.yml -f docker-compose.triton.yml -f docker-compose.dev.yml $(UP_FLAGS)
dev-python:
$(DEFAULT) -f docker-compose.dev.yml -f docker-compose.python.yml $(UP_FLAGS)
clean:
docker-compose -f docker-compose.yml -f docker-compose.triton.yml -f docker-compose.dev.yml down --remove-orphans
$(DEFAULT) -f docker-compose.dev.yml down

View File

@ -6,6 +6,8 @@ services:
context: ..
args:
PYPI_INDEX_URL: https://pypi.tuna.tsinghua.edu.cn/simple
volumes:
- ../:/app
server:
command: uvicorn tabby.server:app --host 0.0.0.0 --port 5000 --reload

View File

@ -0,0 +1,11 @@
version: '3.3'
services:
server:
image: tabbyml/tabby
environment:
MODEL_BACKEND: python
triton:
profiles:
- donotstart

View File

@ -4,33 +4,8 @@ services:
server:
image: tabbyml/tabby
environment:
- MODEL_BACKEND=triton
depends_on:
triton:
condition: service_healthy
MODEL_BACKEND: python
triton:
image: tabbyml/fastertransformer_backend
container_name: tabby-triton
command: /scripts/triton.sh
shm_size: 1gb
volumes:
- ./scripts:/scripts
- ../data/hf_cache:/root/.cache/huggingface
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
environment:
- MODEL_NAME=TabbyML/NeoX-70M
depends_on:
init:
condition: service_completed_successfully
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8002/metrics"]
interval: 2s
timeout: 2s
start_period: 120s
profiles:
- donotstart

View File

@ -1,50 +0,0 @@
version: '3.3'
services:
init:
image: tabbyml/tabby
container_name: tabby-init
command: python -m tabby.tools.model_preload --repo_id TabbyML/NeoX-70M
volumes:
- ../data/hf_cache:/root/.cache/huggingface
server:
image: tabbyml/tabby
container_name: tabby-server
command: uvicorn tabby.server:app --host 0.0.0.0 --port 5000
environment:
- MODEL_NAME=TabbyML/NeoX-70M
- EVENTS_LOG_DIR=/logs
ports:
- "5000:5000"
volumes:
- ../data/logs/tabby-server:/logs
- ../data/hf_cache:/root/.cache/huggingface
depends_on:
init:
condition: service_completed_successfully
admin:
image: tabbyml/tabby
container_name: tabby-admin
command: streamlit run tabby/admin/Home.py
ports:
- "8501:8501"
vector:
image: timberio/vector:0.28.1-alpine
container_name: tabby-vector
volumes:
- ./config/vector.toml:/etc/vector/vector.toml:ro
- ../data/logs:/logs
dagu:
image: tabbyml/tabby
container_name: tabby-dagu
command: dagu server --host 0.0.0.0 --port 8080
volumes:
- ./config:/config:ro
- ../data/repositories:/repositories
- ../data/dataset:/dataset
environment:
DAGU_DAGS: tabby/tasks