Move python code under tabby/ (#8)
* Add tabby config file * Rename train.yaml to trainer.yaml * Change server to relative import * Move source files into tabby * Rename confadd-more-languages
parent
3407253882
commit
8cf533016a
|
|
@ -55,7 +55,7 @@ jobs:
|
|||
id: build-and-push
|
||||
uses: docker/build-push-action@v3.1.1
|
||||
with:
|
||||
file: deployment/Dockerfile
|
||||
file: Dockerfile
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: |
|
||||
|
|
|
|||
2
Makefile
2
Makefile
|
|
@ -15,7 +15,7 @@ $(PRE_COMMIT_HOOK):
|
|||
poetry run pre-commit install --install-hooks
|
||||
|
||||
$(LOCAL_MODEL):
|
||||
poetry run python scripts/huggingface_gptneox_convert.py \
|
||||
poetry run python -m tabby.tools.huggingface_gptneox_convert \
|
||||
-in_file EleutherAI/pythia-70m-deduped \
|
||||
-o $@ \
|
||||
-i_g 1 -m_n tiny-70M -p 1 -w fp16
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
schedule: "5 4 * * *" # Run at 04:05.
|
||||
|
||||
env:
|
||||
- APP_DIR: /app
|
||||
steps:
|
||||
- name: Create job dir
|
||||
command: mktemp -d /tmp/job.XXXXX
|
||||
output: JOB_DIR
|
||||
|
||||
- name: Fetch source code
|
||||
dir: $APP_DIR
|
||||
command: python scripts/download_project.py --url http://download.savannah.gnu.org/releases/tinycc/tcc-0.9.24.tar.bz2 --output_dir $JOB_DIR/repository
|
||||
depends:
|
||||
- Create job dir
|
||||
|
||||
- name: Generate dataset
|
||||
dir: ${APP_DIR}
|
||||
command: python scripts/preprocess/preprocess_project.py --project_dir $JOB_DIR/repository --output_dir $JOB_DIR/dataset
|
||||
depends:
|
||||
- Fetch source code
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
schedule: "5 4 * * *" # Run at 04:05.
|
||||
|
||||
env:
|
||||
- APP_DIR: /app
|
||||
steps:
|
||||
- name: Create job dir
|
||||
command: mktemp -d /tmp/job.XXXXX
|
||||
output: JOB_DIR
|
||||
|
||||
- name: Fetch source code
|
||||
dir: $APP_DIR
|
||||
command: python -m tabby.tools.download_project --url http://download.savannah.gnu.org/releases/tinycc/tcc-0.9.24.tar.bz2 --output_dir $JOB_DIR/repository
|
||||
depends:
|
||||
- Create job dir
|
||||
|
||||
- name: Generate dataset
|
||||
dir: $APP_DIR
|
||||
command: python -m tabby.tools.preprocess.preprocess_project --project_dir $JOB_DIR/repository --output_dir $JOB_DIR/dataset
|
||||
depends:
|
||||
- Fetch source code
|
||||
|
|
@ -4,15 +4,14 @@ services:
|
|||
server:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: ./deployment/Dockerfile
|
||||
args:
|
||||
PYPI_INDEX_URL: https://pypi.tuna.tsinghua.edu.cn/simple
|
||||
command: uvicorn app:app --host 0.0.0.0 --port 5000 --reload
|
||||
command: uvicorn tabby.server:app --host 0.0.0.0 --port 5000 --reload
|
||||
volumes:
|
||||
- ../:/app
|
||||
|
||||
admin:
|
||||
command: streamlit run --server.runOnSave=true Home.py
|
||||
command: streamlit run --server.runOnSave=true tabby/admin/Home.py
|
||||
volumes:
|
||||
- ../:/app
|
||||
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@ services:
|
|||
server:
|
||||
image: tabbyml/tabby
|
||||
container_name: tabby-server
|
||||
working_dir: /app/server
|
||||
command: uvicorn app:app --host 0.0.0.0 --port 5000
|
||||
command: uvicorn tabby.server:app --host 0.0.0.0 --port 5000
|
||||
environment:
|
||||
- PYTHON_MODEL_NAME=EleutherAI/pythia-70m-deduped
|
||||
- EVENTS_LOG_DIR=/logs/tabby-server
|
||||
|
|
@ -14,14 +13,11 @@ services:
|
|||
volumes:
|
||||
- ./data/logs:/logs
|
||||
- ./data/hf_cache:/root/.cache/huggingface
|
||||
links:
|
||||
- vector
|
||||
|
||||
admin:
|
||||
image: tabbyml/tabby
|
||||
container_name: tabby-admin
|
||||
working_dir: /app/admin
|
||||
command: streamlit run Home.py
|
||||
command: streamlit run tabby/admin/Home.py
|
||||
ports:
|
||||
- "8501:8501"
|
||||
links:
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
import logging
|
||||
import os
|
||||
|
||||
import events
|
||||
import uvicorn
|
||||
from fastapi import FastAPI, Response
|
||||
from fastapi.responses import JSONResponse
|
||||
from models import CompletionRequest, CompletionResponse
|
||||
from python import PythonModelService
|
||||
from triton import TritonService
|
||||
|
||||
from . import events
|
||||
from .models import CompletionRequest, CompletionResponse
|
||||
from .python import PythonModelService
|
||||
from .triton import TritonService
|
||||
|
||||
app = FastAPI(
|
||||
title="TabbyServer",
|
||||
|
|
@ -38,7 +39,3 @@ async def completions(request: CompletionRequest) -> CompletionResponse:
|
|||
async def selection(id: str, index: int) -> JSONResponse:
|
||||
events.log_selection(id, index)
|
||||
return JSONResponse(content="ok")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run("app:app", host="0.0.0.0", port=5000)
|
||||
|
|
@ -3,10 +3,11 @@ import os
|
|||
import shutil
|
||||
from typing import List
|
||||
|
||||
import models
|
||||
from loguru import logger
|
||||
from pydantic import BaseModel
|
||||
|
||||
from . import models
|
||||
|
||||
logger.configure(handlers=[])
|
||||
|
||||
|
||||
|
|
@ -3,9 +3,10 @@ import string
|
|||
import time
|
||||
from typing import List
|
||||
|
||||
from models import Choice, CompletionRequest, CompletionResponse
|
||||
from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||
|
||||
from .models import Choice, CompletionRequest, CompletionResponse
|
||||
|
||||
|
||||
class PythonModelService:
|
||||
def __init__(
|
||||
|
|
@ -6,10 +6,11 @@ from typing import List
|
|||
|
||||
import numpy as np
|
||||
import tritonclient.grpc as client_util
|
||||
from models import Choice, CompletionRequest, CompletionResponse
|
||||
from transformers import AutoTokenizer
|
||||
from tritonclient.utils import InferenceServerException, np_to_triton_dtype
|
||||
|
||||
from .models import Choice, CompletionRequest, CompletionResponse
|
||||
|
||||
|
||||
class TritonService:
|
||||
def __init__(
|
||||
|
|
@ -6,7 +6,7 @@ import requests
|
|||
from transformers import HfArgumentParser
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
@dataclass
|
||||
class DownloaderArgs:
|
||||
url: str = field(metadata={"help": "URL to source code tar.gz file"})
|
||||
output_dir: str = field(metadata={"help": "Output save path directory"})
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from args import FilterArgs
|
||||
from .args import FilterArgs
|
||||
|
||||
|
||||
def basic_filters(args: FilterArgs):
|
||||
|
|
@ -2,12 +2,12 @@ import glob
|
|||
import json
|
||||
import os
|
||||
|
||||
import filters
|
||||
import metrics
|
||||
from args import PreprocessProjectArgs
|
||||
from datasets import Dataset
|
||||
from transformers import HfArgumentParser
|
||||
|
||||
from . import filters, metrics
|
||||
from .args import PreprocessProjectArgs
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = HfArgumentParser(PreprocessProjectArgs)
|
||||
Loading…
Reference in New Issue