Remove tokenizer in testdata/

add-more-languages
Meng Zhang 2023-03-24 09:44:12 +08:00
parent f6c75a1999
commit 0f5a959269
6 changed files with 5 additions and 19 deletions

View File

@ -1,2 +1 @@
logs data
hf_cache

View File

@ -5,9 +5,7 @@ services:
image: tabbyml/tabby image: tabbyml/tabby
environment: environment:
- MODEL_BACKEND=triton - MODEL_BACKEND=triton
- TRITON_TOKENIZER_NAME=/tokenizer - TRITON_TOKENIZER_NAME=EleutherAI/pythia-70m-deduped
volumes:
- ../testdata/tiny-70M/tokenizer:/tokenizer
links: links:
- triton - triton

View File

@ -12,8 +12,8 @@ services:
ports: ports:
- "5000:5000" - "5000:5000"
volumes: volumes:
- ./logs:/logs - ./data/logs:/logs
- ./hf_cache:/root/.cache/huggingface - ./data/hf_cache:/root/.cache/huggingface
links: links:
- vector - vector
@ -33,4 +33,4 @@ services:
container_name: tabby-vector container_name: tabby-vector
volumes: volumes:
- ./config/vector.toml:/etc/vector/vector.toml:ro - ./config/vector.toml:/etc/vector/vector.toml:ro
- ./logs:/logs - ./data/logs:/logs

View File

@ -1 +0,0 @@
tokenizer.json filter=lfs diff=lfs merge=lfs -text

View File

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e74ca9064c4f0d2232253bfe5f48caa78b6827212e87d831899d8fc64542e62b
size 2113711

View File

@ -1,7 +0,0 @@
{
"add_prefix_space": false,
"bos_token": "<|endoftext|>",
"eos_token": "<|endoftext|>",
"tokenizer_class": "GPTNeoXTokenizer",
"unk_token": "<|endoftext|>"
}