Spaces:
Running
on
Zero
Running
on
Zero
Rijgersberg
commited on
Commit
β’
fcdfc0f
1
Parent(s):
2698250
Description and back to fully trained geitje-chat
Browse files
app.py
CHANGED
@@ -10,7 +10,7 @@ import torch
|
|
10 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
11 |
|
12 |
HF_TOKEN = os.environ['HF_TOKEN']
|
13 |
-
DESCRIPTION = """# π GEITje
|
14 |
## Een groot open Nederlands taalmodel
|
15 |
|
16 |
[_Coming soon_](https://github.com/Rijgersberg/GEITje)"""
|
@@ -23,7 +23,7 @@ DEFAULT_MAX_NEW_TOKENS = 1024
|
|
23 |
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
24 |
|
25 |
if torch.cuda.is_available():
|
26 |
-
model_id = "Rijgersberg/GEITje-7B-chat
|
27 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto", token=HF_TOKEN)
|
28 |
tokenizer = AutoTokenizer.from_pretrained(model_id, token=HF_TOKEN)
|
29 |
|
|
|
10 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
11 |
|
12 |
HF_TOKEN = os.environ['HF_TOKEN']
|
13 |
+
DESCRIPTION = """# π GEITje-7B-chat π
|
14 |
## Een groot open Nederlands taalmodel
|
15 |
|
16 |
[_Coming soon_](https://github.com/Rijgersberg/GEITje)"""
|
|
|
23 |
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
24 |
|
25 |
if torch.cuda.is_available():
|
26 |
+
model_id = "Rijgersberg/GEITje-7B-chat"
|
27 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto", token=HF_TOKEN)
|
28 |
tokenizer = AutoTokenizer.from_pretrained(model_id, token=HF_TOKEN)
|
29 |
|