Sakalti commited on
Commit
c346a26
1 Parent(s): cb329ff

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -10,14 +10,14 @@ import spaces
10
  import torch
11
  from transformers import AutoModelForCausalLM, AutoTokenizer
12
 
13
- DESCRIPTION = "# Sakaltum-7B-chat"
14
  DESCRIPTION += "\n<p>現在の環境に合わせて最適化されています。</p>"
15
 
16
  MAX_MAX_NEW_TOKENS = 2048
17
  DEFAULT_MAX_NEW_TOKENS = 1024
18
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "32768"))
19
 
20
- model_id = "sakaltcommunity/sakaltum-7b"
21
  if torch.cuda.is_available():
22
  model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype="auto", device_map="auto")
23
  else:
 
10
  import torch
11
  from transformers import AutoModelForCausalLM, AutoTokenizer
12
 
13
+ DESCRIPTION = "# Sakalti/anchobi-4b"
14
  DESCRIPTION += "\n<p>現在の環境に合わせて最適化されています。</p>"
15
 
16
  MAX_MAX_NEW_TOKENS = 2048
17
  DEFAULT_MAX_NEW_TOKENS = 1024
18
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "32768"))
19
 
20
+ model_id = "Sakalti/anchobi-4b"
21
  if torch.cuda.is_available():
22
  model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype="auto", device_map="auto")
23
  else: