MJ106 commited on
Commit
293f78e
β€’
1 Parent(s): c3fcd8a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -13
app.py CHANGED
@@ -1,21 +1,21 @@
1
  import streamlit as st
2
  from transformers import pipeline
3
- from PIL import Image
4
  # TO-DD: ??? λΆ€λΆ„μ˜ μ½”λ“œλ₯Ό μ™„μ„±ν•˜μ‹œμ˜€
5
- pipeline = pipeline(task="image-classification", model="julien-c/hotdog-not-hotdog")
 
 
6
 
7
- st.title("Hot Dog? Or Not?")
8
 
9
- file_name = st.file_uploader("Upload a hot dog candidate image")
 
 
10
 
11
- if file_name is not None:
12
- col1, col2 = st.columns(2)
13
 
14
- image = Image.open(file_name)
15
- col1.image(image, use_column_width=True)
16
- # TO-DO: ??? λΆ€λΆ„μ˜ μ½”λ“œλ₯Ό μ™„μ„±ν•˜μ‹œμ˜€
17
- predictions = pipeline(image)
18
 
19
- col2.header("Probabilities")
20
- for p in predictions:
21
- col2.subheader(f"{ p['label'] }: { round(p['score'] * 100, 1)}%")
 
1
  import streamlit as st
2
  from transformers import pipeline
3
+
4
  # TO-DD: ??? λΆ€λΆ„μ˜ μ½”λ“œλ₯Ό μ™„μ„±ν•˜μ‹œμ˜€
5
+ pipeline = AutoModelForCausalLM.from_pretrained(task="translation", model="maywell/Synatra-7B-v0.3-Translation", tokenizer="maywell/Synatra-7B-v0.3-Translation")
6
+
7
+ device = "cuda" # the device to load the model onto
8
 
 
9
 
10
+ messages = [
11
+ {"role": "user", "content": "λ°”λ‚˜λ‚˜λŠ” μ›λž˜ ν•˜μ–€μƒ‰μ΄μ•Ό?"},
12
+ ]
13
 
14
+ encodeds = tokenizer.apply_chat_template(messages, return_tensors="pt")
 
15
 
16
+ model_inputs = encodeds.to(device)
17
+ model.to(device)
 
 
18
 
19
+ generated_ids = model.generate(model_inputs, max_new_tokens=1000, do_sample=True)
20
+ decoded = tokenizer.batch_decode(generated_ids)
21
+ print(decoded[0])