scottlepp commited on
Commit
ff3c600
1 Parent(s): ab8e1a1
Files changed (2) hide show
  1. app.py +31 -8
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,17 +1,40 @@
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
- pipeline = pipeline(task="image-classification", model="julien-c/hotdog-not-hotdog")
5
 
6
- def predict(input_img):
7
- predictions = pipeline(input_img)
8
- return input_img, {p["label"]: p["score"] for p in predictions}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
  gradio_app = gr.Interface(
11
- predict,
12
- inputs=gr.Image(label="Select hot dog candidate", sources=['upload', 'webcam'], type="pil"),
13
- outputs=[gr.Image(label="Processed Image"), gr.Label(label="Result", num_top_classes=2)],
14
- title="Hot Dog? Or Not?",
15
  )
16
 
17
  if __name__ == "__main__":
 
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
+ # pipeline = pipeline(task="image-classification", model="julien-c/hotdog-not-hotdog")
5
 
6
+ # def predict(input_img):
7
+ # predictions = pipeline(input_img)
8
+ # return input_img, {p["label"]: p["score"] for p in predictions}
9
+
10
+ from typing import List
11
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
12
+
13
+ tokenizer = AutoTokenizer.from_pretrained("juierror/text-to-sql-with-table-schema")
14
+ model = AutoModelForSeq2SeqLM.from_pretrained("juierror/text-to-sql-with-table-schema")
15
+
16
+ def prepare_input(question: str, table: List[str]):
17
+ table_prefix = "table:"
18
+ question_prefix = "question:"
19
+ join_table = ",".join(table)
20
+ inputs = f"{question_prefix} {question} {table_prefix} {join_table}"
21
+ input_ids = tokenizer(inputs, max_length=700, return_tensors="pt").input_ids
22
+ return input_ids
23
+
24
+ def inference(question: str, table: List[str]) -> str:
25
+ input_data = prepare_input(question=question, table=table)
26
+ input_data = input_data.to(model.device)
27
+ outputs = model.generate(inputs=input_data, num_beams=10, top_k=10, max_length=700)
28
+ result = tokenizer.decode(token_ids=outputs[0], skip_special_tokens=True)
29
+ return result
30
+
31
+ # print(inference(question="get people name with age equal 25", table=["id", "name", "age"]))
32
 
33
  gradio_app = gr.Interface(
34
+ inference,
35
+ inputs="textbox",
36
+ outputs="label",
37
+ title="Text To SQL",
38
  )
39
 
40
  if __name__ == "__main__":
requirements.txt CHANGED
@@ -1,2 +1,3 @@
1
  transformers
2
- torch
 
 
1
  transformers
2
+ torch
3
+ typing