SanjanaJD004 commited on
Commit
cd48eb1
·
1 Parent(s): 9edf180

Update app.py

Browse files
Files changed (3) hide show
  1. .DS_Store +0 -0
  2. app.py +11 -4
  3. requirements.txt +3 -1
.DS_Store CHANGED
Binary files a/.DS_Store and b/.DS_Store differ
 
app.py CHANGED
@@ -5,12 +5,18 @@ import torch
5
 
6
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
7
 
8
- #SAVED_MODEL_PATH = 'bart_base_full_finetune_save'
9
- SAVED_MODEL_PATH = './'
10
  model_name = "facebook/bart-base"
11
  model = AutoModelForSeq2SeqLM.from_pretrained(SAVED_MODEL_PATH).to(device)
12
  tokenizer = AutoTokenizer.from_pretrained(model_name)
13
 
 
 
 
 
 
 
 
14
 
15
  def summarize(text):
16
  inputs = tokenizer(f"Summarize dialogue >>\n {text}", return_tensors="pt", max_length=1000, truncation=True, padding="max_length").to(device)
@@ -18,10 +24,11 @@ def summarize(text):
18
  summary = [tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=False) for g in summary_ids]
19
  return summary[0]
20
 
 
21
  iface = gr.Interface(
22
  fn=summarize,
23
- inputs=gr.Textbox(lines=10, label="Input Dialogue"),
24
- outputs=gr.Textbox(label="Generated Summary")
25
  )
26
 
27
  iface.launch()'''
 
5
 
6
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
7
 
8
+ SAVED_MODEL_PATH = '/Users/sanjanajd/Desktop/Bart-base_Summarizer/bart_base_full_finetune_save'
 
9
  model_name = "facebook/bart-base"
10
  model = AutoModelForSeq2SeqLM.from_pretrained(SAVED_MODEL_PATH).to(device)
11
  tokenizer = AutoTokenizer.from_pretrained(model_name)
12
 
13
+ #dataset = load_dataset("samsum")
14
+ dataset = load_dataset("samsum", download_mode="force_redownload")
15
+
16
+
17
+ train_data = dataset["train"]
18
+ validation_data = dataset["validation"]
19
+ test_data = dataset["test"]
20
 
21
  def summarize(text):
22
  inputs = tokenizer(f"Summarize dialogue >>\n {text}", return_tensors="pt", max_length=1000, truncation=True, padding="max_length").to(device)
 
24
  summary = [tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=False) for g in summary_ids]
25
  return summary[0]
26
 
27
+
28
  iface = gr.Interface(
29
  fn=summarize,
30
+ inputs=gr.inputs.Textbox(lines=10, label="Input Dialogue"),
31
+ outputs=gr.outputs.Textbox(label="Generated Summary")
32
  )
33
 
34
  iface.launch()'''
requirements.txt CHANGED
@@ -86,7 +86,9 @@ exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongrou
86
  executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1698579936712/work
87
  filelock==3.13.4
88
  frozenlist==1.4.1
89
- fsspec==2024.2.0
 
 
90
  huggingface-hub==0.22.2
91
  idna==3.7
92
  importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1710971335535/work
 
86
  executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1698579936712/work
87
  filelock==3.13.4
88
  frozenlist==1.4.1
89
+ fsspec==2024.2.0
90
+ gradio==4.26.0
91
+ gradio_client==0.15.1
92
  huggingface-hub==0.22.2
93
  idna==3.7
94
  importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1710971335535/work