awacke1 commited on
Commit
08af166
1 Parent(s): c18db37

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +83 -0
app.py CHANGED
@@ -2,6 +2,89 @@ from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration
2
  import torch
3
  import gradio as gr
4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  mname = "facebook/blenderbot-400M-distill"
6
  model = BlenderbotForConditionalGeneration.from_pretrained(mname)
7
  tokenizer = BlenderbotTokenizer.from_pretrained(mname)
 
2
  import torch
3
  import gradio as gr
4
 
5
+
6
+ # PersistDataset -----
7
+ import os
8
+ import csv
9
+ import gradio as gr
10
+ from gradio import inputs, outputs
11
+ import huggingface_hub
12
+ from huggingface_hub import Repository, hf_hub_download, upload_file
13
+ from datetime import datetime
14
+ DATASET_REPO_URL = "https://huggingface.co/datasets/awacke1/Carddata.csv"
15
+ DATASET_REPO_ID = "awacke1/Carddata.csv"
16
+ DATA_FILENAME = "Carddata.csv"
17
+ DATA_FILE = os.path.join("data", DATA_FILENAME)
18
+ HF_TOKEN = os.environ.get("HF_TOKEN")
19
+ # overriding/appending to the gradio template
20
+ SCRIPT = """
21
+ <script>
22
+ if (!window.hasBeenRun) {
23
+ window.hasBeenRun = true;
24
+ console.log("should only happen once");
25
+ document.querySelector("button.submit").click();
26
+ }
27
+ </script>
28
+ """
29
+ #with open(os.path.join(gr.networking.STATIC_TEMPLATE_LIB, "frontend", "index.html"), "a") as f:
30
+ # f.write(SCRIPT)
31
+ try:
32
+ hf_hub_download(
33
+ repo_id=DATASET_REPO_ID,
34
+ filename=DATA_FILENAME,
35
+ cache_dir=DATA_DIRNAME,
36
+ force_filename=DATA_FILENAME
37
+ )
38
+ except:
39
+ print("file not found")
40
+ repo = Repository(
41
+ local_dir="data", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN
42
+ )
43
+ def generate_html() -> str:
44
+ with open(DATA_FILE) as csvfile:
45
+ reader = csv.DictReader(csvfile)
46
+ rows = []
47
+ for row in reader:
48
+ rows.append(row)
49
+ rows.reverse()
50
+ if len(rows) == 0:
51
+ return "no messages yet"
52
+ else:
53
+ html = "<div class='chatbot'>"
54
+ for row in rows:
55
+ html += "<div>"
56
+ html += f"<span>{row['name']}</span>"
57
+ html += f"<span class='message'>{row['message']}</span>"
58
+ html += "</div>"
59
+ html += "</div>"
60
+ return html
61
+ def store_message(name: str, message: str):
62
+ if name and message:
63
+ with open(DATA_FILE, "a") as csvfile:
64
+ writer = csv.DictWriter(csvfile, fieldnames=["name", "message", "time"])
65
+ writer.writerow(
66
+ {"name": name, "message": message, "time": str(datetime.now())}
67
+ )
68
+ commit_url = repo.push_to_hub()
69
+ return generate_html()
70
+ iface = gr.Interface(
71
+ store_message,
72
+ [
73
+ inputs.Textbox(placeholder="Your name"),
74
+ inputs.Textbox(placeholder="Your message", lines=2),
75
+ ],
76
+ "html",
77
+ css="""
78
+ .message {background-color:cornflowerblue;color:white; padding:4px;margin:4px;border-radius:4px; }
79
+ """,
80
+ title="Reading/writing to a HuggingFace dataset repo from Spaces",
81
+ description=f"This is a demo of how to do simple *shared data persistence* in a Gradio Space, backed by a dataset repo.",
82
+ article=f"The dataset repo is [{DATASET_REPO_URL}]({DATASET_REPO_URL})",
83
+ )
84
+ #iface.launch()
85
+ # -------
86
+
87
+
88
  mname = "facebook/blenderbot-400M-distill"
89
  model = BlenderbotForConditionalGeneration.from_pretrained(mname)
90
  tokenizer = BlenderbotTokenizer.from_pretrained(mname)