pragnakalp commited on
Commit
cb5876d
1 Parent(s): 57e4f69

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +75 -51
app.py CHANGED
@@ -1,12 +1,17 @@
1
- import gradio as gr
2
- from transformers import AutoTokenizer, AutoModelWithLMHead
3
  import gc
4
  import os
5
  import csv
6
- import pandas as pd
7
  import huggingface_hub
 
 
 
 
8
  from huggingface_hub import Repository
 
 
9
 
 
10
  HF_TOKEN = os.environ.get("HF_TOKEN")
11
  DATASET_NAME = "emotion_detection"
12
  DATASET_REPO_URL = f"https://huggingface.co/datasets/pragnakalp/{DATASET_NAME}"
@@ -14,13 +19,6 @@ DATA_FILENAME = "emotion_detection_logs.csv"
14
  DATA_FILE = os.path.join("emotion_detection_logs", DATA_FILENAME)
15
  DATASET_REPO_ID = "pragnakalp/emotion_detection"
16
  print("is none?", HF_TOKEN is None)
17
-
18
- sentences_value = """Raj loves Simran.\nLast year I lost my Dog.\nI bought a new phone!\nShe is scared of cockroaches.\nWow! I was not expecting that.\nShe got mad at him."""
19
- cwd = os.getcwd()
20
- model_path = os.path.join(cwd)
21
- tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-emotion")
22
- model_base = AutoModelWithLMHead.from_pretrained(model_path)
23
-
24
  try:
25
  hf_hub_download(
26
  repo_id=DATASET_REPO_ID,
@@ -36,6 +34,45 @@ repo = Repository(
36
  local_dir="emotion_detection_logs", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN
37
  )
38
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  def get_emotion(text):
40
 
41
  # input_ids = tokenizer.encode(text + '</s>', return_tensors='pt')
@@ -67,55 +104,42 @@ def generate_emotion(article):
67
  'emotion': cur_result
68
  }
69
  )
70
-
71
- # result = {
72
- # 'result': results_dict,
73
- # }
74
  result = {'Input':sen_list_temp, 'Detected Emotion':results}
75
  gc.collect()
76
- add_csv = [results_dict]
77
- with open(DATA_FILE, "a") as f:
78
- writer = csv.writer(f)
79
- # write the data
80
- writer.writerow(add_csv)
81
- commit_url = repo.push_to_hub()
82
- print("commit data :",commit_url)
83
-
84
  return pd.DataFrame(result)
 
85
  """
86
  Save generated details
87
  """
88
- # def save_data_and_sendmail(article,generated_questions,num_que,result):
89
- # try:
90
- # hostname = {}
91
- # hostname = get_device_ip_address()
92
- # url = 'https://pragnakalpdev35.pythonanywhere.com/HF_space_que_gen'
93
- # # url = 'http://pragnakalpdev33.pythonanywhere.com/HF_space_question_generator'
94
- # myobj = {'article': article,'total_que': num_que,'gen_que':result,'ip_addr':hostname.get("ip_addr",""),'host':hostname.get("host","")}
95
- # x = requests.post(url, json = myobj)
96
- # add_csv = [article, generated_questions, num_que]
97
- # with open(DATA_FILE, "a") as f:
98
- # writer = csv.writer(f)
99
- # # write the data
100
- # writer.writerow(add_csv)
101
- # commit_url = repo.push_to_hub()
102
- # print("commit data :",commit_url)
103
- # # except Exception as e:
104
- # # return "Error while storing data -->" + e
105
 
106
- # # try:
107
- # # with open(DATA_FILE, "r") as file:
108
- # # data = json.load(file)
109
- # # data.append(entry)
110
- # # with open(DATA_FILE, "w") as file:
111
- # # json.dump(data, file)
112
- # # commit_url = repo.push_to_hub()
113
- # except Exception as e:
114
- # return "Error while sending mail" + e
115
 
116
- # return "Successfully save data"
117
-
118
- inputs = gr.Textbox(value=sentences_value,lines=10, label="Sentences",elem_id="inp_div")
 
 
 
 
 
 
 
 
 
 
 
 
119
  outputs = [gr.Dataframe(row_count = (2, "dynamic"), col_count=(2, "fixed"), label="Here is the Result", headers=["Input","Detected Emotion"])]
120
 
121
  demo = gr.Interface(
 
 
 
1
  import gc
2
  import os
3
  import csv
4
+ import socket
5
  import huggingface_hub
6
+
7
+ import gradio as gr
8
+ import pandas as pd
9
+
10
  from huggingface_hub import Repository
11
+ from transformers import AutoTokenizer, AutoModelWithLMHead
12
+
13
 
14
+ ## connection with HF datasets
15
  HF_TOKEN = os.environ.get("HF_TOKEN")
16
  DATASET_NAME = "emotion_detection"
17
  DATASET_REPO_URL = f"https://huggingface.co/datasets/pragnakalp/{DATASET_NAME}"
 
19
  DATA_FILE = os.path.join("emotion_detection_logs", DATA_FILENAME)
20
  DATASET_REPO_ID = "pragnakalp/emotion_detection"
21
  print("is none?", HF_TOKEN is None)
 
 
 
 
 
 
 
22
  try:
23
  hf_hub_download(
24
  repo_id=DATASET_REPO_ID,
 
34
  local_dir="emotion_detection_logs", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN
35
  )
36
 
37
+ SENTENCES_VALUE = """Raj loves Simran.\nLast year I lost my Dog.\nI bought a new phone!\nShe is scared of cockroaches.\nWow! I was not expecting that.\nShe got mad at him."""
38
+ ## load model
39
+ cwd = os.getcwd()
40
+ model_path = os.path.join(cwd)
41
+ tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-emotion")
42
+ model_base = AutoModelWithLMHead.from_pretrained(model_path)
43
+
44
+ """
45
+ get ip address
46
+ """
47
+ def get_device_ip_address():
48
+ result = {}
49
+ if os.name == "nt":
50
+ result = "Running on Windows"
51
+ hostname = socket.gethostname()
52
+ ip_address = socket.gethostbyname(hostname)
53
+ result['ip_addr'] = ip_address
54
+ result['host'] = hostname
55
+ print(result)
56
+ return result
57
+ elif os.name == "posix":
58
+ gw = os.popen("ip -4 route show default").read().split()
59
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
60
+ s.connect((gw[2], 0))
61
+ ipaddr = s.getsockname()[0]
62
+ gateway = gw[2]
63
+ host = socket.gethostname()
64
+ result['ip_addr'] = ipaddr
65
+ result['host'] = host
66
+ print(result)
67
+ return result
68
+ else:
69
+ result['id'] = os.name + " not supported yet."
70
+ print(result)
71
+ return result
72
+
73
+ """
74
+ generate emotions of the sentences
75
+ """
76
  def get_emotion(text):
77
 
78
  # input_ids = tokenizer.encode(text + '</s>', return_tensors='pt')
 
104
  'emotion': cur_result
105
  }
106
  )
107
+
 
 
 
108
  result = {'Input':sen_list_temp, 'Detected Emotion':results}
109
  gc.collect()
110
+ save_data_and_sendmail(results_dict,sen_list, results)
 
 
 
 
 
 
 
111
  return pd.DataFrame(result)
112
+
113
  """
114
  Save generated details
115
  """
116
+ def save_data_and_sendmail(results_dict,sen_list,results):
117
+ try:
118
+ hostname = {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
 
120
+ add_csv = [results_dict]
121
+ with open(DATA_FILE, "a") as f:
122
+ writer = csv.writer(f)
123
+ # write the data
124
+ writer.writerow(add_csv)
125
+ commit_url = repo.push_to_hub()
126
+ print("commit data :",commit_url)
 
 
127
 
128
+ hostname = get_device_ip_address()
129
+ url = 'https://pragnakalpdev35.pythonanywhere.com/hf_space_emotion_detection'
130
+ # url = 'http://pragnakalpdev33.pythonanywhere.com/HF_space_question_generator'
131
+ myobj = {'sen_list': sen_list,'gen_results': results,'ip_addr':hostname.get("ip_addr",""),'host':hostname.get("host","")}
132
+ x = requests.post(url, json = myobj)
133
+
134
+ except Exception as e:
135
+ return "Error while sending mail" + e
136
+
137
+ return "Successfully save data"
138
+
139
+ """
140
+ UI design for demo using gradio app
141
+ """
142
+ inputs = gr.Textbox(value=SENTENCES_VALUE,lines=10, label="Sentences",elem_id="inp_div")
143
  outputs = [gr.Dataframe(row_count = (2, "dynamic"), col_count=(2, "fixed"), label="Here is the Result", headers=["Input","Detected Emotion"])]
144
 
145
  demo = gr.Interface(