makanaan commited on
Commit
aab5d44
1 Parent(s): 91c367c

update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -21
app.py CHANGED
@@ -1,27 +1,27 @@
1
- from transformers import pipeline
2
  import gradio as gr
3
 
4
- from transformers import AutoTokenizer, AutoModelForCausalLM
5
- #from os import path
6
-
7
- #MODEL_DIRECTORY = "/models/mrm8488-t5-base-finetuned-emotion"
8
- tokenizer = AutoTokenizer.from_pretrained("tuner007/pegasus_paraphrase", use_fast=False)
9
- #if not path.exists(MODEL_DIRECTORY):
10
- model = AutoModelForCausalLM.from_pretrained("tuner007/pegasus_paraphrase")
11
- # model.save_pretrained(MODEL_DIRECTORY)
12
- #else:
13
- # model = AutoModelWithLMHead.from_pretrained(MODEL_DIRECTORY)
14
 
 
 
 
 
 
 
 
 
15
 
16
  def get_emotion(text):
17
- input_ids = tokenizer.encode(text + '</s>', return_tensors='pt')
18
- output = model.generate(input_ids=input_ids, max_length=2)
19
-
20
- # print(output)
21
- dec = [tokenizer.decode(ids) for ids in output]
22
- print(dec)
23
- label = dec[0]
24
- return label
25
 
26
 
27
 
@@ -31,8 +31,7 @@ def parph(name= "paraphrase: This is something which I cannt understand at all."
31
  ##model_name = 'tuner007/pegasus_paraphrase'
32
  #text2text = pipeline('text2text-generation', model = "Vamsi/T5_Paraphrase_Paws")
33
  ##text2text(name)
34
- test = get_emotion("""No doubt, 2020 has been the wackiest year ever. Found my family and my life wreck like never before. I dont only say this because of the job i hate or any family problem i have, but my mom is also planning on moving in with her brother's family just to escape this house full of unemployed people who keeps her unstable with all her cousins and other relatives depending on her. As if a miracle happened, she was now finally, after years, moving out. Well, of course, Im supposed to be with her. But i was planning on working on site by next week, and guess what. She was planning on moving out this weekend. She told me about this just this Monday, I didnt know what to tell her so I asked her to give me time to think.\n\n\n\nI asked her what her plans were after moving in with another family, relative or not it still wasnt going to be our property so i wanted to make sure. She told me we were only temporarily going to move there and then find a place to settle in afterward. I knew this was a terrible idea already, so i asked her, why wont she find a place to rent first while were here so that we can really make sure that we arent going to stay at my relatives for a long time. Eventually, after a lot of asking, she told me that her and my uncle had a talk, and that he told her it was better for her to be on his side, at least no matter what happens they'd be there for each other. \n\n\n\nThat might be sweet and all, but this set up of moving out doesnt even give us independence at all. This uncle of mine had 5 kids, none of them were grown up. The house was noisily irritating, there was no place for peace of mind and all of that. Even if i move my work equipment there, I don't think it would be a comfortable adjustment. \n\nI just think like, this is, again, another foolish decision not being thought through by my mom. I've been stressed a lot enough, and now i have to contemplate this moving out thing. \n\n\n\nI am planning to tell her that I wanted to stay here as Id be working on-site soon. The plan is to have her live in the countryside with my uncle and for me to stay here. But of course, I would be again the one making the foolish decision because in all cases I should choose to always be with my mom, be there to take care of her, and such. But I already have plans for myself, and I cant afford to compromise for a plan that isnt even thought through. How is she going to figure this out when she gets there? It makes no sense.\n\n \n\nA few days more and it's Christmas Time, the house has no decors, everything is uncertain and are in shambles. This couldn't get any worse."""
35
- )
36
  return test # text2text(name)
37
 
38
 
 
1
+ #from transformers import pipeline
2
  import gradio as gr
3
 
4
+ #from transformers import AutoTokenizer, AutoModelForCausalLM
5
+ ##from os import path
 
 
 
 
 
 
 
 
6
 
7
+ ##MODEL_DIRECTORY = "/models/mrm8488-t5-base-finetuned-emotion"
8
+ #tokenizer = AutoTokenizer.from_pretrained("tuner007/pegasus_paraphrase", use_fast=False)
9
+ ##if not path.exists(MODEL_DIRECTORY):
10
+ #model = AutoModelForCausalLM.from_pretrained("tuner007/pegasus_paraphrase")
11
+ ## model.save_pretrained(MODEL_DIRECTORY)
12
+ ##else:
13
+ ## model = AutoModelWithLMHead.from_pretrained(MODEL_DIRECTORY)
14
+ #
15
 
16
  def get_emotion(text):
17
+ # input_ids = tokenizer.encode(text + '</s>', return_tensors='pt')
18
+ # output = model.generate(input_ids=input_ids, max_length=2)
19
+ #
20
+ # # print(output)
21
+ # dec = [tokenizer.decode(ids) for ids in output]
22
+ # print(dec)
23
+ # label = dec[0]
24
+ return text
25
 
26
 
27
 
 
31
  ##model_name = 'tuner007/pegasus_paraphrase'
32
  #text2text = pipeline('text2text-generation', model = "Vamsi/T5_Paraphrase_Paws")
33
  ##text2text(name)
34
+ test = get_emotion(name)
 
35
  return test # text2text(name)
36
 
37