vinayakdev commited on
Commit
507bb54
1 Parent(s): 0074c68

Happy Hugging Face!

Browse files
Files changed (1) hide show
  1. generator.py +31 -28
generator.py CHANGED
@@ -31,9 +31,9 @@ import pickle
31
  # text= "The abolition of feudal privileges by the National Constituent Assembly on 4 August 1789 and the Declaration \\nof the Rights of Man and of the Citizen (La Déclaration des Droits de l'Homme et du Citoyen), drafted by Lafayette \\nwith the help of Thomas Jefferson and adopted on 26 August, paved the way to a Constitutional Monarchy \\n(4 September 1791 – 21 September 1792). Despite these dramatic changes, life at the court continued, while the situation \\nin Paris was becoming critical because of bread shortages in September. On 5 October 1789, a crowd from Paris descended upon Versailles \\nand forced the royal family to move to the Tuileries Palace in Paris, where they lived under a form of house arrest under \\nthe watch of Lafayette's Garde Nationale, while the Comte de Provence and his wife were allowed to reside in the \\nPetit Luxembourg, where they remained until they went into exile on 20 June 1791."
32
  # hftokenizer = pickle.load(open('models/hftokenizer.sav', 'rb'))
33
  # hfmodel = pickle.load(open('models/hfmodel.sav', 'rb'))
34
- hfmodel = alwm.from_pretrained("valhalla/t5-small-e2e-qg")
35
 
36
- hftokenizer = T5TokenizerFast.from_pretrained("t5-small")
37
  def run_model(input_string, **generator_args):
38
  generator_args = {
39
  "max_length": 256,
@@ -52,36 +52,39 @@ def run_model(input_string, **generator_args):
52
 
53
 
54
 
55
- al_tokenizer = att.from_pretrained("deepset/electra-base-squad2")
56
- al_model = amqa.from_pretrained("deepset/electra-base-squad2")
57
-
 
 
 
58
  # al_model = pickle.load(open('models/al_model.sav', 'rb'))
59
  # al_tokenizer = pickle.load(open('models/al_tokenizer.sav', 'rb'))
60
  def QA(question, context):
61
  # model_name="deepset/electra-base-squad2"
62
- nlp = pipeline("question-answering",model=al_model,tokenizer=al_tokenizer)
63
- format = {
64
- 'question':question,
65
- 'context':context
66
- }
67
- res = nlp(format)
68
- output = f"{question}\n{string.capwords(res['answer'])}\tscore : [{res['score']}] \n"
69
- return output
70
- # inputs = tokenizer(question, context, return_tensors="pt")
71
- # # Run the model, the deepset way
72
- # with torch.no_grad():
73
- # output = model(**inputs)
74
- # start_score = output.start_logits
75
- # end_score = output.end_logits
76
- # #Get the rel scores for the context, and calculate the most probable begginign using torch
77
- # start = torch.argmax(start_score)
78
- # end = torch.argmax(end_score)
79
- # #cinvert tokens to strings
80
- # # output = tokenizer.decode(input_ids[start:end+1], skip_special_tokens=True)
81
- # predict_answer_tokens = inputs.input_ids[0, start : end + 1]
82
- # output = tokenizer.decode(predict_answer_tokens, skip_special_tokens=True)
83
- # output = string.capwords(output)
84
- # print(f"Q. {question} \n Ans. {output}")
85
  # QA("What was the first C program","The first prgram written in C was Hello World")
86
 
87
  def gen_question(inputs):
 
31
  # text= "The abolition of feudal privileges by the National Constituent Assembly on 4 August 1789 and the Declaration \\nof the Rights of Man and of the Citizen (La Déclaration des Droits de l'Homme et du Citoyen), drafted by Lafayette \\nwith the help of Thomas Jefferson and adopted on 26 August, paved the way to a Constitutional Monarchy \\n(4 September 1791 – 21 September 1792). Despite these dramatic changes, life at the court continued, while the situation \\nin Paris was becoming critical because of bread shortages in September. On 5 October 1789, a crowd from Paris descended upon Versailles \\nand forced the royal family to move to the Tuileries Palace in Paris, where they lived under a form of house arrest under \\nthe watch of Lafayette's Garde Nationale, while the Comte de Provence and his wife were allowed to reside in the \\nPetit Luxembourg, where they remained until they went into exile on 20 June 1791."
32
  # hftokenizer = pickle.load(open('models/hftokenizer.sav', 'rb'))
33
  # hfmodel = pickle.load(open('models/hfmodel.sav', 'rb'))
34
+ hfmodel = alwm.from_pretrained("ThomasSimonini/t5-end2end-question-generation")
35
 
36
+ hftokenizer = T5TokenizerFast.from_pretrained("ThomasSimonini/t5-end2end-question-generation")
37
  def run_model(input_string, **generator_args):
38
  generator_args = {
39
  "max_length": 256,
 
52
 
53
 
54
 
55
+ # al_tokenizer = att.from_pretrained("deepset/electra-base-squad2")
56
+ # al_model = amqa.from_pretrained("deepset/electra-base-squad2")
57
+
58
+ tokenizer = AutoTokenizer.from_pretrained("ahotrod/albert_xxlargev1_squad2_512")
59
+
60
+ model = AutoModelForQuestionAnswering.from_pretrained("ahotrod/albert_xxlargev1_squad2_512")
61
  # al_model = pickle.load(open('models/al_model.sav', 'rb'))
62
  # al_tokenizer = pickle.load(open('models/al_tokenizer.sav', 'rb'))
63
  def QA(question, context):
64
  # model_name="deepset/electra-base-squad2"
65
+ # nlp = pipeline("question-answering",model=al_model,tokenizer=al_tokenizer)
66
+ # format = {
67
+ # 'question':question,
68
+ # 'context':context
69
+ # }
70
+ # res = nlp(format)
71
+ # output = f"{question}\n{string.capwords(res['answer'])}\tscore : [{res['score']}] \n"
72
+ # return output
73
+ inputs = tokenizer(question, context, return_tensors="pt")
74
+ # Run the model, the deepset way
75
+ with torch.no_grad():
76
+ output = model(**inputs)
77
+ start_score = output.start_logits
78
+ end_score = output.end_logits
79
+ #Get the rel scores for the context, and calculate the most probable begginign using torch
80
+ start = torch.argmax(start_score)
81
+ end = torch.argmax(end_score)
82
+ #cinvert tokens to strings
83
+ # output = tokenizer.decode(input_ids[start:end+1], skip_special_tokens=True)
84
+ predict_answer_tokens = inputs.input_ids[0, start : end + 1]
85
+ output = tokenizer.decode(predict_answer_tokens, skip_special_tokens=True)
86
+ output = string.capwords(output)
87
+ print(f"Q. {question} \n Ans. {output}")
88
  # QA("What was the first C program","The first prgram written in C was Hello World")
89
 
90
  def gen_question(inputs):