Ali-C137 commited on
Commit
d66881f
1 Parent(s): 5f5f7a2

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -13
app.py CHANGED
@@ -1,14 +1,7 @@
1
- # -*- coding: utf-8 -*-
2
- """Motivation-Letter-Generator
3
 
4
- Automatically generated by Colaboratory.
5
-
6
- Original file is located at
7
- https://colab.research.google.com/drive/1ZjAxQWoA9ECi-WgAMVm0HyonnrFFMlHG
8
- """
9
-
10
- #! pip install transformers
11
- #! pip install gradio
12
 
13
  from transformers import AutoModelForCausalLM, AutoTokenizer, set_seed, pipeline
14
  import gradio as gr
@@ -19,8 +12,8 @@ torch.set_default_tensor_type(torch.cuda.FloatTensor)
19
  ### need more GPU power to call better models !!!!!!
20
 
21
  # from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
 
22
  # tokenizer = AutoTokenizer.from_pretrained("bigscience/T0pp")
23
- # model = AutoModelForSeq2SeqLM.from_pretrained("bigscience/T0pp") # 11B param
24
 
25
  model = AutoModelForCausalLM.from_pretrained('EleutherAI/gpt-neo-1.3B', use_cache=True)
26
  tokenizer = AutoTokenizer.from_pretrained('EleutherAI/gpt-neo-1.3B')
@@ -43,5 +36,3 @@ gr.Interface(
43
  title=title,
44
  article=article).launch()
45
 
46
-
47
-
 
1
+ # Motivation-Letter-Generator
 
2
 
3
+ # !pip install transformers
4
+ # !pip install gradio
 
 
 
 
 
 
5
 
6
  from transformers import AutoModelForCausalLM, AutoTokenizer, set_seed, pipeline
7
  import gradio as gr
 
12
  ### need more GPU power to call better models !!!!!!
13
 
14
  # from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
15
+ # model = AutoModelForSeq2SeqLM.from_pretrained("bigscience/T0pp", use_cache=True) # 11B param
16
  # tokenizer = AutoTokenizer.from_pretrained("bigscience/T0pp")
 
17
 
18
  model = AutoModelForCausalLM.from_pretrained('EleutherAI/gpt-neo-1.3B', use_cache=True)
19
  tokenizer = AutoTokenizer.from_pretrained('EleutherAI/gpt-neo-1.3B')
 
36
  title=title,
37
  article=article).launch()
38