File size: 1,955 Bytes
3c162d3
 
 
 
 
 
 
 
 
62fdf6f
 
3c162d3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2924ece
3c162d3
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
# -*- coding: utf-8 -*-
"""Motivation-Letter-Generator

Automatically generated by Colaboratory.

Original file is located at
    https://colab.research.google.com/drive/1ZjAxQWoA9ECi-WgAMVm0HyonnrFFMlHG
"""

#! pip install transformers
#! pip install gradio

from transformers import AutoModelForCausalLM, AutoTokenizer, set_seed, pipeline
import gradio as gr

import torch
torch.set_default_tensor_type(torch.cuda.FloatTensor)

### need more GPU power to call better models !!!!!!

# from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
# tokenizer = AutoTokenizer.from_pretrained("bigscience/T0pp")
# model = AutoModelForSeq2SeqLM.from_pretrained("bigscience/T0pp") # 11B param

model = AutoModelForCausalLM.from_pretrained('EleutherAI/gpt-neo-1.3B', use_cache=True)
tokenizer = AutoTokenizer.from_pretrained('EleutherAI/gpt-neo-1.3B')

set_seed(424242)

def generate(Name, Employer, Position, Organization, Hard_skills, Soft_skills, max_length=500, top_k=1, temperature=0.9, repetition_penalty = 2.0):
  prompt = f'im {Name} and i want to write a motivation letter to {Employer} about the position {Position} at {Organization} mentioning the hard skills {Hard_skills} and soft skills {Soft_skills} you have acquired'
  input_ids = tokenizer(prompt, return_tensors="pt").to(0)
  sample = model.generate(**input_ids, max_length=max_length,  top_k=top_k, temperature=temperature, repetition_penalty = repetition_penalty)
  return tokenizer.decode(sample[0], truncate_before_pattern=[r"\n\n^#", "^'''", "\n\n\n"])

title = "Motivation Letter Generator w/ GPT-Neo-1.3B"
article = "Colab is not really offering GPUs to load the big guns like 176B BLOOM so this is a toy demo, But if you have enough resources feel free to attack it or contact me, my contact: ali.elfilali00@gmail.com"

gr.Interface(
    fn=generate,
    inputs=["text", "text", "text", "text", "text", "text"],
    outputs="text",
    title=title,
    article=article).launch()