Spaces:
Runtime error
Runtime error
| import streamlit as st | |
| from transformers import AutoTokenizer, AutoModelForSeq2SeqLM | |
| import torch | |
| import torch.nn as nn | |
| class Net(nn.Module): | |
| def __init__(self): | |
| super(Net,self).__init__() | |
| self.layer = nn.Sequential( | |
| nn.Linear(768, 512), | |
| nn.ReLU(), | |
| nn.Linear(512, 256), | |
| nn.ReLU(), | |
| nn.Linear(256, 128), | |
| nn.ReLU(), | |
| nn.Linear(128, 8), | |
| ) | |
| def forward(self,x): | |
| return self.layer(x) | |
| def get_hidden_states(encoded, model): | |
| with torch.no_grad(): | |
| output = model(decoder_input_ids=encoded['input_ids'], output_hidden_states=True, **encoded) | |
| layers = [-4, -3, -2, -1] | |
| states = output['decoder_hidden_states'] | |
| output = torch.stack([states[i] for i in layers]).sum(0).squeeze() | |
| return output.mean(dim=0) | |
| def get_word_vector(sent, tokenizer, model): | |
| encoded = tokenizer.encode_plus(sent, return_tensors="pt", truncation=True) | |
| return get_hidden_states(encoded, model) | |
| labels_articles = {1: 'Computer Science',2: 'Economics',3: "Electrical Engineering And Systems Science", | |
| 4: "Mathematics",5: "Physics",6: "Quantitative Biology",7: "Quantitative Finance", 8: "Statistics"} | |
| def load_models(): | |
| model=Net() | |
| model.load_state_dict(torch.load('dummy_model.txt', map_location=torch.device('cpu'))) | |
| model.eval() | |
| tokenizer = AutoTokenizer.from_pretrained("Callidior/bert2bert-base-arxiv-titlegen") | |
| model_emb = AutoModelForSeq2SeqLM.from_pretrained("Callidior/bert2bert-base-arxiv-titlegen") | |
| return model, model_emb, tokenizer | |
| model, model_emb, tokenizer = load_models() | |
| title = st.text_area("Write title of your article") | |
| summary = st.text_area("Write summary of your article or dont write anything (but you should press Ctrl + Enter)") | |
| text = title + '. ' + summary | |
| embed = get_word_vector(text, tokenizer, model_emb) | |
| logits = torch.nn.functional.softmax(model(embed), dim=0) | |
| best_tags = torch.argsort(logits, descending=True) | |
| sum = 0 | |
| res = '' | |
| st.write('best tags:') | |
| for tag in best_tags: | |
| if sum > 0.95: | |
| break | |
| sum += logits[tag.item()] | |
| # print(tag.item()) | |
| new_tag = labels_articles[tag.item() + 1] | |
| st.write(new_tag) |