import pickle import gradio as gr import spacy import gensim.downloader as api wv = api.load('word2vec-google-news-300') nlp = spacy.load("en_core_web_lg") with open('model.pkl', 'rb') as f: model = pickle.load(f) def preprocess_and_vectorize(text): doc = nlp(text) filtered_tokens = [] for token in doc: if token.is_punct or token.is_stop: continue filtered_tokens.append(token.lemma_) return wv.get_mean_vector(filtered_tokens) def processInput(text): preprocessed_data = [preprocess_and_vectorize(text)] output = model.predict(preprocessed_data) mapped_data = { 1: "World", 2: "Sports", 3: "Business", 4: "Sci/Tech", } return list(map(lambda value: mapped_data.get(value), output))[0] iface = gr.Interface(fn=processInput, inputs="text", outputs="text") if __name__ == "__main__": iface.launch()