Spaces:
Sleeping
Sleeping
import streamlit as st | |
import requests | |
from bs4 import BeautifulSoup | |
def search_arxiv(search_query: str, start: int = 0): | |
base_url = 'http://export.arxiv.org/api/query?' | |
query = f'search_query={search_query}&start={start}&max_results=10' | |
response = requests.get(base_url + query) | |
feed = BeautifulSoup(response.content, 'html.parser') | |
entries = feed.find_all('entry') | |
articles = [] | |
for entry in entries: | |
article = {} | |
article['title'] = entry.title.text | |
article['authors'] = [author.find('name').text for author in entry.find_all('author')] | |
article['abstract'] = entry.summary.text | |
articles.append(article) | |
return articles | |
def get_paper_info(paper_id: str): | |
base_url = 'https://api.semanticscholar.org/v1/paper/' | |
response = requests.get(base_url + paper_id) | |
paper = response.json() | |
paper_info = {} | |
paper_info['title'] = paper['title'] | |
paper_info['authors'] = [author['name'] for author in paper['authors']] | |
paper_info['abstract'] = paper['abstract'] | |
paper_info['fieldsOfStudy'] = paper['fieldsOfStudy'] | |
return paper_info | |
st.title('Scientific Data Substantiator') | |
search_query = st.text_input("Enter your search term") | |
if search_query: | |
articles = search_arxiv(search_query) | |
for article in articles: | |
st.write("Title: ", article['title']) | |
st.write("Authors: ", ", ".join(article['authors'])) | |
st.write("Abstract: ", article['abstract']) | |
st.write("-----") | |