Spaces:
Sleeping
Sleeping
File size: 3,232 Bytes
1f95777 151c2dd 037af6c 9c1234d 151c2dd 526644d 151c2dd 1f95777 151c2dd 1f95777 151c2dd 1ec143e 151c2dd 561abab 151c2dd 1ec143e 151c2dd 1f95777 151c2dd 1f95777 561abab 1f95777 561abab 1f95777 561abab 164690b 151c2dd 164690b 9c1234d 164690b 9c1234d 164690b 151c2dd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 |
from os import getenv
from os.path import exists
from functools import cache
import json
import streamlit as st
from dotenv import load_dotenv
from googleapiclient.discovery import build
from slugify import slugify
from transformers import pipeline
import uuid
from beautiful_soup.app import get_url_content
@cache
def google_search_api_request( query ):
load_dotenv()
api_key = getenv('GOOGLE_SEARCH_API_KEY')
# cx = os.getenv('GOOGLE_SEARCH_ENGINE_ID')
service = build(
"customsearch",
"v1",
developerKey=api_key,
cache_discovery=False
)
# Exclude PDFs from search results.
query = query + ' -filetype:pdf'
return service.cse().list(
q=query,
cx='05048cc2df6134a06',
num=5,
).execute()
def search_results( query ):
file_path = 'search-results/' + slugify( query ) + '.json'
results = []
if exists( file_path ):
with open( file_path, 'r' ) as results_file:
results = json.load( results_file )
else:
search_result = google_search_api_request( query )
if ( int( search_result['searchInformation']['totalResults'] ) > 0 ):
results = search_result['items']
with open( file_path, 'w' ) as results_file:
json.dump( results, results_file )
if ( len( results ) == 0 ) :
raise Exception('No results found.')
return results
def main():
st.title('Google Search')
query = st.text_input('Search query')
if query :
with st.spinner('Loading search results...'):
try:
results = search_results( query )
except Exception as exception:
st.exception(exception)
return
number_of_results = len( results )
st.success( 'Found {} results.'.format( number_of_results ) )
progress_bar = st.progress(0)
# for result in results:
for index, result in enumerate(results):
url_id = uuid.uuid5( uuid.NAMESPACE_URL, result['link'] ).hex
st.write(result['link'])
st.write(url_id)
try:
content = get_url_content( result['link'] )
except Exception as exception:
st.exception(exception)
progress_bar.progress( ( index + 1 ) / number_of_results )
continue
file_path = 'summaries/' + url_id + '.json'
if exists( file_path ):
with open( file_path, 'r' ) as file:
summary = json.load( file )
else:
try:
summarizer = pipeline("summarization", model="sshleifer/distilbart-cnn-12-6")
summary = summarizer(content, max_length=130, min_length=30, do_sample=False, truncation=True)
except Exception as exception:
raise exception
with open( file_path, 'w' ) as file:
json.dump( summary, file )
for sentence in summary:
st.write(sentence['summary_text'])
progress_bar.progress( ( index + 1 ) / number_of_results )
if __name__ == '__main__':
main()
|