Fin_Research / fin_data_api.py
Robert Castagna
add gitignore
bed2d7a
raw
history blame
No virus
1.96 kB
# Use a pipeline as a high-level helper
from transformers import pipeline
import json
import requests
import datetime
import sqlite3
with open('secrets.json') as f:
content = json.load(f)
# API DOC: https://finnhub.io/docs/api/introduction
def get_finnhub_data(example: str) -> json:
"""
Pass in the "example" string from the API documentation. It changes for every endpoint.
:param1 example: '/company-news?symbol=AAPL&from=2023-08-15&to=2023-08-20'
"""
base_url = 'https://finnhub.io/api/v1//'
token = f"&token={content['finhubapi_key']}"
request = requests.get(f"{base_url}{example}{token}")
return request.json()
def sentiment_analysis(headline:str) -> str:
"""
Pass in a headline and get back a sentiment analysis of the text.
This only works for one headline at a time; Should be expanded to work for a list of headlines.
:param1 headline: Text string: 'Apple is the best company in the world'
"""
nlp = pipeline("sentiment-analysis")
return nlp(headline)
## get news articles for a company
#res_news = get_finnhub_data('/company-news?symbol=AAPL&from=2023-08-15&to=2023-08-20')
#print(res_news[0].keys())
#for item in res_news:
#dt_object = datetime.datetime.fromtimestamp(item['datetime']).strftime("%Y-%m-%d")
#print(item['headline'], item['source'], dt_object)
## get basic financials
#res_basic_fins = get_finnhub_data('/stock/metric?symbol=AAPL&metric=all')
#print(res_basic_fins['metric'].keys())
#print(res_basic_fins['series']['annual'].keys())
#print(res_basic_fins['series']['quarterly'].keys())
## get insider sentiment
#res_sentiment = get_finnhub_data('/stock/insider-sentiment?symbol=AAPL')
#print(res_sentiment['data'][0].keys())
# put data in database after figuring out what data we want to store
conn = sqlite3.connect('fin_data.db')
c = conn.cursor()
c.execute("""
select * from test
""")
print(c.fetchall())
conn.commit()
conn.close()