Spaces:
Sleeping
Sleeping
File size: 2,381 Bytes
42cd5f6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
from rag.agents.interface import Pipeline
from openai import OpenAI
from pydantic import BaseModel, Field
import yfinance as yf
import instructor
import timeit
import box
import yaml
from rich import print
from typing import Any, List
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("ignore", category=UserWarning)
class FCall(Pipeline):
def run_pipeline(self,
payload: str,
query_inputs: [str],
query_types: [str],
keywords: [str],
query: str,
file_path: str,
index_name: str,
options: List[str] = None,
group_by_rows: bool = True,
update_targets: bool = True,
debug: bool = False,
local: bool = True) -> Any:
print(f"\nRunning pipeline with {payload}\n")
# Import config vars
with open('config.yml', 'r', encoding='utf8') as ymlfile:
cfg = box.Box(yaml.safe_load(ymlfile))
start = timeit.default_timer()
company = query
class StockInfo(BaseModel):
company: str = Field(..., description="Name of the company")
ticker: str = Field(..., description="Ticker symbol of the company")
# enables `response_model` in create call
client = instructor.patch(
OpenAI(
base_url=cfg.OLLAMA_BASE_URL_FUNCTION,
api_key="ollama",
),
mode=instructor.Mode.JSON,
)
resp = client.chat.completions.create(
model=cfg.LLM_FUNCTION,
messages=[
{
"role": "user",
"content": f"Return the company name and the ticker symbol of the {company}."
}
],
response_model=StockInfo,
max_retries=10
)
print(resp.model_dump_json(indent=2))
stock = yf.Ticker(resp.ticker)
hist = stock.history(period="1d")
stock_price = hist['Close'].iloc[-1]
print(f"The stock price of the {resp.company} is {stock_price}. USD")
end = timeit.default_timer()
print('=' * 50)
print(f"Time to retrieve answer: {end - start}")
|