File size: 6,262 Bytes
ca7e9c6
 
 
 
 
 
 
 
 
e42e9ae
ca7e9c6
 
 
 
e42e9ae
f57b8d4
 
ca7e9c6
e42e9ae
ca7e9c6
 
 
 
e42e9ae
 
 
 
 
 
 
ca7e9c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e42e9ae
ca7e9c6
 
e42e9ae
ca7e9c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f57b8d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e42e9ae
f57b8d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e42e9ae
 
 
 
 
 
 
 
 
 
 
 
 
 
f57b8d4
 
 
 
 
 
 
 
 
 
 
e42e9ae
f57b8d4
 
ca7e9c6
a7f3b3b
ca7e9c6
e42e9ae
ca7e9c6
e42e9ae
 
 
f2f63e5
e42e9ae
 
 
f2f63e5
e42e9ae
 
 
 
 
 
 
 
 
f57b8d4
e42e9ae
 
 
ca7e9c6
 
 
 
 
 
 
 
e42e9ae
ca7e9c6
 
a7f3b3b
a653647
ca7e9c6
e42e9ae
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
##############################################################################
# Agent interfaces that bridges private capability agents (pandas, 
# sql, ...), 3rd party plugin agents (search, weather, movie, ...),
# and 3rd party LLMs
#
# @philmui
# Mon May 1 18:34:45 PDT 2023
##############################################################################


from langchain.schema import HumanMessage
from langchain.prompts import PromptTemplate, ChatPromptTemplate, \
                              HumanMessagePromptTemplate
from models import load_chat_agent, load_chained_agent, load_sales_agent, \
                   load_sqlite_agent, load_book_agent, load_earnings_agent

import openai, numpy as np

from collections import namedtuple
import logging

logger = logging.getLogger(__name__)


# Define a named tuple type '_struct' with fields 
_struct = namedtuple('Struct', ['deterministic', 'semantic', 'creative'])

# type of generated answers
GEN_TYPE = _struct('deterministic', 'semantic', 'creative')

# To parse outputs and get structured data back
from langchain.output_parsers import StructuredOutputParser, ResponseSchema

instruct_template = """
Please answer this question clearly with easy to follow reasoning:
{query}

If you don't know the answer, just reply: not available.
"""

instruct_prompt = PromptTemplate(
    input_variables=["query"],
    template=instruct_template
)

response_schemas = [
    ResponseSchema(name="artist", 
                   description="The name of the musical artist"),
    ResponseSchema(name="song", 
                   description="The name of the song that the artist plays")
]

output_parser = StructuredOutputParser.from_response_schemas(response_schemas)
format_instructions = output_parser.get_format_instructions()

LOCAL_MAGIC_TOKENS = ["my company", "for our team", "our sales", "my team"]
DIGITAL_MAGIC_TOKENS = ["digital media", "our database", "our digital"]

def check_deterministic(sentence, magic_tokens):
    return any([t in sentence.lower() for t in magic_tokens])


chat_prompt = ChatPromptTemplate(
    messages=[
        HumanMessagePromptTemplate.from_template(
            "Given a command from the user, extract the artist and \
             song names \n{format_instructions}\n{user_prompt}")  
    ],
    input_variables=["user_prompt"],
    partial_variables={"format_instructions": format_instructions}
)


def chatAgent(chat_message):
    try:
        agent = load_chat_agent(verbose=True)
        output = agent([HumanMessage(content=chat_message)])
    except:
        output = "Please rephrase and try chat again."
    return output

def salesAgent(instruction):
    output = ""
    try:
        agent = load_sales_agent(verbose=True)
        output = agent.run(instruction)
        print("panda> " + output)
    except Exception as e:
        logger.error(e)
        output = f"Rephrasing your prompt could get better sales results {e}"
    return output

def chinookAgent(instruction, model_name):
    output = ""
    try:
        agent = load_sqlite_agent(model_name)
        output = agent.run(instruction)
        print("chinook> " + output)
    except Exception as e:
        logger.error(e)
        output = "Rephrasing your prompt could get better db results {e}"
    return output

def check_semantic(string1, string2):
    #
    # proper way to do this is to use a
    # vector DB (chroma, pinecone, ...)
    # 
    response = openai.Embedding.create(
                    input=[string1, string2],
                    engine="text-similarity-davinci-001"
                )
    embedding_a = response['data'][0]['embedding']
    embedding_b = response['data'][1]['embedding']
    similarity_score = np.dot(embedding_a, embedding_b)
    logger.info(f"similarity: {similarity_score}")

    return similarity_score > 0.8


def earningsAgent(query):
    output = ""
    try:
        agent = load_earnings_agent(True)
        result = agent({
            "query": query
        })
        logger.info(f"book response: {result['result']}")
        output = result['result']
    except Exception as e:
        logger.error(e)
        output = f"Rephrasing your prompt for the book agent{e}"
    return output

def bookAgent(query):
    output = ""
    try:
        agent = load_book_agent(True)
        result = agent({
            "query": query
        })
        logger.info(f"book response: {result['result']}")
        output = result['result']
    except Exception as e:
        logger.error(e)
        output = f"Rephrasing your prompt for the book agent{e}"
    return output


def agentController(question_text, model_name):
    output = ""
    outputType = None

    if check_deterministic(question_text, LOCAL_MAGIC_TOKENS):
        outputType = GEN_TYPE.deterministic
        output += salesAgent(question_text)
        print(f"๐Ÿ”น salesAgent: {output}")
    elif check_deterministic(question_text, DIGITAL_MAGIC_TOKENS):
        outputType = GEN_TYPE.deterministic
        output += chinookAgent(question_text, model_name)
        print(f"๐Ÿ”น chinookAgent: {output}")

    elif check_semantic(question_text, "Salesforce earnings call for Q4 2023"):
        outputType = GEN_TYPE.semantic
        output += earningsAgent(question_text)
        print(f"๐Ÿ”น earningsAgent: {output}")
    elif check_semantic(question_text, "how to govern") or \
         check_semantic(question_text, "fight a war"):
        outputType = GEN_TYPE.semantic
        output += bookAgent(question_text)
        print(f"๐Ÿ”น bookAgent: {output}")

    else: # more creative / complex reasoning  
        outputType = GEN_TYPE.creative
        try:
            instruction = instruct_prompt.format(query=question_text)
            logger.info(f"instruction: {instruction}")
            agent = load_chained_agent(verbose=True, model_name=model_name)
            response = agent([instruction])
            if response is None or "not available" in response["output"]:
                response = ""
            else:
                output += response['output']
                logger.info(f"๐Ÿ”น Steps: {response['intermediate_steps']}")
        except Exception as e: 
            output = "Most likely ran out of tokens ..."
            logger.error(e)

    return output, outputType