File size: 2,838 Bytes
d94942e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fccd977
d94942e
fccd977
 
 
 
 
 
 
 
 
 
 
 
d94942e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fccd977
d94942e
 
 
 
 
fccd977
 
 
 
 
fa92033
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# ******* THIS FILE CONTAINS ALL THE PROMPTS & CHAINS USED IN Functions.py ***********
from Templates import *
from langchain import PromptTemplate
from langchain.chains import LLMChain
from langchain.llms import OpenAI
from dotenv import load_dotenv
import os


class PromptTemplates:
    def __init__(self):
        self.json_prompt_template = PromptTemplate(
            input_variables=["user_stories", "json_structure"],
            template=convert_json_template,
        )

        self.user_story_prompt = PromptTemplate(
            input_variables=["project_name", "rfp_details"],
            template=user_story_template,
        )

        self.bot_prompt = PromptTemplate(
            input_variables=["context", "input"], template=bot_template
        )

        self.summary_prompt = PromptTemplate(
            input_variables=["project_name", "rfp_details"], template=summary_template
        )

        self.estimations_prompt_template = PromptTemplate(
            input_variables=[
                "project_summary",
                "user_stories",
                "tech_leads",
                "senior_developers",
                "junior_developers",
            ],
            template=estimations_template,
        )

        self.roadmap_prompt = PromptTemplate(
            input_variables=["project_input"], template=roadmap_template
        )


class LLMChains:

    def __init__(self):
        load_dotenv()
        openai_api_key = os.getenv("OPENAI_API_KEY")
        obj = PromptTemplates()

        # generate summary chain
        self.summary_chain = LLMChain(
            llm=OpenAI(model_name="gpt-3.5-turbo-16k", temperature=0.7),
            prompt=obj.summary_prompt,
            verbose="true",
        )

        # genrate bot conversastion
        self.bot_chain = LLMChain(
            llm=OpenAI(model_name="gpt-3.5-turbo-16k", temperature=0.7),
            prompt=obj.bot_prompt,
            verbose="true",
        )

        # genrate user stories user_story_template = """
        self.user_story_chain = LLMChain(
            llm=OpenAI(model_name="gpt-3.5-turbo-16k", temperature=0.7),
            prompt=obj.user_story_prompt,
            verbose="true",
        )

        # prompt template for json
        self.json_chain = LLMChain(
            llm=OpenAI(model_name="gpt-3.5-turbo-16k", temperature=0.7),
            prompt=obj.json_prompt_template,
            verbose="true",
        )

        self.estimations_chain = LLMChain(
            llm=OpenAI(model_name="gpt-3.5-turbo-16k", temperature=0.7),
            prompt=obj.estimations_prompt_template,
            verbose="true",
        )

        self.roadmap_chain = LLMChain(
            llm=OpenAI(model_name="gpt-3.5-turbo-16k", temperature=0.7),
            prompt=obj.roadmap_prompt,
            verbose=True,
        )