Spaces:
Sleeping
Sleeping
prithivi96
commited on
Commit
•
7569f1a
1
Parent(s):
a3e8fba
Upload 6 files
Browse filesJSON Query Generator V1.0
- JSONPath_Generator.py +59 -0
- app.py +20 -0
- human_template.txt +2 -0
- main.py +0 -0
- requirements.txt +4 -0
- system_template.txt +3 -0
JSONPath_Generator.py
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain.prompts import PromptTemplate, SystemMessagePromptTemplate, ChatPromptTemplate, \
|
2 |
+
HumanMessagePromptTemplate
|
3 |
+
from langchain.llms import OpenAI
|
4 |
+
from langchain.chat_models import ChatOpenAI, AzureChatOpenAI
|
5 |
+
from langchain.cache import InMemoryCache
|
6 |
+
import langchain
|
7 |
+
langchain.llm_cache = InMemoryCache()
|
8 |
+
import pandas as pd
|
9 |
+
import os
|
10 |
+
from langchain.chains import LLMChain
|
11 |
+
from langchain.output_parsers import PydanticOutputParser
|
12 |
+
from pydantic import BaseModel, Field
|
13 |
+
os.environ["OPENAI_API_KEY"] = "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"
|
14 |
+
|
15 |
+
|
16 |
+
class JSONPath_Generator:
|
17 |
+
def __init__(self, json_input, target_value, json_condition):
|
18 |
+
self.json_input = json_input
|
19 |
+
self.target_value = target_value
|
20 |
+
self.json_condition = json_condition
|
21 |
+
# os.environ["OPENAI_API_KEY"] = "4b81012d55fb416c9e398f6149c3071e"
|
22 |
+
# self.model = ChatOpenAI()
|
23 |
+
# os.environ["OPENAI_API_TYPE"] = "azure"
|
24 |
+
# os.environ["OPENAI_API_VERSION"] = "2023-03-15-preview"
|
25 |
+
# self.model = AzureChatOpenAI(
|
26 |
+
# # openaikey=openaikey,
|
27 |
+
# # openai_api_version="2023-03-15-preview",
|
28 |
+
# # azure_deployment="text-davinci-003",
|
29 |
+
# # temperature=0,
|
30 |
+
# deployment_name="gpt-4",
|
31 |
+
# model_name="gpt-4",
|
32 |
+
# )
|
33 |
+
self.model = OpenAI(
|
34 |
+
temperature=0,
|
35 |
+
# openai_api_key=self.api_key,
|
36 |
+
model_name="gpt-3.5-turbo-instruct"
|
37 |
+
)
|
38 |
+
|
39 |
+
def create_chat_prompt(self):
|
40 |
+
# System Template
|
41 |
+
with open("system_template.txt", "r") as sys_temp:
|
42 |
+
system_template = sys_temp.read().strip()
|
43 |
+
system_prompt = SystemMessagePromptTemplate.from_template(system_template)
|
44 |
+
# Humman Template
|
45 |
+
with open("human_template.txt", "r") as hum_temp:
|
46 |
+
human_template = hum_temp.read().strip()
|
47 |
+
if self.json_condition != '':
|
48 |
+
human_template += " provided the {json_condition}"
|
49 |
+
human_prompt = HumanMessagePromptTemplate.from_template(human_template)
|
50 |
+
# Chat Prompt
|
51 |
+
self.chat_prompt = ChatPromptTemplate.from_messages([system_prompt, human_prompt])
|
52 |
+
|
53 |
+
def create_llm_chain(self):
|
54 |
+
# self.read_extract_api_details()
|
55 |
+
self.create_chat_prompt()
|
56 |
+
chain = LLMChain(llm=self.model, prompt=self.chat_prompt)
|
57 |
+
self.response = chain.run(Target_value=self.target_value,json_condition = self.json_condition,
|
58 |
+
JSON_Input=self.json_input)
|
59 |
+
return self.response
|
app.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import streamlit as st
|
3 |
+
import tempfile
|
4 |
+
import pandas as pd
|
5 |
+
from JSONPath_Generator import JSONPath_Generator
|
6 |
+
|
7 |
+
#Initialize Streamlit app
|
8 |
+
st.set_page_config(page_title="👨💻 JSON Path Generator")
|
9 |
+
st.header("JSON Path Generator")
|
10 |
+
|
11 |
+
#Input Text Area
|
12 |
+
json_value = st.text_area(label="JSON Body Input: ")
|
13 |
+
target_input = st.text_input(label="Target JSON Key: ")
|
14 |
+
json_condition = st.text_input(label="JSON Conditions (If any): ")
|
15 |
+
|
16 |
+
if st.button("Submit"):
|
17 |
+
json_path_gen = JSONPath_Generator(json_input=json_value, target_value=target_input,
|
18 |
+
json_condition=json_condition)
|
19 |
+
res = json_path_gen.create_llm_chain()
|
20 |
+
st.write(res)
|
human_template.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
{JSON_Input}
|
2 |
+
Generate JSON PATH query to extract the {Target_value} from the above JSON Body
|
main.py
ADDED
File without changes
|
requirements.txt
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
streamlit
|
2 |
+
pandas
|
3 |
+
langchain
|
4 |
+
openai
|
system_template.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
I want you to act as a Expert in generating JSON Query for the provided JSON and Value need to be fetched.
|
2 |
+
I will input the JSON Body and the value need to be fetched, analyze the JSON and return the JSON PATH Query to extract the provided Value from the JSON Body.
|
3 |
+
Do not write any explanation or context. Do not output any other answers apart from the JSON PATH QUERY
|