Upload 9 files
Browse files- gpt.py +39 -0
- main.py +142 -0
- mistral.py +59 -0
- news.py +45 -0
- requirements.txt +14 -0
- static/data/2024-01-20 +1 -0
- web.py +38 -0
gpt.py
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#pip install -U g4f
|
2 |
+
import g4f
|
3 |
+
from time import time as t
|
4 |
+
|
5 |
+
|
6 |
+
def MsgDelAuto(messages:list):
|
7 |
+
x = len(messages.__str__())
|
8 |
+
if x>5500:
|
9 |
+
messages.pop(10)
|
10 |
+
return MsgDelAuto(messages)
|
11 |
+
else:
|
12 |
+
return messages
|
13 |
+
|
14 |
+
def ChatGpt(message:str,messages:list=[]):
|
15 |
+
C=t()
|
16 |
+
messages=MsgDelAuto(messages)
|
17 |
+
messages.append({"role": "user", "content": message})
|
18 |
+
|
19 |
+
response = g4f.ChatCompletion.create(
|
20 |
+
model="gpt-4-32k-0613",
|
21 |
+
provider=g4f.Provider.GPTalk,
|
22 |
+
messages=messages,
|
23 |
+
stream=True,
|
24 |
+
)
|
25 |
+
|
26 |
+
ms=""
|
27 |
+
for message in response:
|
28 |
+
ms+=str(message)
|
29 |
+
messages.append({"role": "assistant", "content": ms})
|
30 |
+
|
31 |
+
return ms,messages,t()-C
|
32 |
+
|
33 |
+
if __name__=="__main__":
|
34 |
+
|
35 |
+
A=input(">>> ")
|
36 |
+
C=t()
|
37 |
+
print(ChatGpt(A,[]))
|
38 |
+
print(t()-C)
|
39 |
+
|
main.py
ADDED
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from flask import Flask, request, jsonify
|
2 |
+
from mistral import Mistral7B
|
3 |
+
from gpt import ChatGpt
|
4 |
+
from news import News
|
5 |
+
from datetime import datetime
|
6 |
+
from os import listdir
|
7 |
+
from web import Online_Scraper
|
8 |
+
|
9 |
+
app = Flask(__name__)
|
10 |
+
|
11 |
+
|
12 |
+
# Tracking API usage
|
13 |
+
counter={
|
14 |
+
'Mistral7B': 0,
|
15 |
+
'ChatGpt': 0,
|
16 |
+
'News': 0,
|
17 |
+
"Web": 0,
|
18 |
+
}
|
19 |
+
|
20 |
+
def Load():
|
21 |
+
global counter
|
22 |
+
current_datetime = datetime.now()
|
23 |
+
|
24 |
+
# Extract only the date
|
25 |
+
current_date = str(current_datetime.date())
|
26 |
+
file=listdir(r"static/data/")
|
27 |
+
if current_date in file:
|
28 |
+
with open(r"static/data/"+current_date,"r") as f:
|
29 |
+
counter=eval(f.read())
|
30 |
+
else:
|
31 |
+
counter={
|
32 |
+
'Mistral7B': 0,
|
33 |
+
'ChatGpt': 0,
|
34 |
+
'News': 0,
|
35 |
+
"Web": 0,
|
36 |
+
}
|
37 |
+
with open(r"static/data/"+current_date,"w") as f:
|
38 |
+
f.write(str(counter))
|
39 |
+
|
40 |
+
def Update():
|
41 |
+
# Get the current date and time
|
42 |
+
global counter
|
43 |
+
current_datetime = datetime.now()
|
44 |
+
|
45 |
+
# Extract only the date
|
46 |
+
current_date = str(current_datetime.date())
|
47 |
+
file=listdir(r"static/data/")
|
48 |
+
if current_date in file:
|
49 |
+
with open(r"static/data/"+current_date,"w") as f:
|
50 |
+
f.write(str(counter))
|
51 |
+
else:
|
52 |
+
counter={
|
53 |
+
'Mistral7B': 0,
|
54 |
+
'ChatGpt': 0,
|
55 |
+
'News': 0,
|
56 |
+
"Web": 0,
|
57 |
+
}
|
58 |
+
with open(r"static/data/"+current_date,"w") as f:
|
59 |
+
f.write(str(counter))
|
60 |
+
|
61 |
+
@app.route('/mistral7b', methods=['POST'])
|
62 |
+
def generate():
|
63 |
+
global counter
|
64 |
+
# Get data from the request
|
65 |
+
data = request.json
|
66 |
+
prompt = data.get('prompt', '')
|
67 |
+
messages = data.get('messages', [])
|
68 |
+
key = data.get('key', '')
|
69 |
+
|
70 |
+
# Call Mistral7B function
|
71 |
+
response, updated_messages, execution_time = Mistral7B(prompt, messages,key)
|
72 |
+
|
73 |
+
# Prepare the response
|
74 |
+
result = {
|
75 |
+
'response': response,
|
76 |
+
'messages': updated_messages,
|
77 |
+
'execution_time': execution_time
|
78 |
+
}
|
79 |
+
counter['Mistral7B']+=1
|
80 |
+
Update()
|
81 |
+
return jsonify(result)
|
82 |
+
|
83 |
+
@app.route('/chatgpt', methods=['POST'])
|
84 |
+
def chat():
|
85 |
+
global counter
|
86 |
+
# Get data from the request
|
87 |
+
data = request.json
|
88 |
+
user_message = data.get('message', '')
|
89 |
+
messages = data.get('messages', [])
|
90 |
+
|
91 |
+
# Call ChatGpt function
|
92 |
+
response, updated_messages, execution_time = ChatGpt(user_message, messages)
|
93 |
+
|
94 |
+
# Prepare the response
|
95 |
+
result = {
|
96 |
+
'response': response,
|
97 |
+
'messages': updated_messages,
|
98 |
+
'execution_time': execution_time
|
99 |
+
}
|
100 |
+
counter["ChatGpt"]+=1
|
101 |
+
Update()
|
102 |
+
return jsonify(result)
|
103 |
+
|
104 |
+
@app.route('/news', methods=['GET'])
|
105 |
+
def get_news():
|
106 |
+
global counter
|
107 |
+
# Get data from the request
|
108 |
+
key = request.args.get('key', '')
|
109 |
+
cache_flag = request.args.get('cache', 'True').lower() == 'true'
|
110 |
+
|
111 |
+
# Call News function
|
112 |
+
news, error, execution_time = News(key, cache_flag)
|
113 |
+
|
114 |
+
# Prepare the response
|
115 |
+
result = {
|
116 |
+
'news': news,
|
117 |
+
'error': error,
|
118 |
+
'execution_time': execution_time
|
119 |
+
}
|
120 |
+
counter["News"]+=1
|
121 |
+
Update()
|
122 |
+
return jsonify(result)
|
123 |
+
|
124 |
+
@app.route('/web', methods=['GET'])
|
125 |
+
def Web():
|
126 |
+
key = request.args.get('prompt', '')
|
127 |
+
result = {
|
128 |
+
'response': Online_Scraper(key)
|
129 |
+
}
|
130 |
+
counter["Web"]+=1
|
131 |
+
return jsonify(result)
|
132 |
+
|
133 |
+
|
134 |
+
@app.route('/divyanshpizza', methods=['GET'])
|
135 |
+
def get_counters():
|
136 |
+
global counter
|
137 |
+
return jsonify(counter),jsonify({"data":str(listdir(r"static/data/"))})
|
138 |
+
|
139 |
+
Load()
|
140 |
+
|
141 |
+
if __name__ == '__main__':
|
142 |
+
app.run()
|
mistral.py
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from huggingface_hub import InferenceClient
|
2 |
+
import random
|
3 |
+
from time import time as t
|
4 |
+
|
5 |
+
API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.1"
|
6 |
+
|
7 |
+
|
8 |
+
messages = [
|
9 |
+
{"role": "system", "content": "I'm the latest J.A.R.V.I.S. AI, designed by Divyansh Shukla with capabilities to access systems through various programming languages using modules like webbrowser, pyautogui, time, pyperclip, random, mouse, wikipedia, keyboard, datetime, tkinter, PyQt5, etc."},
|
10 |
+
]
|
11 |
+
|
12 |
+
# Function to format prompt
|
13 |
+
def format_prompt(message, custom_instructions=None):
|
14 |
+
prompt = ""
|
15 |
+
if custom_instructions:
|
16 |
+
prompt += f"[INST] {custom_instructions} [/INST]"
|
17 |
+
prompt += f"[INST] {message} [/INST]"
|
18 |
+
return prompt
|
19 |
+
|
20 |
+
# Function to generate response based on user input
|
21 |
+
def Mistral7B(prompt,messages:list=[{}],API_KEY="", temperature=0.9, max_new_tokens=1024, top_p=0.95, repetition_penalty=1.0):
|
22 |
+
C=t()
|
23 |
+
headers = {"Authorization": f"Bearer {API_KEY}"}
|
24 |
+
temperature = float(temperature)
|
25 |
+
if temperature < 1e-2:
|
26 |
+
temperature = 1e-2
|
27 |
+
top_p = float(top_p)
|
28 |
+
|
29 |
+
generate_kwargs = dict(
|
30 |
+
temperature=temperature,
|
31 |
+
max_new_tokens=max_new_tokens,
|
32 |
+
top_p=top_p,
|
33 |
+
repetition_penalty=repetition_penalty,
|
34 |
+
do_sample=True,
|
35 |
+
seed=random.randint(0, 10**7),
|
36 |
+
)
|
37 |
+
custom_instructions=str(messages)
|
38 |
+
formatted_prompt = format_prompt(prompt, custom_instructions)
|
39 |
+
|
40 |
+
messages.append({"role": "user", "content": prompt})
|
41 |
+
|
42 |
+
client = InferenceClient(API_URL, headers=headers)
|
43 |
+
response = client.text_generation(formatted_prompt, **generate_kwargs)
|
44 |
+
|
45 |
+
messages.append({"role": "assistant", "content": response})
|
46 |
+
|
47 |
+
return response,messages,t()-C
|
48 |
+
|
49 |
+
if __name__=="__main__":
|
50 |
+
while True:
|
51 |
+
# Get user input
|
52 |
+
user_prompt = input("You: ")
|
53 |
+
|
54 |
+
# Exit condition
|
55 |
+
if user_prompt.lower() == 'exit':
|
56 |
+
break
|
57 |
+
|
58 |
+
generated_text = Mistral7B(user_prompt)
|
59 |
+
print("Bot:", generated_text)
|
news.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from time import time as t
|
3 |
+
import shelve
|
4 |
+
|
5 |
+
CACHE_FILE = "news_cache.db"
|
6 |
+
|
7 |
+
def get_cached_news():
|
8 |
+
with shelve.open(CACHE_FILE) as cache:
|
9 |
+
if "news" in cache:
|
10 |
+
last_update_time = cache["last_update_time"]
|
11 |
+
current_time = t()
|
12 |
+
if current_time - last_update_time < 12 * 60 * 60: # 12 hours in seconds
|
13 |
+
return cache["news"]
|
14 |
+
|
15 |
+
return None
|
16 |
+
|
17 |
+
def cache_news(news):
|
18 |
+
with shelve.open(CACHE_FILE) as cache:
|
19 |
+
cache["news"] = news
|
20 |
+
cache["last_update_time"] = t()
|
21 |
+
|
22 |
+
def News(KEY,cache=True):
|
23 |
+
if cache:
|
24 |
+
cached_news = get_cached_news()
|
25 |
+
if cached_news:
|
26 |
+
return cached_news, None, 0 # Return cached news
|
27 |
+
|
28 |
+
C = t()
|
29 |
+
main_url = f'https://newsapi.org/v2/top-headlines?sources=techcrunch&apiKey={KEY}'
|
30 |
+
main_page = requests.get(main_url).json()
|
31 |
+
articles = main_page["articles"]
|
32 |
+
head = []
|
33 |
+
day = ["first", "second", "third", "fourth", "fifth", "sixth", "seventh", "eighth", "ninth", "tenth"]
|
34 |
+
for ar in articles:
|
35 |
+
head.append(ar["title"])
|
36 |
+
temp = []
|
37 |
+
for i in range(len(day)):
|
38 |
+
temp.append(f"today's {day[i]} news is: {head[i]}\n")
|
39 |
+
result = "".join(temp)
|
40 |
+
|
41 |
+
cache_news(result) # Cache the news
|
42 |
+
return result, None, t() - C
|
43 |
+
|
44 |
+
if __name__ == "__main__":
|
45 |
+
print(News("5b57a2e4baa74123b6db7dff6967881b"))
|
requirements.txt
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Flask
|
2 |
+
gunicorn
|
3 |
+
uvicorn
|
4 |
+
importlib-metadata
|
5 |
+
itsdangerous
|
6 |
+
Jinja2
|
7 |
+
MarkupSafe
|
8 |
+
Werkzeug
|
9 |
+
zipp
|
10 |
+
g4f
|
11 |
+
bs4
|
12 |
+
huggingface_hub
|
13 |
+
requests
|
14 |
+
|
static/data/2024-01-20
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{'Mistral7B': 3, 'ChatGpt': 4, 'News': 4}
|
web.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from bs4 import BeautifulSoup
|
3 |
+
from time import time as t
|
4 |
+
|
5 |
+
classes=["zCubwf","hgKElc","LTKOO sY7ric","Z0LcW","gsrt vk_bk FzvWSb YwPhnf","pclqee","tw-Data-text tw-text-small tw-ta",
|
6 |
+
"IZ6rdc","O5uR6d LTKOO","vlzY6d","webanswers-webanswers_table__webanswers-table",
|
7 |
+
"dDoNo ikb4Bb gsrt","sXLaOe","LWkfKe","VQF4g","qv3Wpe","kno-rdesc"]
|
8 |
+
|
9 |
+
useragent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.75 Safari/537.36'
|
10 |
+
|
11 |
+
#scrape data from google search results
|
12 |
+
def Online_Scraper(query,PRINT=True):
|
13 |
+
C=t()
|
14 |
+
query=query.replace(" + "," plus ")
|
15 |
+
query=query.replace(" - "," minus ")
|
16 |
+
URL = "https://www.google.co.in/search?q=" + query
|
17 |
+
headers = {'User-Agent': useragent}
|
18 |
+
|
19 |
+
page = requests.get(URL, headers=headers)
|
20 |
+
soup = BeautifulSoup(page.content, 'html.parser')
|
21 |
+
|
22 |
+
for i in classes:
|
23 |
+
try:
|
24 |
+
result=soup.find(class_=i).get_text()
|
25 |
+
if PRINT:
|
26 |
+
print(f"by class {i}")
|
27 |
+
return result,None,t()-C
|
28 |
+
except Exception:
|
29 |
+
pass
|
30 |
+
return None,None,t()-C
|
31 |
+
|
32 |
+
if __name__=="__main__":
|
33 |
+
|
34 |
+
A=["who is iron man","spider man","78 + 98","pizza"]
|
35 |
+
for i in A:
|
36 |
+
C=t()
|
37 |
+
print(Online_Scraper(i))
|
38 |
+
print(t()-C)
|