|
import gradio as gr |
|
import wikipedia |
|
import requests |
|
from bs4 import BeautifulSoup |
|
import pyjokes |
|
|
|
|
|
def code(name): |
|
name = name.split('learn')[-1] |
|
name = name.split('start')[-1] |
|
name = name.split()[0] |
|
|
|
url = "https://www.w3schools.com/"+name+"/"+name+"_syntax.asp" |
|
r = requests.get(url) |
|
soup = BeautifulSoup(r.text,"html.parser") |
|
|
|
|
|
heading_object=soup.find_all('div') |
|
result = "" |
|
for info in heading_object: |
|
info1 = str(info) |
|
if '</script>' not in info1 and '<div class="w3-col l10 m12" id="main">' in info1: |
|
|
|
|
|
text = str(info.text).split('Next ❯')[1].split("❮ Previous")[0].split("\n\n\n") |
|
|
|
for r in text: |
|
if "Test Yourself With Exercises" in r or "Submit Answer »" in r or "On this page" in r: |
|
continue |
|
else: |
|
result = result + r+"\n\n" |
|
return result |
|
|
|
|
|
|
|
def joke(): |
|
|
|
|
|
My_joke = pyjokes.get_joke(language="en", category="neutral") |
|
|
|
return My_joke |
|
|
|
|
|
def wiki(name): |
|
text = name |
|
text = text.split("the")[-1] |
|
text = text.split("is a")[-1] |
|
text = text.split("by")[-1] |
|
|
|
|
|
out = "try this key words :\n"+str(wikipedia.search(text, results=10))+"\n\n" |
|
for i in wikipedia.search(text, results=3): |
|
try: |
|
result = wikipedia.summary(i) |
|
if " " in result.lower(): |
|
|
|
|
|
out = out + result+"\n" |
|
except: |
|
continue |
|
return out |
|
|
|
import openai |
|
openai.api_key = "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" |
|
def aitext(word): |
|
response = openai.Completion.create( |
|
model="text-davinci-003", |
|
prompt=word, |
|
temperature=0.9, |
|
max_tokens=200, |
|
top_p=1, |
|
frequency_penalty=0, |
|
presence_penalty=0.6, |
|
stop=[" Human:", " AI:"] |
|
) |
|
|
|
return response.choices[0].text |
|
|
|
import json |
|
headers = {"Authorization": f"Bearer {'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'}"} |
|
def sumy(payload): |
|
API_URL = "https://api-inference.huggingface.co/models/facebook/bart-large-cnn" |
|
data = json.dumps(payload) |
|
response = requests.request("POST", API_URL, headers=headers, data=data) |
|
return json.loads(response.content.decode("utf-8")) |
|
|
|
|
|
def query(payload): |
|
API_URL = "https://api-inference.huggingface.co/models/facebook/bart-large-cnn" |
|
data = json.dumps(payload) |
|
response = requests.request("POST", API_URL, headers=headers, data=data) |
|
return json.loads(response.content.decode("utf-8")) |
|
|
|
def google(name): |
|
if "give" in name or "reason" in name or "result" in name or "step" in name: |
|
|
|
result_count = 2 |
|
print(name) |
|
|
|
else: |
|
result_count = 1 |
|
|
|
f_result = "" |
|
result = {"",""} |
|
text ="" |
|
|
|
|
|
url = "https://www.google.com/search?q="+name |
|
r = requests.get(url) |
|
|
|
soup = BeautifulSoup(r.text,"html.parser") |
|
|
|
heading_object=soup.find_all('div') |
|
|
|
for info in heading_object: |
|
|
|
if '<div class="BNeawe s3v9rd AP7Wnd"><div><div><div class="BNeawe s3v9rd AP7Wnd">' in str(info): |
|
if '›' not in str(info.text) : |
|
result.add(info.text) |
|
|
|
n=0 |
|
for i in result: |
|
if n!=0: |
|
i = i.split("·",1) |
|
try: |
|
i = i[1] |
|
except: |
|
i = i[0] |
|
i=i.split("Duration") |
|
|
|
i = i[0] |
|
text = text +str(n)+"\t"+i+"\n\n" |
|
n=n+1 |
|
|
|
if result_count == 1: |
|
temp = "" |
|
for r in text.split("\n\n"): |
|
temp = temp+r.split("...")[0] |
|
f_result = sumy({"inputs":temp,"parameters": {"do_sample": False,"max_length":300}}) |
|
return f_result[0]['summary_text'] |
|
else: |
|
n=1 |
|
for r in text.split("\n\n")[2:-2]: |
|
if len(r)>10: |
|
if "..." in r: |
|
r = r.split("...") |
|
w = query(r[0].replace("\xa0","")) |
|
f_result = f_result + str(n)+"\t"+(w[0]['summary_text'])+"\n\n"+r"\\" |
|
else: |
|
|
|
w = query(r[:-1]) |
|
f_result = f_result + str(n)+"\t"+(w[0]['summary_text'])+"\n\n"+r"\\" |
|
n=n+1 |
|
return f_result |
|
|
|
def greet(name1): |
|
name = name1.lower() |
|
|
|
if "who are you" in name or "what is you" in name or "your name" in name or"who r u" in name: |
|
|
|
return "Im Ai Based Chatbot Created by ssebowa.org" |
|
|
|
if "who developed you" in name or "what is you" in name or "who mad you" in name or "who made you" in name: |
|
return "ssebowa.org" |
|
|
|
if "tell me a joke" in name or "the joke" in name: |
|
return joke() |
|
|
|
if "love you" in name or "i love" in name: |
|
return "me too" |
|
if "marry me" in name or "marry" in name: |
|
return "im not intrested" |
|
if "your age" in name or "what is your age" in name: |
|
return "Im not a human so i don't have age" |
|
if "thank u" in name or "thanks" in name or "thank you" in name: |
|
return "ok welcome ....!" |
|
if "how to learn" in name or "steps for learning" in name or "step for learning" in name or "steps for" in name or "step for" in name: |
|
try: |
|
return {"result":google(name)+"\n\n"+code(name)} |
|
except: |
|
return {"result":google(name)} |
|
else: |
|
return google(name) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
iface = gr.Interface(fn=greet, inputs="text", outputs="text") |
|
iface.launch() |
|
|
|
|
|
|