Spaces:
Sleeping
Sleeping
mohammed3536
commited on
Commit
•
ba459a9
1
Parent(s):
83942d6
Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,6 @@ from langchain_openai import OpenAI, OpenAIEmbeddings
|
|
6 |
from langchain.prompts import PromptTemplate
|
7 |
from langchain.chains import LLMChain
|
8 |
from langchain_community.vectorstores import FAISS
|
9 |
-
from openai import OpenAI
|
10 |
|
11 |
# Load environment variables
|
12 |
load_dotenv()
|
@@ -42,14 +41,14 @@ def processing(pdf):
|
|
42 |
vectorDB = get_vectorstore(text_chunks)
|
43 |
return vectorDB
|
44 |
|
45 |
-
|
46 |
# Function to generate questions using OpenAI GPT-3
|
47 |
def generate_questions(text, num_questions):
|
48 |
prompt = f"Generate {num_questions} questions from the given text:\n{text}"
|
49 |
-
response =
|
50 |
engine="gpt-3.5-turbo", # You can use another engine if needed
|
51 |
prompt=prompt,
|
52 |
-
max_tokens=200
|
|
|
53 |
)
|
54 |
questions = [choice['text'].strip() for choice in response['choices']]
|
55 |
return questions
|
|
|
6 |
from langchain.prompts import PromptTemplate
|
7 |
from langchain.chains import LLMChain
|
8 |
from langchain_community.vectorstores import FAISS
|
|
|
9 |
|
10 |
# Load environment variables
|
11 |
load_dotenv()
|
|
|
41 |
vectorDB = get_vectorstore(text_chunks)
|
42 |
return vectorDB
|
43 |
|
|
|
44 |
# Function to generate questions using OpenAI GPT-3
|
45 |
def generate_questions(text, num_questions):
|
46 |
prompt = f"Generate {num_questions} questions from the given text:\n{text}"
|
47 |
+
response = OpenAI.Completion.create(
|
48 |
engine="gpt-3.5-turbo", # You can use another engine if needed
|
49 |
prompt=prompt,
|
50 |
+
max_tokens=200,
|
51 |
+
temperature=0.7
|
52 |
)
|
53 |
questions = [choice['text'].strip() for choice in response['choices']]
|
54 |
return questions
|