Spaces:
Sleeping
Sleeping
Merge pull request #48 from almutareb/one_env
Browse files
app.py
CHANGED
@@ -14,21 +14,18 @@ from innovation_pathfinder_ai.utils.utils import (
|
|
14 |
from langchain_community.vectorstores import Chroma
|
15 |
|
16 |
import chromadb
|
17 |
-
from configparser import ConfigParser
|
18 |
import dotenv
|
19 |
import os
|
20 |
|
21 |
dotenv.load_dotenv()
|
22 |
-
|
23 |
-
config.read('innovation_pathfinder_ai/config.ini')
|
24 |
-
persist_directory = config.get('main', 'VECTOR_DATABASE_LOCATION')
|
25 |
|
26 |
logger = logger.get_console_logger("app")
|
27 |
|
28 |
app = FastAPI()
|
29 |
|
30 |
def initialize_chroma_db() -> Chroma:
|
31 |
-
collection_name =
|
32 |
|
33 |
client = chromadb.PersistentClient(
|
34 |
path=persist_directory
|
|
|
14 |
from langchain_community.vectorstores import Chroma
|
15 |
|
16 |
import chromadb
|
|
|
17 |
import dotenv
|
18 |
import os
|
19 |
|
20 |
dotenv.load_dotenv()
|
21 |
+
persist_directory = os.getenv('VECTOR_DATABASE_LOCATION')
|
|
|
|
|
22 |
|
23 |
logger = logger.get_console_logger("app")
|
24 |
|
25 |
app = FastAPI()
|
26 |
|
27 |
def initialize_chroma_db() -> Chroma:
|
28 |
+
collection_name = os.getenv('CONVERSATION_COLLECTION_NAME')
|
29 |
|
30 |
client = chromadb.PersistentClient(
|
31 |
path=persist_directory
|
example.env
CHANGED
@@ -8,11 +8,11 @@ OLLMA_BASE_URL=
|
|
8 |
SERPAPI_API_KEY=
|
9 |
|
10 |
# for chromadb
|
11 |
-
VECTOR_DATABASE_LOCATION=
|
12 |
|
13 |
# Name for the Conversation Memory Collection
|
14 |
CONVERSATION_COLLECTION_NAME="ConversationMemory"
|
15 |
|
16 |
EMBEDDING_MODEL="sentence-transformers/all-MiniLM-L6-v2"
|
17 |
|
18 |
-
SOURCES_CACHE
|
|
|
8 |
SERPAPI_API_KEY=
|
9 |
|
10 |
# for chromadb
|
11 |
+
VECTOR_DATABASE_LOCATION="innovation_pathfinder_ai/knowledge_base/"
|
12 |
|
13 |
# Name for the Conversation Memory Collection
|
14 |
CONVERSATION_COLLECTION_NAME="ConversationMemory"
|
15 |
|
16 |
EMBEDDING_MODEL="sentence-transformers/all-MiniLM-L6-v2"
|
17 |
|
18 |
+
SOURCES_CACHE="innovation_pathfinder_ai/database/sources_cache.sqlite3"
|
innovation_pathfinder_ai/config.ini
DELETED
@@ -1,4 +0,0 @@
|
|
1 |
-
[main]
|
2 |
-
VECTOR_DATABASE_LOCATION = innovation_pathfinder_ai/knowledge_base/
|
3 |
-
CONVERSATION_COLLECTION_NAME = ConversationMemory
|
4 |
-
SOURCES_CACHE = innovation_pathfinder_ai/database/sources_cache.sqlite3
|
|
|
|
|
|
|
|
|
|
innovation_pathfinder_ai/database/db_handler.py
CHANGED
@@ -1,13 +1,13 @@
|
|
1 |
from sqlmodel import SQLModel, create_engine, Session, select
|
2 |
from innovation_pathfinder_ai.database.schema import Sources
|
3 |
from innovation_pathfinder_ai.utils.logger import get_console_logger
|
4 |
-
|
5 |
-
|
6 |
|
7 |
-
|
8 |
-
|
|
|
9 |
|
10 |
-
sqlite_file_name = config.get('main', 'SOURCES_CACHE')
|
11 |
sqlite_url = f"sqlite:///{sqlite_file_name}"
|
12 |
engine = create_engine(sqlite_url, echo=False)
|
13 |
|
|
|
1 |
from sqlmodel import SQLModel, create_engine, Session, select
|
2 |
from innovation_pathfinder_ai.database.schema import Sources
|
3 |
from innovation_pathfinder_ai.utils.logger import get_console_logger
|
4 |
+
import os
|
5 |
+
from dotenv import load_dotenv
|
6 |
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
sqlite_file_name = os.getenv('SOURCES_CACHE')
|
10 |
|
|
|
11 |
sqlite_url = f"sqlite:///{sqlite_file_name}"
|
12 |
engine = create_engine(sqlite_url, echo=False)
|
13 |
|
innovation_pathfinder_ai/structured_tools/structured_tools.py
CHANGED
@@ -32,12 +32,9 @@ from innovation_pathfinder_ai.utils.utils import (
|
|
32 |
create_wikipedia_urls_from_text, create_folder_if_not_exists,
|
33 |
)
|
34 |
import os
|
35 |
-
from configparser import ConfigParser
|
36 |
# from innovation_pathfinder_ai.utils import create_wikipedia_urls_from_text
|
37 |
|
38 |
-
|
39 |
-
config.read('innovation_pathfinder_ai/config.ini')
|
40 |
-
persist_directory = config.get('main', 'VECTOR_DATABASE_LOCATION')
|
41 |
|
42 |
@tool
|
43 |
def memory_search(query:str) -> str:
|
@@ -48,7 +45,7 @@ def memory_search(query:str) -> str:
|
|
48 |
path=persist_directory,
|
49 |
)
|
50 |
|
51 |
-
collection_name =
|
52 |
#store using envar
|
53 |
|
54 |
embedding_function = SentenceTransformerEmbeddings(
|
|
|
32 |
create_wikipedia_urls_from_text, create_folder_if_not_exists,
|
33 |
)
|
34 |
import os
|
|
|
35 |
# from innovation_pathfinder_ai.utils import create_wikipedia_urls_from_text
|
36 |
|
37 |
+
persist_directory = os.getenv('VECTOR_DATABASE_LOCATION')
|
|
|
|
|
38 |
|
39 |
@tool
|
40 |
def memory_search(query:str) -> str:
|
|
|
45 |
path=persist_directory,
|
46 |
)
|
47 |
|
48 |
+
collection_name = os.getenv('CONVERSATION_COLLECTION_NAME')
|
49 |
#store using envar
|
50 |
|
51 |
embedding_function = SentenceTransformerEmbeddings(
|
innovation_pathfinder_ai/vector_store/chroma_vector_store.py
CHANGED
@@ -23,14 +23,11 @@ from langchain_community.embeddings.sentence_transformer import (
|
|
23 |
from innovation_pathfinder_ai.utils.utils import (
|
24 |
generate_uuid
|
25 |
)
|
26 |
-
from configparser import ConfigParser
|
27 |
import dotenv
|
28 |
import os
|
29 |
|
30 |
dotenv.load_dotenv()
|
31 |
-
|
32 |
-
config.read('innovation_pathfinder_ai/config.ini')
|
33 |
-
persist_directory = config.get('main', 'VECTOR_DATABASE_LOCATION')
|
34 |
|
35 |
|
36 |
def read_markdown_file(file_path: str) -> str:
|
|
|
23 |
from innovation_pathfinder_ai.utils.utils import (
|
24 |
generate_uuid
|
25 |
)
|
|
|
26 |
import dotenv
|
27 |
import os
|
28 |
|
29 |
dotenv.load_dotenv()
|
30 |
+
persist_directory = os.getenv('VECTOR_DATABASE_LOCATION')
|
|
|
|
|
31 |
|
32 |
|
33 |
def read_markdown_file(file_path: str) -> str:
|