Red-tech-hub commited on
Commit
27b7ec1
1 Parent(s): 19353ca

[update] run.py test

Browse files
Files changed (1) hide show
  1. run.py +15 -12
run.py CHANGED
@@ -6,19 +6,18 @@ from langchain_community.vectorstores import Chroma
6
  from transformers import AutoModelForCausalLM
7
 
8
  import os
9
- os.environ['TRANSFORMERS_CACHE'] = '/code/model/cache/'
10
 
 
 
 
 
11
 
12
- model_kwargs = {'trust_remote_code': True}
13
 
14
- # embedding = HuggingFaceEmbeddings(
15
- # model_name="nomic-ai/nomic-embed-text-v1.5",
16
- # model_kwargs=model_kwargs
17
- # )
18
 
19
  db = Chroma(
20
  persist_directory="./chroma_db",
21
- # embedding_function=embedding,
22
  collection_name='CVE'
23
  )
24
 
@@ -32,11 +31,15 @@ Question: {question}
32
 
33
  prompt = ChatPromptTemplate.from_template(template)
34
 
35
- model = AutoModelForCausalLM.from_pretrained(
36
- "zephyr-7b-beta.Q4_K_S.gguf",
37
- model_type='mistral',
38
- threads=3,
39
- )
 
 
 
 
40
 
41
  chain = (
42
  {"context": retriever, "question": RunnablePassthrough()}
 
6
  from transformers import AutoModelForCausalLM
7
 
8
  import os
 
9
 
10
+ # Ensure the TRANSFORMERS_CACHE directory exists and is writable
11
+ cache_dir = '/code/model/cache/'
12
+ if not os.path.exists(cache_dir):
13
+ os.makedirs(cache_dir)
14
 
15
+ os.environ['TRANSFORMERS_CACHE'] = cache_dir
16
 
17
+ model_kwargs = {'trust_remote_code': True}
 
 
 
18
 
19
  db = Chroma(
20
  persist_directory="./chroma_db",
 
21
  collection_name='CVE'
22
  )
23
 
 
31
 
32
  prompt = ChatPromptTemplate.from_template(template)
33
 
34
+ try:
35
+ model = AutoModelForCausalLM.from_pretrained(
36
+ "zephyr-7b-beta.Q4_K_S.gguf",
37
+ model_type='mistral',
38
+ threads=3,
39
+ )
40
+ except Exception as e:
41
+ print(f"Failed to load model: {e}")
42
+ exit(1)
43
 
44
  chain = (
45
  {"context": retriever, "question": RunnablePassthrough()}