yangdx
commited on
Commit
·
ef3cf44
1
Parent(s):
7dfcd6d
Update sample code in README.md
Browse files- README-zh.md +24 -24
- README.md +24 -24
- examples/lightrag_openai_demo.py +0 -2
README-zh.md
CHANGED
|
@@ -107,42 +107,42 @@ from lightrag.utils import setup_logger
|
|
| 107 |
|
| 108 |
setup_logger("lightrag", level="INFO")
|
| 109 |
|
|
|
|
|
|
|
|
|
|
| 110 |
async def initialize_rag():
|
| 111 |
rag = LightRAG(
|
| 112 |
-
working_dir=
|
| 113 |
embedding_func=openai_embed,
|
| 114 |
-
llm_model_func=gpt_4o_mini_complete
|
| 115 |
)
|
| 116 |
-
|
| 117 |
await rag.initialize_storages()
|
| 118 |
await initialize_pipeline_status()
|
| 119 |
-
|
| 120 |
return rag
|
| 121 |
|
| 122 |
def main():
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
|
| 134 |
-
|
| 135 |
-
|
| 136 |
-
# 混合模式集成知识图谱和向量检索
|
| 137 |
-
mode="mix"
|
| 138 |
|
| 139 |
-
|
| 140 |
-
"
|
| 141 |
-
|
| 142 |
-
|
|
|
|
| 143 |
|
| 144 |
if __name__ == "__main__":
|
| 145 |
-
main()
|
| 146 |
```
|
| 147 |
|
| 148 |
### 查询参数
|
|
|
|
| 107 |
|
| 108 |
setup_logger("lightrag", level="INFO")
|
| 109 |
|
| 110 |
+
if not os.path.exists(WORKING_DIR):
|
| 111 |
+
os.mkdir(WORKING_DIR)
|
| 112 |
+
|
| 113 |
async def initialize_rag():
|
| 114 |
rag = LightRAG(
|
| 115 |
+
working_dir=WORKING_DIR,
|
| 116 |
embedding_func=openai_embed,
|
| 117 |
+
llm_model_func=gpt_4o_mini_complete,
|
| 118 |
)
|
|
|
|
| 119 |
await rag.initialize_storages()
|
| 120 |
await initialize_pipeline_status()
|
|
|
|
| 121 |
return rag
|
| 122 |
|
| 123 |
def main():
|
| 124 |
+
try:
|
| 125 |
+
# Initialize RAG instance
|
| 126 |
+
rag = await initialize_rag()
|
| 127 |
+
rag.insert("Your text")
|
| 128 |
+
|
| 129 |
+
# Perform hybrid search
|
| 130 |
+
mode="hybrid"
|
| 131 |
+
print(
|
| 132 |
+
await rag.query(
|
| 133 |
+
"What are the top themes in this story?",
|
| 134 |
+
param=QueryParam(mode=mode)
|
| 135 |
+
)
|
| 136 |
+
)
|
|
|
|
|
|
|
| 137 |
|
| 138 |
+
except Exception as e:
|
| 139 |
+
print(f"An error occurred: {e}")
|
| 140 |
+
finally:
|
| 141 |
+
if rag:
|
| 142 |
+
await rag.finalize_storages()
|
| 143 |
|
| 144 |
if __name__ == "__main__":
|
| 145 |
+
asyncio.run(main())
|
| 146 |
```
|
| 147 |
|
| 148 |
### 查询参数
|
README.md
CHANGED
|
@@ -143,42 +143,42 @@ from lightrag.utils import setup_logger
|
|
| 143 |
|
| 144 |
setup_logger("lightrag", level="INFO")
|
| 145 |
|
|
|
|
|
|
|
|
|
|
| 146 |
async def initialize_rag():
|
| 147 |
rag = LightRAG(
|
| 148 |
-
working_dir=
|
| 149 |
embedding_func=openai_embed,
|
| 150 |
-
llm_model_func=gpt_4o_mini_complete
|
| 151 |
)
|
| 152 |
-
|
| 153 |
await rag.initialize_storages()
|
| 154 |
await initialize_pipeline_status()
|
| 155 |
-
|
| 156 |
return rag
|
| 157 |
|
| 158 |
def main():
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
|
| 167 |
-
|
| 168 |
-
|
| 169 |
-
|
| 170 |
-
|
| 171 |
-
|
| 172 |
-
# Mix mode Integrates knowledge graph and vector retrieval.
|
| 173 |
-
mode="mix"
|
| 174 |
|
| 175 |
-
|
| 176 |
-
"
|
| 177 |
-
|
| 178 |
-
|
|
|
|
| 179 |
|
| 180 |
if __name__ == "__main__":
|
| 181 |
-
main()
|
| 182 |
```
|
| 183 |
|
| 184 |
### Query Param
|
|
|
|
| 143 |
|
| 144 |
setup_logger("lightrag", level="INFO")
|
| 145 |
|
| 146 |
+
if not os.path.exists(WORKING_DIR):
|
| 147 |
+
os.mkdir(WORKING_DIR)
|
| 148 |
+
|
| 149 |
async def initialize_rag():
|
| 150 |
rag = LightRAG(
|
| 151 |
+
working_dir=WORKING_DIR,
|
| 152 |
embedding_func=openai_embed,
|
| 153 |
+
llm_model_func=gpt_4o_mini_complete,
|
| 154 |
)
|
|
|
|
| 155 |
await rag.initialize_storages()
|
| 156 |
await initialize_pipeline_status()
|
|
|
|
| 157 |
return rag
|
| 158 |
|
| 159 |
def main():
|
| 160 |
+
try:
|
| 161 |
+
# Initialize RAG instance
|
| 162 |
+
rag = await initialize_rag()
|
| 163 |
+
rag.insert("Your text")
|
| 164 |
+
|
| 165 |
+
# Perform hybrid search
|
| 166 |
+
mode="hybrid"
|
| 167 |
+
print(
|
| 168 |
+
await rag.query(
|
| 169 |
+
"What are the top themes in this story?",
|
| 170 |
+
param=QueryParam(mode=mode)
|
| 171 |
+
)
|
| 172 |
+
)
|
|
|
|
|
|
|
| 173 |
|
| 174 |
+
except Exception as e:
|
| 175 |
+
print(f"An error occurred: {e}")
|
| 176 |
+
finally:
|
| 177 |
+
if rag:
|
| 178 |
+
await rag.finalize_storages()
|
| 179 |
|
| 180 |
if __name__ == "__main__":
|
| 181 |
+
asyncio.run(main())
|
| 182 |
```
|
| 183 |
|
| 184 |
### Query Param
|
examples/lightrag_openai_demo.py
CHANGED
|
@@ -82,7 +82,6 @@ async def initialize_rag():
|
|
| 82 |
working_dir=WORKING_DIR,
|
| 83 |
embedding_func=openai_embed,
|
| 84 |
llm_model_func=gpt_4o_mini_complete,
|
| 85 |
-
# llm_model_func=gpt_4o_complete
|
| 86 |
)
|
| 87 |
|
| 88 |
await rag.initialize_storages()
|
|
@@ -92,7 +91,6 @@ async def initialize_rag():
|
|
| 92 |
|
| 93 |
|
| 94 |
async def main():
|
| 95 |
-
rag = None
|
| 96 |
try:
|
| 97 |
# Initialize RAG instance
|
| 98 |
rag = await initialize_rag()
|
|
|
|
| 82 |
working_dir=WORKING_DIR,
|
| 83 |
embedding_func=openai_embed,
|
| 84 |
llm_model_func=gpt_4o_mini_complete,
|
|
|
|
| 85 |
)
|
| 86 |
|
| 87 |
await rag.initialize_storages()
|
|
|
|
| 91 |
|
| 92 |
|
| 93 |
async def main():
|
|
|
|
| 94 |
try:
|
| 95 |
# Initialize RAG instance
|
| 96 |
rag = await initialize_rag()
|