zrguo
		
	commited on
		
		
					Commit 
							
							·
						
						0e1b431
	
1
								Parent(s):
							
							47db617
								
Revert "[EVO] Add language configuration to environment and argument parsing"
Browse files- env.example +1 -1
- lightrag/api/lightrag_server.py +0 -3
- lightrag/api/utils_api.py +0 -1
    	
        env.example
    CHANGED
    
    | @@ -48,7 +48,7 @@ | |
| 48 | 
             
            # CHUNK_OVERLAP_SIZE=100
         | 
| 49 | 
             
            # MAX_TOKENS=32768             # Max tokens send to LLM for summarization
         | 
| 50 | 
             
            # MAX_TOKEN_SUMMARY=500        # Max tokens for entity or relations summary
         | 
| 51 | 
            -
            #  | 
| 52 | 
             
            # MAX_EMBED_TOKENS=8192
         | 
| 53 |  | 
| 54 | 
             
            ### LLM Configuration (Use valid host. For local services installed with docker, you can use host.docker.internal)
         | 
|  | |
| 48 | 
             
            # CHUNK_OVERLAP_SIZE=100
         | 
| 49 | 
             
            # MAX_TOKENS=32768             # Max tokens send to LLM for summarization
         | 
| 50 | 
             
            # MAX_TOKEN_SUMMARY=500        # Max tokens for entity or relations summary
         | 
| 51 | 
            +
            # SUMMARY_LANGUAGE=English
         | 
| 52 | 
             
            # MAX_EMBED_TOKENS=8192
         | 
| 53 |  | 
| 54 | 
             
            ### LLM Configuration (Use valid host. For local services installed with docker, you can use host.docker.internal)
         | 
    	
        lightrag/api/lightrag_server.py
    CHANGED
    
    | @@ -331,9 +331,6 @@ def create_app(args): | |
| 331 | 
             
                            "use_llm_check": False,
         | 
| 332 | 
             
                        },
         | 
| 333 | 
             
                        namespace_prefix=args.namespace_prefix,
         | 
| 334 | 
            -
                        addon_params={
         | 
| 335 | 
            -
                            "language": args.language,
         | 
| 336 | 
            -
                        },
         | 
| 337 | 
             
                        auto_manage_storages_states=False,
         | 
| 338 | 
             
                    )
         | 
| 339 | 
             
                else:  # azure_openai
         | 
|  | |
| 331 | 
             
                            "use_llm_check": False,
         | 
| 332 | 
             
                        },
         | 
| 333 | 
             
                        namespace_prefix=args.namespace_prefix,
         | 
|  | |
|  | |
|  | |
| 334 | 
             
                        auto_manage_storages_states=False,
         | 
| 335 | 
             
                    )
         | 
| 336 | 
             
                else:  # azure_openai
         | 
    	
        lightrag/api/utils_api.py
    CHANGED
    
    | @@ -340,7 +340,6 @@ def parse_args(is_uvicorn_mode: bool = False) -> argparse.Namespace: | |
| 340 | 
             
                # Inject chunk configuration
         | 
| 341 | 
             
                args.chunk_size = get_env_value("CHUNK_SIZE", 1200, int)
         | 
| 342 | 
             
                args.chunk_overlap_size = get_env_value("CHUNK_OVERLAP_SIZE", 100, int)
         | 
| 343 | 
            -
                args.language = get_env_value("LANGUAGE", "English")
         | 
| 344 |  | 
| 345 | 
             
                ollama_server_infos.LIGHTRAG_MODEL = args.simulated_model_name
         | 
| 346 |  | 
|  | |
| 340 | 
             
                # Inject chunk configuration
         | 
| 341 | 
             
                args.chunk_size = get_env_value("CHUNK_SIZE", 1200, int)
         | 
| 342 | 
             
                args.chunk_overlap_size = get_env_value("CHUNK_OVERLAP_SIZE", 100, int)
         | 
|  | |
| 343 |  | 
| 344 | 
             
                ollama_server_infos.LIGHTRAG_MODEL = args.simulated_model_name
         | 
| 345 |  | 
