Limour commited on
Commit
ff06e88
1 Parent(s): 1f5b448

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -0
app.py CHANGED
@@ -112,6 +112,7 @@ with gr.Blocks() as setting:
112
  os.mkdir("cache")
113
  snapshot_download(repo_id='TheBloke/CausalLM-7B-GGUF', local_dir=r'downloads',
114
  allow_patterns='causallm_7b.Q5_K_M.gguf')
 
115
 
116
  # ========== 加载模型 ==========
117
  model = StreamingLLM(model_path=setting_path.value,
@@ -194,6 +195,10 @@ with gr.Blocks() as role:
194
  pass
195
  tmp = model.save_session(setting_cache_path.value)
196
  print(f'save cache {tmp}')
 
 
 
 
197
 
198
 
199
  # ========== 流式输出函数 ==========
 
112
  os.mkdir("cache")
113
  snapshot_download(repo_id='TheBloke/CausalLM-7B-GGUF', local_dir=r'downloads',
114
  allow_patterns='causallm_7b.Q5_K_M.gguf')
115
+ snapshot_download(repo_id='Limour/llama-python-streamingllm-cache', repo_type='dataset', local_dir=r'cache')
116
 
117
  # ========== 加载模型 ==========
118
  model = StreamingLLM(model_path=setting_path.value,
 
195
  pass
196
  tmp = model.save_session(setting_cache_path.value)
197
  print(f'save cache {tmp}')
198
+ # ========== 上传缓存 ==========
199
+ from huggingface_hub import login, CommitScheduler
200
+ login(token=os.environ.get("HF_TOKEN"), write_permission=True)
201
+ CommitScheduler(repo_id='Limour/llama-python-streamingllm-cache', repo_type='dataset', folder_path='cache')
202
 
203
 
204
  # ========== 流式输出函数 ==========