CISCai commited on
Commit
11e4190
1 Parent(s): 60b16c5

Fix deferred metadata update and show link to Chat Template Editor for chat template metadata

Browse files
Files changed (1) hide show
  1. app.py +13 -5
app.py CHANGED
@@ -160,7 +160,8 @@ with gr.Blocks(
160
 
161
  meta_string = gr.Textbox(
162
  label = "String",
163
- info = "Enter to update value (Shift+Enter for new line).",
 
164
  visible = False,
165
  )
166
 
@@ -293,7 +294,7 @@ Below you will find a collection of example use-cases to show you how to perform
293
  example_boolean: dict(
294
  value = False,
295
  label = meta_boolean.label,
296
- info = "",
297
  visible = False,
298
  ),
299
  example_token_select: dict(
@@ -310,13 +311,13 @@ Below you will find a collection of example use-cases to show you how to perform
310
  value = 0,
311
  precision = 0,
312
  label = meta_number.label,
313
- info = "",
314
  visible = False,
315
  ),
316
  example_string: dict(
317
  value = "",
318
  label = meta_string.label,
319
- info = "",
320
  visible = False,
321
  ),
322
  }
@@ -747,6 +748,7 @@ Any framework based on `llama-cpp-python` will let you select which chat templat
747
  oauth_token: gr.OAuthToken | None = None,
748
  ):
749
  m = []
 
750
  meta = init_state()
751
 
752
  yield {
@@ -819,7 +821,6 @@ Any framework based on `llama-cpp-python` will let you select which chat templat
819
  meta.var['repo_file'] = repo_file
820
  meta.var['branch'] = branch
821
 
822
- deferred_updates = []
823
  for k, v in progress.tqdm(metadata, desc = 'Reading metadata...', total = num_metadata, unit = f' of {num_metadata} metadata keys...'):
824
  human = [*human_readable_metadata(meta, k, v.type, v.value)]
825
 
@@ -852,6 +853,9 @@ Any framework based on `llama-cpp-python` will let you select which chat templat
852
  value = '',
853
  visible = True,
854
  ),
 
 
 
855
  }
856
 
857
 
@@ -934,6 +938,7 @@ Any framework based on `llama-cpp-python` will let you select which chat templat
934
  do_select_token = False
935
  do_lookup_token = False
936
  do_token_type = False
 
937
  match key:
938
  case 'tokenizer.ggml.scores':
939
  do_select_token = True
@@ -942,6 +947,8 @@ Any framework based on `llama-cpp-python` will let you select which chat templat
942
  do_token_type = True
943
  case s if s.endswith('_token_id'):
944
  do_lookup_token = True
 
 
945
  case _:
946
  pass
947
 
@@ -982,6 +989,7 @@ Any framework based on `llama-cpp-python` will let you select which chat templat
982
  ),
983
  meta_string: gr.Textbox(
984
  value = val if typ == GGUFValueType.STRING else '',
 
985
  visible = True if typ == GGUFValueType.STRING else False,
986
  ),
987
  meta_array: gr.Textbox(
 
160
 
161
  meta_string = gr.Textbox(
162
  label = "String",
163
+ info = "Enter to update value (Shift+Enter for new line)",
164
+ show_copy_button = True,
165
  visible = False,
166
  )
167
 
 
294
  example_boolean: dict(
295
  value = False,
296
  label = meta_boolean.label,
297
+ info = meta_boolean.info,
298
  visible = False,
299
  ),
300
  example_token_select: dict(
 
311
  value = 0,
312
  precision = 0,
313
  label = meta_number.label,
314
+ info = meta_number.info,
315
  visible = False,
316
  ),
317
  example_string: dict(
318
  value = "",
319
  label = meta_string.label,
320
+ info = meta_string.info,
321
  visible = False,
322
  ),
323
  }
 
748
  oauth_token: gr.OAuthToken | None = None,
749
  ):
750
  m = []
751
+ deferred_updates = []
752
  meta = init_state()
753
 
754
  yield {
 
821
  meta.var['repo_file'] = repo_file
822
  meta.var['branch'] = branch
823
 
 
824
  for k, v in progress.tqdm(metadata, desc = 'Reading metadata...', total = num_metadata, unit = f' of {num_metadata} metadata keys...'):
825
  human = [*human_readable_metadata(meta, k, v.type, v.value)]
826
 
 
853
  value = '',
854
  visible = True,
855
  ),
856
+ file_meta: gr.skip() if not deferred_updates else gr.Matrix(
857
+ m,
858
+ ),
859
  }
860
 
861
 
 
938
  do_select_token = False
939
  do_lookup_token = False
940
  do_token_type = False
941
+ do_chat_template = False
942
  match key:
943
  case 'tokenizer.ggml.scores':
944
  do_select_token = True
 
947
  do_token_type = True
948
  case s if s.endswith('_token_id'):
949
  do_lookup_token = True
950
+ case s if s == 'tokenizer.chat_template' or s.startswith('tokenizer.chat_template.'):
951
+ do_chat_template = True
952
  case _:
953
  pass
954
 
 
989
  ),
990
  meta_string: gr.Textbox(
991
  value = val if typ == GGUFValueType.STRING else '',
992
+ info = "Use [Chat Template Editor](https://huggingface.co/spaces/CISCai/chat-template-editor) to edit/test the template, then paste the result here (press Enter to update value)" if do_chat_template else example_defaults[example_string]["info"],
993
  visible = True if typ == GGUFValueType.STRING else False,
994
  ),
995
  meta_array: gr.Textbox(