Spaces:
Runtime error
Runtime error
fix bug
Browse files
app.py
CHANGED
@@ -142,28 +142,29 @@ if st.session_state['tokenizer_json'] is not None:
|
|
142 |
|
143 |
|
144 |
with prompt_template_output_col:
|
145 |
-
|
146 |
if generate_prompt_example_button:
|
147 |
with open(f"./tmp/{st.session_state['uuid']}/tmp_chat_template.json", "w") as fp:
|
148 |
fp.write(st.session_state['input_jinja_template'])
|
149 |
with open(f"./tmp/{st.session_state['uuid']}/tmp_chat_template.json", "r") as f:
|
150 |
-
jinja_lines = f.readlines()
|
151 |
st.session_state['tokenizer'].chat_template = sanitize_jinja2(jinja_lines)
|
|
|
152 |
os.remove(f"./tmp/{st.session_state['uuid']}/tmp_chat_template.json")
|
153 |
-
generated_prompt_wo_add_generation_prompt = st.session_state['tokenizer'].apply_chat_template(chat, tokenize=False, add_generation_prompt= False)
|
154 |
-
generated_prompt_w_add_generation_prompt = st.session_state['tokenizer'].apply_chat_template(chat, tokenize=False, add_generation_prompt= True)
|
155 |
-
|
156 |
st.session_state['successful_template'] = copy.deepcopy(st.session_state['input_jinja_template'])
|
157 |
-
|
158 |
|
159 |
if len(st.session_state['successful_template']) > 0:
|
160 |
st.text_area(
|
161 |
"Generate Prompt with `add_generation_prompt=False`", value=st.session_state['generated_prompt_wo_add_generation_prompt'],
|
162 |
-
height=300, placeholder=None, disabled=True, label_visibility="visible", key="
|
163 |
|
164 |
st.text_area(
|
165 |
"Generate Prompt with `add_generation_prompt=True`", value=st.session_state['generated_prompt_w_add_generation_prompt'],
|
166 |
-
height=300, placeholder=None, disabled=True, label_visibility="visible", key="
|
167 |
|
168 |
access_token_no_cache = st.text_input("HuggingFace Access Token API with Write Access", type="password", key="access_token_no_cache")
|
169 |
commit_message_text_input = st.text_input("Commit Message", key="commit_message_text_input")
|
|
|
142 |
|
143 |
|
144 |
with prompt_template_output_col:
|
145 |
+
# print(st.session_state['input_jinja_template'])
|
146 |
if generate_prompt_example_button:
|
147 |
with open(f"./tmp/{st.session_state['uuid']}/tmp_chat_template.json", "w") as fp:
|
148 |
fp.write(st.session_state['input_jinja_template'])
|
149 |
with open(f"./tmp/{st.session_state['uuid']}/tmp_chat_template.json", "r") as f:
|
150 |
+
jinja_lines = copy.deepcopy(f.readlines())
|
151 |
st.session_state['tokenizer'].chat_template = sanitize_jinja2(jinja_lines)
|
152 |
+
# print(sanitize_jinja2(jinja_lines))
|
153 |
os.remove(f"./tmp/{st.session_state['uuid']}/tmp_chat_template.json")
|
154 |
+
st.session_state['generated_prompt_wo_add_generation_prompt'] = st.session_state['tokenizer'].apply_chat_template(chat, tokenize=False, add_generation_prompt= False)
|
155 |
+
st.session_state['generated_prompt_w_add_generation_prompt'] = st.session_state['tokenizer'].apply_chat_template(chat, tokenize=False, add_generation_prompt= True)
|
156 |
+
# print(generated_prompt_wo_add_generation_prompt)
|
157 |
st.session_state['successful_template'] = copy.deepcopy(st.session_state['input_jinja_template'])
|
158 |
+
# print(st.session_state['successful_template'])
|
159 |
|
160 |
if len(st.session_state['successful_template']) > 0:
|
161 |
st.text_area(
|
162 |
"Generate Prompt with `add_generation_prompt=False`", value=st.session_state['generated_prompt_wo_add_generation_prompt'],
|
163 |
+
height=300, placeholder=None, disabled=True, label_visibility="visible", key="generated_prompt_wo_add_generation_prompt_text_area")
|
164 |
|
165 |
st.text_area(
|
166 |
"Generate Prompt with `add_generation_prompt=True`", value=st.session_state['generated_prompt_w_add_generation_prompt'],
|
167 |
+
height=300, placeholder=None, disabled=True, label_visibility="visible", key="generated_prompt_w_add_generation_prompt_text_area")
|
168 |
|
169 |
access_token_no_cache = st.text_input("HuggingFace Access Token API with Write Access", type="password", key="access_token_no_cache")
|
170 |
commit_message_text_input = st.text_input("Commit Message", key="commit_message_text_input")
|