hsaest's picture
Upload folder using huggingface_hub
9be4956 verified
raw
history blame
2.35 kB
from tqdm import tqdm
import json
import os
def load_line_json_data(filename):
data = []
with open(filename, 'r', encoding='utf-8') as f:
for line in f.read().strip().split('\n'):
unit = json.loads(line)
data.append(unit)
return data
if __name__ == '__main__':
model_name = ['gpt-3.5-turbo-1106','gpt-4-1106-preview2','greedy_search','mistral-7B-32K','gemini2','mixtral','gpt-3.5-turbo-11062'][-3]
method = ['direct','cot','react','reflexion','tool-use'][-1]
set_type = ['dev','test'][0]
results = open(f'/home/xj/toolAugEnv/code/toolConstraint/data/api_request/{set_type}_{model_name}_{method}.txt','r').read().strip().split('\n')
directory = f'/home/xj/toolAugEnv/code/toolConstraint/data/final_data/{set_type}'
query_data_list = load_line_json_data(os.path.join(directory, 'query/query.jsonl'))
idx_number_list = [i for i in range(1,len(query_data_list)+1)]
for idx in tqdm(idx_number_list):
generated_plan = json.load(open(f'/home/xj/toolAugEnv/code/toolConstraint/results/{set_type}/plan_{idx}.json'))
# print(results[idx-1].split('```json')[1].split('```')[0])
suffix = None
if method == 'tool-use':
suffix = ''
else:
suffix = '_collected'
if generated_plan[-1][f'{model_name}_{method}{suffix}_info_results'] not in ["","Max Token Length Exceeded."] :
result = results[idx-1].split('```json')[1].split('```')[0]
# print(result)
try:
if method == 'tool-use':
generated_plan[-1][f'{model_name}_{method}_results_parsed'] = eval(result)
else:
generated_plan[-1][f'{model_name}_{method}_with_human_info_results_parsed'] = eval(result)
except:
print(result)
break
else:
if method == 'tool-use':
generated_plan[-1][f'{model_name}_{method}_results_parsed'] = None
else:
generated_plan[-1][f'{model_name}_{method}_with_human_info_results_parsed'] = None
# print(generated_plan[-1]['chatgpt_human_collected_info_results_parsed'])
with open(f'/home/xj/toolAugEnv/code/toolConstraint/results/{set_type}/plan_{idx}.json','w') as f:
json.dump(generated_plan,f)