import json

temp = {"data":[
    [""],
    "{\"prompt\": \"ancient castle\", \"all_prompts\": [\"ancient castle\"], \"negative_prompt\": \"\", \"seed\": 544910640, \"all_seeds\": [544910640], \"subseed\": 540360099, \"all_subseeds\": [540360099], \"subseed_strength\": 0, \"width\": 512, \"height\": 512, \"sampler_index\": 0, \"sampler\": \"Euler a\", \"cfg_scale\": 7, \"steps\": 70, \"batch_size\": 1, \"restore_faces\": false, \"face_restoration_model\": null, \"sd_model_hash\": \"ab21ba3c\", \"seed_resize_from_w\": 0, \"seed_resize_from_h\": 0, \"denoising_strength\": null, \"extra_generation_params\": {}, \"index_of_first_image\": 0, \"infotexts\": [\"ancient castle\\nSteps: 70, Sampler: Euler a, CFG scale: 7, Seed: 544910640, Size: 512x512, Model hash: ab21ba3c, Eta: 0.68, Clip skip: 2\"], \"styles\": [\"None\", \"None\"], \"job_timestamp\": \"20221110171102\", \"clip_skip\": 2}","<p>ancient castle<br>\nSteps: 70, Sampler: Euler a, CFG scale: 7, Seed: 544910640, Size: 512x512, Model hash: ab21ba3c, Eta: 0.68, Clip skip: 2</p><div class='performance'><p class='time'>Time taken: <wbr>23.04s</p><p class='vram'>Torch active/reserved: 5860/6162 MiB, <wbr>Sys VRAM: 8041/8192 MiB (98.16%)</p></div>"],"is_generating":False,"duration":23.036685943603516,"average_duration":12.179592847824097}

meta_data = json.JSONDecoder().decode(temp['data'][1])

data = {
    'image': {
        'data_type': temp['data'][0][0].split(',')[0],
        'raw_data':temp['data'][0][0].split(',')[1],
    },
    'argument': {
        'prompt': meta_data['prompt'],
        'negative_prompt': meta_data['negative_prompt'],
        'seed': meta_data['seed'],
        'width': meta_data['width'],
        'height': meta_data['height'],
        'sampler': meta_data['sampler'],
        'info_text': meta_data['infotexts']
    }
}