{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 829, "status": "ok", "timestamp": 1641588786523, "user": { "displayName": "Yurii Paniv", "photoUrl": "https://lh3.googleusercontent.com/a/default-user=s64", "userId": "13095662915325887123" }, "user_tz": -120 }, "id": "YELVqGxMxnbG", "outputId": "876761c1-2e03-411b-e61b-07ac4ad61377" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Wed Dec 28 21:13:08 2022 \n", "+-----------------------------------------------------------------------------+\n", "| NVIDIA-SMI 515.86.01 Driver Version: 515.86.01 CUDA Version: 11.7 |\n", "|-------------------------------+----------------------+----------------------+\n", "| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n", "| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n", "| | | MIG M. |\n", "|===============================+======================+======================|\n", "| 0 NVIDIA GeForce ... Off | 00000000:0A:00.0 On | N/A |\n", "| 0% 41C P8 24W / 390W | 1364MiB / 24576MiB | 0% Default |\n", "| | | N/A |\n", "+-------------------------------+----------------------+----------------------+\n", " \n", "+-----------------------------------------------------------------------------+\n", "| Processes: |\n", "| GPU GI CI PID Type Process name GPU Memory |\n", "| ID ID Usage |\n", "|=============================================================================|\n", "| 0 N/A N/A 1345 G /usr/lib/xorg/Xorg 528MiB |\n", "| 0 N/A N/A 2100 G /usr/bin/kwalletd5 4MiB |\n", "| 0 N/A N/A 2266 G ...ec/xdg-desktop-portal-kde 4MiB |\n", "| 0 N/A N/A 2303 G /usr/bin/ksmserver 4MiB |\n", "| 0 N/A N/A 2305 G /usr/bin/kded5 4MiB |\n", "| 0 N/A N/A 2306 G /usr/bin/kwin_x11 102MiB |\n", "| 0 N/A N/A 2367 G /usr/bin/plasmashell 133MiB |\n", "| 0 N/A N/A 2396 G ...de-authentication-agent-1 4MiB |\n", "| 0 N/A N/A 2443 G ...x-gnu/libexec/kdeconnectd 4MiB |\n", "| 0 N/A N/A 2445 G .../usr/bin/telegram-desktop 7MiB |\n", "| 0 N/A N/A 2459 G /usr/bin/kaccess 4MiB |\n", "| 0 N/A N/A 2484 G ...1/usr/lib/firefox/firefox 214MiB |\n", "| 0 N/A N/A 2499 G .../libexec/DiscoverNotifier 4MiB |\n", "| 0 N/A N/A 2784 G /usr/bin/dolphin 4MiB |\n", "| 0 N/A N/A 2917 G /usr/bin/dolphin 4MiB |\n", "| 0 N/A N/A 2997 G /usr/bin/dolphin 4MiB |\n", "| 0 N/A N/A 3138 G ...gnu/libexec/kf5/kioslave5 4MiB |\n", "| 0 N/A N/A 3158 G ...gnu/libexec/kf5/kioslave5 4MiB |\n", "| 0 N/A N/A 3663 G /usr/bin/dolphin 4MiB |\n", "| 0 N/A N/A 3768 G /usr/bin/dolphin 4MiB |\n", "| 0 N/A N/A 3908 G ...gnu/libexec/kf5/kioslave5 4MiB |\n", "| 0 N/A N/A 3964 G ...gnu/libexec/kf5/kioslave5 4MiB |\n", "| 0 N/A N/A 4610 G ...RendererForSitePerProcess 293MiB |\n", "+-----------------------------------------------------------------------------+\n" ] } ], "source": [ "gpu_info = !nvidia-smi\n", "gpu_info = '\\n'.join(gpu_info)\n", "if gpu_info.find('failed') >= 0:\n", " print('Not connected to a GPU')\n", "else:\n", " print(gpu_info)" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 5334, "status": "ok", "timestamp": 1641588811766, "user": { "displayName": "Yurii Paniv", "photoUrl": "https://lh3.googleusercontent.com/a/default-user=s64", "userId": "13095662915325887123" }, "user_tz": -120 }, "id": "2MMXcWFFgCXU", "outputId": "be9fd72e-4395-4cd0-ff87-631dad046e71" }, "outputs": [], "source": [ "from datasets import load_from_disk, load_metric, Audio\n", "\n", "common_voice_train = load_from_disk(\"cached_dataset/cv_train\")\n", "common_voice_test = load_from_disk(\"cached_dataset/cv_test\")" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "id": "kAR0-2KLkopp" }, "outputs": [], "source": [ "from transformers import Wav2Vec2FeatureExtractor\n", "\n", "feature_extractor = Wav2Vec2FeatureExtractor(feature_size=1, sampling_rate=16000, padding_value=0.0, do_normalize=True, return_attention_mask=True)" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n" ] } ], "source": [ "from transformers import Wav2Vec2CTCTokenizer\n", "\n", "tokenizer = Wav2Vec2CTCTokenizer.from_pretrained(\"./\", unk_token=\"[UNK]\", pad_token=\"[PAD]\", word_delimiter_token=\"|\")" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { "id": "KYZtoW-tlZgl" }, "outputs": [], "source": [ "from transformers import Wav2Vec2Processor\n", "\n", "processor = Wav2Vec2Processor(feature_extractor=feature_extractor, tokenizer=tokenizer)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": { "id": "tborvC9hx88e" }, "outputs": [], "source": [ "import torch\n", "\n", "from dataclasses import dataclass, field\n", "from typing import Any, Dict, List, Optional, Union\n", "\n", "@dataclass\n", "class DataCollatorCTCWithPadding:\n", " \"\"\"\n", " Data collator that will dynamically pad the inputs received.\n", " Args:\n", " processor (:class:`~transformers.Wav2Vec2Processor`)\n", " The processor used for proccessing the data.\n", " padding (:obj:`bool`, :obj:`str` or :class:`~transformers.tokenization_utils_base.PaddingStrategy`, `optional`, defaults to :obj:`True`):\n", " Select a strategy to pad the returned sequences (according to the model's padding side and padding index)\n", " among:\n", " * :obj:`True` or :obj:`'longest'`: Pad to the longest sequence in the batch (or no padding if only a single\n", " sequence if provided).\n", " * :obj:`'max_length'`: Pad to a maximum length specified with the argument :obj:`max_length` or to the\n", " maximum acceptable input length for the model if that argument is not provided.\n", " * :obj:`False` or :obj:`'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of\n", " different lengths).\n", " \"\"\"\n", "\n", " processor: Wav2Vec2Processor\n", " padding: Union[bool, str] = True\n", "\n", " def __call__(self, features: List[Dict[str, Union[List[int], torch.Tensor]]]) -> Dict[str, torch.Tensor]:\n", " # split inputs and labels since they have to be of different lenghts and need\n", " # different padding methods\n", " input_features = [{\"input_values\": feature[\"input_values\"]} for feature in features]\n", " label_features = [{\"input_ids\": feature[\"labels\"]} for feature in features]\n", "\n", " batch = self.processor.pad(\n", " input_features,\n", " padding=self.padding,\n", " return_tensors=\"pt\",\n", " )\n", " with self.processor.as_target_processor():\n", " labels_batch = self.processor.pad(\n", " label_features,\n", " padding=self.padding,\n", " return_tensors=\"pt\",\n", " )\n", "\n", " # replace padding with -100 to ignore loss correctly\n", " labels = labels_batch[\"input_ids\"].masked_fill(labels_batch.attention_mask.ne(1), -100)\n", "\n", " batch[\"labels\"] = labels\n", "\n", " return batch" ] }, { "cell_type": "code", "execution_count": 7, "metadata": { "id": "lbQf5GuZyQ4_" }, "outputs": [], "source": [ "data_collator = DataCollatorCTCWithPadding(processor=processor, padding=True)" ] }, { "cell_type": "code", "execution_count": 8, "metadata": { "id": "9Xsux2gmyXso" }, "outputs": [], "source": [ "wer_metric = load_metric(\"wer\")\n", "cer_metric = load_metric(\"cer\")\n", "metrics = [wer_metric, cer_metric]" ] }, { "cell_type": "code", "execution_count": 9, "metadata": { "id": "1XZ-kjweyTy_" }, "outputs": [], "source": [ "import numpy as np\n", "\n", "def compute_metrics(pred):\n", " pred_logits = pred.predictions\n", " pred_ids = np.argmax(pred_logits, axis=-1)\n", "\n", " pred.label_ids[pred.label_ids == -100] = processor.tokenizer.pad_token_id\n", "\n", " pred_str = processor.batch_decode(pred_ids)\n", " # we do not want to group tokens when computing the metrics\n", " label_str = processor.batch_decode(pred.label_ids, group_tokens=False)\n", "\n", " wer = wer_metric.compute(predictions=pred_str, references=label_str)\n", " cer = cer_metric.compute(predictions=pred_str, references=label_str)\n", "\n", " return {\"wer\": wer, \"cer\": cer}" ] }, { "cell_type": "code", "execution_count": 10, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 9496, "status": "ok", "timestamp": 1641588938616, "user": { "displayName": "Yurii Paniv", "photoUrl": "https://lh3.googleusercontent.com/a/default-user=s64", "userId": "13095662915325887123" }, "user_tz": -120 }, "id": "e7cqAWIayn6w", "outputId": "b7b20ce9-e1b2-473f-8032-2a75f98dfa9e" }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Some weights of the model checkpoint at facebook/wav2vec2-xls-r-300m were not used when initializing Wav2Vec2ForCTC: ['project_q.weight', 'quantizer.weight_proj.weight', 'project_q.bias', 'quantizer.weight_proj.bias', 'project_hid.bias', 'project_hid.weight', 'quantizer.codevectors']\n", "- This IS expected if you are initializing Wav2Vec2ForCTC from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", "- This IS NOT expected if you are initializing Wav2Vec2ForCTC from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", "Some weights of Wav2Vec2ForCTC were not initialized from the model checkpoint at facebook/wav2vec2-xls-r-300m and are newly initialized: ['lm_head.bias', 'lm_head.weight']\n", "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n" ] } ], "source": [ "from transformers import Wav2Vec2ForCTC\n", "\n", "model = Wav2Vec2ForCTC.from_pretrained(\n", " \"facebook/wav2vec2-xls-r-300m\", \n", " attention_dropout=0.3,\n", " hidden_dropout=0.3,\n", " feat_proj_dropout=0.3,\n", " mask_time_prob=0.05,\n", " layerdrop=0.3,\n", " ctc_loss_reduction=\"mean\", \n", " pad_token_id=processor.tokenizer.pad_token_id,\n", " vocab_size=len(processor.tokenizer),\n", ")" ] }, { "cell_type": "code", "execution_count": 11, "metadata": { "id": "oGI8zObtZ3V0" }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/home/robinhad/Projects/unchanged/voice-recognition-ua/env/lib/python3.10/site-packages/transformers/models/wav2vec2/modeling_wav2vec2.py:1618: FutureWarning: The method `freeze_feature_extractor` is deprecated and will be removed in Transformers v5.Please use the equivalent `freeze_feature_encoder` method instead.\n", " warnings.warn(\n" ] } ], "source": [ "model.freeze_feature_extractor()" ] }, { "cell_type": "code", "execution_count": 12, "metadata": { "id": "KbeKSV7uzGPP" }, "outputs": [], "source": [ "from transformers import TrainingArguments\n", "\n", "repo_name = \"wav2vec2-xls-r-base-uk\"\n", "\n", "training_args = TrainingArguments(\n", " output_dir=repo_name,\n", " group_by_length=True,\n", " per_device_train_batch_size=24,\n", " per_device_eval_batch_size=24, \n", " gradient_accumulation_steps=6,\n", " eval_accumulation_steps=6,\n", " evaluation_strategy=\"epoch\",\n", " save_strategy=\"epoch\",\n", " logging_strategy=\"epoch\",\n", " num_train_epochs=150,\n", " gradient_checkpointing=True,\n", " fp16=True,\n", " #save_steps=1,\n", " #eval_steps=1,\n", " #logging_steps=1,\n", " learning_rate=3e-4,\n", " warmup_steps=500,\n", " save_total_limit=2,\n", " report_to=\"tensorboard\",\n", " load_best_model_at_end=True,\n", " metric_for_best_model=\"cer\",\n", " greater_is_better=False\n", ")" ] }, { "cell_type": "code", "execution_count": 14, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 11063, "status": "ok", "timestamp": 1641588949674, "user": { "displayName": "Yurii Paniv", "photoUrl": "https://lh3.googleusercontent.com/a/default-user=s64", "userId": "13095662915325887123" }, "user_tz": -120 }, "id": "rY7vBmFCPFgC", "outputId": "2e89d5ea-5b25-44bf-8492-a6220b0b1c38" }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Using cuda_amp half precision backend\n" ] } ], "source": [ "from transformers import Trainer\n", "\n", "trainer = Trainer(\n", " model=model,\n", " data_collator=data_collator,\n", " args=training_args,\n", " compute_metrics=compute_metrics,\n", " train_dataset=common_voice_train,\n", " eval_dataset=common_voice_test,\n", " tokenizer=processor.feature_extractor,\n", ")" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 409 }, "id": "9fRr9TG5pGBl", "outputId": "c2a7c797-326c-4bd2-b167-9d2f41d77def" }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Loading model from wav2vec2-xls-r-base-uk/checkpoint-7505.\n", "The following columns in the training set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length. If input_length are not expected by `Wav2Vec2ForCTC.forward`, you can safely ignore this message.\n", "/home/robinhad/Projects/unchanged/voice-recognition-ua/env/lib/python3.10/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", " warnings.warn(\n", "***** Running training *****\n", " Num examples = 11463\n", " Num Epochs = 150\n", " Instantaneous batch size per device = 24\n", " Total train batch size (w. parallel, distributed & accumulation) = 144\n", " Gradient Accumulation steps = 6\n", " Total optimization steps = 11850\n", " Continuing training from checkpoint, will skip to saved global_step\n", " Continuing training from epoch 95\n", " Continuing training from global step 7505\n", " Will skip the first 95 epochs then the first 0 batches in the first epoch. If this takes a lot of time, you can add the `--ignore_data_skip` flag to your launch command, but you will resume the training on data already seen by your model.\n" ] }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "d39c143147e7431a91cf50b54464cbee", "version_major": 2, "version_minor": 0 }, "text/plain": [ "0it [00:00, ?it/s]" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ "\n", "
\n", " \n", " \n", " [ 7910/11850 45:49 < 7:28:05, 0.15 it/s, Epoch 100.11/150]\n", "
\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
EpochTraining LossValidation LossWerCer
950.2712000.5969270.5195650.128453
960.2793000.5957890.5165180.128272
970.2768000.6234000.5125820.127275
980.2660000.6172450.5191810.130092
990.2816000.6067720.5124010.128527

" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stderr", "output_type": "stream", "text": [ "The following columns in the evaluation set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length. If input_length are not expected by `Wav2Vec2ForCTC.forward`, you can safely ignore this message.\n", "***** Running Evaluation *****\n", " Num examples = 6783\n", " Batch size = 24\n", "Saving model checkpoint to wav2vec2-xls-r-base-uk/checkpoint-7584\n", "Configuration saved in wav2vec2-xls-r-base-uk/checkpoint-7584/config.json\n", "Model weights saved in wav2vec2-xls-r-base-uk/checkpoint-7584/pytorch_model.bin\n", "Feature extractor saved in wav2vec2-xls-r-base-uk/checkpoint-7584/preprocessor_config.json\n", "Deleting older checkpoint [wav2vec2-xls-r-base-uk/checkpoint-7505] due to args.save_total_limit\n", "The following columns in the evaluation set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length. If input_length are not expected by `Wav2Vec2ForCTC.forward`, you can safely ignore this message.\n", "***** Running Evaluation *****\n", " Num examples = 6783\n", " Batch size = 24\n", "Saving model checkpoint to wav2vec2-xls-r-base-uk/checkpoint-7663\n", "Configuration saved in wav2vec2-xls-r-base-uk/checkpoint-7663/config.json\n", "Model weights saved in wav2vec2-xls-r-base-uk/checkpoint-7663/pytorch_model.bin\n", "Feature extractor saved in wav2vec2-xls-r-base-uk/checkpoint-7663/preprocessor_config.json\n", "Deleting older checkpoint [wav2vec2-xls-r-base-uk/checkpoint-7584] due to args.save_total_limit\n", "The following columns in the evaluation set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length. If input_length are not expected by `Wav2Vec2ForCTC.forward`, you can safely ignore this message.\n", "***** Running Evaluation *****\n", " Num examples = 6783\n", " Batch size = 24\n", "Saving model checkpoint to wav2vec2-xls-r-base-uk/checkpoint-7742\n", "Configuration saved in wav2vec2-xls-r-base-uk/checkpoint-7742/config.json\n", "Model weights saved in wav2vec2-xls-r-base-uk/checkpoint-7742/pytorch_model.bin\n", "Feature extractor saved in wav2vec2-xls-r-base-uk/checkpoint-7742/preprocessor_config.json\n", "Deleting older checkpoint [wav2vec2-xls-r-base-uk/checkpoint-7663] due to args.save_total_limit\n", "The following columns in the evaluation set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length. If input_length are not expected by `Wav2Vec2ForCTC.forward`, you can safely ignore this message.\n", "***** Running Evaluation *****\n", " Num examples = 6783\n", " Batch size = 24\n", "Saving model checkpoint to wav2vec2-xls-r-base-uk/checkpoint-7821\n", "Configuration saved in wav2vec2-xls-r-base-uk/checkpoint-7821/config.json\n", "Model weights saved in wav2vec2-xls-r-base-uk/checkpoint-7821/pytorch_model.bin\n", "Feature extractor saved in wav2vec2-xls-r-base-uk/checkpoint-7821/preprocessor_config.json\n", "Deleting older checkpoint [wav2vec2-xls-r-base-uk/checkpoint-7742] due to args.save_total_limit\n", "The following columns in the evaluation set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length. If input_length are not expected by `Wav2Vec2ForCTC.forward`, you can safely ignore this message.\n", "***** Running Evaluation *****\n", " Num examples = 6783\n", " Batch size = 24\n", "Saving model checkpoint to wav2vec2-xls-r-base-uk/checkpoint-7900\n", "Configuration saved in wav2vec2-xls-r-base-uk/checkpoint-7900/config.json\n", "Model weights saved in wav2vec2-xls-r-base-uk/checkpoint-7900/pytorch_model.bin\n", "Feature extractor saved in wav2vec2-xls-r-base-uk/checkpoint-7900/preprocessor_config.json\n", "Deleting older checkpoint [wav2vec2-xls-r-base-uk/checkpoint-7821] due to args.save_total_limit\n" ] } ], "source": [ "trainer.train(resume_from_checkpoint=True)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "trainer.create_model_card()" ] } ], "metadata": { "accelerator": "GPU", "colab": { "collapsed_sections": [], "machine_shape": "hm", "name": "Копія записника \"Fine-Tune XLS-R on Common Voice.ipynb\"", "provenance": [ { "file_id": "https://github.com/patrickvonplaten/notebooks/blob/master/Fine_Tune_XLS_R_on_Common_Voice.ipynb", "timestamp": 1641583715050 } ] }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.6" }, "vscode": { "interpreter": { "hash": "a5cdd9abf8df3af0fd61fdb3838d6c6f2f66a9ba4bf4484f45cd88abf9f04fe9" } }, "widgets": { "application/vnd.jupyter.widget-state+json": { "04ec68b059df4c628839c3ac29e2ebdd": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "DescriptionStyleModel", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "05d8496d54174ae298c319b0194fc710": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "116786d9364a4a57b521cddaabeda688": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "HBoxModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_a1e2c04dc2cb45ea80bec125e3dbf56f", "IPY_MODEL_b6d46d40efa14b21814f41531f5a2f41", "IPY_MODEL_d8bf8dc5d6c84140a4e96c9c435b8f17" ], "layout": "IPY_MODEL_9baa2f69aa9c4387bf1086a04ed78420" } }, "18bc63944343440f837cdff76db004fc": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "HTMLModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_a4ae510b4f3845f891a796cf844fc2bb", "placeholder": "​", "style": "IPY_MODEL_e6e50da6516847878309fdc5c463edb3", "value": " 6962/6962 [01:46<00:00, 78.15ex/s]" } }, "1f3abdf2e0f6459da4179a94d691c4c4": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "FloatProgressModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_c31a747e18df4b4aa4449a30e387448c", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_414efa8a08cd491cb78af8a95a151daa", "value": 1 } }, "22ba979142074f1d976e1a905544fd2d": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "3dedffa30b774426bd474072a3a0d591": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "DescriptionStyleModel", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "414efa8a08cd491cb78af8a95a151daa": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "ProgressStyleModel", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "427056895c674c428400bee0f5b43995": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "445c84e1e2e541f2a54fb989def386ae": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "48c60be3ca9349a295b83f65769c7f27": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "HTMLModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_05d8496d54174ae298c319b0194fc710", "placeholder": "​", "style": "IPY_MODEL_3dedffa30b774426bd474072a3a0d591", "value": " 1/1 [00:00<00:00, 11.09ba/s]" } }, "5815ae1348994bfebba4a8e968489a96": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "DescriptionStyleModel", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "5c2a7fea8c434d51ada69a0854b88baf": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "68502fb433564eee8dfdf272ed7e4f56": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "HTMLModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_5c2a7fea8c434d51ada69a0854b88baf", "placeholder": "​", "style": "IPY_MODEL_6c80bd8a8fe14a5989fe27445c14650f", "value": "100%" } }, "6c80bd8a8fe14a5989fe27445c14650f": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "DescriptionStyleModel", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "77f1a51099b24831ad8b2be3d2dc833a": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "8b6b7f28751c45c8869aa86eb2a0ab26": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "HBoxModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_68502fb433564eee8dfdf272ed7e4f56", "IPY_MODEL_1f3abdf2e0f6459da4179a94d691c4c4", "IPY_MODEL_48c60be3ca9349a295b83f65769c7f27" ], "layout": "IPY_MODEL_445c84e1e2e541f2a54fb989def386ae" } }, "9baa2f69aa9c4387bf1086a04ed78420": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "9c875952cdd649a5bab87de9bb3f5200": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "DescriptionStyleModel", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "a1e2c04dc2cb45ea80bec125e3dbf56f": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "HTMLModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_427056895c674c428400bee0f5b43995", "placeholder": "​", "style": "IPY_MODEL_04ec68b059df4c628839c3ac29e2ebdd", "value": "100%" } }, "a29f88f174f8499082fbb36a36c47fa4": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "HBoxModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_d45747150d0b434593a3a7c98399599a", "IPY_MODEL_ea73f7deb1c643f7b81de7fb7acaaf1b", "IPY_MODEL_18bc63944343440f837cdff76db004fc" ], "layout": "IPY_MODEL_efc3bc0c48124ebeb79d245216eaf0fe" } }, "a4ae510b4f3845f891a796cf844fc2bb": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "aa329cb93df44a6da6012c7cc49d7489": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "b39b6e9131ca4ce3b31e84ceb04e1b83": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "ProgressStyleModel", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "b6d46d40efa14b21814f41531f5a2f41": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "FloatProgressModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_77f1a51099b24831ad8b2be3d2dc833a", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_d518f2c2ab6945b78a6d336dad6262bd", "value": 1 } }, "c31a747e18df4b4aa4449a30e387448c": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "c5eed102ef134a4e8ca41713b82ff6a4": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "d45747150d0b434593a3a7c98399599a": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "HTMLModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_aa329cb93df44a6da6012c7cc49d7489", "placeholder": "​", "style": "IPY_MODEL_9c875952cdd649a5bab87de9bb3f5200", "value": "100%" } }, "d518f2c2ab6945b78a6d336dad6262bd": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "ProgressStyleModel", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "d8bf8dc5d6c84140a4e96c9c435b8f17": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "HTMLModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_22ba979142074f1d976e1a905544fd2d", "placeholder": "​", "style": "IPY_MODEL_5815ae1348994bfebba4a8e968489a96", "value": " 1/1 [00:00<00:00, 7.95ba/s]" } }, "e6e50da6516847878309fdc5c463edb3": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "DescriptionStyleModel", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "ea73f7deb1c643f7b81de7fb7acaaf1b": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.5.0", "model_name": "FloatProgressModel", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_c5eed102ef134a4e8ca41713b82ff6a4", "max": 6962, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_b39b6e9131ca4ce3b31e84ceb04e1b83", "value": 6962 } }, "efc3bc0c48124ebeb79d245216eaf0fe": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } } } } }, "nbformat": 4, "nbformat_minor": 4 }