{"cells":[{"cell_type":"markdown","metadata":{"id":"28e4c4d1-a73f-437b-a1bd-c2cc3874924a"},"source":["# 강의 11주차: midm-food-order-understanding\n","\n","1. KT-AI/midm-bitext-S-7B-inst-v1 를 주문 문장 이해에 미세 튜닝\n","\n","- food-order-understanding-small-3200.json (학습)\n","- food-order-understanding-small-800.json (검증)\n","\n","\n","종속적인 필요 내용\n","- huggingface 계정 설정 및 llama-2 사용 승인\n","- 로깅을 위한 wandb\n","\n","\n","history\n","\n","v1.2\n","- KT-AI/midm-bitext-S-7B-inst-v1 에 safetensors 포맷이 올라왔기에, 해당 리포에서 받도록 설정 변경\n","- 전체 과정 재검증"],"id":"28e4c4d1-a73f-437b-a1bd-c2cc3874924a"},{"cell_type":"code","execution_count":2,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"nDZe_wqKU6J3","outputId":"031e0ee2-9385-44c0-ab12-97cb3c95ffc9","executionInfo":{"status":"ok","timestamp":1702304409865,"user_tz":-540,"elapsed":14624,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["Requirement already satisfied: transformers in /usr/local/lib/python3.10/dist-packages (4.35.2)\n","Requirement already satisfied: peft in /usr/local/lib/python3.10/dist-packages (0.7.0)\n","Requirement already satisfied: accelerate in /usr/local/lib/python3.10/dist-packages (0.25.0)\n","Requirement already satisfied: optimum in /usr/local/lib/python3.10/dist-packages (1.15.0)\n","Requirement already satisfied: bitsandbytes in /usr/local/lib/python3.10/dist-packages (0.41.3.post1)\n","Requirement already satisfied: trl in /usr/local/lib/python3.10/dist-packages (0.7.4)\n","Requirement already satisfied: wandb in /usr/local/lib/python3.10/dist-packages (0.16.1)\n","Requirement already satisfied: einops in /usr/local/lib/python3.10/dist-packages (0.7.0)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from transformers) (3.13.1)\n","Requirement already satisfied: huggingface-hub<1.0,>=0.16.4 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.19.4)\n","Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from transformers) (1.23.5)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from transformers) (23.2)\n","Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from transformers) (6.0.1)\n","Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.10/dist-packages (from transformers) (2023.6.3)\n","Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from transformers) (2.31.0)\n","Requirement already satisfied: tokenizers<0.19,>=0.14 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.15.0)\n","Requirement already satisfied: safetensors>=0.3.1 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.4.1)\n","Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.10/dist-packages (from transformers) (4.66.1)\n","Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from peft) (5.9.5)\n","Requirement already satisfied: torch>=1.13.0 in /usr/local/lib/python3.10/dist-packages (from peft) (2.1.0+cu118)\n","Requirement already satisfied: coloredlogs in /usr/local/lib/python3.10/dist-packages (from optimum) (15.0.1)\n","Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from optimum) (1.12)\n","Requirement already satisfied: datasets in /usr/local/lib/python3.10/dist-packages (from optimum) (2.15.0)\n","Requirement already satisfied: tyro>=0.5.11 in /usr/local/lib/python3.10/dist-packages (from trl) (0.6.0)\n","Requirement already satisfied: Click!=8.0.0,>=7.1 in /usr/local/lib/python3.10/dist-packages (from wandb) (8.1.7)\n","Requirement already satisfied: GitPython!=3.1.29,>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from wandb) (3.1.40)\n","Requirement already satisfied: sentry-sdk>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from wandb) (1.38.0)\n","Requirement already satisfied: docker-pycreds>=0.4.0 in /usr/local/lib/python3.10/dist-packages (from wandb) (0.4.0)\n","Requirement already satisfied: setproctitle in /usr/local/lib/python3.10/dist-packages (from wandb) (1.3.3)\n","Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from wandb) (67.7.2)\n","Requirement already satisfied: appdirs>=1.4.3 in /usr/local/lib/python3.10/dist-packages (from wandb) (1.4.4)\n","Requirement already satisfied: protobuf!=4.21.0,<5,>=3.19.0 in /usr/local/lib/python3.10/dist-packages (from wandb) (3.20.3)\n","Requirement already satisfied: six>=1.4.0 in /usr/local/lib/python3.10/dist-packages (from docker-pycreds>=0.4.0->wandb) (1.16.0)\n","Requirement already satisfied: gitdb<5,>=4.0.1 in /usr/local/lib/python3.10/dist-packages (from GitPython!=3.1.29,>=1.0.0->wandb) (4.0.11)\n","Requirement already satisfied: fsspec>=2023.5.0 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub<1.0,>=0.16.4->transformers) (2023.6.0)\n","Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub<1.0,>=0.16.4->transformers) (4.5.0)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (3.6)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (2023.11.17)\n","Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.13.0->peft) (3.2.1)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=1.13.0->peft) (3.1.2)\n","Requirement already satisfied: triton==2.1.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.13.0->peft) (2.1.0)\n","Requirement already satisfied: sentencepiece!=0.1.92,>=0.1.91 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.1.99)\n","Requirement already satisfied: docstring-parser>=0.14.1 in /usr/local/lib/python3.10/dist-packages (from tyro>=0.5.11->trl) (0.15)\n","Requirement already satisfied: rich>=11.1.0 in /usr/local/lib/python3.10/dist-packages (from tyro>=0.5.11->trl) (13.7.0)\n","Requirement already satisfied: shtab>=1.5.6 in /usr/local/lib/python3.10/dist-packages (from tyro>=0.5.11->trl) (1.6.5)\n","Requirement already satisfied: humanfriendly>=9.1 in /usr/local/lib/python3.10/dist-packages (from coloredlogs->optimum) (10.0)\n","Requirement already satisfied: pyarrow>=8.0.0 in /usr/local/lib/python3.10/dist-packages (from datasets->optimum) (9.0.0)\n","Requirement already satisfied: pyarrow-hotfix in /usr/local/lib/python3.10/dist-packages (from datasets->optimum) (0.6)\n","Requirement already satisfied: dill<0.3.8,>=0.3.0 in /usr/local/lib/python3.10/dist-packages (from datasets->optimum) (0.3.7)\n","Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from datasets->optimum) (1.5.3)\n","Requirement already satisfied: xxhash in /usr/local/lib/python3.10/dist-packages (from datasets->optimum) (3.4.1)\n","Requirement already satisfied: multiprocess in /usr/local/lib/python3.10/dist-packages (from datasets->optimum) (0.70.15)\n","Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from datasets->optimum) (3.9.1)\n","Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->optimum) (1.3.0)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->optimum) (23.1.0)\n","Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->optimum) (6.0.4)\n","Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->optimum) (1.9.3)\n","Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->optimum) (1.4.0)\n","Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->optimum) (1.3.1)\n","Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->optimum) (4.0.3)\n","Requirement already satisfied: smmap<6,>=3.0.1 in /usr/local/lib/python3.10/dist-packages (from gitdb<5,>=4.0.1->GitPython!=3.1.29,>=1.0.0->wandb) (5.0.1)\n","Requirement already satisfied: markdown-it-py>=2.2.0 in /usr/local/lib/python3.10/dist-packages (from rich>=11.1.0->tyro>=0.5.11->trl) (3.0.0)\n","Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /usr/local/lib/python3.10/dist-packages (from rich>=11.1.0->tyro>=0.5.11->trl) (2.16.1)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=1.13.0->peft) (2.1.3)\n","Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets->optimum) (2.8.2)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets->optimum) (2023.3.post1)\n","Requirement already satisfied: mdurl~=0.1 in /usr/local/lib/python3.10/dist-packages (from markdown-it-py>=2.2.0->rich>=11.1.0->tyro>=0.5.11->trl) (0.1.2)\n"]}],"source":["pip install transformers peft accelerate optimum bitsandbytes trl wandb einops"],"id":"nDZe_wqKU6J3"},{"cell_type":"code","execution_count":3,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"51eb00d7-2928-41ad-9ae9-7f0da7d64d6d","outputId":"e7e31196-fa10-4589-e5e8-c4086486db5f","executionInfo":{"status":"ok","timestamp":1702304447771,"user_tz":-540,"elapsed":30386,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/trl/trainer/ppo_config.py:141: UserWarning: The `optimize_cuda_cache` arguement will be deprecated soon, please use `optimize_device_cache` instead.\n"," warnings.warn(\n"]}],"source":["import os\n","from dataclasses import dataclass, field\n","from typing import Optional\n","import re\n","\n","import torch\n","import tyro\n","from accelerate import Accelerator\n","from datasets import load_dataset, Dataset\n","from peft import AutoPeftModelForCausalLM, LoraConfig\n","from tqdm import tqdm\n","from transformers import (\n"," AutoModelForCausalLM,\n"," AutoTokenizer,\n"," BitsAndBytesConfig,\n"," TrainingArguments,\n",")\n","\n","from trl import SFTTrainer\n","\n","from trl.trainer import ConstantLengthDataset"],"id":"51eb00d7-2928-41ad-9ae9-7f0da7d64d6d"},{"cell_type":"code","execution_count":4,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":162,"referenced_widgets":["dbe8b80107f646fca9ce17fc6898688e","25bab324b2b9446bad5f3a73eed40e68","1e5df26c96974f9e80ec411cc2efb005","726bbc9eda2647089f64254e9afc18a6","730a80d2060d4c0d9ddd2e17f2da0045","cd2ea8d1f93c436c8045979227f28f39","e520cbc12c7f45809976dfbfcf56dd64","cacc47dd52114b3caa6a0a420f748793","435d3880497f437fbe82c5c5aea4723b","f2c6a7c598a2446d980e5b099f8b0504","380d699b391e443594c77e0618acc1e6","81c738cb1572429fad029c865af5864e","1dbd9abdfd9f441a9a2a92797469029f","bdff58ba27c74f89acc6ce2fa028b322","a8d2283aa6d44f1ab1549f4311e88e2d","ff6ee54fece6482fa4908c5bd6f35331","4552475fe488474e98941eb5bc34fe1e","349de155fbbb411b98558636e5b363e5","29721702addc4325b2d6578e51ad6212","ff3d0f971a534f23928c1c9b133ade05","38d4d232d70d49dd8c3ab620e6cfb96c","7dcd8bfea49a447390fd3d693ce473f8","a827efea829546b7b7e5e42a465849e4","fee5d6bf794f4cb7962ef9985fbf4348","bb9ba62e3cd74e5d965fd6d7cbfffcdb","6d01340c7ea248da9b089906ddb0743f","520fd7520fe4457f88e1e7bdcbff3e99","66775e202d174977937a2bb33552e08d","ab2576b47a964778a4fb23a0177c2372","a99d5e99af0748a289fa755b80c2ceaf","129d75c4582a42b98245c5a79ea22525","92fdf3c90389449595e1d7b3605f6953"]},"id":"tX7gYxZaVhYL","outputId":"368e5df8-8976-47c1-a8be-d407e4e16a4d","executionInfo":{"status":"ok","timestamp":1702304450076,"user_tz":-540,"elapsed":364,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"display_data","data":{"text/plain":["VBox(children=(HTML(value='
,\n","ignore_data_skip=False,\n","include_inputs_for_metrics=False,\n","include_tokens_per_second=False,\n","jit_mode_eval=False,\n","label_names=None,\n","label_smoothing_factor=0.0,\n","learning_rate=0.0001,\n","length_column_name=length,\n","load_best_model_at_end=False,\n","local_rank=0,\n","log_level=passive,\n","log_level_replica=warning,\n","log_on_each_node=True,\n","logging_dir=./results/runs/Dec11_14-21-47_8ccc3e745a6c,\n","logging_first_step=False,\n","logging_nan_inf_filter=True,\n","logging_steps=50,\n","logging_strategy=steps,\n","lr_scheduler_type=cosine,\n","max_grad_norm=0.3,\n","max_steps=300,\n","metric_for_best_model=None,\n","mp_parameters=,\n","neftune_noise_alpha=None,\n","no_cuda=False,\n","num_train_epochs=1,\n","optim=paged_adamw_32bit,\n","optim_args=None,\n","output_dir=/gdrive/MyDrive/lora-midm-7b-food-order-understanding,\n","overwrite_output_dir=False,\n","past_index=-1,\n","per_device_eval_batch_size=1,\n","per_device_train_batch_size=1,\n","prediction_loss_only=False,\n","push_to_hub=False,\n","push_to_hub_model_id=None,\n","push_to_hub_organization=None,\n","push_to_hub_token=,\n","ray_scope=last,\n","remove_unused_columns=False,\n","report_to=['wandb'],\n","resume_from_checkpoint=None,\n","run_name=midm-7b-food-order-understanding,\n","save_on_each_node=False,\n","save_safetensors=True,\n","save_steps=500,\n","save_strategy=epoch,\n","save_total_limit=20,\n","seed=42,\n","skip_memory_metrics=True,\n","split_batches=False,\n","tf32=None,\n","torch_compile=False,\n","torch_compile_backend=None,\n","torch_compile_mode=None,\n","torchdynamo=None,\n","tpu_metrics_debug=False,\n","tpu_num_cores=None,\n","use_cpu=False,\n","use_ipex=False,\n","use_legacy_prediction_loop=False,\n","use_mps_device=False,\n","warmup_ratio=0.03,\n","warmup_steps=0,\n","weight_decay=0.05,\n","), packing=True, peft_config=LoraConfig(peft_type=, auto_mapping=None, base_model_name_or_path=None, revision=None, task_type='CAUSAL_LM', inference_mode=False, r=8, target_modules={'c_fc', 'c_attn', 'c_proj'}, lora_alpha=16, lora_dropout=0.05, fan_in_fan_out=False, bias='none', modules_to_save=None, init_lora_weights=True, layers_to_transform=None, layers_pattern=None, rank_pattern={}, alpha_pattern={}, megatron_config=None, megatron_core='megatron.core', loftq_config={}), merge_with_final_checkpoint=False)\n"]}],"source":["print(script_args)"],"id":"bac62c01-21ef-491e-a686-cf4988186c58"},{"cell_type":"code","execution_count":13,"metadata":{"id":"1ff1422e-184d-4438-b033-40ae8bdaa5fd","executionInfo":{"status":"ok","timestamp":1702304525838,"user_tz":-540,"elapsed":324,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[],"source":["bnb_config = BitsAndBytesConfig(\n"," load_in_4bit=True,\n"," bnb_4bit_quant_type=\"nf4\",\n"," bnb_4bit_compute_dtype=torch.bfloat16,\n",")"],"id":"1ff1422e-184d-4438-b033-40ae8bdaa5fd"},{"cell_type":"markdown","metadata":{"id":"elg7gcB-5zb7"},"source":["원본인 'KT-AI/midm-bitext-S-7B-inst-v1' 는 *.bin 형태로 모델을 제공한다.\n","- 코랩에서 CPU 메모리 부족 발생\n","\n","해결책\n","- safetensors로 변환한 모델을 업로드 하고 이를 사용하기로 한다."],"id":"elg7gcB-5zb7"},{"cell_type":"code","execution_count":14,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":168,"referenced_widgets":["b2e8914a604a4cd7a8160a247b46897e","80b1d408c82c4a16b237c9ca6ff853a9","9ae5d008fbdb49e793eeca063f8a9b79","e0f4a69b292d4821b24b1e0f8c85d994","ac74b0890fdb4386a50184258f6efea6","329a5858a60f4140b693ad2d40f2666c","ed062bc006874d5a975c048bf1b49111","0405581206a04b8f9e462b4a97a9b396","d39c0747d6da4f9095fb300b7ecdee14","35a047ec6fdd44df851354380808b081","51396a17ef894a3dbddbcc21f59e6fe9"]},"id":"15c8425e-bb0b-40c5-bfe8-385bac699b9d","outputId":"0e0eac23-cbe8-4fa6-9a80-c3710b860b4a","executionInfo":{"status":"ok","timestamp":1702304636354,"user_tz":-540,"elapsed":99278,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py:472: FutureWarning: The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.\n"," warnings.warn(\n"]},{"output_type":"display_data","data":{"text/plain":["Loading checkpoint shards: 0%| | 0/2 [00:00, auto_mapping=None, base_model_name_or_path=None, revision=None, task_type='CAUSAL_LM', inference_mode=False, r=8, target_modules={'c_fc', 'c_attn', 'c_proj'}, lora_alpha=16, lora_dropout=0.05, fan_in_fan_out=False, bias='none', modules_to_save=None, init_lora_weights=True, layers_to_transform=None, layers_pattern=None, rank_pattern={}, alpha_pattern={}, megatron_config=None, megatron_core='megatron.core', loftq_config={})"]},"metadata":{},"execution_count":17}],"source":["peft_config"],"id":"4420fcc4-2bac-413d-b7aa-89455c512419"},{"cell_type":"code","execution_count":18,"metadata":{"id":"f47f9584-3988-46b8-a062-29dcde75a0e2","executionInfo":{"status":"ok","timestamp":1702304651037,"user_tz":-540,"elapsed":915,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[],"source":["tokenizer = AutoTokenizer.from_pretrained(\n"," script_args.model_name,\n"," trust_remote_code=True,\n"," cache_dir=script_args.cache_dir,\n",")\n","\n","if getattr(tokenizer, \"pad_token\", None) is None:\n"," tokenizer.pad_token = tokenizer.eos_token\n","tokenizer.padding_side = \"right\" # Fix weird overflow issue with fp16 training\n","\n","tokenizer.add_special_tokens(dict(bos_token=''))\n","\n","base_model.config.pad_token_id = tokenizer.pad_token_id\n","base_model.config.bos_token_id = tokenizer.bos_token_id"],"id":"f47f9584-3988-46b8-a062-29dcde75a0e2"},{"cell_type":"code","execution_count":19,"metadata":{"id":"abd17c83-ab8d-44cb-b69b-fc0936c2cec5","executionInfo":{"status":"ok","timestamp":1702304654395,"user_tz":-540,"elapsed":339,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[],"source":["training_args = script_args.training_args"],"id":"abd17c83-ab8d-44cb-b69b-fc0936c2cec5"},{"cell_type":"code","execution_count":20,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"62e8139f-5179-4c75-84a7-0c818ab0a35a","outputId":"b0893f9b-1aad-499d-9857-e8b7faac026f","executionInfo":{"status":"ok","timestamp":1702304656295,"user_tz":-540,"elapsed":3,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"stream","name":"stderr","text":["100%|██████████| 400/400 [00:00<00:00, 2881.84it/s]"]},{"output_type":"stream","name":"stdout","text":["The character to token ratio of the dataset is: 1.52\n"]},{"output_type":"stream","name":"stderr","text":["\n","/usr/local/lib/python3.10/dist-packages/trl/trainer/utils.py:548: UserWarning: The passed formatting_func has more than one argument. Usually that function should have a single argument `example` which corresponds to the dictionary returned by each element of the dataset. Make sure you know what you are doing.\n"," warnings.warn(\n"]}],"source":["train_dataset = create_datasets(tokenizer, script_args)"],"id":"62e8139f-5179-4c75-84a7-0c818ab0a35a"},{"cell_type":"code","execution_count":21,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"000314e9-f10b-4685-8da6-0511494a9eb4","outputId":"30604c53-3c78-45a5-80d4-3d8a06468906","executionInfo":{"status":"ok","timestamp":1702304659649,"user_tz":-540,"elapsed":455,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"execute_result","data":{"text/plain":["3200"]},"metadata":{},"execution_count":21}],"source":["len(train_dataset)"],"id":"000314e9-f10b-4685-8da6-0511494a9eb4"},{"cell_type":"code","execution_count":22,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"4ba80a64-0ec7-4b29-ac95-7b3d34549f17","outputId":"82ff1e93-8546-4feb-e018-be6b94116331","executionInfo":{"status":"ok","timestamp":1702304662462,"user_tz":-540,"elapsed":879,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/trl/trainer/sft_trainer.py:267: UserWarning: You passed `packing=True` to the SFTTrainer, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n"," warnings.warn(\n"]}],"source":["trainer = SFTTrainer(\n"," model=base_model,\n"," train_dataset=train_dataset,\n"," eval_dataset=None,\n"," peft_config=peft_config,\n"," packing=script_args.packing,\n"," max_seq_length=script_args.seq_length,\n"," tokenizer=tokenizer,\n"," args=training_args,\n",")"],"id":"4ba80a64-0ec7-4b29-ac95-7b3d34549f17"},{"cell_type":"code","execution_count":23,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"6qPxfovivMuH","outputId":"eb88ae80-cc09-46de-9eaf-2f1c194e65b6","executionInfo":{"status":"ok","timestamp":1702304665032,"user_tz":-540,"elapsed":323,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"execute_result","data":{"text/plain":["MidmLMHeadModel(\n"," (transformer): MidmModel(\n"," (wte): Embedding(72192, 4096)\n"," (rotary_pos_emb): RotaryEmbedding()\n"," (drop): Dropout(p=0.0, inplace=False)\n"," (h): ModuleList(\n"," (0-31): 32 x MidmBlock(\n"," (ln_1): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n"," (attn): MidmAttention(\n"," (c_attn): lora.Linear4bit(\n"," (base_layer): Linear4bit(in_features=4096, out_features=12288, bias=False)\n"," (lora_dropout): ModuleDict(\n"," (default): Dropout(p=0.05, inplace=False)\n"," )\n"," (lora_A): ModuleDict(\n"," (default): Linear(in_features=4096, out_features=8, bias=False)\n"," )\n"," (lora_B): ModuleDict(\n"," (default): Linear(in_features=8, out_features=12288, bias=False)\n"," )\n"," (lora_embedding_A): ParameterDict()\n"," (lora_embedding_B): ParameterDict()\n"," )\n"," (c_proj): lora.Linear4bit(\n"," (base_layer): Linear4bit(in_features=4096, out_features=4096, bias=False)\n"," (lora_dropout): ModuleDict(\n"," (default): Dropout(p=0.05, inplace=False)\n"," )\n"," (lora_A): ModuleDict(\n"," (default): Linear(in_features=4096, out_features=8, bias=False)\n"," )\n"," (lora_B): ModuleDict(\n"," (default): Linear(in_features=8, out_features=4096, bias=False)\n"," )\n"," (lora_embedding_A): ParameterDict()\n"," (lora_embedding_B): ParameterDict()\n"," )\n"," (attn_dropout): Dropout(p=0.0, inplace=False)\n"," (resid_dropout): Dropout(p=0.0, inplace=False)\n"," )\n"," (ln_2): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n"," (mlp): MidmMLP(\n"," (c_fc): lora.Linear4bit(\n"," (base_layer): Linear4bit(in_features=4096, out_features=21760, bias=False)\n"," (lora_dropout): ModuleDict(\n"," (default): Dropout(p=0.05, inplace=False)\n"," )\n"," (lora_A): ModuleDict(\n"," (default): Linear(in_features=4096, out_features=8, bias=False)\n"," )\n"," (lora_B): ModuleDict(\n"," (default): Linear(in_features=8, out_features=21760, bias=False)\n"," )\n"," (lora_embedding_A): ParameterDict()\n"," (lora_embedding_B): ParameterDict()\n"," )\n"," (c_proj): lora.Linear4bit(\n"," (base_layer): Linear4bit(in_features=10880, out_features=4096, bias=False)\n"," (lora_dropout): ModuleDict(\n"," (default): Dropout(p=0.05, inplace=False)\n"," )\n"," (lora_A): ModuleDict(\n"," (default): Linear(in_features=10880, out_features=8, bias=False)\n"," )\n"," (lora_B): ModuleDict(\n"," (default): Linear(in_features=8, out_features=4096, bias=False)\n"," )\n"," (lora_embedding_A): ParameterDict()\n"," (lora_embedding_B): ParameterDict()\n"," )\n"," (dropout): Dropout(p=0.0, inplace=False)\n"," )\n"," )\n"," )\n"," (ln_f): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n"," )\n"," (lm_head): Linear(in_features=4096, out_features=72192, bias=False)\n",")"]},"metadata":{},"execution_count":23}],"source":["base_model"],"id":"6qPxfovivMuH"},{"cell_type":"code","execution_count":24,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"gw9xbeUgbZEo","outputId":"02436cba-c8bb-48e7-cf63-4bef5313ff80","executionInfo":{"status":"ok","timestamp":1702304668322,"user_tz":-540,"elapsed":363,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"execute_result","data":{"text/plain":["PeftModelForCausalLM(\n"," (base_model): LoraModel(\n"," (model): MidmLMHeadModel(\n"," (transformer): MidmModel(\n"," (wte): Embedding(72192, 4096)\n"," (rotary_pos_emb): RotaryEmbedding()\n"," (drop): Dropout(p=0.0, inplace=False)\n"," (h): ModuleList(\n"," (0-31): 32 x MidmBlock(\n"," (ln_1): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n"," (attn): MidmAttention(\n"," (c_attn): lora.Linear4bit(\n"," (base_layer): Linear4bit(in_features=4096, out_features=12288, bias=False)\n"," (lora_dropout): ModuleDict(\n"," (default): Dropout(p=0.05, inplace=False)\n"," )\n"," (lora_A): ModuleDict(\n"," (default): Linear(in_features=4096, out_features=8, bias=False)\n"," )\n"," (lora_B): ModuleDict(\n"," (default): Linear(in_features=8, out_features=12288, bias=False)\n"," )\n"," (lora_embedding_A): ParameterDict()\n"," (lora_embedding_B): ParameterDict()\n"," )\n"," (c_proj): lora.Linear4bit(\n"," (base_layer): Linear4bit(in_features=4096, out_features=4096, bias=False)\n"," (lora_dropout): ModuleDict(\n"," (default): Dropout(p=0.05, inplace=False)\n"," )\n"," (lora_A): ModuleDict(\n"," (default): Linear(in_features=4096, out_features=8, bias=False)\n"," )\n"," (lora_B): ModuleDict(\n"," (default): Linear(in_features=8, out_features=4096, bias=False)\n"," )\n"," (lora_embedding_A): ParameterDict()\n"," (lora_embedding_B): ParameterDict()\n"," )\n"," (attn_dropout): Dropout(p=0.0, inplace=False)\n"," (resid_dropout): Dropout(p=0.0, inplace=False)\n"," )\n"," (ln_2): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n"," (mlp): MidmMLP(\n"," (c_fc): lora.Linear4bit(\n"," (base_layer): Linear4bit(in_features=4096, out_features=21760, bias=False)\n"," (lora_dropout): ModuleDict(\n"," (default): Dropout(p=0.05, inplace=False)\n"," )\n"," (lora_A): ModuleDict(\n"," (default): Linear(in_features=4096, out_features=8, bias=False)\n"," )\n"," (lora_B): ModuleDict(\n"," (default): Linear(in_features=8, out_features=21760, bias=False)\n"," )\n"," (lora_embedding_A): ParameterDict()\n"," (lora_embedding_B): ParameterDict()\n"," )\n"," (c_proj): lora.Linear4bit(\n"," (base_layer): Linear4bit(in_features=10880, out_features=4096, bias=False)\n"," (lora_dropout): ModuleDict(\n"," (default): Dropout(p=0.05, inplace=False)\n"," )\n"," (lora_A): ModuleDict(\n"," (default): Linear(in_features=10880, out_features=8, bias=False)\n"," )\n"," (lora_B): ModuleDict(\n"," (default): Linear(in_features=8, out_features=4096, bias=False)\n"," )\n"," (lora_embedding_A): ParameterDict()\n"," (lora_embedding_B): ParameterDict()\n"," )\n"," (dropout): Dropout(p=0.0, inplace=False)\n"," )\n"," )\n"," )\n"," (ln_f): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n"," )\n"," (lm_head): Linear(in_features=4096, out_features=72192, bias=False)\n"," )\n"," )\n",")"]},"metadata":{},"execution_count":24}],"source":["trainer.model"],"id":"gw9xbeUgbZEo"},{"cell_type":"code","execution_count":25,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"edb204be-ec15-4800-af49-6cfbad2f7f9a","outputId":"a49ca1de-0d59-48d8-cbc3-ba91e3ba1904","executionInfo":{"status":"ok","timestamp":1702304672140,"user_tz":-540,"elapsed":304,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["trainable params: 16744448 || all params: 3821510656 || trainable%: 0.4381630592527648\n"]}],"source":["print_trainable_parameters(base_model)"],"id":"edb204be-ec15-4800-af49-6cfbad2f7f9a"},{"cell_type":"code","execution_count":26,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"sVA-VzeTigHs","outputId":"cb6cfa28-e9ad-4cc9-b9c5-7bfd0d8cadd2","executionInfo":{"status":"ok","timestamp":1702304673972,"user_tz":-540,"elapsed":292,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"execute_result","data":{"text/plain":["7795015808"]},"metadata":{},"execution_count":26}],"source":["base_model.get_memory_footprint()"],"id":"sVA-VzeTigHs"},{"cell_type":"code","source":["trainer.model.print_trainable_parameters()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"CTZmx_faQ-Xj","outputId":"ef6b9e25-ce54-49ae-fe9e-05b18de03fc4","executionInfo":{"status":"ok","timestamp":1702304675538,"user_tz":-540,"elapsed":2,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"id":"CTZmx_faQ-Xj","execution_count":27,"outputs":[{"output_type":"stream","name":"stdout","text":["trainable params: 16,744,448 || all params: 7,034,347,520 || trainable%: 0.23803839591934178\n"]}]},{"cell_type":"markdown","metadata":{"id":"76sRe172fGlm"},"source":["midm 모델을 주문 문장 이해에 적용시 특징\n","- 모델 로딩 과정에서 CPU도 5.1기가, 디스크 42.4기가, GPU 메모리: 7,4 기가\n","\n","구글 코랩 T-4 GPU: 300스텝 (13:47초 예상)\n","\n","시퀀스 길이 384의 경우\n","- 14.7 G / 15.0 G 사용\n","- 메모리 오버플로우 발생시 이보다 줄일 것"],"id":"76sRe172fGlm"},{"cell_type":"code","execution_count":28,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":443},"id":"14019fa9-0c6f-4729-ac99-0d407af375b8","outputId":"7f0f51ba-6b7c-4aef-992d-c00a21dd7ed6","executionInfo":{"status":"ok","timestamp":1702305619411,"user_tz":-540,"elapsed":940657,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"stream","name":"stderr","text":["\u001b[34m\u001b[1mwandb\u001b[0m: Currently logged in as: \u001b[33msuyeun0109\u001b[0m (\u001b[33msuyeun\u001b[0m). Use \u001b[1m`wandb login --relogin`\u001b[0m to force relogin\n"]},{"output_type":"display_data","data":{"text/plain":[""],"text/html":["Tracking run with wandb version 0.16.1"]},"metadata":{}},{"output_type":"display_data","data":{"text/plain":[""],"text/html":["Run data is saved locally in /content/wandb/run-20231211_142441-q0brniqd"]},"metadata":{}},{"output_type":"display_data","data":{"text/plain":[""],"text/html":["Syncing run midm-7b-food-order-understanding to Weights & Biases (docs)
"]},"metadata":{}},{"output_type":"display_data","data":{"text/plain":[""],"text/html":[" View project at https://wandb.ai/suyeun/huggingface"]},"metadata":{}},{"output_type":"display_data","data":{"text/plain":[""],"text/html":[" View run at https://wandb.ai/suyeun/huggingface/runs/q0brniqd"]},"metadata":{}},{"output_type":"display_data","data":{"text/plain":[""],"text/html":["\n","
\n"," \n"," \n"," [300/300 15:27, Epoch 0/1]\n","
\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
StepTraining Loss
501.040400
1000.548100
1500.504600
2000.495700
2500.518000
3000.497100

"]},"metadata":{}},{"output_type":"execute_result","data":{"text/plain":["TrainOutput(global_step=300, training_loss=0.6006682777404785, metrics={'train_runtime': 940.0842, 'train_samples_per_second': 0.638, 'train_steps_per_second': 0.319, 'total_flos': 9315508499251200.0, 'train_loss': 0.6006682777404785, 'epoch': 0.19})"]},"metadata":{},"execution_count":28}],"source":["trainer.train()"],"id":"14019fa9-0c6f-4729-ac99-0d407af375b8"},{"cell_type":"code","execution_count":29,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":35},"id":"3Y4FQSyRghQt","outputId":"60b008f1-1e1c-42f3-bd0c-1157fa7412b7","executionInfo":{"status":"ok","timestamp":1702305626226,"user_tz":-540,"elapsed":412,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"execute_result","data":{"text/plain":["'/gdrive/MyDrive/lora-midm-7b-food-order-understanding'"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"string"}},"metadata":{},"execution_count":29}],"source":["script_args.training_args.output_dir"],"id":"3Y4FQSyRghQt"},{"cell_type":"code","execution_count":30,"metadata":{"id":"49f05450-da2a-4edd-9db2-63836a0ec73a","executionInfo":{"status":"ok","timestamp":1702305629228,"user_tz":-540,"elapsed":851,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[],"source":["trainer.save_model(script_args.training_args.output_dir)"],"id":"49f05450-da2a-4edd-9db2-63836a0ec73a"},{"cell_type":"markdown","metadata":{"id":"652f307e-e1d7-43ae-b083-dba2d94c2296"},"source":["# 추론 테스트"],"id":"652f307e-e1d7-43ae-b083-dba2d94c2296"},{"cell_type":"code","execution_count":31,"metadata":{"id":"ea8a1fea-7499-4386-9dea-0509110f61af","executionInfo":{"status":"ok","timestamp":1702305631310,"user_tz":-540,"elapsed":857,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[],"source":["from transformers import pipeline, TextStreamer"],"id":"ea8a1fea-7499-4386-9dea-0509110f61af"},{"cell_type":"code","execution_count":32,"metadata":{"id":"52626888-1f6e-46b6-a8dd-836622149ff5","executionInfo":{"status":"ok","timestamp":1702305633700,"user_tz":-540,"elapsed":481,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[],"source":["instruction_prompt_template = \"\"\"###System;다음은 매장에서 고객이 음식을 주문하는 주문 문장이다. 이를 분석하여 음식명, 옵션명, 수량을 추출하여 고객의 의도를 이해하고자 한다.\n","분석 결과를 완성해주기 바란다.\n","\n","### 주문 문장: {0} ### 분석 결과:\n","\"\"\"\n","\n","prompt_template = \"\"\"###System;{System}\n","###User;{User}\n","###Midm;\"\"\"\n","\n","default_system_msg = (\n"," \"너는 먼저 사용자가 입력한 주문 문장을 분석하는 에이전트이다. 이로부터 주문을 구성하는 음식명, 옵션명, 수량을 차례대로 추출해야 한다.\"\n",")"],"id":"52626888-1f6e-46b6-a8dd-836622149ff5"},{"cell_type":"code","execution_count":33,"metadata":{"id":"46e844fa-8f63-4359-a4fb-df66e8171796","executionInfo":{"status":"ok","timestamp":1702305636576,"user_tz":-540,"elapsed":1,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[],"source":["evaluation_queries = [\n"," \"오늘은 비가오니깐 이거 먹자. 삼선짬뽕 곱배기 하나하구요, 사천 탕수육 중짜 한그릇 주세요.\",\n"," \"아이스아메리카노 톨사이즈 한잔 하고요. 딸기스무디 한잔 주세요. 또, 콜드브루라떼 하나요.\",\n"," \"참이슬 한병, 코카콜라 1.5리터 한병, 테슬라 한병이요.\",\n"," \"꼬막무침 1인분하고요, 닭도리탕 중자 주세요. 그리고 소주도 한병 주세요.\",\n"," \"김치찌개 3인분하고요, 계란말이 주세요.\",\n"," \"불고기버거세트 1개하고요 감자튀김 추가해주세요.\",\n"," \"불닭볶음면 1개랑 사리곰탕면 2개 주세요.\",\n"," \"카페라떼 아이스 샷추가 한잔하구요. 스콘 하나 주세요\",\n"," \"여기요 춘천닭갈비 4인분하고요. 라면사리 추가하겠습니다. 콜라 300ml 두캔주세요.\",\n"," \"있잖아요 조랭이떡국 3인분하고요. 떡만두 한세트 주세요.\",\n"," \"깐풍탕수 2인분 하고요 콜라 1.5리터 한병이요.\",\n","]"],"id":"46e844fa-8f63-4359-a4fb-df66e8171796"},{"cell_type":"code","execution_count":34,"metadata":{"id":"1919cf1f-482e-4185-9d06-e3cea1918416","executionInfo":{"status":"ok","timestamp":1702305639801,"user_tz":-540,"elapsed":344,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[],"source":["def wrapper_generate(model, input_prompt, do_stream=False):\n"," data = tokenizer(input_prompt, return_tensors=\"pt\")\n"," streamer = TextStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)\n"," input_ids = data.input_ids[..., :-1]\n"," with torch.no_grad():\n"," pred = model.generate(\n"," input_ids=input_ids.cuda(),\n"," streamer=streamer if do_stream else None,\n"," use_cache=True,\n"," max_new_tokens=float('inf'),\n"," do_sample=False\n"," )\n"," decoded_text = tokenizer.batch_decode(pred, skip_special_tokens=True)\n"," decoded_text = decoded_text[0].replace(\"<[!newline]>\", \"\\n\")\n"," return (decoded_text[len(input_prompt):])"],"id":"1919cf1f-482e-4185-9d06-e3cea1918416"},{"cell_type":"code","execution_count":35,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"eaac1f6f-c823-4488-8edb-2f931ddf0daa","outputId":"c632e94d-faad-4244-b32d-139ace8783f8","executionInfo":{"status":"ok","timestamp":1702306195075,"user_tz":-540,"elapsed":552708,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py:1473: UserWarning: You have modified the pretrained model configuration to control generation. This is a deprecated strategy to control generation and will be removed soon, in a future version. Please use and modify the model generation configuration (see https://huggingface.co/docs/transformers/generation_strategies#default-text-generation-configuration )\n"," warnings.warn(\n"]}],"source":["eval_dic = {i:wrapper_generate(model=base_model, input_prompt=prompt_template.format(System=default_system_msg, User=evaluation_queries[i]))for i, query in enumerate(evaluation_queries)}"],"id":"eaac1f6f-c823-4488-8edb-2f931ddf0daa"},{"cell_type":"code","execution_count":36,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"fefd04ba-2ed8-4f84-bdd0-86d52b3f39f6","outputId":"0d52da0b-d64c-4d60-a624-81d094fbbb13","executionInfo":{"status":"ok","timestamp":1702306195075,"user_tz":-540,"elapsed":18,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["- 분석 결과 0: 음식명:삼선짬뽕, 옵션:곱배기, 수량:하나\n","- 분석 결과 1: 음식명:사천 탕수육, 옵션:중짜, 수량:한그릇\n"]}],"source":["print(eval_dic[0])"],"id":"fefd04ba-2ed8-4f84-bdd0-86d52b3f39f6"},{"cell_type":"markdown","metadata":{"id":"3f471e3a-723b-4df5-aa72-46f571f6bab6"},"source":["# 미세튜닝된 모델 로딩 후 테스트"],"id":"3f471e3a-723b-4df5-aa72-46f571f6bab6"},{"cell_type":"code","execution_count":37,"metadata":{"id":"a43bdd07-7555-42b2-9888-a614afec892f","executionInfo":{"status":"ok","timestamp":1702306199550,"user_tz":-540,"elapsed":368,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[],"source":["bnb_config = BitsAndBytesConfig(\n"," load_in_4bit=True,\n"," bnb_4bit_quant_type=\"nf4\",\n"," bnb_4bit_compute_dtype=torch.bfloat16,\n",")"],"id":"a43bdd07-7555-42b2-9888-a614afec892f"},{"cell_type":"code","execution_count":39,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":705},"id":"39db2ee4-23c8-471f-89b2-bca34964bf81","outputId":"d00d2dc2-cd2f-480c-85a2-33cf265314b2","executionInfo":{"status":"error","timestamp":1702306279779,"user_tz":-540,"elapsed":15084,"user":{"displayName":"조수연","userId":"03810862007552836948"}}},"outputs":[{"output_type":"error","ename":"ValueError","evalue":"ignored","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m base_model = AutoModelForCausalLM.from_pretrained(\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mscript_args\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodel_name\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mquantization_config\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mbnb_config\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mdevice_map\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"auto\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;31m# {\"\": Accelerator().local_process_index},\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mtrust_remote_code\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py\u001b[0m in \u001b[0;36mfrom_pretrained\u001b[0;34m(cls, pretrained_model_name_or_path, *model_args, **kwargs)\u001b[0m\n\u001b[1;32m 559\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 560\u001b[0m \u001b[0mcls\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mregister\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__class__\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmodel_class\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mexist_ok\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 561\u001b[0;31m return model_class.from_pretrained(\n\u001b[0m\u001b[1;32m 562\u001b[0m \u001b[0mpretrained_model_name_or_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0mmodel_args\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mhub_kwargs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 563\u001b[0m )\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py\u001b[0m in \u001b[0;36mfrom_pretrained\u001b[0;34m(cls, pretrained_model_name_or_path, config, cache_dir, ignore_mismatched_sizes, force_download, local_files_only, token, revision, use_safetensors, *model_args, **kwargs)\u001b[0m\n\u001b[1;32m 3418\u001b[0m }\n\u001b[1;32m 3419\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m\"cpu\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mdevice_map_without_lm_head\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;34m\"disk\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mdevice_map_without_lm_head\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 3420\u001b[0;31m raise ValueError(\n\u001b[0m\u001b[1;32m 3421\u001b[0m \"\"\"\n\u001b[1;32m 3422\u001b[0m \u001b[0mSome\u001b[0m \u001b[0mmodules\u001b[0m \u001b[0mare\u001b[0m \u001b[0mdispatched\u001b[0m \u001b[0mon\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mCPU\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mdisk\u001b[0m\u001b[0;34m.\u001b[0m \u001b[0mMake\u001b[0m \u001b[0msure\u001b[0m \u001b[0myou\u001b[0m \u001b[0mhave\u001b[0m \u001b[0menough\u001b[0m \u001b[0mGPU\u001b[0m \u001b[0mRAM\u001b[0m \u001b[0mto\u001b[0m \u001b[0mfit\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mValueError\u001b[0m: \n Some modules are dispatched on the CPU or the disk. Make sure you have enough GPU RAM to fit\n the quantized model. If you want to dispatch the model on the CPU or the disk while keeping\n these modules in 32-bit, you need to set `load_in_8bit_fp32_cpu_offload=True` and pass a custom\n `device_map` to `from_pretrained`. Check\n https://huggingface.co/docs/transformers/main/en/main_classes/quantization#offload-between-cpu-and-gpu\n for more details.\n "]}],"source":["base_model = AutoModelForCausalLM.from_pretrained(\n"," script_args.model_name,\n"," quantization_config=bnb_config,\n"," device_map=\"auto\", # {\"\": Accelerator().local_process_index},\n"," trust_remote_code=True,\n"," use_auth_token=True,\n"," cache_dir=script_args.cache_dir,\n",")\n","base_model.config.use_cache = False"],"id":"39db2ee4-23c8-471f-89b2-bca34964bf81"},{"cell_type":"code","execution_count":null,"metadata":{"id":"b0b75ca4-730d-4bde-88bb-a86462a76d52"},"outputs":[],"source":["tokenizer = AutoTokenizer.from_pretrained(\n"," script_args.model_name,\n"," trust_remote_code=True,\n"," cache_dir=script_args.cache_dir,\n",")\n","\n","if getattr(tokenizer, \"pad_token\", None) is None:\n"," tokenizer.pad_token = tokenizer.eos_token\n","tokenizer.padding_side = \"right\" # Fix weird overflow issue with fp16 training\n","\n","tokenizer.add_special_tokens(dict(bos_token=''))\n","\n","trained_model.config.pad_token_id = tokenizer.pad_token_id\n","trained_model.config.bos_token_id = tokenizer.bos_token_id"],"id":"b0b75ca4-730d-4bde-88bb-a86462a76d52"},{"cell_type":"markdown","metadata":{"id":"X1tRCa4EiYXp"},"source":["추론 과정에서는 GPU 메모리를 약 5.5 GB 활용"],"id":"X1tRCa4EiYXp"},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"background_save":true,"base_uri":"https://localhost:8080/"},"id":"e374555b-9f8a-4617-8ea7-c1e6ee1b2999","outputId":"526d2827-6422-4399-d7ed-107b822b2bb2"},"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py:1473: UserWarning: You have modified the pretrained model configuration to control generation. This is a deprecated strategy to control generation and will be removed soon, in a future version. Please use and modify the model generation configuration (see https://huggingface.co/docs/transformers/generation_strategies#default-text-generation-configuration )\n"," warnings.warn(\n"]},{"output_type":"stream","name":"stdout","text":["- 분석 결과 0: 음식명:삼선짬뽕, 옵션:곱배기, 수량:하나<[!newline]>- 분석 결과 1: 음식명:사천 탕수육, 옵션:중짜, 수량:한그릇\n","- 분석 결과 0: 음식명:아이스아메리카노,옵션:톨사이즈,수량:한잔<[!newline]>- 분석 결과 1: 음식명:딸기스무디,수량:한잔<[!newline]>- 분석 결과 2: 음식명:콜드브루라떼,수량:하나\n","- 분석 결과 0: 음식명:참이슬,수량:한병<[!newline]>- 분석 결과 1: 음식명:코카콜라,옵션:1.5리터,수량:한병<[!newline]>- 분석 결과 2: 음식명:테슬라,수량:한병\n","- 분석 결과 0: 음식명:꼬막무침, 수량:1인분<[!newline]>- 분석 결과 1: 음식명:닭도리탕, 옵션:중자<[!newline]>- 분석 결과 2: 음식명:소주, 수량:한병\n","- 분석 결과 0: 음식명:김치찌개, 수량:3인분<[!newline]>- 분석 결과 1: 음식명:계란말이\n","- 분석 결과 0: 음식명:불고기버거세트, 수량:1개<[!newline]>- 분석 결과 1: 음식명:감자튀김, 수량:추가\n","- 분석 결과 0: "]}],"source":["eval_dic = {i:wrapper_generate(model=trained_model, do_stream=True, input_prompt=prompt_template.format(System=default_system_msg, User=evaluation_queries[i]))for i, query in enumerate(evaluation_queries)}"],"id":"e374555b-9f8a-4617-8ea7-c1e6ee1b2999"},{"cell_type":"code","execution_count":null,"metadata":{"id":"5d055bb0-5e5f-4221-a634-45d903c0f3b5"},"outputs":[],"source":["print(eval_dic[0])"],"id":"5d055bb0-5e5f-4221-a634-45d903c0f3b5"}],"metadata":{"accelerator":"GPU","colab":{"provenance":[]},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.10.13"},"widgets":{"application/vnd.jupyter.widget-state+json":{"dbe8b80107f646fca9ce17fc6898688e":{"model_module":"@jupyter-widgets/controls","model_name":"VBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"VBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"VBoxView","box_style":"","children":["IPY_MODEL_38d4d232d70d49dd8c3ab620e6cfb96c","IPY_MODEL_7dcd8bfea49a447390fd3d693ce473f8","IPY_MODEL_a827efea829546b7b7e5e42a465849e4","IPY_MODEL_fee5d6bf794f4cb7962ef9985fbf4348"],"layout":"IPY_MODEL_e520cbc12c7f45809976dfbfcf56dd64"}},"25bab324b2b9446bad5f3a73eed40e68":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_cacc47dd52114b3caa6a0a420f748793","placeholder":"​","style":"IPY_MODEL_435d3880497f437fbe82c5c5aea4723b","value":"


Copy a token from your Hugging Face\ntokens page and paste it below.
Immediately click login after copying\nyour token or it might be stored in plain text in this notebook file.
"}},"1e5df26c96974f9e80ec411cc2efb005":{"model_module":"@jupyter-widgets/controls","model_name":"PasswordModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"PasswordModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"PasswordView","continuous_update":true,"description":"Token:","description_tooltip":null,"disabled":false,"layout":"IPY_MODEL_f2c6a7c598a2446d980e5b099f8b0504","placeholder":"​","style":"IPY_MODEL_380d699b391e443594c77e0618acc1e6","value":""}},"726bbc9eda2647089f64254e9afc18a6":{"model_module":"@jupyter-widgets/controls","model_name":"CheckboxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"CheckboxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"CheckboxView","description":"Add token as git credential?","description_tooltip":null,"disabled":false,"indent":true,"layout":"IPY_MODEL_81c738cb1572429fad029c865af5864e","style":"IPY_MODEL_1dbd9abdfd9f441a9a2a92797469029f","value":true}},"730a80d2060d4c0d9ddd2e17f2da0045":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ButtonView","button_style":"","description":"Login","disabled":false,"icon":"","layout":"IPY_MODEL_bdff58ba27c74f89acc6ce2fa028b322","style":"IPY_MODEL_a8d2283aa6d44f1ab1549f4311e88e2d","tooltip":""}},"cd2ea8d1f93c436c8045979227f28f39":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ff6ee54fece6482fa4908c5bd6f35331","placeholder":"​","style":"IPY_MODEL_4552475fe488474e98941eb5bc34fe1e","value":"\nPro Tip: If you don't already have one, you can create a dedicated\n'notebooks' token with 'write' access, that you can then easily reuse for all\nnotebooks.
"}},"e520cbc12c7f45809976dfbfcf56dd64":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":"center","align_self":null,"border":null,"bottom":null,"display":"flex","flex":null,"flex_flow":"column","grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":"50%"}},"cacc47dd52114b3caa6a0a420f748793":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"435d3880497f437fbe82c5c5aea4723b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"f2c6a7c598a2446d980e5b099f8b0504":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"380d699b391e443594c77e0618acc1e6":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"81c738cb1572429fad029c865af5864e":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"1dbd9abdfd9f441a9a2a92797469029f":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"bdff58ba27c74f89acc6ce2fa028b322":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a8d2283aa6d44f1ab1549f4311e88e2d":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","button_color":null,"font_weight":""}},"ff6ee54fece6482fa4908c5bd6f35331":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"4552475fe488474e98941eb5bc34fe1e":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"349de155fbbb411b98558636e5b363e5":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_29721702addc4325b2d6578e51ad6212","placeholder":"​","style":"IPY_MODEL_ff3d0f971a534f23928c1c9b133ade05","value":"Connecting..."}},"29721702addc4325b2d6578e51ad6212":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ff3d0f971a534f23928c1c9b133ade05":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"38d4d232d70d49dd8c3ab620e6cfb96c":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_bb9ba62e3cd74e5d965fd6d7cbfffcdb","placeholder":"​","style":"IPY_MODEL_6d01340c7ea248da9b089906ddb0743f","value":"Token is valid (permission: write)."}},"7dcd8bfea49a447390fd3d693ce473f8":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_520fd7520fe4457f88e1e7bdcbff3e99","placeholder":"​","style":"IPY_MODEL_66775e202d174977937a2bb33552e08d","value":"Your token has been saved in your configured git credential helpers (store)."}},"a827efea829546b7b7e5e42a465849e4":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ab2576b47a964778a4fb23a0177c2372","placeholder":"​","style":"IPY_MODEL_a99d5e99af0748a289fa755b80c2ceaf","value":"Your token has been saved to /root/.cache/huggingface/token"}},"fee5d6bf794f4cb7962ef9985fbf4348":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_129d75c4582a42b98245c5a79ea22525","placeholder":"​","style":"IPY_MODEL_92fdf3c90389449595e1d7b3605f6953","value":"Login successful"}},"bb9ba62e3cd74e5d965fd6d7cbfffcdb":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"6d01340c7ea248da9b089906ddb0743f":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"520fd7520fe4457f88e1e7bdcbff3e99":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"66775e202d174977937a2bb33552e08d":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ab2576b47a964778a4fb23a0177c2372":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"a99d5e99af0748a289fa755b80c2ceaf":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"129d75c4582a42b98245c5a79ea22525":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"92fdf3c90389449595e1d7b3605f6953":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b2e8914a604a4cd7a8160a247b46897e":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_80b1d408c82c4a16b237c9ca6ff853a9","IPY_MODEL_9ae5d008fbdb49e793eeca063f8a9b79","IPY_MODEL_e0f4a69b292d4821b24b1e0f8c85d994"],"layout":"IPY_MODEL_ac74b0890fdb4386a50184258f6efea6"}},"80b1d408c82c4a16b237c9ca6ff853a9":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_329a5858a60f4140b693ad2d40f2666c","placeholder":"​","style":"IPY_MODEL_ed062bc006874d5a975c048bf1b49111","value":"Loading checkpoint shards: 100%"}},"9ae5d008fbdb49e793eeca063f8a9b79":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_0405581206a04b8f9e462b4a97a9b396","max":2,"min":0,"orientation":"horizontal","style":"IPY_MODEL_d39c0747d6da4f9095fb300b7ecdee14","value":2}},"e0f4a69b292d4821b24b1e0f8c85d994":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_35a047ec6fdd44df851354380808b081","placeholder":"​","style":"IPY_MODEL_51396a17ef894a3dbddbcc21f59e6fe9","value":" 2/2 [01:19<00:00, 38.18s/it]"}},"ac74b0890fdb4386a50184258f6efea6":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"329a5858a60f4140b693ad2d40f2666c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"ed062bc006874d5a975c048bf1b49111":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"0405581206a04b8f9e462b4a97a9b396":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"d39c0747d6da4f9095fb300b7ecdee14":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"35a047ec6fdd44df851354380808b081":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"51396a17ef894a3dbddbcc21f59e6fe9":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}}}}},"nbformat":4,"nbformat_minor":5}