KrithikV commited on
Commit
dbc2bad
1 Parent(s): b5d6de4

HuggingFaceUser/adapter-medmobile

Browse files
README.md CHANGED
@@ -18,7 +18,7 @@ should probably proofread and complete it, then remove this comment. -->
18
 
19
  This model is a fine-tuned version of [microsoft/Phi-3-mini-4k-instruct](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct) on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
- - Loss: 0.6528
22
 
23
  ## Model description
24
 
@@ -52,19 +52,97 @@ The following hyperparameters were used during training:
52
 
53
  | Training Loss | Epoch | Step | Validation Loss |
54
  |:-------------:|:------:|:----:|:---------------:|
55
- | 0.7804 | 0.2203 | 100 | 0.6979 |
56
- | 0.6811 | 0.4405 | 200 | 0.6706 |
57
- | 0.6681 | 0.6608 | 300 | 0.6644 |
58
- | 0.6622 | 0.8811 | 400 | 0.6613 |
59
- | 0.6602 | 1.1013 | 500 | 0.6592 |
60
- | 0.6581 | 1.3216 | 600 | 0.6576 |
61
- | 0.6564 | 1.5419 | 700 | 0.6563 |
62
- | 0.6557 | 1.7621 | 800 | 0.6553 |
63
- | 0.6541 | 1.9824 | 900 | 0.6545 |
64
- | 0.6531 | 2.2026 | 1000 | 0.6540 |
65
- | 0.6506 | 2.4229 | 1100 | 0.6534 |
66
- | 0.651 | 2.6432 | 1200 | 0.6530 |
67
- | 0.6512 | 2.8634 | 1300 | 0.6528 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
 
69
 
70
  ### Framework versions
 
18
 
19
  This model is a fine-tuned version of [microsoft/Phi-3-mini-4k-instruct](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct) on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
+ - Loss: 0.8445
22
 
23
  ## Model description
24
 
 
52
 
53
  | Training Loss | Epoch | Step | Validation Loss |
54
  |:-------------:|:------:|:----:|:---------------:|
55
+ | 1.0374 | 0.0329 | 100 | 1.0194 |
56
+ | 0.9715 | 0.0658 | 200 | 0.9314 |
57
+ | 0.9106 | 0.0987 | 300 | 0.8969 |
58
+ | 0.888 | 0.1316 | 400 | 0.8869 |
59
+ | 0.8902 | 0.1645 | 500 | 0.8813 |
60
+ | 0.8826 | 0.1974 | 600 | 0.8777 |
61
+ | 0.8763 | 0.2303 | 700 | 0.8745 |
62
+ | 0.8728 | 0.2632 | 800 | 0.8723 |
63
+ | 0.8707 | 0.2961 | 900 | 0.8701 |
64
+ | 0.8702 | 0.3289 | 1000 | 0.8684 |
65
+ | 0.8631 | 0.3618 | 1100 | 0.8664 |
66
+ | 0.8623 | 0.3947 | 1200 | 0.8647 |
67
+ | 0.8655 | 0.4276 | 1300 | 0.8624 |
68
+ | 0.863 | 0.4605 | 1400 | 0.8602 |
69
+ | 0.858 | 0.4934 | 1500 | 0.8586 |
70
+ | 0.859 | 0.5263 | 1600 | 0.8578 |
71
+ | 0.8527 | 0.5592 | 1700 | 0.8569 |
72
+ | 0.8587 | 0.5921 | 1800 | 0.8563 |
73
+ | 0.8551 | 0.625 | 1900 | 0.8557 |
74
+ | 0.8548 | 0.6579 | 2000 | 0.8550 |
75
+ | 0.8515 | 0.6908 | 2100 | 0.8546 |
76
+ | 0.8531 | 0.7237 | 2200 | 0.8542 |
77
+ | 0.8567 | 0.7566 | 2300 | 0.8535 |
78
+ | 0.8589 | 0.7895 | 2400 | 0.8532 |
79
+ | 0.8547 | 0.8224 | 2500 | 0.8529 |
80
+ | 0.8537 | 0.8553 | 2600 | 0.8525 |
81
+ | 0.85 | 0.8882 | 2700 | 0.8521 |
82
+ | 0.8518 | 0.9211 | 2800 | 0.8519 |
83
+ | 0.8456 | 0.9539 | 2900 | 0.8515 |
84
+ | 0.8585 | 0.9868 | 3000 | 0.8512 |
85
+ | 0.849 | 1.0197 | 3100 | 0.8509 |
86
+ | 0.8549 | 1.0526 | 3200 | 0.8507 |
87
+ | 0.8502 | 1.0855 | 3300 | 0.8504 |
88
+ | 0.8504 | 1.1184 | 3400 | 0.8502 |
89
+ | 0.8488 | 1.1513 | 3500 | 0.8500 |
90
+ | 0.8504 | 1.1842 | 3600 | 0.8497 |
91
+ | 0.8465 | 1.2171 | 3700 | 0.8495 |
92
+ | 0.8471 | 1.25 | 3800 | 0.8494 |
93
+ | 0.8467 | 1.2829 | 3900 | 0.8491 |
94
+ | 0.8439 | 1.3158 | 4000 | 0.8489 |
95
+ | 0.8467 | 1.3487 | 4100 | 0.8487 |
96
+ | 0.8461 | 1.3816 | 4200 | 0.8485 |
97
+ | 0.8525 | 1.4145 | 4300 | 0.8483 |
98
+ | 0.8426 | 1.4474 | 4400 | 0.8481 |
99
+ | 0.8479 | 1.4803 | 4500 | 0.8480 |
100
+ | 0.853 | 1.5132 | 4600 | 0.8478 |
101
+ | 0.8432 | 1.5461 | 4700 | 0.8477 |
102
+ | 0.8416 | 1.5789 | 4800 | 0.8475 |
103
+ | 0.8527 | 1.6118 | 4900 | 0.8474 |
104
+ | 0.849 | 1.6447 | 5000 | 0.8472 |
105
+ | 0.8446 | 1.6776 | 5100 | 0.8471 |
106
+ | 0.8427 | 1.7105 | 5200 | 0.8469 |
107
+ | 0.8464 | 1.7434 | 5300 | 0.8468 |
108
+ | 0.8444 | 1.7763 | 5400 | 0.8466 |
109
+ | 0.8479 | 1.8092 | 5500 | 0.8465 |
110
+ | 0.8452 | 1.8421 | 5600 | 0.8465 |
111
+ | 0.8387 | 1.875 | 5700 | 0.8466 |
112
+ | 0.845 | 1.9079 | 5800 | 0.8463 |
113
+ | 0.8402 | 1.9408 | 5900 | 0.8461 |
114
+ | 0.8459 | 1.9737 | 6000 | 0.8460 |
115
+ | 0.8431 | 2.0066 | 6100 | 0.8460 |
116
+ | 0.8395 | 2.0395 | 6200 | 0.8459 |
117
+ | 0.8395 | 2.0724 | 6300 | 0.8458 |
118
+ | 0.8457 | 2.1053 | 6400 | 0.8457 |
119
+ | 0.8438 | 2.1382 | 6500 | 0.8457 |
120
+ | 0.8411 | 2.1711 | 6600 | 0.8456 |
121
+ | 0.8386 | 2.2039 | 6700 | 0.8456 |
122
+ | 0.8393 | 2.2368 | 6800 | 0.8454 |
123
+ | 0.8406 | 2.2697 | 6900 | 0.8454 |
124
+ | 0.8386 | 2.3026 | 7000 | 0.8453 |
125
+ | 0.8456 | 2.3355 | 7100 | 0.8453 |
126
+ | 0.8424 | 2.3684 | 7200 | 0.8452 |
127
+ | 0.8437 | 2.4013 | 7300 | 0.8451 |
128
+ | 0.8426 | 2.4342 | 7400 | 0.8451 |
129
+ | 0.8393 | 2.4671 | 7500 | 0.8450 |
130
+ | 0.8398 | 2.5 | 7600 | 0.8450 |
131
+ | 0.8434 | 2.5329 | 7700 | 0.8449 |
132
+ | 0.8456 | 2.5658 | 7800 | 0.8449 |
133
+ | 0.8393 | 2.5987 | 7900 | 0.8449 |
134
+ | 0.8401 | 2.6316 | 8000 | 0.8448 |
135
+ | 0.838 | 2.6645 | 8100 | 0.8448 |
136
+ | 0.8432 | 2.6974 | 8200 | 0.8447 |
137
+ | 0.8471 | 2.7303 | 8300 | 0.8447 |
138
+ | 0.8435 | 2.7632 | 8400 | 0.8446 |
139
+ | 0.8441 | 2.7961 | 8500 | 0.8446 |
140
+ | 0.8399 | 2.8289 | 8600 | 0.8446 |
141
+ | 0.8391 | 2.8618 | 8700 | 0.8446 |
142
+ | 0.8432 | 2.8947 | 8800 | 0.8446 |
143
+ | 0.8459 | 2.9276 | 8900 | 0.8446 |
144
+ | 0.8446 | 2.9605 | 9000 | 0.8445 |
145
+ | 0.8412 | 2.9934 | 9100 | 0.8445 |
146
 
147
 
148
  ### Framework versions
adapter_config.json CHANGED
@@ -21,12 +21,12 @@
21
  "revision": null,
22
  "target_modules": [
23
  "v_proj",
24
- "down_proj",
25
- "k_proj",
26
  "up_proj",
27
- "o_proj",
28
  "gate_proj",
29
- "q_proj"
 
 
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
21
  "revision": null,
22
  "target_modules": [
23
  "v_proj",
 
 
24
  "up_proj",
 
25
  "gate_proj",
26
+ "down_proj",
27
+ "q_proj",
28
+ "o_proj",
29
+ "k_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:85220d193374a84ae4a8d1b086d0ab8217ddf0dcc51e1ea4af458a92031e63a1
3
  size 35668592
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03269857638dc656da278ff44685ba779bbd1f762db3c85d3e55ea8325dbe2f4
3
  size 35668592
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1e77e9a2758d546ac53f6a040c3b0e6240a576d85c2041e6548bf14bf06256cb
3
  size 5432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8878f600d4eaa80da8759065d02110b078211820bd1ce796ab6d601701cd3a70
3
  size 5432