deeksonparlma
commited on
Commit
·
de3ad5a
1
Parent(s):
0fd739c
reshape
Browse files- app.py +5 -3
- retrain.ipynb +66 -4
app.py
CHANGED
@@ -35,9 +35,11 @@ def classify_text(inp):
|
|
35 |
# reshape the input to 2D
|
36 |
# convert the input to a numpy array
|
37 |
# return model.predict( np.array(inp).reshape(1, -1) )
|
38 |
-
|
39 |
-
|
40 |
-
|
|
|
|
|
41 |
|
42 |
# # encode the input text
|
43 |
# encoded_input = tokenizer(text, return_tensors='pt')
|
|
|
35 |
# reshape the input to 2D
|
36 |
# convert the input to a numpy array
|
37 |
# return model.predict( np.array(inp).reshape(1, -1) )
|
38 |
+
# dtype='numeric' is not compatible with arrays of bytes/strings.Convert your data to numeric values explicitly instead.
|
39 |
+
|
40 |
+
# Convert inp to numeric values explicitly instead
|
41 |
+
inp = np.array(inp).reshape(1, -1)
|
42 |
+
return model.predict(inp)
|
43 |
|
44 |
# # encode the input text
|
45 |
# encoded_input = tokenizer(text, return_tensors='pt')
|
retrain.ipynb
CHANGED
@@ -2,12 +2,66 @@
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
-
"execution_count":
|
6 |
"metadata": {},
|
7 |
-
"outputs": [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
"source": [
|
9 |
-
"
|
10 |
-
"\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
]
|
12 |
}
|
13 |
],
|
@@ -18,7 +72,15 @@
|
|
18 |
"name": "python3"
|
19 |
},
|
20 |
"language_info": {
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
"name": "python",
|
|
|
|
|
22 |
"version": "3.10.7"
|
23 |
},
|
24 |
"orig_nbformat": 4,
|
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
+
"execution_count": 2,
|
6 |
"metadata": {},
|
7 |
+
"outputs": [
|
8 |
+
{
|
9 |
+
"name": "stderr",
|
10 |
+
"output_type": "stream",
|
11 |
+
"text": [
|
12 |
+
"Widget Javascript not detected. It may not be installed or enabled properly. Reconnecting the current kernel may help.\n"
|
13 |
+
]
|
14 |
+
},
|
15 |
+
{
|
16 |
+
"data": {
|
17 |
+
"application/vnd.jupyter.widget-view+json": {
|
18 |
+
"model_id": "15b5be9109db4967b5495c8b1110fbc6"
|
19 |
+
}
|
20 |
+
},
|
21 |
+
"metadata": {},
|
22 |
+
"output_type": "display_data"
|
23 |
+
},
|
24 |
+
{
|
25 |
+
"ename": "OSError",
|
26 |
+
"evalue": "Can't load the configuration of 'rabiaqayyum/autotrain-mental-health-analysis-752423172'. If you were trying to load it from 'https://huggingface.co/models', make sure you don't have a local directory with the same name. Otherwise, make sure 'rabiaqayyum/autotrain-mental-health-analysis-752423172' is the correct path to a directory containing a config.json file",
|
27 |
+
"output_type": "error",
|
28 |
+
"traceback": [
|
29 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
30 |
+
"\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)",
|
31 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/transformers/configuration_utils.py\u001b[0m in \u001b[0;36m_get_config_dict\u001b[0;34m(cls, pretrained_model_name_or_path, **kwargs)\u001b[0m\n\u001b[1;32m 619\u001b[0m \u001b[0;31m# Load from local folder or from cache or download from model Hub and cache\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 620\u001b[0;31m resolved_config_file = cached_file(\n\u001b[0m\u001b[1;32m 621\u001b[0m \u001b[0mpretrained_model_name_or_path\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
32 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/transformers/utils/hub.py\u001b[0m in \u001b[0;36mcached_file\u001b[0;34m(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, use_auth_token, revision, local_files_only, subfolder, user_agent, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash)\u001b[0m\n\u001b[1;32m 408\u001b[0m \u001b[0;31m# Load from URL or cache if already cached\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 409\u001b[0;31m resolved_file = hf_hub_download(\n\u001b[0m\u001b[1;32m 410\u001b[0m \u001b[0mpath_or_repo_id\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
33 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py\u001b[0m in \u001b[0;36m_inner_fn\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 123\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 124\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 125\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
34 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/huggingface_hub/file_download.py\u001b[0m in \u001b[0;36mhf_hub_download\u001b[0;34m(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, user_agent, force_download, force_filename, proxies, etag_timeout, resume_download, token, local_files_only, legacy_cache_layout)\u001b[0m\n\u001b[1;32m 1282\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1283\u001b[0;31m http_get(\n\u001b[0m\u001b[1;32m 1284\u001b[0m \u001b[0murl_to_download\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
35 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/huggingface_hub/file_download.py\u001b[0m in \u001b[0;36mhttp_get\u001b[0;34m(url, temp_file, proxies, resume_size, headers, timeout, max_retries)\u001b[0m\n\u001b[1;32m 521\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 522\u001b[0;31m progress = tqdm(\n\u001b[0m\u001b[1;32m 523\u001b[0m \u001b[0munit\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"B\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
36 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/huggingface_hub/utils/tqdm.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 123\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"disable\"\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 124\u001b[0;31m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 125\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
37 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/tqdm/notebook.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 249\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdisp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdisplay\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 250\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcolour\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcolour\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 251\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
38 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/tqdm/notebook.py\u001b[0m in \u001b[0;36mcolour\u001b[0;34m(self, bar_color)\u001b[0m\n\u001b[1;32m 208\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'container'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 209\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcontainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mchildren\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstyle\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbar_color\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbar_color\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 210\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
39 |
+
"\u001b[0;31mAttributeError\u001b[0m: 'FloatProgress' object has no attribute 'style'",
|
40 |
+
"\nDuring handling of the above exception, another exception occurred:\n",
|
41 |
+
"\u001b[0;31mOSError\u001b[0m Traceback (most recent call last)",
|
42 |
+
"\u001b[0;32m/tmp/ipykernel_23856/169314155.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtransformers\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mAutoModelForSequenceClassification\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mAutoTokenizer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mmodel\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mAutoModelForSequenceClassification\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfrom_pretrained\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"rabiaqayyum/autotrain-mental-health-analysis-752423172\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0muse_auth_token\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mtokenizer\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mAutoTokenizer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfrom_pretrained\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"rabiaqayyum/autotrain-mental-health-analysis-752423172\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0muse_auth_token\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
43 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py\u001b[0m in \u001b[0;36mfrom_pretrained\u001b[0;34m(cls, pretrained_model_name_or_path, *model_args, **kwargs)\u001b[0m\n\u001b[1;32m 432\u001b[0m \u001b[0mhub_kwargs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpop\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mname\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mhub_kwargs_names\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mname\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 433\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mPretrainedConfig\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 434\u001b[0;31m config, kwargs = AutoConfig.from_pretrained(\n\u001b[0m\u001b[1;32m 435\u001b[0m \u001b[0mpretrained_model_name_or_path\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 436\u001b[0m \u001b[0mreturn_unused_kwargs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
44 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py\u001b[0m in \u001b[0;36mfrom_pretrained\u001b[0;34m(cls, pretrained_model_name_or_path, **kwargs)\u001b[0m\n\u001b[1;32m 850\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"name_or_path\"\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpretrained_model_name_or_path\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 851\u001b[0m \u001b[0mtrust_remote_code\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpop\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"trust_remote_code\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 852\u001b[0;31m \u001b[0mconfig_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0munused_kwargs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mPretrainedConfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_config_dict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpretrained_model_name_or_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 853\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m\"auto_map\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mconfig_dict\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0;34m\"AutoConfig\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mconfig_dict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"auto_map\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 854\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mtrust_remote_code\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
45 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/transformers/configuration_utils.py\u001b[0m in \u001b[0;36mget_config_dict\u001b[0;34m(cls, pretrained_model_name_or_path, **kwargs)\u001b[0m\n\u001b[1;32m 563\u001b[0m \u001b[0moriginal_kwargs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcopy\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdeepcopy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 564\u001b[0m \u001b[0;31m# Get config dict associated with the base config file\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 565\u001b[0;31m \u001b[0mconfig_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcls\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_config_dict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpretrained_model_name_or_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 566\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m\"_commit_hash\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mconfig_dict\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 567\u001b[0m \u001b[0moriginal_kwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"_commit_hash\"\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mconfig_dict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"_commit_hash\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
46 |
+
"\u001b[0;32m~/.local/lib/python3.10/site-packages/transformers/configuration_utils.py\u001b[0m in \u001b[0;36m_get_config_dict\u001b[0;34m(cls, pretrained_model_name_or_path, **kwargs)\u001b[0m\n\u001b[1;32m 639\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 640\u001b[0m \u001b[0;31m# For any other exception, we throw a generic error.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 641\u001b[0;31m raise EnvironmentError(\n\u001b[0m\u001b[1;32m 642\u001b[0m \u001b[0;34mf\"Can't load the configuration of '{pretrained_model_name_or_path}'. If you were trying to load it\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 643\u001b[0m \u001b[0;34m\" from 'https://huggingface.co/models', make sure you don't have a local directory with the same\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
47 |
+
"\u001b[0;31mOSError\u001b[0m: Can't load the configuration of 'rabiaqayyum/autotrain-mental-health-analysis-752423172'. If you were trying to load it from 'https://huggingface.co/models', make sure you don't have a local directory with the same name. Otherwise, make sure 'rabiaqayyum/autotrain-mental-health-analysis-752423172' is the correct path to a directory containing a config.json file"
|
48 |
+
]
|
49 |
+
}
|
50 |
+
],
|
51 |
"source": [
|
52 |
+
"\n",
|
53 |
+
"from transformers import AutoModelForSequenceClassification, AutoTokenizer\n",
|
54 |
+
"\n",
|
55 |
+
"model = AutoModelForSequenceClassification.from_pretrained(\"rabiaqayyum/autotrain-mental-health-analysis-752423172\", use_auth_token=True)\n",
|
56 |
+
"\n",
|
57 |
+
"tokenizer = AutoTokenizer.from_pretrained(\"rabiaqayyum/autotrain-mental-health-analysis-752423172\", use_auth_token=True)\n",
|
58 |
+
"\n",
|
59 |
+
"inputs = tokenizer(\"I love AutoTrain\", return_tensors=\"pt\")\n",
|
60 |
+
"\n",
|
61 |
+
"outputs = model(**inputs)\n",
|
62 |
+
"\n",
|
63 |
+
"# show data from model\n",
|
64 |
+
"outputs"
|
65 |
]
|
66 |
}
|
67 |
],
|
|
|
72 |
"name": "python3"
|
73 |
},
|
74 |
"language_info": {
|
75 |
+
"codemirror_mode": {
|
76 |
+
"name": "ipython",
|
77 |
+
"version": 3
|
78 |
+
},
|
79 |
+
"file_extension": ".py",
|
80 |
+
"mimetype": "text/x-python",
|
81 |
"name": "python",
|
82 |
+
"nbconvert_exporter": "python",
|
83 |
+
"pygments_lexer": "ipython3",
|
84 |
"version": "3.10.7"
|
85 |
},
|
86 |
"orig_nbformat": 4,
|