dansbecker commited on
Commit
b879b12
1 Parent(s): 46c14a5

A standard update

Browse files
.ipynb_checkpoints/hn_hiring_posts-checkpoint.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 4,
6
  "id": "056aa255-fda1-4cde-be24-459f6ad2c8b9",
7
  "metadata": {},
8
  "outputs": [
@@ -17,12 +17,13 @@
17
  ],
18
  "source": [
19
  "import pandas as pd\n",
20
- "from bs4 import BeautifulSoup\n",
21
  "\n",
22
- "df = pd.read_csv('./bq-results-20211206-133858-irtkgx60el7i.csv').drop(['ParentUrl', 'ParentAuthor', 'ParentTime', 'ParentScore'], axis=1)\n",
 
 
23
  "df.text = df.text.str.replace('<p>', '\\n')\n",
24
  "strings_to_remove = ['rel=\"nofollow\"', '<pre>', '</pre>', '<i>', '</i>', '<code>', '</code>', '&gt;']\n",
25
- "email_regex = '[a-zA-Z0-9._-]{0,30}@[a-zA-Z0-9._-]{0,20}\\.[a-zA-Z0-9_-]{2,3}'\n",
26
  "munged_url_regex = 'http(s)?:\\&\\#.*?\\<\\/a>'\n",
27
  "\n",
28
  "for string in strings_to_remove:\n",
@@ -44,113 +45,28 @@
44
  },
45
  {
46
  "cell_type": "code",
47
- "execution_count": 24,
48
- "id": "1ee19b74-2085-40d8-b9d2-51b526f4d438",
49
  "metadata": {},
50
  "outputs": [],
51
  "source": [
52
  "import datasets\n",
53
  "from huggingface_hub import create_repo\n",
54
- "from huggingface_hub import Repository"
55
- ]
56
- },
57
- {
58
- "cell_type": "code",
59
- "execution_count": 43,
60
- "id": "57ca7c96-d94c-4e65-8113-da7729558247",
61
- "metadata": {},
62
- "outputs": [
63
- {
64
- "name": "stderr",
65
- "output_type": "stream",
66
- "text": [
67
- "/Users/dan.becker/Desktop/hackernews_hiring_dataset is already a clone of https://huggingface.co/datasets/dansbecker/hackernews_hiring_posts. Make sure you pull the latest changes with `repo.git_pull()`.\n"
68
- ]
69
- },
70
- {
71
- "ename": "OSError",
72
- "evalue": "fatal: pathspec 'hackernews_hiring_dataset/data/*' did not match any files\n",
73
- "output_type": "error",
74
- "traceback": [
75
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
76
- "\u001b[0;31mCalledProcessError\u001b[0m Traceback (most recent call last)",
77
- "\u001b[0;32m/usr/local/anaconda3/lib/python3.8/site-packages/huggingface_hub/repository.py\u001b[0m in \u001b[0;36mgit_add\u001b[0;34m(self, pattern, auto_lfs_track)\u001b[0m\n\u001b[1;32m 908\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 909\u001b[0;31m result = subprocess.run(\n\u001b[0m\u001b[1;32m 910\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m\"git\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"add\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"-v\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpattern\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
78
- "\u001b[0;32m/usr/local/anaconda3/lib/python3.8/subprocess.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(input, capture_output, timeout, check, *popenargs, **kwargs)\u001b[0m\n\u001b[1;32m 515\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcheck\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mretcode\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 516\u001b[0;31m raise CalledProcessError(retcode, process.args,\n\u001b[0m\u001b[1;32m 517\u001b[0m output=stdout, stderr=stderr)\n",
79
- "\u001b[0;31mCalledProcessError\u001b[0m: Command '['git', 'add', '-v', 'hackernews_hiring_dataset/data/*']' returned non-zero exit status 128.",
80
- "\nDuring handling of the above exception, another exception occurred:\n",
81
- "\u001b[0;31mOSError\u001b[0m Traceback (most recent call last)",
82
- "\u001b[0;32m<ipython-input-43-bd751bbd86f5>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mrepo_url\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'https://huggingface.co/datasets/dansbecker/hackernews_hiring_posts'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0mrepo\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mRepository\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlocal_dir\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"hackernews_hiring_dataset\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mclone_from\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrepo_url\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 9\u001b[0;31m \u001b[0mrepo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgit_add\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata_path\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;34m'/*'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 10\u001b[0m \u001b[0mrepo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgit_commit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Push data from notebook\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
83
- "\u001b[0;32m/usr/local/anaconda3/lib/python3.8/site-packages/huggingface_hub/repository.py\u001b[0m in \u001b[0;36mgit_add\u001b[0;34m(self, pattern, auto_lfs_track)\u001b[0m\n\u001b[1;32m 917\u001b[0m \u001b[0mlogger\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minfo\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf\"Adding to index:\\n{result.stdout}\\n\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 918\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0msubprocess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mCalledProcessError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mexc\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 919\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mEnvironmentError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mexc\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstderr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 920\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 921\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mgit_commit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcommit_message\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mstr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m\"commit files to HF hub\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
84
- "\u001b[0;31mOSError\u001b[0m: fatal: pathspec 'hackernews_hiring_dataset/data/*' did not match any files\n"
85
- ]
86
- }
87
- ],
88
- "source": [
89
- "all_datasets = datasets.dataset_dict.DatasetDict({'hiring': datasets.Dataset.from_pandas(hiring_df),\n",
90
- " 'wants_to_be_hired': datasets.Dataset.from_pandas(wants_to_be_hired_df),\n",
91
- " 'freelancer': datasets.Dataset.from_pandas(freelancer_df)})\n",
92
  "data_path = './data'\n",
93
  "all_datasets.save_to_disk(data_path)\n",
94
  "\n",
95
  "repo_url = 'https://huggingface.co/datasets/dansbecker/hackernews_hiring_posts'\n",
96
  "repo = Repository(local_dir=\".\", clone_from=repo_url)\n",
97
- "repo.git_pull()\n",
98
  "repo.git_add(data_path)\n",
99
- "repo.git_commit(\"Push data from notebook\")"
100
- ]
101
- },
102
- {
103
- "cell_type": "code",
104
- "execution_count": 36,
105
- "id": "2e7fbc2c-550e-4266-b7f2-63287953fdc7",
106
- "metadata": {},
107
- "outputs": [
108
- {
109
- "data": {
110
- "text/plain": [
111
- "\u001b[0;31mSignature:\u001b[0m \u001b[0mrepo\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgit_commit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcommit_message\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mstr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'commit files to HF hub'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
112
- "\u001b[0;31mDocstring:\u001b[0m git commit\n",
113
- "\u001b[0;31mFile:\u001b[0m /usr/local/anaconda3/lib/python3.8/site-packages/huggingface_hub/repository.py\n",
114
- "\u001b[0;31mType:\u001b[0m method\n"
115
- ]
116
- },
117
- "metadata": {},
118
- "output_type": "display_data"
119
- }
120
- ],
121
- "source": [
122
- "repo.git_commit?"
123
  ]
124
- },
125
- {
126
- "cell_type": "code",
127
- "execution_count": 23,
128
- "id": "1421d124-a4ae-401e-9753-27a82e9f35d4",
129
- "metadata": {},
130
- "outputs": [
131
- {
132
- "ename": "TypeError",
133
- "evalue": "Expected a pyarrow.Table or a datasets.table.Table object, but got {'hiring': text \\\n0 Kindling -- New York City\\nLead Application De... \n1 Angular UI Developers: Help Improve the State ... \n2 Cir.cl is a funded startup founded by former O... \n3 Backplane - Palo Alto, CA\\n== About Us ==\\nWe ... \n5 We are building supercomputers and bespoke sof... \n... ... \n111574 LOCATION: Mountain View, CA |Full-time |ONSITE... \n111575 Eaze | Senior Software Engineer | San Francisc... \n111576 Auckland, New Zealand | REMOTE | VISA\\n---\\n# ... \n111577 Elementum | Mountain View | Onsite\\nI am the H... \n111578 SeatGeek — New York, NY — Full Time — We're a ... \n\n CommentTime CommentAuthor \\\n0 2014-08-01 16:15:39 UTC toestues \n1 2014-08-01 16:15:52 UTC AngularJobs \n2 2014-05-01 20:50:30 UTC cld276 \n3 2014-05-01 20:50:39 UTC chengyinliu \n5 2013-04-01 19:24:46 UTC kaib \n... ... ... \n111574 2017-07-03 23:46:02 UTC katrinadurant \n111575 2017-07-03 21:05:44 UTC spade \n111576 2017-07-03 21:05:39 UTC neftaly \n111577 2017-07-04 03:16:13 UTC sv13 \n111578 2017-07-04 13:05:00 UTC jack7890 \n\n ParentTitle \n0 Ask HN: Who is hiring? (August 2014) \n1 Ask HN: Who is hiring? (August 2014) \n2 Ask HN: Who is hiring? (May 2014) \n3 Ask HN: Who is hiring? (May 2014) \n5 Ask HN: Who is hiring? (April 2013) \n... ... \n111574 Ask HN: Who is hiring? (July 2017) \n111575 Ask HN: Who is hiring? (July 2017) \n111576 Ask HN: Who is hiring? (July 2017) \n111577 Ask HN: Who is hiring? (July 2017) \n111578 Ask HN: Who is hiring? (July 2017) \n\n[75974 rows x 4 columns], 'wants_to_be_hired': text \\\n7 Location: FL\\n Remote:Yes, Part time preferred... \n8 Willing to relocate: No\\nLocation: London, UK\\... \n10 Location: Ukraine\\nRemote: Yes\\nWilling to rel... \n11 Location: London\\nRemote: No\\nWilling to reloc... \n23 Location: India\\nRemote: Yes\\nTechnologies: Py... \n... ... \n111535 Location: Montreal, Quebec (Canada)\\nRemote: N... \n111542 Location: Seattle, WA\\nRemote: yes/exclusively... \n111543 Location: India\\nRemote: Yes\\nWilling to reloc... \n111562 Hi, I am Sagar Ghai, a recent Computer Science... \n111568 Location: Baltimore, Delaware\\nRemote: Preferr... \n\n CommentTime CommentAuthor \\\n7 2019-12-03 14:53:37 UTC kanagac \n8 2021-09-01 16:29:21 UTC electronstudio \n10 2014-11-01 21:46:49 UTC andrey_utkin \n11 2014-09-02 21:26:34 UTC mikemases \n23 2021-06-02 02:40:37 UTC kgritesh \n... ... ... \n111535 2017-07-05 18:12:05 UTC ajgaba \n111542 2017-07-03 16:33:10 UTC fuzzy-logic \n111543 2017-07-04 16:17:41 UTC kshk123 \n111562 2017-07-04 08:34:09 UTC sagarghai \n111568 2017-07-05 21:33:02 UTC wernercd \n\n ParentTitle \n7 Ask HN: Who wants to be hired? (December 2019) \n8 Ask HN: Who wants to be hired? (September 2021) \n10 Ask HN: Who wants to be hired? (November 2014) \n11 Ask HN: Who wants to be hired? (September 2014) \n23 Ask HN: Who wants to be hired? (June 2021) \n... ... \n111535 Ask HN: Who wants to be hired? (July 2017) \n111542 Ask HN: Who wants to be hired? (July 2017) \n111543 Ask HN: Who wants to be hired? (July 2017) \n111562 Ask HN: Who wants to be hired? (July 2017) \n111568 Ask HN: Who wants to be hired? (July 2017) \n\n[20262 rows x 4 columns], 'freelancer': text \\\n4 SEEKING WORK\\nLocation: Victoria, B.C.\\nRemote... \n6 SEEKING WORK, Nottingham UK, Remote\\nLAMP (Per... \n12 SEEKING WORK | GMT+8 (Perth, Australia) | Remo... \n15 SEEKING WORK | REMOTE SR Software Architect / ... \n16 SEEKING WORK\\nZoë Davidson // Front End Engine... \n... ... \n111518 SEEKING WORK - Toronto + REMOTE (GMT-5).\\nFull... \n111523 SEEKING WORK - Remote, Berlin\\nContent Marketi... \n111538 SEEKING WORK - Northern Utah (SLC to Provo) or... \n111547 SEEKING WORK - US, Remote\\n- Available mid-Aug... \n111572 SEEKING WORK - Remote - Brisbane, Australia\\nB... \n\n CommentTime CommentAuthor \\\n4 2015-04-01 16:39:38 UTC spitfire \n6 2012-02-01 14:05:14 UTC mike-cardwell \n12 2021-11-01 15:09:57 UTC bengtan \n15 2021-11-01 15:40:58 UTC jhoelzel \n16 2021-11-01 15:41:14 UTC zdavidson4 \n... ... ... \n111518 2017-07-03 16:03:57 UTC d10p \n111523 2017-07-04 07:37:22 UTC rwieruch \n111538 2017-07-08 06:42:31 UTC VaedaStrike \n111547 2017-07-14 19:17:40 UTC jeffmk \n111572 2017-07-04 04:11:32 UTC pirhoteknik \n\n ParentTitle \n4 Ask HN: Freelancer? Seeking freelancer? (April... \n6 Ask HN: Freelancer? Seeking freelancer? (Febr... \n12 Ask HN: Freelancer? Seeking freelancer? (Novem... \n15 Ask HN: Freelancer? Seeking freelancer? (Novem... \n16 Ask HN: Freelancer? Seeking freelancer? (Novem... \n... ... \n111518 Ask HN: Freelancer? Seeking freelancer? (July ... \n111523 Ask HN: Freelancer? Seeking freelancer? (July ... \n111538 Ask HN: Freelancer? Seeking freelancer? (July ... \n111547 Ask HN: Freelancer? Seeking freelancer? (July ... \n111572 Ask HN: Freelancer? Seeking freelancer? (July ... \n\n[15266 rows x 4 columns]}.",
134
- "output_type": "error",
135
- "traceback": [
136
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
137
- "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)",
138
- "\u001b[0;32m<ipython-input-23-d01447c6056e>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m datasets.Dataset({'hiring': hiring_df,\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0;34m'wants_to_be_hired'\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mwants_to_be_hired_df\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m 'freelancer': freelancer_df})\n",
139
- "\u001b[0;32m/usr/local/anaconda3/lib/python3.8/site-packages/datasets/arrow_dataset.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, arrow_table, info, split, indices_table, fingerprint)\u001b[0m\n\u001b[1;32m 580\u001b[0m \u001b[0mIndexableMixin\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 581\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 582\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_data\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mTable\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_check_table\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marrow_table\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 583\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_indices\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mOptional\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mTable\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_check_table\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindices_table\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mindices_table\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 584\u001b[0m \u001b[0mmaybe_register_dataset_for_temp_dir_deletion\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
140
- "\u001b[0;32m/usr/local/anaconda3/lib/python3.8/site-packages/datasets/arrow_dataset.py\u001b[0m in \u001b[0;36m_check_table\u001b[0;34m(table)\u001b[0m\n\u001b[1;32m 556\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mtable\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 557\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 558\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mTypeError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf\"Expected a pyarrow.Table or a datasets.table.Table object, but got {table}.\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 559\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 560\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
141
- "\u001b[0;31mTypeError\u001b[0m: Expected a pyarrow.Table or a datasets.table.Table object, but got {'hiring': text \\\n0 Kindling -- New York City\\nLead Application De... \n1 Angular UI Developers: Help Improve the State ... \n2 Cir.cl is a funded startup founded by former O... \n3 Backplane - Palo Alto, CA\\n== About Us ==\\nWe ... \n5 We are building supercomputers and bespoke sof... \n... ... \n111574 LOCATION: Mountain View, CA |Full-time |ONSITE... \n111575 Eaze | Senior Software Engineer | San Francisc... \n111576 Auckland, New Zealand | REMOTE | VISA\\n---\\n# ... \n111577 Elementum | Mountain View | Onsite\\nI am the H... \n111578 SeatGeek — New York, NY — Full Time — We're a ... \n\n CommentTime CommentAuthor \\\n0 2014-08-01 16:15:39 UTC toestues \n1 2014-08-01 16:15:52 UTC AngularJobs \n2 2014-05-01 20:50:30 UTC cld276 \n3 2014-05-01 20:50:39 UTC chengyinliu \n5 2013-04-01 19:24:46 UTC kaib \n... ... ... \n111574 2017-07-03 23:46:02 UTC katrinadurant \n111575 2017-07-03 21:05:44 UTC spade \n111576 2017-07-03 21:05:39 UTC neftaly \n111577 2017-07-04 03:16:13 UTC sv13 \n111578 2017-07-04 13:05:00 UTC jack7890 \n\n ParentTitle \n0 Ask HN: Who is hiring? (August 2014) \n1 Ask HN: Who is hiring? (August 2014) \n2 Ask HN: Who is hiring? (May 2014) \n3 Ask HN: Who is hiring? (May 2014) \n5 Ask HN: Who is hiring? (April 2013) \n... ... \n111574 Ask HN: Who is hiring? (July 2017) \n111575 Ask HN: Who is hiring? (July 2017) \n111576 Ask HN: Who is hiring? (July 2017) \n111577 Ask HN: Who is hiring? (July 2017) \n111578 Ask HN: Who is hiring? (July 2017) \n\n[75974 rows x 4 columns], 'wants_to_be_hired': text \\\n7 Location: FL\\n Remote:Yes, Part time preferred... \n8 Willing to relocate: No\\nLocation: London, UK\\... \n10 Location: Ukraine\\nRemote: Yes\\nWilling to rel... \n11 Location: London\\nRemote: No\\nWilling to reloc... \n23 Location: India\\nRemote: Yes\\nTechnologies: Py... \n... ... \n111535 Location: Montreal, Quebec (Canada)\\nRemote: N... \n111542 Location: Seattle, WA\\nRemote: yes/exclusively... \n111543 Location: India\\nRemote: Yes\\nWilling to reloc... \n111562 Hi, I am Sagar Ghai, a recent Computer Science... \n111568 Location: Baltimore, Delaware\\nRemote: Preferr... \n\n CommentTime CommentAuthor \\\n7 2019-12-03 14:53:37 UTC kanagac \n8 2021-09-01 16:29:21 UTC electronstudio \n10 2014-11-01 21:46:49 UTC andrey_utkin \n11 2014-09-02 21:26:34 UTC mikemases \n23 2021-06-02 02:40:37 UTC kgritesh \n... ... ... \n111535 2017-07-05 18:12:05 UTC ajgaba \n111542 2017-07-03 16:33:10 UTC fuzzy-logic \n111543 2017-07-04 16:17:41 UTC kshk123 \n111562 2017-07-04 08:34:09 UTC sagarghai \n111568 2017-07-05 21:33:02 UTC wernercd \n\n ParentTitle \n7 Ask HN: Who wants to be hired? (December 2019) \n8 Ask HN: Who wants to be hired? (September 2021) \n10 Ask HN: Who wants to be hired? (November 2014) \n11 Ask HN: Who wants to be hired? (September 2014) \n23 Ask HN: Who wants to be hired? (June 2021) \n... ... \n111535 Ask HN: Who wants to be hired? (July 2017) \n111542 Ask HN: Who wants to be hired? (July 2017) \n111543 Ask HN: Who wants to be hired? (July 2017) \n111562 Ask HN: Who wants to be hired? (July 2017) \n111568 Ask HN: Who wants to be hired? (July 2017) \n\n[20262 rows x 4 columns], 'freelancer': text \\\n4 SEEKING WORK\\nLocation: Victoria, B.C.\\nRemote... \n6 SEEKING WORK, Nottingham UK, Remote\\nLAMP (Per... \n12 SEEKING WORK | GMT+8 (Perth, Australia) | Remo... \n15 SEEKING WORK | REMOTE SR Software Architect / ... \n16 SEEKING WORK\\nZoë Davidson // Front End Engine... \n... ... \n111518 SEEKING WORK - Toronto + REMOTE (GMT-5).\\nFull... \n111523 SEEKING WORK - Remote, Berlin\\nContent Marketi... \n111538 SEEKING WORK - Northern Utah (SLC to Provo) or... \n111547 SEEKING WORK - US, Remote\\n- Available mid-Aug... \n111572 SEEKING WORK - Remote - Brisbane, Australia\\nB... \n\n CommentTime CommentAuthor \\\n4 2015-04-01 16:39:38 UTC spitfire \n6 2012-02-01 14:05:14 UTC mike-cardwell \n12 2021-11-01 15:09:57 UTC bengtan \n15 2021-11-01 15:40:58 UTC jhoelzel \n16 2021-11-01 15:41:14 UTC zdavidson4 \n... ... ... \n111518 2017-07-03 16:03:57 UTC d10p \n111523 2017-07-04 07:37:22 UTC rwieruch \n111538 2017-07-08 06:42:31 UTC VaedaStrike \n111547 2017-07-14 19:17:40 UTC jeffmk \n111572 2017-07-04 04:11:32 UTC pirhoteknik \n\n ParentTitle \n4 Ask HN: Freelancer? Seeking freelancer? (April... \n6 Ask HN: Freelancer? Seeking freelancer? (Febr... \n12 Ask HN: Freelancer? Seeking freelancer? (Novem... \n15 Ask HN: Freelancer? Seeking freelancer? (Novem... \n16 Ask HN: Freelancer? Seeking freelancer? (Novem... \n... ... \n111518 Ask HN: Freelancer? Seeking freelancer? (July ... \n111523 Ask HN: Freelancer? Seeking freelancer? (July ... \n111538 Ask HN: Freelancer? Seeking freelancer? (July ... \n111547 Ask HN: Freelancer? Seeking freelancer? (July ... \n111572 Ask HN: Freelancer? Seeking freelancer? (July ... \n\n[15266 rows x 4 columns]}."
142
- ]
143
- }
144
- ],
145
- "source": []
146
- },
147
- {
148
- "cell_type": "code",
149
- "execution_count": null,
150
- "id": "eea7788b-8919-4fb9-bec4-9a349ee34728",
151
- "metadata": {},
152
- "outputs": [],
153
- "source": []
154
  }
155
  ],
156
  "metadata": {
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 1,
6
  "id": "056aa255-fda1-4cde-be24-459f6ad2c8b9",
7
  "metadata": {},
8
  "outputs": [
 
17
  ],
18
  "source": [
19
  "import pandas as pd\n",
 
20
  "\n",
21
+ "df = pd.read_csv('raw/bq-results-20211206-133858-irtkgx60el7i.csv').drop(['ParentUrl', 'ParentAuthor', 'ParentTime', 'ParentScore'], axis=1)\n",
22
+ "\n",
23
+ "# There's a mix of HTML, sanitized links and raw Unicode that we'd like to clean up\n",
24
  "df.text = df.text.str.replace('<p>', '\\n')\n",
25
  "strings_to_remove = ['rel=\"nofollow\"', '<pre>', '</pre>', '<i>', '</i>', '<code>', '</code>', '&gt;']\n",
26
+ "email_regex = '[a-zA-Z0-9._-]{0,30}@[a-zA-Z0-9._-]{0,20}\\.[a-zA-Z0-9_-]{2,3}' # for redacting emails\n",
27
  "munged_url_regex = 'http(s)?:\\&\\#.*?\\<\\/a>'\n",
28
  "\n",
29
  "for string in strings_to_remove:\n",
 
45
  },
46
  {
47
  "cell_type": "code",
48
+ "execution_count": 8,
49
+ "id": "57ca7c96-d94c-4e65-8113-da7729558247",
50
  "metadata": {},
51
  "outputs": [],
52
  "source": [
53
  "import datasets\n",
54
  "from huggingface_hub import create_repo\n",
55
+ "from huggingface_hub import Repository\n",
56
+ "\n",
57
+ "all_datasets = datasets.dataset_dict.DatasetDict({'hiring': datasets.Dataset.from_pandas(hiring_df).remove_columns('__index_level_0__'),\n",
58
+ " 'wants_to_be_hired': datasets.Dataset.from_pandas(wants_to_be_hired_df).remove_columns('__index_level_0__'),\n",
59
+ " 'freelancer': datasets.Dataset.from_pandas(freelancer_df).remove_columns('__index_level_0__')})\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  "data_path = './data'\n",
61
  "all_datasets.save_to_disk(data_path)\n",
62
  "\n",
63
  "repo_url = 'https://huggingface.co/datasets/dansbecker/hackernews_hiring_posts'\n",
64
  "repo = Repository(local_dir=\".\", clone_from=repo_url)\n",
 
65
  "repo.git_add(data_path)\n",
66
+ "repo.git_add('*.ipynb')\n",
67
+ "repo.git_add('README.md')\n",
68
+ "repo.git_commit(\"A standard update\")"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  }
71
  ],
72
  "metadata": {
README.md ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ This dataset contains postings and comments from the following recurring threads on [Hacker News](http://news.ycombinator.com/)
2
+
3
+ 1. Ask HN: Who is hiring?
4
+ 2. Ask HN: Who wants to be hired?
5
+ 3. Freelancer? Seeking freelancer?
6
+
7
+ These post types are stored in datasets called `hiring`, `wants_to_be_hired` and `freelancer` respectively.
8
+
9
+ Each type of posting has occurred on a regular basis for several years. You can identify when each comment/listing was added through the CommentTime field. The `ParentTitle` also indicates the date of the parent thread in text (e.g. `Ask HN: Who is hiring? (March 2021)`)
10
+
11
+ This dataset is not programmatically reproducible from source because it was uploaded as an experiment with HF datasets. The raw data was created by querying the public table `bigquery-public-data.hacker_news.full` in Google BigQuery.
12
+
13
+ Email addresses have been redacted from the dataset.
14
+
15
+ If this dataset is interesting/useful, I (Dan Becker) will look into improving reproducibility and other general clean-up.
16
+
17
+ This dataset may be useful for finding trends in tech and tech job listings.
data/freelancer/dataset.arrow CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9f7a7fed71d052074a7edf4e7d52d862f349c21fe5a360ad126d530c17c2349f
3
- size 11288376
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c98a8f743575b496b0c37547fdabdb311f7fb932a51a41ed125846d71de6c7b
3
+ size 11166008
data/freelancer/dataset_info.json CHANGED
@@ -26,11 +26,6 @@
26
  "dtype": "string",
27
  "id": null,
28
  "_type": "Value"
29
- },
30
- "__index_level_0__": {
31
- "dtype": "int64",
32
- "id": null,
33
- "_type": "Value"
34
  }
35
  },
36
  "homepage": "",
 
26
  "dtype": "string",
27
  "id": null,
28
  "_type": "Value"
 
 
 
 
 
29
  }
30
  },
31
  "homepage": "",
data/freelancer/state.json CHANGED
@@ -4,7 +4,7 @@
4
  "filename": "dataset.arrow"
5
  }
6
  ],
7
- "_fingerprint": "04052ca22482b184",
8
  "_format_columns": null,
9
  "_format_kwargs": {},
10
  "_format_type": null,
 
4
  "filename": "dataset.arrow"
5
  }
6
  ],
7
+ "_fingerprint": "4f8f711b9136404c",
8
  "_format_columns": null,
9
  "_format_kwargs": {},
10
  "_format_type": null,
data/hiring/dataset.arrow CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:96ac076bb640d943257e8fa165c629e6215a2e15013c0e5dcbc6e2ccdfa93c6e
3
- size 82584288
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8827da89590c888c1e2d167c04f4fb527ed342c9cc23c61bbfa6646cfcf6f2f
3
+ size 81975968
data/hiring/dataset_info.json CHANGED
@@ -26,11 +26,6 @@
26
  "dtype": "string",
27
  "id": null,
28
  "_type": "Value"
29
- },
30
- "__index_level_0__": {
31
- "dtype": "int64",
32
- "id": null,
33
- "_type": "Value"
34
  }
35
  },
36
  "homepage": "",
 
26
  "dtype": "string",
27
  "id": null,
28
  "_type": "Value"
 
 
 
 
 
29
  }
30
  },
31
  "homepage": "",
data/hiring/state.json CHANGED
@@ -4,7 +4,7 @@
4
  "filename": "dataset.arrow"
5
  }
6
  ],
7
- "_fingerprint": "ebeb4665e2db8db2",
8
  "_format_columns": null,
9
  "_format_kwargs": {},
10
  "_format_type": null,
 
4
  "filename": "dataset.arrow"
5
  }
6
  ],
7
+ "_fingerprint": "e5166a077fc0f8e1",
8
  "_format_columns": null,
9
  "_format_kwargs": {},
10
  "_format_type": null,
data/wants_to_be_hired/dataset.arrow CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:23846fba245539287b9264e7f0f23ffbd32f23dc7be4ea6a258e7dc2be4d3ff2
3
- size 11535536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc638e6f45b651e6e166298e20f7c68b2fb6cdb5aac357a728778dffa9114ac1
3
+ size 11373152
data/wants_to_be_hired/dataset_info.json CHANGED
@@ -26,11 +26,6 @@
26
  "dtype": "string",
27
  "id": null,
28
  "_type": "Value"
29
- },
30
- "__index_level_0__": {
31
- "dtype": "int64",
32
- "id": null,
33
- "_type": "Value"
34
  }
35
  },
36
  "homepage": "",
 
26
  "dtype": "string",
27
  "id": null,
28
  "_type": "Value"
 
 
 
 
 
29
  }
30
  },
31
  "homepage": "",
data/wants_to_be_hired/state.json CHANGED
@@ -4,7 +4,7 @@
4
  "filename": "dataset.arrow"
5
  }
6
  ],
7
- "_fingerprint": "2a5d337776498fbe",
8
  "_format_columns": null,
9
  "_format_kwargs": {},
10
  "_format_type": null,
 
4
  "filename": "dataset.arrow"
5
  }
6
  ],
7
+ "_fingerprint": "f8da56cf67a872ff",
8
  "_format_columns": null,
9
  "_format_kwargs": {},
10
  "_format_type": null,
hn_hiring_posts.ipynb CHANGED
@@ -19,9 +19,11 @@
19
  "import pandas as pd\n",
20
  "\n",
21
  "df = pd.read_csv('raw/bq-results-20211206-133858-irtkgx60el7i.csv').drop(['ParentUrl', 'ParentAuthor', 'ParentTime', 'ParentScore'], axis=1)\n",
 
 
22
  "df.text = df.text.str.replace('<p>', '\\n')\n",
23
  "strings_to_remove = ['rel=\"nofollow\"', '<pre>', '</pre>', '<i>', '</i>', '<code>', '</code>', '&gt;']\n",
24
- "email_regex = '[a-zA-Z0-9._-]{0,30}@[a-zA-Z0-9._-]{0,20}\\.[a-zA-Z0-9_-]{2,3}'\n",
25
  "munged_url_regex = 'http(s)?:\\&\\#.*?\\<\\/a>'\n",
26
  "\n",
27
  "for string in strings_to_remove:\n",
@@ -43,7 +45,7 @@
43
  },
44
  {
45
  "cell_type": "code",
46
- "execution_count": null,
47
  "id": "57ca7c96-d94c-4e65-8113-da7729558247",
48
  "metadata": {},
49
  "outputs": [],
@@ -52,9 +54,9 @@
52
  "from huggingface_hub import create_repo\n",
53
  "from huggingface_hub import Repository\n",
54
  "\n",
55
- "all_datasets = datasets.dataset_dict.DatasetDict({'hiring': datasets.Dataset.from_pandas(hiring_df),\n",
56
- " 'wants_to_be_hired': datasets.Dataset.from_pandas(wants_to_be_hired_df),\n",
57
- " 'freelancer': datasets.Dataset.from_pandas(freelancer_df)})\n",
58
  "data_path = './data'\n",
59
  "all_datasets.save_to_disk(data_path)\n",
60
  "\n",
@@ -62,6 +64,7 @@
62
  "repo = Repository(local_dir=\".\", clone_from=repo_url)\n",
63
  "repo.git_add(data_path)\n",
64
  "repo.git_add('*.ipynb')\n",
 
65
  "repo.git_commit(\"A standard update\")"
66
  ]
67
  }
 
19
  "import pandas as pd\n",
20
  "\n",
21
  "df = pd.read_csv('raw/bq-results-20211206-133858-irtkgx60el7i.csv').drop(['ParentUrl', 'ParentAuthor', 'ParentTime', 'ParentScore'], axis=1)\n",
22
+ "\n",
23
+ "# There's a mix of HTML, sanitized links and raw Unicode that we'd like to clean up\n",
24
  "df.text = df.text.str.replace('<p>', '\\n')\n",
25
  "strings_to_remove = ['rel=\"nofollow\"', '<pre>', '</pre>', '<i>', '</i>', '<code>', '</code>', '&gt;']\n",
26
+ "email_regex = '[a-zA-Z0-9._-]{0,30}@[a-zA-Z0-9._-]{0,20}\\.[a-zA-Z0-9_-]{2,3}' # for redacting emails\n",
27
  "munged_url_regex = 'http(s)?:\\&\\#.*?\\<\\/a>'\n",
28
  "\n",
29
  "for string in strings_to_remove:\n",
 
45
  },
46
  {
47
  "cell_type": "code",
48
+ "execution_count": 8,
49
  "id": "57ca7c96-d94c-4e65-8113-da7729558247",
50
  "metadata": {},
51
  "outputs": [],
 
54
  "from huggingface_hub import create_repo\n",
55
  "from huggingface_hub import Repository\n",
56
  "\n",
57
+ "all_datasets = datasets.dataset_dict.DatasetDict({'hiring': datasets.Dataset.from_pandas(hiring_df).remove_columns('__index_level_0__'),\n",
58
+ " 'wants_to_be_hired': datasets.Dataset.from_pandas(wants_to_be_hired_df).remove_columns('__index_level_0__'),\n",
59
+ " 'freelancer': datasets.Dataset.from_pandas(freelancer_df).remove_columns('__index_level_0__')})\n",
60
  "data_path = './data'\n",
61
  "all_datasets.save_to_disk(data_path)\n",
62
  "\n",
 
64
  "repo = Repository(local_dir=\".\", clone_from=repo_url)\n",
65
  "repo.git_add(data_path)\n",
66
  "repo.git_add('*.ipynb')\n",
67
+ "repo.git_add('README.md')\n",
68
  "repo.git_commit(\"A standard update\")"
69
  ]
70
  }