recruiter-assistant / scripts /process-data.py
Vincent Claes
add data processing script
1315f1b
raw history blame
No virus
1.11 kB
"""
# download parquet file from here: https://huggingface.co/datasets/Sachinkelenjaguri/Resume_dataset
# asked chatgpt to write me a script to convert and deduplicate
"""
import pandas as pd
# Step 1: Read the parquet file
df = pd.read_parquet('/Users/vincent/Downloads/csv-train.parquet')
if 'Category' in df.columns:
unique_classes = df['Category'].unique()
print("Unique classes in 'Category' column:")
for cls in unique_classes:
print(cls)
else:
print("'Category' column does not exist in the data.")
# Step 2: Check if 'Resume' column exists
if 'Resume' in df.columns:
# Keep only the 'Resume' column
print(df.shape)
df = df.drop_duplicates(subset=['Resume'])
print(df.shape)
df = df[['Resume']]
# Remove all the new lines from each cell of the 'Resume' column
df['Resume'] = df['Resume'].replace('\n',' ', regex=True)
else:
print("'Resume' column does not exist in the data.")
# Step 3: Write the filtered dataframe back to a csv file
df.to_csv('/Users/vincent/Downloads/output.csv', index=False, header=False)
print("Completed successfully")