Datasets:

Modalities:
Image
Text
Formats:
parquet
Languages:
English
Libraries:
Datasets
Dask
License:
openaccess / clean.py
Brett Renfer
Fixes for broken rows; full dataset now
5d110fb
raw
history blame contribute delete
895 Bytes
import csv
import gzip
import io
def load_and_process_csv(input_file_path, output_file_path):
with gzip.open(input_file_path, 'rt', newline='\r\n', encoding='utf-8') as infile:
reader = csv.reader(infile)
rows = list(reader)
processed_rows = []
for row in rows:
processed_row = []
for field in row:
# if field.startswith('"') and field.endswith('"'):
field = field.replace('\n', ' ')
field = field.replace('\r', ' ')
processed_row.append(field)
processed_rows.append(processed_row)
with gzip.open(output_file_path, 'wt', newline='', encoding='utf-8') as outfile:
writer = csv.writer(outfile)
writer.writerows(processed_rows)
# Example usage
input_file_path = 'MetObjects.csv.gz'
output_file_path = 'metadata.csv.gz'
load_and_process_csv(input_file_path, output_file_path)