useparquet
Browse files- useparquet.py +135 -0
useparquet.py
ADDED
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pyarrow.parquet as pq
|
2 |
+
import os
|
3 |
+
import json
|
4 |
+
|
5 |
+
# Function to check row count in a Parquet file
|
6 |
+
def check_row_count(parquet_file_path):
|
7 |
+
try:
|
8 |
+
parquet_file = pq.ParquetFile(parquet_file_path)
|
9 |
+
num_rows = parquet_file.metadata.num_rows
|
10 |
+
print(f"The Parquet file '{parquet_file_path}' contains {num_rows} rows.")
|
11 |
+
return num_rows
|
12 |
+
except Exception as e:
|
13 |
+
print(f"Error reading Parquet file: {e}")
|
14 |
+
|
15 |
+
# Function to retrieve a specific row efficiently from a Parquet file
|
16 |
+
def get_specific_row(parquet_file_path, row_index):
|
17 |
+
try:
|
18 |
+
# Open the Parquet file
|
19 |
+
parquet_file = pq.ParquetFile(parquet_file_path)
|
20 |
+
|
21 |
+
# Initialize variables to track the current row position
|
22 |
+
current_row = 0
|
23 |
+
|
24 |
+
# Iterate over each row group in the Parquet file
|
25 |
+
for row_group in range(parquet_file.num_row_groups):
|
26 |
+
# Read the current row group into a table
|
27 |
+
table = parquet_file.read_row_group(row_group)
|
28 |
+
|
29 |
+
# Check if the desired row is within the current row group
|
30 |
+
if current_row <= row_index < current_row + table.num_rows:
|
31 |
+
# Calculate the index of the row within the current table
|
32 |
+
row_in_table = row_index - current_row
|
33 |
+
|
34 |
+
# Retrieve the row as a dictionary
|
35 |
+
row = {col: table.column(col)[row_in_table].as_py() for col in table.column_names}
|
36 |
+
|
37 |
+
return row
|
38 |
+
|
39 |
+
# Update the current row position
|
40 |
+
current_row += table.num_rows
|
41 |
+
|
42 |
+
# If the row index is out of bounds, raise an error
|
43 |
+
raise IndexError(f"Row index {row_index} is out of bounds.")
|
44 |
+
|
45 |
+
except Exception as e:
|
46 |
+
print(f"Error retrieving row: {e}")
|
47 |
+
return None
|
48 |
+
|
49 |
+
# Function to save image content back to JPG
|
50 |
+
def save_image_from_row(row, output_image_path):
|
51 |
+
try:
|
52 |
+
image_content = row['image_content'] # Image content as binary
|
53 |
+
with open(output_image_path, 'wb') as img_file:
|
54 |
+
img_file.write(image_content)
|
55 |
+
print(f"Image saved successfully at {output_image_path}")
|
56 |
+
except Exception as e:
|
57 |
+
print(f"Error saving image: {e}")
|
58 |
+
|
59 |
+
# Function to save JSON content to a file
|
60 |
+
def save_json_from_row(row, output_json_path):
|
61 |
+
try:
|
62 |
+
json_content = row['json_content'] # JSON content as string
|
63 |
+
json_data = json.loads(json_content) # Parse the string into a JSON object
|
64 |
+
with open(output_json_path, 'w', encoding='utf-8') as json_file:
|
65 |
+
json.dump(json_data, json_file, indent=4, ensure_ascii=False)
|
66 |
+
print(f"JSON saved successfully at {output_json_path}")
|
67 |
+
except Exception as e:
|
68 |
+
print(f"Error saving JSON: {e}")
|
69 |
+
|
70 |
+
# Function to save all images and JSONs into a specific folder
|
71 |
+
def save_all_images_and_jsons(parquet_file_path, output_folder):
|
72 |
+
try:
|
73 |
+
# Create the output folder if it doesn't exist
|
74 |
+
os.makedirs(output_folder, exist_ok=True)
|
75 |
+
|
76 |
+
# Open the Parquet file and check the number of rows
|
77 |
+
parquet_file = pq.ParquetFile(parquet_file_path)
|
78 |
+
num_rows = parquet_file.metadata.num_rows
|
79 |
+
|
80 |
+
# Iterate through all rows and save images and JSONs
|
81 |
+
for row_index in range(num_rows):
|
82 |
+
row = get_specific_row(parquet_file_path, row_index)
|
83 |
+
if row is not None:
|
84 |
+
# Define the file paths for each image and JSON
|
85 |
+
output_image_path = os.path.join(output_folder, f"image_{row_index}.jpg")
|
86 |
+
output_json_path = os.path.join(output_folder, f"data_{row_index}.json")
|
87 |
+
|
88 |
+
# Save the image and JSON content
|
89 |
+
save_image_from_row(row, output_image_path)
|
90 |
+
save_json_from_row(row, output_json_path)
|
91 |
+
|
92 |
+
except Exception as e:
|
93 |
+
print(f"Error saving all images and JSONs: {e}")
|
94 |
+
|
95 |
+
# Function to save N images and JSONs into a specific folder
|
96 |
+
def save_n_images_and_jsons(parquet_file_path, output_folder, num_to_save):
|
97 |
+
try:
|
98 |
+
# Create the output folder if it doesn't exist
|
99 |
+
os.makedirs(output_folder, exist_ok=True)
|
100 |
+
|
101 |
+
# Open the Parquet file and check the number of rows
|
102 |
+
parquet_file = pq.ParquetFile(parquet_file_path)
|
103 |
+
num_rows = parquet_file.metadata.num_rows
|
104 |
+
|
105 |
+
# Limit the number of files to save to the available rows
|
106 |
+
limit = min(num_to_save, num_rows)
|
107 |
+
|
108 |
+
# Iterate through the first N rows and save images and JSONs
|
109 |
+
for row_index in range(limit):
|
110 |
+
row = get_specific_row(parquet_file_path, row_index)
|
111 |
+
if row is not None:
|
112 |
+
# Define the file paths for each image and JSON
|
113 |
+
output_image_path = os.path.join(output_folder, f"image_{row_index}.jpg")
|
114 |
+
output_json_path = os.path.join(output_folder, f"data_{row_index}.json")
|
115 |
+
|
116 |
+
# Save the image and JSON content
|
117 |
+
save_image_from_row(row, output_image_path)
|
118 |
+
save_json_from_row(row, output_json_path)
|
119 |
+
|
120 |
+
except Exception as e:
|
121 |
+
print(f"Error saving N images and JSONs: {e}")
|
122 |
+
|
123 |
+
# Example usage:
|
124 |
+
parquet_file_path = 'path/to/MobileViews_xxx-xxx.parquet'
|
125 |
+
output_folder_all = 'path/to/all_extracted_images_jsons'
|
126 |
+
output_folder_n = 'path/to/n_extracted_images_jsons'
|
127 |
+
|
128 |
+
check_row_count('parquet_file_path')
|
129 |
+
|
130 |
+
|
131 |
+
# Step 1: Save all images and JSONs or a certain number
|
132 |
+
|
133 |
+
save_n_images_and_jsons(parquet_file_path, output_folder_n, num_to_save=100)
|
134 |
+
|
135 |
+
# save_all_images_and_jsons(parquet_file_path, output_folder_all)
|