thompsonmj commited on
Commit
3ed547e
1 Parent(s): 1f3d066

Reduce memory usage and improve storage efficiency during dataset processing

Browse files
Files changed (1) hide show
  1. download.py +16 -4
download.py CHANGED
@@ -106,18 +106,30 @@ def concatenate_files(animal):
106
  with open(f"{save_dir}/dataset/image/{animal}.zip", 'wb') as f_out:
107
  for f_name in part_files:
108
  with open(f_name, 'rb') as f_in:
109
- f_out.write(f_in.read())
 
 
 
 
 
110
  print(f"Archive for {animal} concatenated.")
111
  else:
112
  print(f"No part files found for {animal}.")
113
 
114
  with concurrent.futures.ThreadPoolExecutor() as executor:
115
  executor.map(concatenate_files, animals)
 
 
 
 
 
 
 
 
116
 
117
  def verify_and_extract(animal):
118
  print(f"Confirming data integrity for {animal}.zip ...")
119
- with open(f"{save_dir}/dataset/image/{animal}.zip", 'rb') as f:
120
- zip_md5 = md5(f.read()).hexdigest()
121
 
122
  with open(f"{save_dir}/dataset/image/{animal}_md5.txt", 'r') as file:
123
  expected_md5 = file.read().strip().split()[0]
@@ -139,4 +151,4 @@ def verify_and_extract(animal):
139
  with concurrent.futures.ThreadPoolExecutor() as executor:
140
  executor.map(verify_and_extract, animals)
141
 
142
- print("Download and setup complete.")
 
106
  with open(f"{save_dir}/dataset/image/{animal}.zip", 'wb') as f_out:
107
  for f_name in part_files:
108
  with open(f_name, 'rb') as f_in:
109
+ # Read and write in chunks
110
+ CHUNK_SIZE = 8*1024*1024 # 8MB
111
+ for chunk in iter(lambda: f_in.read(CHUNK_SIZE), b""):
112
+ f_out.write(chunk)
113
+ # Delete part files as they are concatenated
114
+ os.remove(f_name)
115
  print(f"Archive for {animal} concatenated.")
116
  else:
117
  print(f"No part files found for {animal}.")
118
 
119
  with concurrent.futures.ThreadPoolExecutor() as executor:
120
  executor.map(concatenate_files, animals)
121
+
122
+ def compute_md5(file_path):
123
+ hasher = md5()
124
+ with open(file_path, 'rb') as f:
125
+ CHUNK_SIZE = 8*1024*1024 # 8MB
126
+ for chunk in iter(lambda: f.read(CHUNK_SIZE), b""):
127
+ hasher.update(chunk)
128
+ return hasher.hexdigest()
129
 
130
  def verify_and_extract(animal):
131
  print(f"Confirming data integrity for {animal}.zip ...")
132
+ zip_md5 = compute_md5(f"{save_dir}/dataset/image/{animal}.zip")
 
133
 
134
  with open(f"{save_dir}/dataset/image/{animal}_md5.txt", 'r') as file:
135
  expected_md5 = file.read().strip().split()[0]
 
151
  with concurrent.futures.ThreadPoolExecutor() as executor:
152
  executor.map(verify_and_extract, animals)
153
 
154
+ print("Download script finished.")