|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
'''---compulsory---''' |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import hoho; hoho.setup() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import webdataset as wds |
|
from tqdm import tqdm |
|
from typing import Dict |
|
import pandas as pd |
|
from transformers import AutoTokenizer |
|
import os |
|
import time |
|
import io |
|
from PIL import Image as PImage |
|
import numpy as np |
|
|
|
from hoho.read_write_colmap import read_cameras_binary, read_images_binary, read_points3D_binary |
|
from hoho import proc, Sample |
|
|
|
def convert_entry_to_human_readable(entry): |
|
out = {} |
|
already_good = ['__key__', 'wf_vertices', 'wf_edges', 'edge_semantics', 'mesh_vertices', 'mesh_faces', 'face_semantics', 'K', 'R', 't'] |
|
for k, v in entry.items(): |
|
if k in already_good: |
|
out[k] = v |
|
continue |
|
if k == 'points3d': |
|
out[k] = read_points3D_binary(fid=io.BytesIO(v)) |
|
if k == 'cameras': |
|
out[k] = read_cameras_binary(fid=io.BytesIO(v)) |
|
if k == 'images': |
|
out[k] = read_images_binary(fid=io.BytesIO(v)) |
|
if k in ['ade20k', 'gestalt']: |
|
out[k] = [PImage.open(io.BytesIO(x)).convert('RGB') for x in v] |
|
if k == 'depthcm': |
|
out[k] = [PImage.open(io.BytesIO(x)) for x in entry['depthcm']] |
|
return out |
|
|
|
'''---end of compulsory---''' |
|
|
|
|
|
import subprocess |
|
import sys |
|
import os |
|
|
|
import numpy as np |
|
|
|
os.environ['MKL_SERVICE_FORCE_INTEL'] = '1' |
|
|
|
def download_packages(packages, folder='packages/torch'): |
|
""" |
|
Downloads packages as .whl files into the specified folder using pip. |
|
|
|
Parameters: |
|
packages (list): List of packages to download with versions. |
|
folder (str): The folder where the .whl files will be saved. |
|
""" |
|
Path(folder).mkdir(parents=True, exist_ok=True) |
|
try: |
|
subprocess.check_call([sys.executable, "-m", "pip", "download", |
|
"--platform", "manylinux1_x86_64", |
|
"--python-version", "38", |
|
"--only-binary=:all:", |
|
"-d", folder] + packages) |
|
print(f"Packages downloaded successfully into {folder}") |
|
except subprocess.CalledProcessError as e: |
|
print(f"Failed to download packages. Error: {e}") |
|
|
|
def install_package_from_local_file(package_name, folder='packages'): |
|
""" |
|
Installs a package from a local .whl file or a directory containing .whl files using pip. |
|
|
|
Parameters: |
|
package_name (str): The name of the package to install. |
|
folder (str): The folder where the .whl files are located. |
|
""" |
|
try: |
|
pth = str(Path(folder) / package_name) |
|
subprocess.check_call([sys.executable, "-m", "pip", "install", |
|
"--no-index", |
|
"--find-links", pth, |
|
package_name]) |
|
print(f"Package installed successfully from {pth}") |
|
except subprocess.CalledProcessError as e: |
|
print(f"Failed to install package from {pth}. Error: {e}") |
|
|
|
def setup_environment(): |
|
|
|
|
|
|
|
|
|
|
|
|
|
install_package_from_local_file('torch', folder='packages') |
|
install_package_from_local_file('packages/torch/torchvision-0.14.1-cp38-cp38-manylinux1_x86_64.whl', folder='packages/torch') |
|
install_package_from_local_file('packages/torch/torchaudio-0.13.1-cp38-cp38-manylinux1_x86_64.whl', folder='packages/torch') |
|
install_package_from_local_file('scikit-learn', folder='packages') |
|
install_package_from_local_file('open3d', folder='packages') |
|
install_package_from_local_file('easydict', folder='packages') |
|
|
|
|
|
|
|
|
|
|
|
pc_util_path = os.path.join(os.getcwd(), 'pc_util') |
|
if os.path.isdir(pc_util_path): |
|
os.chdir(pc_util_path) |
|
subprocess.check_call([sys.executable, "setup.py", "install"]) |
|
else: |
|
print(f"Directory {pc_util_path} does not exist") |
|
|
|
from pathlib import Path |
|
def save_submission(submission, path): |
|
""" |
|
Saves the submission to a specified path. |
|
|
|
Parameters: |
|
submission (List[Dict[]]): The submission to save. |
|
path (str): The path to save the submission to. |
|
""" |
|
sub = pd.DataFrame(submission, columns=["__key__", "wf_vertices", "wf_edges"]) |
|
sub.to_parquet(path) |
|
print(f"Submission saved to {path}") |
|
|
|
if __name__ == "__main__": |
|
setup_environment() |
|
|
|
from handcrafted_solution import predict |
|
print ("------------ Loading dataset------------ ") |
|
params = hoho.get_params() |
|
dataset = hoho.get_dataset(decode=None, split='all', dataset_type='webdataset') |
|
|
|
print('------------ Now you can do your solution ---------------') |
|
solution = [] |
|
from concurrent.futures import ProcessPoolExecutor |
|
with ProcessPoolExecutor(max_workers=8) as pool: |
|
results = [] |
|
for i, sample in enumerate(tqdm(dataset)): |
|
results.append(pool.submit(predict, sample, visualize=False)) |
|
|
|
for i, result in enumerate(tqdm(results)): |
|
key, pred_vertices, pred_edges = result.result() |
|
solution.append({ |
|
'__key__': key, |
|
'wf_vertices': pred_vertices.tolist(), |
|
'wf_edges': pred_edges |
|
}) |
|
if i % 100 == 0: |
|
|
|
print(f"Processed {i} samples") |
|
|
|
print('------------ Saving results ---------------') |
|
save_submission(solution, Path(params['output_path']) / "submission.parquet") |
|
print("------------ Done ------------ ") |
|
|