Neural Terrain Generation
Collection
7 items
•
Updated
heightmap
imagewidth (px) 360
360
| latitude
stringclasses 115
values | longitude
stringclasses 315
values |
---|---|---|
s07 | e023 |
|
s26 | w062 |
|
n28 | e111 |
|
n06 | w006 |
|
n47 | e097 |
|
n28 | e030 |
|
n38 | w084 |
|
n59 | e044 |
|
n41 | e065 |
|
s03 | e121 |
|
s03 | e016 |
|
n17 | w010 |
|
n59 | e108 |
|
n32 | e093 |
|
s09 | w048 |
|
n24 | e081 |
|
n46 | e106 |
|
s10 | w073 |
|
n47 | e101 |
|
s19 | e132 |
|
s24 | e046 |
|
s34 | e143 |
|
s03 | w075 |
|
s27 | w055 |
|
n24 | w014 |
|
n45 | e054 |
|
n06 | e014 |
|
n24 | e087 |
|
s25 | e118 |
|
s26 | e016 |
|
n48 | e123 |
|
n14 | e011 |
|
n56 | e159 |
|
n27 | w012 |
|
n56 | e101 |
|
s21 | w069 |
|
n45 | e064 |
|
n08 | e042 |
|
n05 | e039 |
|
n44 | e142 |
|
n40 | w125 |
|
n21 | e022 |
|
n23 | e081 |
|
n28 | e109 |
|
s41 | w065 |
|
n45 | e026 |
|
s10 | e017 |
|
n30 | e079 |
|
n34 | e046 |
|
n26 | e104 |
|
n43 | e001 |
|
n46 | w124 |
|
n29 | e025 |
|
n22 | w003 |
|
n26 | e087 |
|
n44 | e142 |
|
n58 | e067 |
|
n24 | e074 |
|
n55 | e099 |
|
n48 | w094 |
|
n35 | w114 |
|
n47 | w070 |
|
n46 | w114 |
|
n33 | e080 |
|
n57 | w122 |
|
n34 | e006 |
|
n55 | w063 |
|
s06 | w071 |
|
s27 | e139 |
|
n35 | e106 |
|
n33 | e077 |
|
n19 | e084 |
|
n28 | e070 |
|
n37 | e088 |
|
n33 | e050 |
|
s32 | w058 |
|
s02 | e015 |
|
n37 | e041 |
|
n04 | w072 |
|
n42 | e081 |
|
s31 | e127 |
|
n04 | e029 |
|
n10 | e026 |
|
s13 | w050 |
|
n41 | w111 |
|
s27 | e017 |
|
n16 | w010 |
|
n20 | e015 |
|
n07 | e033 |
|
n27 | w010 |
|
n30 | e010 |
|
n41 | e084 |
|
s48 | w067 |
|
s26 | e135 |
|
n25 | e094 |
|
s39 | e174 |
|
s16 | e032 |
|
s05 | w058 |
|
s05 | w061 |
|
n48 | e052 |
This is a dataset of 360x360 Earth heightmaps generated from SRTM 1 Arc-Second Global. Each heightmap is labelled according to its latitude and longitude. There are 573,995 samples.
import rasterio
import matplotlib.pyplot as plt
import os
input_directory = '...'
output_directory = '...'
file_list = os.listdir(input_directory)
for i in range(len(file_list)):
image = rasterio.open(input_directory + file_list[i])
plt.imsave(output_directory + file_list[i][0:-4] + '.png', image.read(1), cmap='gray')
from split_image import split_image
import os
input_directory = '...'
output_directory = '...'
file_list = os.listdir(input_directory)
for i in range(len(file_list)):
split_image(input_directory + file_list[i], 10, 10, should_square=True, should_cleanup=False, output_dir=output_directory)
Hand pick a dataset of corrupted and uncorrupted heightmaps then train a discriminator to automatically filter the whole dataset.
Compile images into parquet files.
import pyarrow as pa
import pyarrow.parquet as pq
import pandas as pd
from PIL import Image
import os
import io
import json
samples_per_file = 6_000
root_dir = 'data/datasets/world-heightmaps-360px-png'
df = pd.read_csv(os.path.join(root_dir, 'metadata.csv'))
df = df.sample(frac=1).reset_index(drop=True)
def save_table(image_data, table_number):
print(f'Entries in table {table_number}: {len(image_data)}')
schema = pa.schema(
fields=[
('heightmap', pa.struct([('bytes', pa.binary()), ('path', pa.string())])),
('latitude', pa.string()),
('longitude', pa.string())
],
metadata={
b'huggingface': json.dumps({
'info': {
'features': {
'heightmap': {'_type': 'Image'},
'latitude': {'_type': 'Value', 'dtype': 'string'},
'longitude': {'_type': 'Value', 'dtype': 'string'}
}
}
}).encode('utf-8')
}
)
table = pa.Table.from_pylist(image_data, schema=schema)
pq.write_table(table, f'data/world-heightmaps-360px-parquet/{str(table_number).zfill(4)}.parquet')
image_data = []
samples_in_current_file = 0
current_file_number = 0
for i, row in df.iterrows():
if samples_in_current_file >= samples_per_file:
save_table(image_data, current_file_number)
image_data = []
samples_in_current_file = 0
current_file_number += 1
samples_in_current_file += 1
image_path = row['file_name']
with Image.open(os.path.join(root_dir, image_path)) as image:
image_bytes = io.BytesIO()
image.save(image_bytes, format='PNG')
image_dict = {
'heightmap': {
'bytes': image_bytes.getvalue(),
'path': image_path
},
'latitude': str(row['latitude']),
'longitude': str(row['longitude'])
}
image_data.append(image_dict)
save_table(image_data, current_file_number)