app
Browse files- app.py +163 -0
- models.py +78 -0
- models/default_settings.json +17 -0
- networks.py +169 -0
- point_clouds/chair.obj +2048 -0
- point_clouds/default_settings.json +28 -0
- point_clouds/goat.obj +0 -0
- render_util.py +135 -0
- requirements.txt +9 -0
- util.py +121 -0
app.py
ADDED
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import util
|
3 |
+
import torch
|
4 |
+
import render_util
|
5 |
+
import math
|
6 |
+
from pathlib import Path
|
7 |
+
from models import PosADANet
|
8 |
+
import json
|
9 |
+
import plotly.graph_objects as go
|
10 |
+
import gdown
|
11 |
+
|
12 |
+
|
13 |
+
point_color = "rgb(30, 20, 160)"
|
14 |
+
FILE_PC_KEY = 'File'
|
15 |
+
DEFAULT_COLOR = '#E1E1E1'
|
16 |
+
|
17 |
+
|
18 |
+
@st.cache
|
19 |
+
def load_model(path: str, num_controls: int, url: str):
|
20 |
+
"""
|
21 |
+
Load model from memory, or download from drive
|
22 |
+
:param path: path to save/load the pretrained model
|
23 |
+
:param num_controls: length of style/control vector the model requires (6 for regular, 8 for metallic roughness)
|
24 |
+
:param url: google drive url to download the model if its not already downloaded
|
25 |
+
:return: returns the pretrained model
|
26 |
+
"""
|
27 |
+
if not Path(path).exists():
|
28 |
+
with st.spinner('Downloading Model'):
|
29 |
+
gdown.download(url, path, quiet=False)
|
30 |
+
|
31 |
+
model = PosADANet(1, 4, num_controls, padding='zeros', bilinear=True).to(device)
|
32 |
+
model.load_state_dict(torch.load(path, map_location=device))
|
33 |
+
model.eval()
|
34 |
+
|
35 |
+
return model
|
36 |
+
|
37 |
+
|
38 |
+
def load_dict_data(path: str):
|
39 |
+
"""
|
40 |
+
load a json file
|
41 |
+
:param path: path to json file
|
42 |
+
:return: dict with json data
|
43 |
+
"""
|
44 |
+
with open(path, 'r') as file:
|
45 |
+
data = json.load(file)
|
46 |
+
|
47 |
+
return data
|
48 |
+
|
49 |
+
|
50 |
+
def to_rgb(hex_color: str):
|
51 |
+
"""
|
52 |
+
convert color in hex format to rgb format
|
53 |
+
:param hex_color: color hex string
|
54 |
+
:return: list of three numbers for RGB channels between 0-1
|
55 |
+
"""
|
56 |
+
h = hex_color.lstrip('#')
|
57 |
+
return [float(int(h[i:i + 2], 16)) / 255 for i in (0, 2, 4)]
|
58 |
+
|
59 |
+
|
60 |
+
st.title('Z2P - Demo')
|
61 |
+
|
62 |
+
device = torch.device(torch.cuda.current_device() if torch.cuda.is_available() else torch.device('cpu'))
|
63 |
+
|
64 |
+
st.subheader('Settings')
|
65 |
+
|
66 |
+
# Load model and pc data for info about predefined demo point clouds and pretrained models
|
67 |
+
model_data = load_dict_data('models/default_settings.json')
|
68 |
+
pc_data = load_dict_data('point_clouds/default_settings.json')
|
69 |
+
|
70 |
+
col1_head, col2_head = st.columns(2)
|
71 |
+
model_key = col2_head.radio(
|
72 |
+
'Choose Model',
|
73 |
+
model_data.keys())
|
74 |
+
|
75 |
+
pc_key = col2_head.radio(
|
76 |
+
'Choose Point Cloud',
|
77 |
+
pc_data.keys())
|
78 |
+
|
79 |
+
uploaded_file = col2_head.file_uploader('Upload Your Own Point Cloud (.xyz, .obj)')
|
80 |
+
|
81 |
+
if pc_key == FILE_PC_KEY:
|
82 |
+
# Use point cloud uploaded by user
|
83 |
+
if uploaded_file is not None:
|
84 |
+
txt = uploaded_file.getvalue().decode("utf-8")
|
85 |
+
pc = util.xyz2tensor(txt, append_normals=True)
|
86 |
+
else:
|
87 |
+
st.warning('Please upload a .xyz or .obj file')
|
88 |
+
st.stop()
|
89 |
+
else:
|
90 |
+
# Load demo point cloud
|
91 |
+
pc = util.read_xyz_file(pc_data[pc_key]['path'])
|
92 |
+
|
93 |
+
st.header('Input')
|
94 |
+
col1, col2 = st.columns(2)
|
95 |
+
|
96 |
+
# parameters for point cloud spacial transformations
|
97 |
+
col2.subheader("Point Cloud Transformations")
|
98 |
+
scale = col2.slider('Scale', min_value=0.0, max_value=5.0, value=pc_data[pc_key]['scale'])
|
99 |
+
rx = col2.slider('X-Rotation', min_value=-math.pi, max_value=math.pi, value=pc_data[pc_key]['rx'])
|
100 |
+
ry = col2.slider('Y-Rotation', min_value=-math.pi, max_value=math.pi, value=pc_data[pc_key]['ry'])
|
101 |
+
rz = col2.slider('Z-Rotation', min_value=-math.pi, max_value=math.pi, value=pc_data[pc_key]['rz'])
|
102 |
+
dy = col2.slider('Height', min_value=0, max_value=500, value=pc_data[pc_key]['dy'])
|
103 |
+
|
104 |
+
col1.subheader("Input Z-Buffer")
|
105 |
+
|
106 |
+
# apply transformations
|
107 |
+
pc = render_util.rotate_pc(pc, rx, ry, rz)
|
108 |
+
trace1 = [go.Scatter3d(x=pc[:, 0], y=pc[:, 1], z=-pc[:, 2], mode="markers",
|
109 |
+
marker=dict(
|
110 |
+
symbol="circle",
|
111 |
+
size=1,
|
112 |
+
color=point_color))]
|
113 |
+
fig = go.Figure(trace1, layout=go.Layout())
|
114 |
+
col1_head.plotly_chart(fig, use_container_width=True)
|
115 |
+
|
116 |
+
# Project and render the point z-buffer
|
117 |
+
zbuffer = render_util.draw_pc(pc, radius=model_data[model_key]['point_radius'], dy=dy, scale=scale)
|
118 |
+
|
119 |
+
# Show input z-buffer visualization in streamlit
|
120 |
+
col1.image(zbuffer / zbuffer.max(), use_column_width=True)
|
121 |
+
|
122 |
+
zbuffer: torch.Tensor = torch.from_numpy(zbuffer).float().to(device)
|
123 |
+
|
124 |
+
st.header('Result')
|
125 |
+
|
126 |
+
len_style = model_data[model_key]['len_style']
|
127 |
+
# Load pretrained model
|
128 |
+
model = load_model(model_data[model_key]['path'], len_style, model_data[model_key]['url'])
|
129 |
+
col1, col2 = st.columns(2)
|
130 |
+
col2.subheader('Visualization Controls')
|
131 |
+
zbuffer = zbuffer.unsqueeze(-1).permute(2, 0, 1)
|
132 |
+
zbuffer: torch.Tensor = zbuffer.float().to(device).unsqueeze(0)
|
133 |
+
|
134 |
+
style = torch.zeros(len_style, dtype=zbuffer.dtype, device=device)
|
135 |
+
|
136 |
+
# Pick color and light direction visualization parameters
|
137 |
+
hex_color = col2.color_picker('Pick A Color', DEFAULT_COLOR)
|
138 |
+
style[0], style[1], style[2] = to_rgb(hex_color)
|
139 |
+
style[:3] = style[:3].clip(0.0, 0.9)
|
140 |
+
|
141 |
+
# Light direction
|
142 |
+
style[3] = col2.slider('Light Radius', min_value=-1.0, max_value=1.0, value=0.0) # delta_r
|
143 |
+
style[4] = col2.slider('Light Phi', min_value=-math.pi/4, max_value=math.pi/4, value=0.0) # np.pi / 4 # delta_phi
|
144 |
+
style[5] = col2.slider('Light Theta', min_value=-math.pi/4, max_value=math.pi/4, value=0.0) # delta_theta
|
145 |
+
|
146 |
+
# Extra Controls for Metallic and Roughness Model
|
147 |
+
if len_style == 8:
|
148 |
+
style[6] = col2.slider('Mettalic', min_value=0.0, max_value=1.0, value=0.5)
|
149 |
+
style[7] = col2.slider('Roughness', min_value=0.0, max_value=1.0, value=0.5)
|
150 |
+
|
151 |
+
style = style.unsqueeze(0)
|
152 |
+
|
153 |
+
# generate image with pretrained model
|
154 |
+
with torch.no_grad():
|
155 |
+
generated = model(zbuffer.float(), style)
|
156 |
+
|
157 |
+
# embed a white background behind the object using the alpha map
|
158 |
+
# as well as the color used as input in the bottom right corner
|
159 |
+
generated = util.embed_color(generated.detach(), style[:, :3], box_size=50)
|
160 |
+
rendered = generated[0].permute(1, 2, 0).cpu().numpy()
|
161 |
+
|
162 |
+
# show the image in streamlit
|
163 |
+
col1.image(rendered.clip(0, 1), use_column_width=True)
|
models.py
ADDED
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from networks import *
|
2 |
+
|
3 |
+
|
4 |
+
class PosADANet(nn.Module):
|
5 |
+
def encode(self, shp):
|
6 |
+
device = self.omega.device
|
7 |
+
B, _, H, W = shp
|
8 |
+
row = torch.arange(H).to(device) / H
|
9 |
+
enc_row1 = torch.sin(self.omega[None, :] * row[:, None])
|
10 |
+
enc_row2 = torch.cos(self.omega[None, :] * row[:, None])
|
11 |
+
rows = torch.cat([enc_row1.unsqueeze(1).repeat((1, W, 1)), enc_row2.unsqueeze(1).repeat((1, W, 1))], dim=-1)
|
12 |
+
|
13 |
+
col = torch.arange(W).to(device) / W
|
14 |
+
enc_col1 = torch.sin(self.omega[None, :] * col[:, None])
|
15 |
+
enc_col2 = torch.cos(self.omega[None, :] * col[:, None])
|
16 |
+
cols = torch.cat([enc_col1.unsqueeze(0).repeat((H, 1, 1)), enc_col2.unsqueeze(0).repeat((H, 1, 1))], dim=-1)
|
17 |
+
|
18 |
+
encoding = torch.cat([rows, cols], dim=-1)
|
19 |
+
encoding = encoding.permute(2, 0, 1).unsqueeze(0).repeat((B, 1, 1, 1))
|
20 |
+
return encoding
|
21 |
+
|
22 |
+
def get_encoding(self, x):
|
23 |
+
shp1 = x.shape
|
24 |
+
singelton = self.positional_encoding is not None\
|
25 |
+
and self.positional_encoding.shape[0] == shp1[0] and self.positional_encoding.shape[2:] == shp1[2:]
|
26 |
+
if singelton:
|
27 |
+
return self.positional_encoding
|
28 |
+
self.positional_encoding = self.encode(x.shape)
|
29 |
+
return self.positional_encoding
|
30 |
+
|
31 |
+
def __init__(self, input_channels, output_channels, n_style, bilinear=True, padding='zero', full_ada=True,
|
32 |
+
nfreq=20, magnitude=10):
|
33 |
+
super(PosADANet, self).__init__()
|
34 |
+
factor = 2 if bilinear else 1
|
35 |
+
self.omega = nn.Parameter(torch.rand(nfreq) * magnitude)
|
36 |
+
self.omega.requires_grad = False
|
37 |
+
self.positional_encoding = None
|
38 |
+
self.full_ada = full_ada
|
39 |
+
|
40 |
+
self.style_encoder = FullyConnected(n_style, W_SIZE, layers=6)
|
41 |
+
self.padding = padding
|
42 |
+
self.input_channels = input_channels + nfreq * 4
|
43 |
+
self.n_classes = output_channels
|
44 |
+
self.bilinear = bilinear
|
45 |
+
self.channels = [512 // factor, 256 // factor, 128 // factor]
|
46 |
+
self.inc = DoubleConv(self.input_channels, 64)
|
47 |
+
self.down1 = Down(64, 128, padding=padding, ada=self.full_ada)
|
48 |
+
self.down2 = Down(128, 256, padding=padding, ada=self.full_ada)
|
49 |
+
self.down3 = Down(256, 512, padding=padding, ada=self.full_ada)
|
50 |
+
self.down4 = Down(512, 1024 // factor, padding=padding, ada=self.full_ada)
|
51 |
+
self.up1 = Up(1024, 512 // factor, bilinear, ada=True, padding=padding)
|
52 |
+
self.up2 = Up(512, 256 // factor, bilinear, ada=True, padding=padding)
|
53 |
+
self.up3 = Up(256, 128 // factor, bilinear, ada=True, padding=padding)
|
54 |
+
self.up4 = Up(128, 64, bilinear, padding=padding, ada=True)
|
55 |
+
self.outc = OutConv(64, output_channels, padding=padding)
|
56 |
+
|
57 |
+
def forward(self, x, style):
|
58 |
+
w = self.style_encoder(style)
|
59 |
+
encoding = self.get_encoding(x)
|
60 |
+
x = torch.cat([x, encoding], dim=1)
|
61 |
+
|
62 |
+
x1 = self.inc(x)
|
63 |
+
if self.full_ada:
|
64 |
+
x2 = self.down1(x1, w=w)
|
65 |
+
x3 = self.down2(x2, w=w)
|
66 |
+
x4 = self.down3(x3, w=w)
|
67 |
+
x5 = self.down4(x4, w=w)
|
68 |
+
else:
|
69 |
+
x2 = self.down1(x1)
|
70 |
+
x3 = self.down2(x2)
|
71 |
+
x4 = self.down3(x3)
|
72 |
+
x5 = self.down4(x4)
|
73 |
+
x = self.up1(x5, x4, w=w)
|
74 |
+
x = self.up2(x, x3, w=w)
|
75 |
+
x = self.up3(x, x2, w=w)
|
76 |
+
x = self.up4(x, x1, w=w)
|
77 |
+
logits = self.outc(x)
|
78 |
+
return logits
|
models/default_settings.json
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Regular": {
|
3 |
+
"path": "models/model.pt",
|
4 |
+
"point_radius": 3,
|
5 |
+
"len_style": 6,
|
6 |
+
"file_id": "1NqMotLa3kxtmYni8P4kYc4T9rFNsrQlv",
|
7 |
+
"url": "https://drive.google.com/u/0/uc?id=1NqMotLa3kxtmYni8P4kYc4T9rFNsrQlv&export=download"
|
8 |
+
},
|
9 |
+
|
10 |
+
"Metal-Roughness": {
|
11 |
+
"path": "models/mr.pt",
|
12 |
+
"point_radius": 3,
|
13 |
+
"len_style": 8,
|
14 |
+
"file_id": "1A70qTfZSshKewF2udl_yxwwH9Y29Wb_f",
|
15 |
+
"url": "https://drive.google.com/u/0/uc?id=1A70qTfZSshKewF2udl_yxwwH9Y29Wb_f&export=download"
|
16 |
+
}
|
17 |
+
}
|
networks.py
ADDED
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn.functional as F
|
3 |
+
from torch import nn
|
4 |
+
W_SIZE = 512
|
5 |
+
|
6 |
+
|
7 |
+
def calc_mean_std(feat, eps=1e-5):
|
8 |
+
# eps is a small value added to the variance to avoid divide-by-zero.
|
9 |
+
size = feat.size()
|
10 |
+
assert (len(size) == 4)
|
11 |
+
N, C = size[:2]
|
12 |
+
feat_var = feat.view(N, C, -1).var(dim=2) + eps
|
13 |
+
feat_std = feat_var.sqrt().view(N, C, 1, 1)
|
14 |
+
feat_mean = feat.view(N, C, -1).mean(dim=2).view(N, C, 1, 1)
|
15 |
+
return feat_mean, feat_std
|
16 |
+
|
17 |
+
|
18 |
+
def adain(content_feat, style_feat):
|
19 |
+
assert (content_feat.size()[:2] == style_feat[0].size()[:2]) and (content_feat.size()[:2] == style_feat[1].size()[:2])
|
20 |
+
size = content_feat.size()
|
21 |
+
style_mean, style_std = style_feat
|
22 |
+
style_mean, style_std = style_mean.unsqueeze(-1).unsqueeze(-1), style_std.unsqueeze(-1).unsqueeze(-1)
|
23 |
+
content_mean, content_std = calc_mean_std(content_feat)
|
24 |
+
|
25 |
+
normalized_feat = (content_feat - content_mean.expand(
|
26 |
+
size)) / content_std.expand(size)
|
27 |
+
return normalized_feat * style_std.expand(size) + style_mean.expand(size)
|
28 |
+
|
29 |
+
|
30 |
+
class FullyConnected(nn.Module):
|
31 |
+
def __init__(self, input_channels: int, output_channels: int, layers=3):
|
32 |
+
super(FullyConnected, self).__init__()
|
33 |
+
self.channels = torch.linspace(input_channels, output_channels, layers + 1).long()
|
34 |
+
self.layers = nn.Sequential(
|
35 |
+
*[nn.Linear(self.channels[i].item(), self.channels[i+1].item()) for i in range(len(self.channels) - 1)]
|
36 |
+
)
|
37 |
+
|
38 |
+
def forward(self, x):
|
39 |
+
return self.layers(x)
|
40 |
+
|
41 |
+
|
42 |
+
class Affine(nn.Module):
|
43 |
+
def __init__(self, input_channels: int, output_channels):
|
44 |
+
super(Affine, self).__init__()
|
45 |
+
self.lin = nn.Linear(input_channels, output_channels)
|
46 |
+
bias = torch.zeros(output_channels)
|
47 |
+
nn.init.normal_(bias, 0, 1)
|
48 |
+
self.bias = nn.Parameter(bias)
|
49 |
+
|
50 |
+
|
51 |
+
def forward(self, x):
|
52 |
+
return self.lin(x) + self.bias
|
53 |
+
|
54 |
+
|
55 |
+
class DoubleConv(nn.Module):
|
56 |
+
"""(convolution => [BN] => ReLU) * 2"""
|
57 |
+
|
58 |
+
def __init__(self, in_channels, out_channels, mid_channels=None, ada=False, padding='zeros'):
|
59 |
+
super().__init__()
|
60 |
+
if not mid_channels:
|
61 |
+
mid_channels = out_channels
|
62 |
+
self.ada = ada
|
63 |
+
self.relu = nn.ReLU(inplace=True)
|
64 |
+
self.conv1 = nn.Conv2d(in_channels, mid_channels, kernel_size=3, padding=1, padding_mode=padding)
|
65 |
+
if ada:
|
66 |
+
self.a1_mean = Affine(W_SIZE, mid_channels)
|
67 |
+
self.a1_std = Affine(W_SIZE, mid_channels)
|
68 |
+
else:
|
69 |
+
self.norm1 = nn.InstanceNorm2d(mid_channels, affine=True)
|
70 |
+
self.conv2 = nn.Conv2d(mid_channels, out_channels, kernel_size=3, padding=1, padding_mode=padding)
|
71 |
+
|
72 |
+
if ada:
|
73 |
+
self.a2_mean = Affine(W_SIZE, out_channels)
|
74 |
+
self.a2_std = Affine(W_SIZE, out_channels)
|
75 |
+
else:
|
76 |
+
self.norm2 = nn.InstanceNorm2d(out_channels, affine=True)
|
77 |
+
|
78 |
+
def forward(self, x, w=None):
|
79 |
+
if self.ada:
|
80 |
+
assert w is not None
|
81 |
+
|
82 |
+
x = self.conv1(x)
|
83 |
+
|
84 |
+
if self.ada:
|
85 |
+
x = adain(x, (self.a1_mean(w), self.a1_std(w)))
|
86 |
+
else:
|
87 |
+
x = self.norm1(x)
|
88 |
+
x = self.relu(x)
|
89 |
+
|
90 |
+
x = self.conv2(x)
|
91 |
+
if self.ada:
|
92 |
+
x = adain(x, (self.a2_mean(w), self.a2_std(w)))
|
93 |
+
else:
|
94 |
+
x = self.norm2(x)
|
95 |
+
x = self.relu(x)
|
96 |
+
|
97 |
+
return x
|
98 |
+
|
99 |
+
|
100 |
+
class DiluteConv(nn.Module):
|
101 |
+
"""(convolution => [BN] => ReLU) * 2"""
|
102 |
+
|
103 |
+
def __init__(self, in_channels, out_channels, dilation, padding='zeros'):
|
104 |
+
super().__init__()
|
105 |
+
self.relu = nn.ReLU(inplace=True)
|
106 |
+
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3,
|
107 |
+
padding=1+dilation, dilation=dilation, padding_mode=padding)
|
108 |
+
self.norm1 = nn.InstanceNorm2d(out_channels, affine=True)
|
109 |
+
|
110 |
+
def forward(self, x, y=None):
|
111 |
+
if y is not None:
|
112 |
+
x = torch.cat([x, y], dim=1)
|
113 |
+
|
114 |
+
x = self.conv1(x)
|
115 |
+
x = self.norm1(x)
|
116 |
+
x = self.relu(x)
|
117 |
+
return x
|
118 |
+
|
119 |
+
|
120 |
+
class Down(nn.Module):
|
121 |
+
"""Downscaling with maxpool then double conv"""
|
122 |
+
|
123 |
+
def __init__(self, in_channels, out_channels, ada=False, padding='zeros'):
|
124 |
+
super().__init__()
|
125 |
+
self.max_pool = nn.MaxPool2d(2)
|
126 |
+
self.double_conv = DoubleConv(in_channels, out_channels, ada=ada, padding=padding)
|
127 |
+
|
128 |
+
def forward(self, x, w=None):
|
129 |
+
x = self.max_pool(x)
|
130 |
+
return self.double_conv(x, w)
|
131 |
+
|
132 |
+
|
133 |
+
class Up(nn.Module):
|
134 |
+
"""Upscaling then double conv"""
|
135 |
+
|
136 |
+
def __init__(self, in_channels, out_channels, bilinear=True, ada=False, padding='zeros'):
|
137 |
+
super().__init__()
|
138 |
+
|
139 |
+
# if bilinear, use the normal convolutions to reduce the number of channels
|
140 |
+
if bilinear:
|
141 |
+
self.up = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True)
|
142 |
+
self.conv = DoubleConv(in_channels, out_channels, in_channels // 2, ada=ada)
|
143 |
+
else:
|
144 |
+
self.up = nn.ConvTranspose2d(in_channels , in_channels // 2, kernel_size=2, stride=2)
|
145 |
+
self.conv = DoubleConv(in_channels, out_channels, ada=ada)
|
146 |
+
|
147 |
+
|
148 |
+
def forward(self, x1, x2, w=None):
|
149 |
+
x1 = self.up(x1)
|
150 |
+
# input is CHW
|
151 |
+
diffY = x2.size()[2] - x1.size()[2]
|
152 |
+
diffX = x2.size()[3] - x1.size()[3]
|
153 |
+
|
154 |
+
x1 = F.pad(x1, [diffX // 2, diffX - diffX // 2,
|
155 |
+
diffY // 2, diffY - diffY // 2])
|
156 |
+
# if you have padding issues, see
|
157 |
+
# https://github.com/HaiyongJiang/U-Net-Pytorch-Unstructured-Buggy/commit/0e854509c2cea854e247a9c615f175f76fbb2e3a
|
158 |
+
# https://github.com/xiaopeng-liao/Pytorch-UNet/commit/8ebac70e633bac59fc22bb5195e513d5832fb3bd
|
159 |
+
x = torch.cat([x2, x1], dim=1)
|
160 |
+
return self.conv(x, w)
|
161 |
+
|
162 |
+
|
163 |
+
class OutConv(nn.Module):
|
164 |
+
def __init__(self, in_channels, out_channels, padding='zeros'):
|
165 |
+
super(OutConv, self).__init__()
|
166 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1, padding_mode=padding)
|
167 |
+
|
168 |
+
def forward(self, x):
|
169 |
+
return self.conv(x)
|
point_clouds/chair.obj
ADDED
@@ -0,0 +1,2048 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
v 0.234929 0.337874 0.270455
|
2 |
+
v -0.108035 0.987172 0.261880
|
3 |
+
v -0.131206 -0.112986 -0.373481
|
4 |
+
v -0.154145 0.264240 0.263408
|
5 |
+
v 0.117302 -0.469692 -0.478992
|
6 |
+
v -0.351647 -0.125311 0.182484
|
7 |
+
v -0.224811 0.645706 0.180777
|
8 |
+
v 0.323227 -0.686495 -0.338356
|
9 |
+
v 0.008997 0.163369 0.264446
|
10 |
+
v -0.297759 0.641838 0.185079
|
11 |
+
v 0.169752 -0.031170 0.055650
|
12 |
+
v -0.125707 -0.260187 -0.397039
|
13 |
+
v -0.373023 0.190010 0.269286
|
14 |
+
v 0.266351 0.209207 0.275975
|
15 |
+
v -0.053959 -0.714795 -0.399740
|
16 |
+
v 0.195509 0.149641 0.263843
|
17 |
+
v -0.141084 -0.518729 -0.426649
|
18 |
+
v 0.119625 0.014877 0.236450
|
19 |
+
v 0.397039 -0.063167 0.161792
|
20 |
+
v -0.247284 0.129970 0.264839
|
21 |
+
v -0.093309 -0.706456 -0.363439
|
22 |
+
v 0.153922 0.525297 0.268584
|
23 |
+
v -0.330576 -0.701641 -0.398505
|
24 |
+
v -0.186486 0.746649 0.186765
|
25 |
+
v -0.274954 -0.346176 0.223911
|
26 |
+
v 0.334483 -0.676403 -0.468382
|
27 |
+
v -0.267551 -0.376370 -0.377744
|
28 |
+
v -0.364741 -0.447956 -0.388172
|
29 |
+
v -0.382287 -0.326860 0.236367
|
30 |
+
v 0.336305 -0.084560 -0.475437
|
31 |
+
v 0.356829 -0.033736 0.104479
|
32 |
+
v -0.390369 -0.254079 0.168163
|
33 |
+
v 0.188159 0.402350 0.276716
|
34 |
+
v -0.070646 -0.667468 -0.459254
|
35 |
+
v -0.382242 0.037856 0.260504
|
36 |
+
v 0.095027 -0.127838 -0.368521
|
37 |
+
v 0.034229 0.010756 0.206453
|
38 |
+
v -0.331612 -0.032407 -0.082217
|
39 |
+
v 0.252327 -0.046404 0.092811
|
40 |
+
v -0.294240 -0.679764 -0.429071
|
41 |
+
v 0.283773 -0.353392 -0.384295
|
42 |
+
v 0.198551 0.009012 0.266331
|
43 |
+
v -0.375937 -0.076753 0.274383
|
44 |
+
v 0.048390 -0.030561 -0.285445
|
45 |
+
v -0.367747 -0.299313 -0.394029
|
46 |
+
v -0.140339 -0.024085 -0.330394
|
47 |
+
v 0.373814 -0.557565 0.234771
|
48 |
+
v -0.382503 -0.245404 0.221866
|
49 |
+
v 0.152576 0.619727 0.268857
|
50 |
+
v 0.338024 -0.029857 0.108992
|
51 |
+
v 0.161924 0.105911 0.249212
|
52 |
+
v -0.322826 -0.619741 -0.340739
|
53 |
+
v 0.390506 -0.260882 0.227798
|
54 |
+
v 0.001430 -0.033966 0.090923
|
55 |
+
v -0.037249 -0.398000 -0.482397
|
56 |
+
v -0.187721 0.526189 0.176035
|
57 |
+
v 0.328946 0.675601 0.269455
|
58 |
+
v 0.282915 -0.373772 -0.393219
|
59 |
+
v 0.376260 -0.093738 -0.040693
|
60 |
+
v 0.353620 0.881214 0.215812
|
61 |
+
v -0.283479 -0.240803 -0.462803
|
62 |
+
v 0.239605 0.017681 0.256481
|
63 |
+
v 0.322432 -0.101670 -0.253680
|
64 |
+
v 0.318502 0.019524 0.167510
|
65 |
+
v 0.137893 0.053395 0.267163
|
66 |
+
v -0.008524 -0.111788 -0.376000
|
67 |
+
v -0.347018 0.472111 0.167265
|
68 |
+
v -0.368768 -0.039708 -0.039039
|
69 |
+
v -0.328005 -0.026601 -0.254264
|
70 |
+
v -0.335698 -0.582626 -0.478571
|
71 |
+
v -0.389538 -0.055372 -0.142715
|
72 |
+
v 0.353359 -0.486266 0.273200
|
73 |
+
v -0.252546 -0.700320 -0.400114
|
74 |
+
v -0.316047 -0.561502 0.196386
|
75 |
+
v 0.079478 -0.590567 -0.397840
|
76 |
+
v 0.258449 0.481702 0.270246
|
77 |
+
v 0.013831 0.528498 0.256392
|
78 |
+
v -0.139999 -0.258860 -0.414439
|
79 |
+
v 0.276973 -0.330164 0.267133
|
80 |
+
v -0.274478 -0.281009 0.233629
|
81 |
+
v -0.103665 -0.107791 -0.173529
|
82 |
+
v 0.326543 -0.100581 -0.008907
|
83 |
+
v -0.295433 0.763712 0.181203
|
84 |
+
v -0.389229 -0.301446 0.239652
|
85 |
+
v 0.323635 0.359762 0.178120
|
86 |
+
v -0.132476 0.448592 0.178369
|
87 |
+
v 0.256293 0.524987 0.175918
|
88 |
+
v 0.397058 0.489817 0.190424
|
89 |
+
v -0.036550 0.069281 0.266264
|
90 |
+
v 0.292992 0.700444 0.265051
|
91 |
+
v -0.033959 0.717449 0.264586
|
92 |
+
v 0.274089 0.403609 0.178854
|
93 |
+
v -0.154638 -0.626445 -0.375009
|
94 |
+
v 0.126203 0.305201 0.197944
|
95 |
+
v -0.153719 -0.704280 -0.358150
|
96 |
+
v 0.337816 -0.681229 0.138300
|
97 |
+
v -0.283874 0.939877 0.232358
|
98 |
+
v 0.219522 0.598875 0.266886
|
99 |
+
v -0.166617 0.892549 0.274235
|
100 |
+
v 0.230291 -0.174570 -0.466597
|
101 |
+
v 0.003530 -0.095680 -0.285496
|
102 |
+
v -0.318762 -0.245704 0.180358
|
103 |
+
v -0.186878 0.507835 0.252506
|
104 |
+
v -0.094255 0.888319 0.225682
|
105 |
+
v 0.275935 -0.105914 0.070943
|
106 |
+
v 0.069442 0.671530 0.259417
|
107 |
+
v 0.393589 0.405449 0.176114
|
108 |
+
v 0.118762 -0.049163 0.193994
|
109 |
+
v -0.327890 0.398947 0.221941
|
110 |
+
v 0.146093 0.729286 0.264736
|
111 |
+
v -0.335120 -0.547906 -0.462967
|
112 |
+
v -0.357837 -0.026730 -0.258989
|
113 |
+
v -0.330812 0.013516 0.227148
|
114 |
+
v 0.192276 -0.104000 -0.167110
|
115 |
+
v 0.392359 0.364743 0.269025
|
116 |
+
v 0.248612 0.326642 0.253320
|
117 |
+
v -0.289528 0.041919 0.253210
|
118 |
+
v -0.381093 0.685815 0.200735
|
119 |
+
v 0.166933 -0.024001 -0.305641
|
120 |
+
v -0.284489 -0.223505 -0.380020
|
121 |
+
v -0.139631 -0.134837 -0.379720
|
122 |
+
v -0.084626 0.845236 0.214946
|
123 |
+
v -0.100169 0.220228 0.193209
|
124 |
+
v -0.105805 -0.401860 -0.399278
|
125 |
+
v 0.263788 0.550184 0.174273
|
126 |
+
v -0.385741 0.168842 0.177030
|
127 |
+
v -0.340250 -0.125793 -0.110040
|
128 |
+
v 0.368769 -0.717672 0.202901
|
129 |
+
v 0.390836 -0.088767 -0.161073
|
130 |
+
v -0.148884 0.144657 0.178403
|
131 |
+
v -0.038018 -0.117264 0.087932
|
132 |
+
v -0.212537 0.231196 0.261012
|
133 |
+
v 0.127510 -0.027342 -0.319315
|
134 |
+
v 0.077677 0.262064 0.265150
|
135 |
+
v 0.229495 -0.043569 -0.255113
|
136 |
+
v -0.136093 0.489757 0.233577
|
137 |
+
v 0.145502 -0.243582 -0.395274
|
138 |
+
v -0.227613 -0.359193 -0.375703
|
139 |
+
v -0.176292 -0.031085 0.047867
|
140 |
+
v -0.069503 0.653904 0.188043
|
141 |
+
v -0.164227 -0.096222 -0.060089
|
142 |
+
v -0.196247 0.863053 0.263500
|
143 |
+
v 0.221867 0.885537 0.260931
|
144 |
+
v 0.061239 0.597139 0.266620
|
145 |
+
v -0.051791 0.913975 0.235923
|
146 |
+
v 0.046294 -0.006316 0.262813
|
147 |
+
v 0.155830 -0.130500 0.204093
|
148 |
+
v -0.389099 0.680734 0.238960
|
149 |
+
v -0.263156 0.524563 0.273801
|
150 |
+
v -0.098459 -0.037874 0.072127
|
151 |
+
v 0.399008 -0.084215 -0.336023
|
152 |
+
v 0.298279 -0.378582 -0.384212
|
153 |
+
v 0.057692 -0.101209 -0.075235
|
154 |
+
v -0.141631 0.879728 0.267273
|
155 |
+
v -0.036266 -0.564170 -0.477154
|
156 |
+
v 0.343969 -0.116183 0.055074
|
157 |
+
v 0.296914 -0.108109 -0.197412
|
158 |
+
v 0.122872 -0.055463 0.261350
|
159 |
+
v -0.062420 0.654645 0.266183
|
160 |
+
v 0.006674 0.532358 0.267078
|
161 |
+
v -0.288830 -0.634607 -0.342278
|
162 |
+
v -0.059532 0.221884 0.267344
|
163 |
+
v -0.272200 0.359440 0.177854
|
164 |
+
v -0.143081 -0.129730 0.205373
|
165 |
+
v -0.394339 0.411867 0.245172
|
166 |
+
v -0.203525 0.473105 0.269096
|
167 |
+
v 0.195556 -0.016758 -0.380649
|
168 |
+
v -0.337572 -0.547028 0.136090
|
169 |
+
v 0.014799 0.338936 0.224248
|
170 |
+
v 0.394859 -0.080148 0.009430
|
171 |
+
v -0.160113 -0.112880 0.121308
|
172 |
+
v 0.315151 -0.371725 0.270198
|
173 |
+
v -0.332222 -0.644326 -0.445324
|
174 |
+
v -0.212699 0.731293 0.267936
|
175 |
+
v 0.403290 -0.098182 0.193796
|
176 |
+
v -0.017270 0.758458 0.267124
|
177 |
+
v 0.215223 -0.703966 0.149444
|
178 |
+
v 0.380941 -0.128924 -0.421331
|
179 |
+
v -0.206820 -0.710090 -0.427485
|
180 |
+
v 0.046501 0.181300 0.174767
|
181 |
+
v 0.191986 -0.104678 -0.172826
|
182 |
+
v -0.285790 -0.085489 0.216651
|
183 |
+
v 0.331643 0.380021 0.273363
|
184 |
+
v 0.049568 -0.594506 -0.411356
|
185 |
+
v -0.388853 0.283244 0.265088
|
186 |
+
v 0.016012 0.978519 0.236635
|
187 |
+
v 0.135129 -0.115504 0.275308
|
188 |
+
v 0.139701 -0.657597 -0.427492
|
189 |
+
v -0.075730 0.134994 0.181660
|
190 |
+
v 0.262001 0.550722 0.183882
|
191 |
+
v -0.008723 0.584641 0.272241
|
192 |
+
v -0.164089 0.120105 0.185381
|
193 |
+
v 0.163384 -0.281050 -0.468117
|
194 |
+
v 0.372990 0.346278 0.170136
|
195 |
+
v -0.193758 -0.021975 0.170549
|
196 |
+
v 0.095583 -0.571127 -0.480169
|
197 |
+
v -0.185149 0.879475 0.267368
|
198 |
+
v 0.104738 -0.547797 -0.404689
|
199 |
+
v 0.201651 -0.040274 0.047412
|
200 |
+
v -0.028107 -0.491044 -0.481412
|
201 |
+
v 0.373897 0.037628 0.168495
|
202 |
+
v -0.042664 -0.119254 -0.425920
|
203 |
+
v -0.291611 -0.377434 0.224952
|
204 |
+
v -0.131795 -0.159023 -0.467711
|
205 |
+
v 0.003033 0.406761 0.179072
|
206 |
+
v -0.214255 -0.136664 -0.366200
|
207 |
+
v 0.347801 -0.622412 -0.462879
|
208 |
+
v -0.302271 -0.574308 0.248457
|
209 |
+
v 0.115624 0.503139 0.264207
|
210 |
+
v -0.375981 -0.583317 0.233823
|
211 |
+
v -0.180815 0.377357 0.262484
|
212 |
+
v 0.358379 -0.115931 -0.168431
|
213 |
+
v -0.279510 0.867731 0.207881
|
214 |
+
v 0.271647 0.245225 0.177585
|
215 |
+
v -0.248691 -0.026306 -0.448161
|
216 |
+
v 0.070478 -0.141508 -0.464548
|
217 |
+
v -0.111126 0.500240 0.250377
|
218 |
+
v -0.071177 0.199134 0.262281
|
219 |
+
v 0.185889 0.099660 0.188676
|
220 |
+
v 0.126086 0.915714 0.260327
|
221 |
+
v -0.276595 0.141574 0.174803
|
222 |
+
v 0.062412 0.763021 0.225857
|
223 |
+
v 0.328628 -0.610980 0.175193
|
224 |
+
v -0.375039 -0.319315 -0.439650
|
225 |
+
v -0.297856 0.639195 0.169151
|
226 |
+
v -0.320065 -0.036079 -0.466189
|
227 |
+
v 0.247775 0.001532 0.140204
|
228 |
+
v 0.328216 -0.118857 -0.265464
|
229 |
+
v -0.376935 -0.454684 0.167232
|
230 |
+
v -0.279720 -0.106097 0.019812
|
231 |
+
v -0.186604 0.554214 0.274252
|
232 |
+
v -0.235482 -0.303994 -0.380949
|
233 |
+
v -0.335700 -0.232206 -0.456728
|
234 |
+
v 0.215534 0.553010 0.199911
|
235 |
+
v 0.291583 -0.446869 -0.482218
|
236 |
+
v 0.379146 -0.490873 0.192444
|
237 |
+
v 0.373026 -0.552732 -0.411361
|
238 |
+
v -0.113825 0.515189 0.259195
|
239 |
+
v -0.372455 -0.378779 -0.424565
|
240 |
+
v -0.315948 -0.284238 0.206178
|
241 |
+
v -0.015813 0.210956 0.178502
|
242 |
+
v -0.028702 -0.111901 0.182941
|
243 |
+
v -0.375779 0.896794 0.242108
|
244 |
+
v 0.262033 -0.112351 0.111052
|
245 |
+
v 0.174316 0.277792 0.175777
|
246 |
+
v -0.094996 0.910770 0.241722
|
247 |
+
v 0.340454 0.430943 0.174039
|
248 |
+
v -0.326284 -0.016730 -0.195038
|
249 |
+
v 0.277596 0.815258 0.270204
|
250 |
+
v -0.266241 -0.033008 0.036377
|
251 |
+
v -0.165943 0.766598 0.202153
|
252 |
+
v -0.397385 0.118678 0.205425
|
253 |
+
v -0.018777 0.335690 0.253984
|
254 |
+
v -0.384143 0.750448 0.182632
|
255 |
+
v 0.007514 0.244853 0.246844
|
256 |
+
v 0.358749 -0.066129 0.126318
|
257 |
+
v -0.080588 -0.066770 0.287041
|
258 |
+
v 0.121591 -0.111409 -0.475552
|
259 |
+
v -0.095301 -0.037509 0.254906
|
260 |
+
v 0.121729 0.908582 0.229777
|
261 |
+
v -0.004029 0.342776 0.175952
|
262 |
+
v -0.254762 0.706609 0.268638
|
263 |
+
v -0.063333 0.319263 0.175133
|
264 |
+
v 0.132989 0.527550 0.257417
|
265 |
+
v 0.046955 0.643081 0.270653
|
266 |
+
v 0.082827 -0.561426 -0.472709
|
267 |
+
v 0.039808 0.455400 0.269915
|
268 |
+
v 0.285412 0.783921 0.197294
|
269 |
+
v -0.116600 -0.102985 0.033965
|
270 |
+
v -0.068987 -0.471011 -0.399000
|
271 |
+
v -0.294774 -0.226692 -0.371526
|
272 |
+
v -0.393754 -0.452029 0.262217
|
273 |
+
v -0.290933 0.765127 0.224940
|
274 |
+
v -0.353975 0.281317 0.170978
|
275 |
+
v 0.376929 -0.092483 -0.443292
|
276 |
+
v -0.103693 -0.574243 -0.387929
|
277 |
+
v -0.204128 -0.098048 -0.256301
|
278 |
+
v -0.062577 -0.096152 -0.108766
|
279 |
+
v 0.387759 0.827176 0.267633
|
280 |
+
v -0.119603 -0.205038 -0.463625
|
281 |
+
v 0.397147 0.283963 0.188109
|
282 |
+
v 0.291907 0.944911 0.223668
|
283 |
+
v 0.067665 0.674366 0.186264
|
284 |
+
v 0.378070 -0.358130 0.166498
|
285 |
+
v -0.379933 -0.318569 0.225005
|
286 |
+
v 0.254622 -0.209285 -0.468395
|
287 |
+
v 0.052464 0.134506 0.177848
|
288 |
+
v -0.326809 -0.182879 -0.458109
|
289 |
+
v -0.151937 0.267967 0.177437
|
290 |
+
v 0.138782 0.108784 0.205471
|
291 |
+
v -0.280847 0.445687 0.269863
|
292 |
+
v 0.362198 -0.301384 0.177783
|
293 |
+
v 0.300498 0.420710 0.265556
|
294 |
+
v 0.113596 -0.110828 0.115839
|
295 |
+
v 0.089603 -0.090906 -0.196787
|
296 |
+
v 0.045684 -0.192864 -0.385325
|
297 |
+
v -0.067327 0.560921 0.178896
|
298 |
+
v 0.292612 0.010455 0.195725
|
299 |
+
v -0.110167 -0.107076 -0.125061
|
300 |
+
v -0.370683 0.001726 0.173532
|
301 |
+
v -0.184185 0.859949 0.224398
|
302 |
+
v -0.297658 -0.216464 -0.390181
|
303 |
+
v -0.240419 -0.107088 -0.469080
|
304 |
+
v 0.293481 0.330569 0.246537
|
305 |
+
v 0.024550 0.714162 0.266479
|
306 |
+
v 0.292913 0.362507 0.277775
|
307 |
+
v -0.027558 -0.343278 -0.389414
|
308 |
+
v 0.031585 -0.320920 -0.492326
|
309 |
+
v 0.254914 -0.353294 -0.382395
|
310 |
+
v 0.042751 -0.137486 0.270081
|
311 |
+
v -0.092382 0.048475 0.266620
|
312 |
+
v -0.150238 -0.075729 -0.466246
|
313 |
+
v -0.315400 -0.698835 -0.389031
|
314 |
+
v -0.142346 -0.157923 -0.362023
|
315 |
+
v -0.386896 -0.448310 0.265238
|
316 |
+
v -0.099744 0.012428 0.175749
|
317 |
+
v -0.080490 -0.093530 -0.251035
|
318 |
+
v 0.347574 0.487414 0.174044
|
319 |
+
v -0.093304 -0.035207 -0.152096
|
320 |
+
v -0.329986 0.703247 0.183411
|
321 |
+
v 0.220804 -0.099650 0.096338
|
322 |
+
v 0.386260 -0.058375 0.037163
|
323 |
+
v -0.263989 -0.375388 -0.474424
|
324 |
+
v 0.134286 -0.096118 -0.451878
|
325 |
+
v 0.171452 0.693151 0.268257
|
326 |
+
v 0.165617 -0.176978 -0.463634
|
327 |
+
v 0.323025 -0.431986 -0.368919
|
328 |
+
v -0.074342 0.141145 0.263551
|
329 |
+
v -0.333632 0.872180 0.234293
|
330 |
+
v 0.145055 -0.012452 0.181109
|
331 |
+
v -0.301443 -0.357595 0.268075
|
332 |
+
v -0.030833 0.685243 0.268340
|
333 |
+
v -0.184823 -0.103404 -0.048349
|
334 |
+
v 0.298178 -0.103795 0.115381
|
335 |
+
v 0.309489 -0.679615 0.266857
|
336 |
+
v 0.125925 0.415056 0.178250
|
337 |
+
v 0.313646 -0.097480 -0.194650
|
338 |
+
v 0.302858 -0.255432 0.211959
|
339 |
+
v 0.357269 -0.267401 0.157120
|
340 |
+
v -0.283463 -0.634894 0.245466
|
341 |
+
v 0.201908 -0.105613 -0.268211
|
342 |
+
v -0.089751 -0.019471 -0.454941
|
343 |
+
v 0.371424 -0.588828 0.205309
|
344 |
+
v 0.333838 -0.500466 0.272002
|
345 |
+
v 0.090859 0.093530 0.184046
|
346 |
+
v 0.324378 0.155674 0.270020
|
347 |
+
v 0.086501 0.326276 0.267925
|
348 |
+
v -0.046586 -0.104987 0.042180
|
349 |
+
v 0.245308 -0.379501 -0.462044
|
350 |
+
v 0.037995 0.582303 0.270439
|
351 |
+
v -0.118269 -0.083040 -0.167237
|
352 |
+
v 0.067297 -0.112217 0.074831
|
353 |
+
v 0.298037 -0.455050 -0.477055
|
354 |
+
v -0.370070 -0.710503 0.206051
|
355 |
+
v 0.301232 -0.158268 -0.461104
|
356 |
+
v 0.014665 -0.430518 -0.439692
|
357 |
+
v 0.346496 -0.032551 0.019597
|
358 |
+
v 0.217088 -0.691155 -0.399249
|
359 |
+
v 0.334828 0.692455 0.222213
|
360 |
+
v 0.295395 -0.621631 0.158414
|
361 |
+
v 0.029049 0.560203 0.261999
|
362 |
+
v -0.081976 -0.048168 -0.479283
|
363 |
+
v 0.137453 0.195451 0.179355
|
364 |
+
v -0.160280 -0.110437 -0.272118
|
365 |
+
v 0.219569 -0.160707 -0.382171
|
366 |
+
v -0.091790 -0.034508 -0.461279
|
367 |
+
v 0.312962 -0.223868 0.180703
|
368 |
+
v 0.228994 -0.139059 -0.466911
|
369 |
+
v 0.113100 -0.089007 -0.076635
|
370 |
+
v 0.007137 0.859625 0.218484
|
371 |
+
v 0.312044 0.187563 0.195598
|
372 |
+
v 0.398509 0.612203 0.216478
|
373 |
+
v -0.121646 -0.176251 -0.477181
|
374 |
+
v 0.084373 0.495652 0.271418
|
375 |
+
v -0.148287 0.527741 0.268665
|
376 |
+
v -0.311427 -0.278046 0.264916
|
377 |
+
v 0.363336 -0.480338 0.251968
|
378 |
+
v 0.375297 0.020240 0.163835
|
379 |
+
v 0.164293 0.167083 0.261692
|
380 |
+
v 0.051531 -0.349660 -0.394986
|
381 |
+
v 0.395052 -0.015404 -0.275666
|
382 |
+
v -0.014478 0.516453 0.181865
|
383 |
+
v -0.153032 -0.100072 -0.232883
|
384 |
+
v -0.374984 -0.343407 -0.388599
|
385 |
+
v -0.027552 0.537758 0.177055
|
386 |
+
v -0.124623 0.386294 0.268848
|
387 |
+
v 0.350200 0.683745 0.252505
|
388 |
+
v -0.177742 -0.449936 -0.478085
|
389 |
+
v -0.301221 -0.102855 -0.115096
|
390 |
+
v -0.204862 -0.067309 0.260974
|
391 |
+
v -0.047183 -0.031334 -0.085108
|
392 |
+
v -0.141711 -0.107062 -0.062312
|
393 |
+
v -0.053958 0.696618 0.197574
|
394 |
+
v 0.238406 0.800789 0.257851
|
395 |
+
v -0.362808 -0.681152 -0.445474
|
396 |
+
v -0.374250 -0.302917 -0.374541
|
397 |
+
v 0.235782 -0.131263 0.186330
|
398 |
+
v -0.337113 0.284403 0.181149
|
399 |
+
v 0.245768 0.119714 0.246311
|
400 |
+
v 0.334465 0.654535 0.179064
|
401 |
+
v 0.045820 -0.125458 0.185568
|
402 |
+
v 0.135776 0.440296 0.274451
|
403 |
+
v 0.083236 0.291291 0.175916
|
404 |
+
v 0.237530 -0.100098 -0.271209
|
405 |
+
v 0.387542 -0.217134 0.262564
|
406 |
+
v -0.334145 -0.566352 0.237140
|
407 |
+
v 0.295120 -0.269500 0.240743
|
408 |
+
v -0.001719 0.748026 0.259158
|
409 |
+
v -0.036777 0.145089 0.179569
|
410 |
+
v -0.085293 0.974152 0.265526
|
411 |
+
v -0.153197 -0.120570 0.200526
|
412 |
+
v -0.310623 -0.552020 0.164278
|
413 |
+
v 0.224955 0.758767 0.230485
|
414 |
+
v 0.353158 -0.521389 -0.398707
|
415 |
+
v 0.289184 -0.181612 0.269823
|
416 |
+
v -0.330787 0.287807 0.167327
|
417 |
+
v 0.357535 0.614456 0.178914
|
418 |
+
v -0.397014 -0.089318 -0.024221
|
419 |
+
v -0.152939 -0.605539 0.205899
|
420 |
+
v -0.279626 -0.056576 0.147053
|
421 |
+
v 0.043059 -0.600463 -0.358764
|
422 |
+
v 0.112115 -0.113312 -0.329952
|
423 |
+
v 0.355899 -0.294699 -0.463132
|
424 |
+
v -0.000449 0.478135 0.192619
|
425 |
+
v -0.074857 -0.405996 -0.479171
|
426 |
+
v 0.281005 -0.166595 -0.343269
|
427 |
+
v 0.289618 -0.528388 0.260904
|
428 |
+
v 0.063448 0.789730 0.268029
|
429 |
+
v -0.066868 0.111792 0.272510
|
430 |
+
v 0.119826 -0.034602 0.091900
|
431 |
+
v 0.017341 -0.086806 -0.129612
|
432 |
+
v 0.276310 -0.299466 -0.377248
|
433 |
+
v -0.226295 -0.020677 -0.412918
|
434 |
+
v 0.191871 -0.660262 -0.472713
|
435 |
+
v -0.344773 0.112170 0.269898
|
436 |
+
v -0.047869 -0.101338 0.033672
|
437 |
+
v 0.370579 -0.173034 -0.119848
|
438 |
+
v 0.116533 -0.027316 -0.017576
|
439 |
+
v -0.276813 -0.012296 0.200688
|
440 |
+
v 0.292398 -0.503788 -0.376963
|
441 |
+
v -0.181458 -0.302268 -0.468182
|
442 |
+
v 0.209288 -0.578596 0.249334
|
443 |
+
v 0.221905 -0.024990 -0.123499
|
444 |
+
v 0.178267 -0.661986 0.192876
|
445 |
+
v -0.091831 -0.074649 0.254612
|
446 |
+
v 0.112692 -0.113462 0.227026
|
447 |
+
v 0.054948 0.099034 0.184342
|
448 |
+
v 0.104008 -0.216885 -0.385917
|
449 |
+
v 0.395529 0.395238 0.180832
|
450 |
+
v 0.387483 -0.050677 0.092321
|
451 |
+
v 0.321949 -0.016066 -0.023163
|
452 |
+
v 0.074711 0.848527 0.268328
|
453 |
+
v -0.246785 -0.401960 -0.416741
|
454 |
+
v 0.362205 -0.077406 -0.467058
|
455 |
+
v 0.368500 -0.098239 -0.450602
|
456 |
+
v -0.292997 -0.344429 -0.475441
|
457 |
+
v -0.172385 -0.114224 0.272412
|
458 |
+
v 0.135271 0.000970 0.173145
|
459 |
+
v -0.102322 0.282850 0.177929
|
460 |
+
v -0.384000 0.144008 0.258342
|
461 |
+
v 0.151576 -0.138693 0.240596
|
462 |
+
v 0.113907 0.061814 0.180698
|
463 |
+
v 0.297857 -0.452123 -0.477474
|
464 |
+
v -0.100199 -0.578533 -0.392615
|
465 |
+
v -0.174375 0.646420 0.269144
|
466 |
+
v 0.196824 -0.700379 -0.439113
|
467 |
+
v 0.393831 0.123286 0.269766
|
468 |
+
v 0.332955 0.514489 0.171281
|
469 |
+
v -0.379525 -0.162789 -0.377600
|
470 |
+
v 0.279262 -0.025692 0.118557
|
471 |
+
v 0.371176 -0.042899 -0.423908
|
472 |
+
v -0.193836 -0.461294 -0.473703
|
473 |
+
v -0.275493 -0.025837 -0.127301
|
474 |
+
v -0.277216 0.727593 0.183722
|
475 |
+
v -0.036690 -0.139596 -0.469425
|
476 |
+
v -0.003349 0.334823 0.175543
|
477 |
+
v -0.063338 -0.024456 -0.314729
|
478 |
+
v -0.164163 -0.153825 0.194745
|
479 |
+
v -0.045883 0.215929 0.176511
|
480 |
+
v 0.018440 -0.114793 0.112234
|
481 |
+
v -0.090701 -0.707573 -0.428709
|
482 |
+
v -0.225372 -0.117857 0.124035
|
483 |
+
v 0.196114 0.898674 0.253101
|
484 |
+
v 0.108491 0.385444 0.177732
|
485 |
+
v -0.150494 -0.121029 0.173427
|
486 |
+
v 0.259022 -0.141006 0.253923
|
487 |
+
v 0.358324 -0.027819 -0.259388
|
488 |
+
v 0.173065 0.368393 0.268955
|
489 |
+
v -0.313758 0.867979 0.273980
|
490 |
+
v 0.402650 0.844036 0.214603
|
491 |
+
v 0.072312 -0.687371 -0.445051
|
492 |
+
v 0.280730 0.265359 0.268177
|
493 |
+
v -0.191428 -0.251116 -0.398436
|
494 |
+
v 0.179861 -0.030592 -0.410747
|
495 |
+
v -0.267945 0.366612 0.269210
|
496 |
+
v 0.170841 -0.128819 0.236727
|
497 |
+
v 0.370028 -0.380134 -0.387621
|
498 |
+
v -0.181471 -0.533448 -0.381055
|
499 |
+
v 0.294210 -0.013076 -0.213760
|
500 |
+
v 0.054803 0.586972 0.198406
|
501 |
+
v -0.331410 0.767957 0.262203
|
502 |
+
v -0.269987 0.610992 0.238541
|
503 |
+
v -0.389416 -0.359673 0.260543
|
504 |
+
v -0.093199 0.702106 0.183411
|
505 |
+
v -0.206805 -0.693436 -0.441428
|
506 |
+
v 0.021674 0.397889 0.264178
|
507 |
+
v -0.130878 0.162931 0.170036
|
508 |
+
v 0.387727 -0.064590 -0.066028
|
509 |
+
v -0.293459 0.043037 0.267402
|
510 |
+
v -0.390641 -0.682732 0.249755
|
511 |
+
v -0.046749 0.860663 0.265325
|
512 |
+
v 0.044410 -0.009751 0.169833
|
513 |
+
v 0.192518 -0.026554 -0.270001
|
514 |
+
v 0.056229 0.590863 0.268815
|
515 |
+
v -0.059105 -0.466597 -0.425635
|
516 |
+
v -0.306069 -0.201264 0.067173
|
517 |
+
v 0.135955 0.411944 0.228527
|
518 |
+
v -0.396045 -0.095898 0.143497
|
519 |
+
v -0.286032 0.581620 0.267140
|
520 |
+
v -0.133699 -0.122111 -0.467886
|
521 |
+
v 0.391917 0.796164 0.246918
|
522 |
+
v -0.388314 -0.245814 0.167793
|
523 |
+
v 0.361952 -0.632860 0.187529
|
524 |
+
v -0.340206 -0.437423 -0.372541
|
525 |
+
v 0.114633 0.289015 0.265817
|
526 |
+
v -0.017492 -0.126969 0.181988
|
527 |
+
v 0.235843 -0.052679 -0.299070
|
528 |
+
v -0.213738 -0.122541 0.197664
|
529 |
+
v 0.134629 0.536517 0.176369
|
530 |
+
v 0.254772 -0.671891 -0.401234
|
531 |
+
v 0.084759 0.419315 0.206001
|
532 |
+
v -0.320124 -0.199640 0.273093
|
533 |
+
v 0.298441 -0.471757 0.229268
|
534 |
+
v -0.385733 -0.072023 -0.128097
|
535 |
+
v 0.400502 -0.031448 -0.299282
|
536 |
+
v -0.218777 0.577141 0.268257
|
537 |
+
v 0.091051 -0.687946 0.221838
|
538 |
+
v 0.359847 -0.392675 -0.451225
|
539 |
+
v 0.390426 -0.476699 0.192073
|
540 |
+
v 0.417519 0.006399 0.261925
|
541 |
+
v 0.156366 0.938683 0.232039
|
542 |
+
v -0.194700 -0.138221 0.261230
|
543 |
+
v -0.294390 0.383392 0.262035
|
544 |
+
v -0.392350 -0.119920 0.124560
|
545 |
+
v -0.075460 -0.687997 -0.365172
|
546 |
+
v -0.221921 0.106842 0.274598
|
547 |
+
v -0.039426 0.983030 0.251808
|
548 |
+
v 0.222626 -0.029889 -0.012486
|
549 |
+
v 0.047691 0.454547 0.270596
|
550 |
+
v -0.010711 -0.111634 0.148152
|
551 |
+
v -0.187886 0.178451 0.188107
|
552 |
+
v 0.132267 -0.424543 -0.471173
|
553 |
+
v -0.112751 -0.471470 -0.397745
|
554 |
+
v -0.311846 -0.294117 0.272559
|
555 |
+
v -0.164343 -0.228990 -0.391941
|
556 |
+
v 0.230858 0.468907 0.271699
|
557 |
+
v 0.293192 -0.027242 -0.428119
|
558 |
+
v -0.305231 0.660658 0.181491
|
559 |
+
v 0.297257 -0.704277 -0.443514
|
560 |
+
v 0.131379 -0.205228 -0.385494
|
561 |
+
v -0.123123 0.422625 0.175093
|
562 |
+
v -0.182872 -0.282196 -0.418869
|
563 |
+
v 0.269587 0.865164 0.198688
|
564 |
+
v -0.383775 -0.293722 0.271485
|
565 |
+
v 0.211227 0.600854 0.185101
|
566 |
+
v 0.296660 -0.653017 -0.369451
|
567 |
+
v -0.340298 0.246195 0.267627
|
568 |
+
v -0.101888 -0.591537 -0.452715
|
569 |
+
v -0.353445 0.385257 0.168148
|
570 |
+
v 0.193614 -0.683399 -0.361501
|
571 |
+
v 0.394392 0.403286 0.209584
|
572 |
+
v -0.041654 0.714470 0.198740
|
573 |
+
v 0.351615 0.141510 0.271354
|
574 |
+
v 0.127553 0.131509 0.184299
|
575 |
+
v 0.189579 0.584256 0.267034
|
576 |
+
v -0.215212 0.631238 0.247239
|
577 |
+
v 0.233469 -0.612953 0.209834
|
578 |
+
v 0.229285 -0.461401 -0.472747
|
579 |
+
v -0.024886 -0.104814 -0.306275
|
580 |
+
v 0.379087 -0.135430 -0.457256
|
581 |
+
v 0.136521 0.299621 0.269448
|
582 |
+
v -0.374253 -0.655099 0.253990
|
583 |
+
v -0.064861 0.325383 0.269735
|
584 |
+
v -0.031028 0.029070 0.175702
|
585 |
+
v -0.281049 -0.113581 -0.316961
|
586 |
+
v -0.286790 0.784445 0.243208
|
587 |
+
v 0.124536 -0.135190 -0.353458
|
588 |
+
v -0.213367 0.813899 0.269381
|
589 |
+
v 0.003026 -0.106824 -0.340723
|
590 |
+
v -0.268964 -0.603111 -0.384174
|
591 |
+
v -0.226489 -0.329017 -0.387500
|
592 |
+
v -0.386228 -0.464643 0.252935
|
593 |
+
v -0.390550 -0.179280 0.246498
|
594 |
+
v 0.128230 -0.040998 -0.101121
|
595 |
+
v -0.376470 -0.222421 0.176563
|
596 |
+
v 0.228891 -0.040755 0.153494
|
597 |
+
v 0.383465 -0.459168 -0.374959
|
598 |
+
v -0.081407 -0.630544 -0.369186
|
599 |
+
v 0.122192 -0.032384 -0.467715
|
600 |
+
v -0.278067 -0.028427 -0.064343
|
601 |
+
v -0.053946 0.321150 0.177421
|
602 |
+
v -0.345442 -0.135279 -0.019468
|
603 |
+
v 0.109607 0.615390 0.237412
|
604 |
+
v 0.109985 0.231464 0.263239
|
605 |
+
v -0.270759 -0.012210 0.268608
|
606 |
+
v 0.127872 0.687141 0.266949
|
607 |
+
v 0.006851 0.146439 0.185496
|
608 |
+
v 0.238933 -0.168302 0.222976
|
609 |
+
v -0.233827 -0.324790 -0.433413
|
610 |
+
v 0.235416 0.877099 0.222122
|
611 |
+
v 0.395086 -0.057497 -0.227185
|
612 |
+
v -0.220987 -0.531955 -0.374297
|
613 |
+
v -0.097362 0.656251 0.184521
|
614 |
+
v 0.004140 0.674044 0.252669
|
615 |
+
v 0.317047 -0.278499 -0.465892
|
616 |
+
v -0.289300 -0.198759 0.272997
|
617 |
+
v 0.244699 -0.032057 0.047859
|
618 |
+
v -0.007112 -0.107788 -0.461754
|
619 |
+
v -0.293772 -0.699123 0.265271
|
620 |
+
v -0.211787 0.280044 0.174190
|
621 |
+
v 0.311349 -0.221799 0.274834
|
622 |
+
v -0.256786 -0.546885 -0.378458
|
623 |
+
v -0.036233 -0.148516 -0.355227
|
624 |
+
v -0.246474 0.385525 0.262018
|
625 |
+
v -0.024476 0.652761 0.185162
|
626 |
+
v 0.300560 0.426704 0.271743
|
627 |
+
v -0.007797 -0.031907 -0.217547
|
628 |
+
v 0.178407 -0.225626 -0.374720
|
629 |
+
v -0.059090 -0.623680 -0.477622
|
630 |
+
v 0.212180 -0.510442 -0.382992
|
631 |
+
v 0.013432 0.220012 0.175777
|
632 |
+
v -0.144926 0.339852 0.177580
|
633 |
+
v 0.225142 -0.552207 -0.469477
|
634 |
+
v 0.152936 0.267687 0.264648
|
635 |
+
v 0.276514 -0.307914 0.268870
|
636 |
+
v 0.309694 0.248797 0.183261
|
637 |
+
v 0.069559 -0.139226 -0.373339
|
638 |
+
v -0.381781 0.284475 0.264225
|
639 |
+
v 0.324328 -0.122473 0.177225
|
640 |
+
v -0.337515 -0.695585 0.195491
|
641 |
+
v -0.378110 -0.120619 -0.147412
|
642 |
+
v 0.312551 -0.112348 -0.378311
|
643 |
+
v -0.388038 -0.028289 0.078876
|
644 |
+
v -0.071890 0.757400 0.202107
|
645 |
+
v 0.177427 -0.395690 -0.391732
|
646 |
+
v -0.332173 -0.054671 -0.468253
|
647 |
+
v 0.095727 0.498190 0.269568
|
648 |
+
v 0.378831 -0.670110 0.189711
|
649 |
+
v -0.133277 -0.119591 0.248484
|
650 |
+
v -0.372338 -0.036097 -0.008576
|
651 |
+
v 0.276339 -0.028133 -0.042077
|
652 |
+
v -0.294045 -0.600569 -0.375924
|
653 |
+
v -0.091774 0.453083 0.253670
|
654 |
+
v 0.127743 0.996672 0.274742
|
655 |
+
v 0.232236 -0.107289 -0.050514
|
656 |
+
v 0.008655 0.152869 0.272645
|
657 |
+
v -0.120100 -0.696911 0.230057
|
658 |
+
v 0.339061 -0.125313 -0.338878
|
659 |
+
v -0.237591 -0.070267 -0.334745
|
660 |
+
v -0.330086 -0.636366 0.147660
|
661 |
+
v -0.257771 0.795314 0.270995
|
662 |
+
v -0.155980 -0.316553 -0.476539
|
663 |
+
v -0.340431 -0.124823 0.271062
|
664 |
+
v 0.308483 -0.424186 -0.468488
|
665 |
+
v -0.145585 -0.030835 -0.167887
|
666 |
+
v -0.366606 -0.029598 -0.424880
|
667 |
+
v 0.385382 -0.049874 -0.136782
|
668 |
+
v -0.236408 -0.037823 0.144841
|
669 |
+
v -0.058880 0.177556 0.178176
|
670 |
+
v -0.388691 0.030627 0.171219
|
671 |
+
v 0.027708 -0.105802 -0.163858
|
672 |
+
v -0.106848 -0.048611 0.192999
|
673 |
+
v -0.234280 0.611972 0.262693
|
674 |
+
v 0.058096 0.928948 0.231732
|
675 |
+
v 0.390294 0.487839 0.179544
|
676 |
+
v -0.159881 -0.587650 0.251941
|
677 |
+
v -0.199139 0.423965 0.192481
|
678 |
+
v -0.389488 0.580982 0.163841
|
679 |
+
v -0.388957 -0.218838 0.204370
|
680 |
+
v 0.206448 0.391270 0.176753
|
681 |
+
v -0.170690 0.664701 0.267280
|
682 |
+
v -0.289370 -0.633759 -0.467119
|
683 |
+
v -0.247138 -0.685883 -0.455336
|
684 |
+
v -0.312919 -0.478603 0.280133
|
685 |
+
v 0.224960 0.507949 0.274111
|
686 |
+
v 0.221269 0.313202 0.233065
|
687 |
+
v 0.097736 -0.696961 -0.456325
|
688 |
+
v 0.146734 -0.024811 -0.426812
|
689 |
+
v 0.298873 -0.120829 0.176097
|
690 |
+
v -0.283758 -0.384689 0.256115
|
691 |
+
v -0.196240 0.892749 0.225142
|
692 |
+
v -0.088464 -0.038992 -0.471882
|
693 |
+
v 0.189506 0.342342 0.261335
|
694 |
+
v -0.182344 0.301917 0.179436
|
695 |
+
v -0.299339 0.223822 0.229587
|
696 |
+
v -0.168482 0.748168 0.267499
|
697 |
+
v 0.190289 0.934038 0.239234
|
698 |
+
v 0.164215 -0.051469 0.273965
|
699 |
+
v -0.025654 0.420058 0.229752
|
700 |
+
v -0.317007 -0.106900 -0.106731
|
701 |
+
v 0.024299 0.569548 0.271050
|
702 |
+
v -0.180298 0.029146 0.185152
|
703 |
+
v 0.000044 0.592720 0.182536
|
704 |
+
v -0.250133 0.717017 0.268354
|
705 |
+
v 0.006775 0.111525 0.192992
|
706 |
+
v 0.142503 -0.495772 -0.476619
|
707 |
+
v 0.135628 -0.670030 -0.359761
|
708 |
+
v 0.389221 0.551702 0.165526
|
709 |
+
v -0.132081 -0.096881 -0.353732
|
710 |
+
v -0.002087 0.205101 0.180254
|
711 |
+
v -0.070448 -0.024735 -0.315061
|
712 |
+
v -0.054183 0.726815 0.193372
|
713 |
+
v -0.204086 0.541989 0.185558
|
714 |
+
v 0.215173 -0.485792 -0.375228
|
715 |
+
v 0.111059 0.610880 0.250824
|
716 |
+
v -0.023741 -0.128325 0.280883
|
717 |
+
v -0.361464 0.846779 0.270198
|
718 |
+
v 0.074023 0.391997 0.184559
|
719 |
+
v 0.141234 0.094984 0.186357
|
720 |
+
v -0.283907 -0.204445 -0.370282
|
721 |
+
v -0.358904 -0.104636 0.277100
|
722 |
+
v -0.032039 0.652618 0.216088
|
723 |
+
v 0.192191 0.375567 0.260254
|
724 |
+
v -0.070243 -0.152106 -0.386576
|
725 |
+
v 0.305333 0.140461 0.164057
|
726 |
+
v -0.313270 0.063663 0.174629
|
727 |
+
v 0.371672 -0.127747 -0.249976
|
728 |
+
v -0.372976 0.453464 0.165709
|
729 |
+
v -0.347208 -0.444737 0.174040
|
730 |
+
v -0.119348 -0.498058 -0.478572
|
731 |
+
v 0.132959 -0.027892 -0.103338
|
732 |
+
v 0.124581 -0.037501 -0.455206
|
733 |
+
v 0.231909 -0.673774 0.270648
|
734 |
+
v 0.152033 -0.022258 -0.046268
|
735 |
+
v 0.390855 -0.134640 -0.113720
|
736 |
+
v -0.322668 0.781419 0.264155
|
737 |
+
v 0.285125 0.003635 0.259250
|
738 |
+
v -0.196870 -0.140813 0.241006
|
739 |
+
v -0.384248 -0.160495 -0.456193
|
740 |
+
v 0.292439 -0.013024 -0.391795
|
741 |
+
v 0.348732 -0.579334 0.153725
|
742 |
+
v -0.225861 -0.188369 0.259076
|
743 |
+
v -0.366028 -0.557965 0.165255
|
744 |
+
v -0.098328 -0.643589 0.259296
|
745 |
+
v 0.271655 -0.109956 -0.243317
|
746 |
+
v 0.040700 -0.435801 -0.395151
|
747 |
+
v 0.401881 -0.008860 0.233480
|
748 |
+
v -0.375535 -0.053839 -0.111181
|
749 |
+
v 0.309799 -0.343904 0.268410
|
750 |
+
v 0.326491 -0.531732 -0.458931
|
751 |
+
v 0.010388 -0.686230 0.109958
|
752 |
+
v -0.006124 0.670958 0.194894
|
753 |
+
v 0.407058 -0.069614 0.170077
|
754 |
+
v 0.111608 0.359820 0.178726
|
755 |
+
v -0.370255 -0.667437 0.266828
|
756 |
+
v -0.254672 0.157248 0.178062
|
757 |
+
v -0.197609 0.147918 0.262324
|
758 |
+
v -0.120967 0.351227 0.179819
|
759 |
+
v 0.099045 -0.534379 -0.463344
|
760 |
+
v -0.131659 -0.094015 -0.186714
|
761 |
+
v 0.049102 0.406249 0.271667
|
762 |
+
v -0.306088 -0.034332 0.044235
|
763 |
+
v 0.333445 -0.036178 0.062159
|
764 |
+
v 0.154260 -0.099616 -0.462843
|
765 |
+
v -0.407725 0.436139 0.275182
|
766 |
+
v 0.356065 0.597016 0.177343
|
767 |
+
v -0.149084 -0.708496 -0.382952
|
768 |
+
v -0.005209 -0.176760 -0.471999
|
769 |
+
v -0.192318 -0.135805 -0.371262
|
770 |
+
v -0.285587 0.612918 0.171432
|
771 |
+
v -0.024030 0.914681 0.278891
|
772 |
+
v -0.120958 -0.628464 0.173867
|
773 |
+
v 0.402966 0.016109 0.253885
|
774 |
+
v -0.312267 -0.083421 0.269856
|
775 |
+
v 0.237667 -0.005567 0.135134
|
776 |
+
v -0.099687 -0.658847 -0.368349
|
777 |
+
v 0.390356 0.021070 0.161435
|
778 |
+
v 0.023613 -0.076577 -0.342044
|
779 |
+
v -0.017015 0.477991 0.241824
|
780 |
+
v 0.378339 0.032118 0.180173
|
781 |
+
v -0.371846 -0.018345 -0.364264
|
782 |
+
v 0.092025 0.363263 0.180525
|
783 |
+
v 0.236656 -0.621768 0.195564
|
784 |
+
v 0.056730 0.194263 0.264011
|
785 |
+
v -0.195903 0.783344 0.195074
|
786 |
+
v 0.212568 -0.598864 0.213260
|
787 |
+
v -0.277158 0.110849 0.263850
|
788 |
+
v -0.233352 -0.449708 -0.468648
|
789 |
+
v -0.166703 0.094357 0.174013
|
790 |
+
v 0.039830 0.850191 0.210383
|
791 |
+
v -0.284334 -0.581895 0.267143
|
792 |
+
v -0.258011 -0.656000 0.264201
|
793 |
+
v -0.370940 -0.222279 -0.376859
|
794 |
+
v -0.267432 -0.588207 0.196227
|
795 |
+
v -0.012732 -0.100700 0.132265
|
796 |
+
v -0.392023 0.428397 0.241240
|
797 |
+
v -0.007415 -0.040403 -0.007227
|
798 |
+
v -0.272029 -0.118489 -0.318427
|
799 |
+
v -0.052297 0.378995 0.180384
|
800 |
+
v 0.027199 -0.668506 0.154028
|
801 |
+
v 0.163027 0.869561 0.218617
|
802 |
+
v 0.056113 -0.105101 0.021717
|
803 |
+
v 0.070875 -0.712625 0.189614
|
804 |
+
v 0.081128 -0.090418 0.267984
|
805 |
+
v 0.181503 -0.419216 -0.476383
|
806 |
+
v -0.212240 -0.698990 -0.442508
|
807 |
+
v 0.032012 0.763564 0.203752
|
808 |
+
v -0.180810 0.021560 0.264634
|
809 |
+
v 0.289721 -0.026386 -0.356785
|
810 |
+
v -0.006627 0.685996 0.186091
|
811 |
+
v -0.288413 -0.266787 0.267094
|
812 |
+
v -0.365675 0.675700 0.181364
|
813 |
+
v 0.305747 -0.508025 0.278725
|
814 |
+
v -0.335810 0.397597 0.271943
|
815 |
+
v 0.387336 -0.256160 0.193313
|
816 |
+
v -0.218630 -0.103593 -0.111114
|
817 |
+
v -0.295069 -0.438159 -0.404843
|
818 |
+
v 0.357985 -0.625895 -0.395898
|
819 |
+
v 0.206395 0.976531 0.234042
|
820 |
+
v 0.200977 -0.018870 -0.425419
|
821 |
+
v -0.391550 0.166808 0.178004
|
822 |
+
v -0.352402 -0.711671 0.177153
|
823 |
+
v -0.273333 -0.633704 -0.464073
|
824 |
+
v -0.385496 -0.587730 0.220033
|
825 |
+
v 0.238316 -0.492748 -0.443712
|
826 |
+
v -0.134253 -0.476257 -0.480325
|
827 |
+
v 0.283782 -0.461164 0.163165
|
828 |
+
v -0.033587 0.372631 0.251613
|
829 |
+
v -0.295696 -0.306100 0.158122
|
830 |
+
v 0.377625 -0.706771 0.144838
|
831 |
+
v 0.065711 -0.668046 0.269277
|
832 |
+
v 0.310851 0.594389 0.169701
|
833 |
+
v -0.310384 -0.096298 -0.204179
|
834 |
+
v 0.165624 0.608609 0.181896
|
835 |
+
v -0.069131 0.485825 0.268357
|
836 |
+
v -0.318077 -0.029134 0.028636
|
837 |
+
v 0.389348 -0.695945 0.143850
|
838 |
+
v -0.018062 0.113589 0.261892
|
839 |
+
v 0.214600 -0.016553 -0.459963
|
840 |
+
v -0.052372 0.028539 0.262307
|
841 |
+
v -0.014243 0.041671 0.274764
|
842 |
+
v 0.022932 -0.026070 0.076177
|
843 |
+
v 0.075535 -0.114745 0.269747
|
844 |
+
v -0.131142 -0.334151 -0.473532
|
845 |
+
v 0.184512 -0.660642 0.202682
|
846 |
+
v -0.096558 -0.188781 -0.393466
|
847 |
+
v 0.310329 -0.690960 0.154710
|
848 |
+
v 0.360169 -0.621315 0.236246
|
849 |
+
v 0.352906 -0.297936 -0.387815
|
850 |
+
v 0.385920 -0.694058 0.156938
|
851 |
+
v -0.009933 -0.370804 -0.398581
|
852 |
+
v 0.218368 0.303236 0.176900
|
853 |
+
v 0.245793 -0.105779 -0.246145
|
854 |
+
v 0.394717 -0.127228 0.129353
|
855 |
+
v 0.226597 0.436646 0.245463
|
856 |
+
v -0.096383 0.656454 0.261855
|
857 |
+
v 0.184912 0.640899 0.201584
|
858 |
+
v -0.072920 0.203815 0.179385
|
859 |
+
v 0.389511 -0.655900 0.118836
|
860 |
+
v 0.160781 -0.544774 -0.464353
|
861 |
+
v 0.247826 0.046073 0.162508
|
862 |
+
v -0.211113 -0.231972 -0.473847
|
863 |
+
v 0.130196 -0.034519 -0.020602
|
864 |
+
v 0.336444 -0.006033 0.266257
|
865 |
+
v -0.323519 -0.025148 -0.143527
|
866 |
+
v -0.391061 -0.215041 0.197289
|
867 |
+
v -0.218873 -0.128012 0.229197
|
868 |
+
v 0.099048 0.510627 0.268961
|
869 |
+
v -0.148536 0.331206 0.181922
|
870 |
+
v 0.036348 -0.010846 0.133989
|
871 |
+
v -0.048516 -0.376152 -0.470998
|
872 |
+
v -0.330522 0.563542 0.260927
|
873 |
+
v 0.291374 -0.269176 0.273638
|
874 |
+
v 0.171697 -0.022676 -0.339538
|
875 |
+
v -0.256901 -0.329001 -0.376135
|
876 |
+
v -0.286617 -0.079966 -0.087451
|
877 |
+
v 0.296927 -0.301952 -0.370408
|
878 |
+
v 0.392803 0.323026 0.202613
|
879 |
+
v 0.388276 -0.117070 -0.009884
|
880 |
+
v -0.380825 -0.445706 0.178690
|
881 |
+
v 0.291321 0.213559 0.271041
|
882 |
+
v -0.305533 0.500088 0.172173
|
883 |
+
v 0.317677 -0.525115 -0.358534
|
884 |
+
v 0.102855 -0.498566 -0.401182
|
885 |
+
v 0.188381 -0.712923 -0.378088
|
886 |
+
v -0.349228 -0.486553 0.156116
|
887 |
+
v -0.354273 -0.121038 0.076390
|
888 |
+
v -0.125540 0.403744 0.265011
|
889 |
+
v -0.380220 -0.456939 0.253413
|
890 |
+
v 0.179006 0.083527 0.263377
|
891 |
+
v -0.236194 0.666144 0.184584
|
892 |
+
v 0.019126 -0.039988 0.111258
|
893 |
+
v -0.042911 -0.584424 -0.375218
|
894 |
+
v -0.185316 0.622559 0.251478
|
895 |
+
v -0.395797 0.039688 0.255004
|
896 |
+
v -0.041647 0.533712 0.173394
|
897 |
+
v 0.335965 -0.509542 0.275968
|
898 |
+
v 0.201711 0.328091 0.269918
|
899 |
+
v -0.403772 -0.060886 0.191871
|
900 |
+
v -0.387134 0.048115 0.169290
|
901 |
+
v -0.057228 -0.123895 0.261880
|
902 |
+
v 0.303171 -0.009832 0.162424
|
903 |
+
v 0.092011 0.412522 0.262909
|
904 |
+
v 0.269578 0.516735 0.257393
|
905 |
+
v -0.198830 -0.027670 -0.425901
|
906 |
+
v 0.377051 -0.372648 0.173852
|
907 |
+
v 0.180299 -0.218163 -0.414409
|
908 |
+
v 0.058351 0.016386 0.263677
|
909 |
+
v -0.211300 0.038234 0.171704
|
910 |
+
v 0.120246 -0.091184 -0.047813
|
911 |
+
v 0.378182 -0.386341 -0.434263
|
912 |
+
v -0.222118 -0.110423 0.078483
|
913 |
+
v 0.177231 0.504936 0.270452
|
914 |
+
v -0.060291 -0.028251 -0.052111
|
915 |
+
v 0.400591 0.301518 0.238194
|
916 |
+
v 0.144087 0.663100 0.268445
|
917 |
+
v 0.239578 0.047714 0.166236
|
918 |
+
v 0.100602 -0.678260 -0.463944
|
919 |
+
v -0.050845 0.007342 0.253296
|
920 |
+
v -0.301829 -0.026677 -0.254052
|
921 |
+
v -0.126033 0.377869 0.269345
|
922 |
+
v -0.238763 0.523934 0.269343
|
923 |
+
v 0.049669 -0.021070 -0.435418
|
924 |
+
v -0.226463 0.804929 0.197584
|
925 |
+
v 0.391541 0.682016 0.206576
|
926 |
+
v -0.396277 -0.674961 0.124735
|
927 |
+
v 0.214587 -0.001888 0.262461
|
928 |
+
v 0.292975 -0.426217 0.212572
|
929 |
+
v 0.116877 -0.607238 -0.467239
|
930 |
+
v -0.132787 -0.093043 0.104990
|
931 |
+
v -0.308855 -0.104533 -0.312284
|
932 |
+
v -0.265501 -0.117645 0.118891
|
933 |
+
v 0.048125 -0.037763 -0.025740
|
934 |
+
v 0.008467 -0.026929 -0.345044
|
935 |
+
v -0.124837 0.306952 0.208691
|
936 |
+
v 0.366964 -0.681751 -0.383796
|
937 |
+
v -0.374244 -0.617813 -0.406860
|
938 |
+
v 0.352116 0.813476 0.201876
|
939 |
+
v 0.271354 -0.643020 -0.473041
|
940 |
+
v 0.364318 0.823360 0.188103
|
941 |
+
v -0.282651 -0.018293 -0.363468
|
942 |
+
v 0.048496 0.312911 0.181195
|
943 |
+
v 0.240330 -0.203150 -0.469092
|
944 |
+
v 0.341979 0.658676 0.186204
|
945 |
+
v -0.309327 -0.506315 -0.368434
|
946 |
+
v -0.293184 -0.102758 -0.044467
|
947 |
+
v 0.028181 -0.679131 -0.360488
|
948 |
+
v 0.256996 -0.583209 -0.374400
|
949 |
+
v -0.206154 -0.497336 -0.458638
|
950 |
+
v 0.313856 0.457963 0.172677
|
951 |
+
v -0.354082 -0.183999 0.094512
|
952 |
+
v -0.277633 -0.355008 -0.471906
|
953 |
+
v -0.270731 -0.030454 -0.329189
|
954 |
+
v 0.325646 -0.516136 -0.371380
|
955 |
+
v -0.238977 0.456953 0.259029
|
956 |
+
v -0.143142 -0.034343 0.097344
|
957 |
+
v 0.025561 -0.456989 -0.484188
|
958 |
+
v -0.277707 -0.694835 0.232055
|
959 |
+
v -0.165989 0.343199 0.255205
|
960 |
+
v -0.081881 -0.031341 -0.035244
|
961 |
+
v -0.386074 0.697174 0.195450
|
962 |
+
v 0.199296 -0.397397 -0.384646
|
963 |
+
v 0.010706 -0.027930 -0.122718
|
964 |
+
v -0.367782 0.695370 0.178146
|
965 |
+
v -0.100379 0.316521 0.178642
|
966 |
+
v -0.103254 0.917551 0.238973
|
967 |
+
v -0.106203 -0.398002 -0.470666
|
968 |
+
v 0.180758 -0.250227 -0.408139
|
969 |
+
v 0.218924 -0.020897 -0.247762
|
970 |
+
v 0.247105 0.631299 0.268897
|
971 |
+
v 0.333051 -0.176824 0.121436
|
972 |
+
v 0.204189 -0.023729 -0.440700
|
973 |
+
v -0.026050 -0.107164 -0.104147
|
974 |
+
v 0.336489 0.860660 0.208767
|
975 |
+
v 0.105290 -0.536819 -0.480096
|
976 |
+
v 0.210021 -0.185065 -0.388557
|
977 |
+
v 0.132252 -0.351349 -0.467375
|
978 |
+
v 0.194356 -0.024981 -0.331044
|
979 |
+
v -0.158226 -0.022853 -0.360644
|
980 |
+
v 0.261695 -0.638326 -0.474481
|
981 |
+
v -0.190784 0.148163 0.264365
|
982 |
+
v -0.311544 -0.271842 0.219595
|
983 |
+
v -0.144977 0.210398 0.189768
|
984 |
+
v -0.176683 -0.103439 -0.402099
|
985 |
+
v 0.407258 -0.103908 0.254438
|
986 |
+
v 0.399017 0.561496 0.218461
|
987 |
+
v 0.048970 -0.217896 -0.476033
|
988 |
+
v 0.192743 -0.699030 -0.427479
|
989 |
+
v 0.307995 -0.656159 -0.326923
|
990 |
+
v 0.188799 0.702718 0.185409
|
991 |
+
v 0.271174 -0.699106 -0.306598
|
992 |
+
v 0.145606 -0.100657 -0.029464
|
993 |
+
v 0.150967 0.845418 0.210602
|
994 |
+
v -0.055711 -0.519637 -0.480048
|
995 |
+
v -0.047043 0.010968 0.265179
|
996 |
+
v 0.379760 -0.197811 0.163468
|
997 |
+
v 0.022703 0.391000 0.182444
|
998 |
+
v 0.174130 0.005356 0.182064
|
999 |
+
v -0.322922 -0.024121 -0.287693
|
1000 |
+
v -0.289251 0.105762 0.176688
|
1001 |
+
v -0.361415 0.460219 0.232971
|
1002 |
+
v -0.113009 -0.365594 -0.474563
|
1003 |
+
v -0.297582 -0.537172 0.163294
|
1004 |
+
v -0.262592 0.484244 0.269768
|
1005 |
+
v 0.267110 0.263087 0.260092
|
1006 |
+
v 0.375514 0.910416 0.217508
|
1007 |
+
v 0.279737 -0.714696 0.200342
|
1008 |
+
v 0.388323 -0.232937 -0.434391
|
1009 |
+
v 0.165373 -0.090903 -0.024119
|
1010 |
+
v 0.352593 0.302331 0.181057
|
1011 |
+
v 0.118780 -0.033067 -0.105974
|
1012 |
+
v -0.126873 -0.171653 -0.374576
|
1013 |
+
v -0.254984 0.485152 0.176822
|
1014 |
+
v -0.389634 -0.486616 0.215568
|
1015 |
+
v -0.338189 -0.356663 -0.457257
|
1016 |
+
v 0.319257 -0.633683 0.171419
|
1017 |
+
v -0.183830 0.875326 0.275382
|
1018 |
+
v 0.240252 0.148507 0.175925
|
1019 |
+
v -0.056811 -0.203496 -0.378187
|
1020 |
+
v -0.253574 -0.116127 0.213360
|
1021 |
+
v 0.372086 -0.157483 -0.400582
|
1022 |
+
v -0.396802 0.742097 0.223186
|
1023 |
+
v 0.003294 0.014195 0.176300
|
1024 |
+
v -0.061402 0.679865 0.188561
|
1025 |
+
v -0.116934 -0.192198 -0.463384
|
1026 |
+
v 0.070547 -0.563669 -0.473988
|
1027 |
+
v -0.390422 -0.037597 0.121828
|
1028 |
+
v 0.094991 -0.105167 -0.396133
|
1029 |
+
v 0.056234 -0.374437 -0.396037
|
1030 |
+
v -0.130389 0.387880 0.259227
|
1031 |
+
v -0.170287 0.930153 0.260215
|
1032 |
+
v 0.279877 -0.032262 -0.059313
|
1033 |
+
v 0.276083 -0.460219 0.259079
|
1034 |
+
v -0.288727 -0.693072 0.175977
|
1035 |
+
v 0.279526 0.695888 0.265116
|
1036 |
+
v 0.346096 0.075281 0.271246
|
1037 |
+
v -0.225656 -0.601377 -0.352627
|
1038 |
+
v 0.217839 -0.651178 -0.339215
|
1039 |
+
v -0.360332 -0.590637 -0.407344
|
1040 |
+
v 0.181755 -0.036455 0.065550
|
1041 |
+
v 0.397828 -0.658416 0.249765
|
1042 |
+
v -0.099015 0.079590 0.223014
|
1043 |
+
v 0.283692 -0.548181 0.162477
|
1044 |
+
v 0.038882 0.141529 0.264624
|
1045 |
+
v -0.207119 0.056986 0.262253
|
1046 |
+
v -0.263523 -0.054082 0.189820
|
1047 |
+
v 0.248574 0.825944 0.200700
|
1048 |
+
v -0.338868 -0.318431 0.170447
|
1049 |
+
v -0.024343 -0.100427 -0.276954
|
1050 |
+
v 0.243065 0.021874 0.170352
|
1051 |
+
v -0.369886 -0.166498 -0.382317
|
1052 |
+
v 0.043523 -0.465843 -0.448775
|
1053 |
+
v -0.006083 0.248416 0.186434
|
1054 |
+
v 0.324143 0.675579 0.259742
|
1055 |
+
v 0.397087 0.220836 0.170690
|
1056 |
+
v -0.051763 0.027410 0.173092
|
1057 |
+
v 0.219371 0.642115 0.187181
|
1058 |
+
v 0.041419 0.467422 0.270693
|
1059 |
+
v -0.316528 0.913115 0.267252
|
1060 |
+
v -0.397635 0.707876 0.258294
|
1061 |
+
v -0.094318 0.071100 0.270597
|
1062 |
+
v 0.338938 -0.234738 -0.381478
|
1063 |
+
v 0.379305 -0.100378 -0.087034
|
1064 |
+
v 0.311697 -0.704517 0.131143
|
1065 |
+
v 0.125480 -0.003870 0.113190
|
1066 |
+
v -0.300942 -0.694209 -0.381679
|
1067 |
+
v 0.150587 0.652780 0.192126
|
1068 |
+
v 0.035303 -0.188313 -0.468273
|
1069 |
+
v -0.139286 -0.326071 -0.412568
|
1070 |
+
v 0.312962 0.800517 0.222700
|
1071 |
+
v 0.099489 -0.133170 0.208449
|
1072 |
+
v 0.031261 -0.093371 -0.317582
|
1073 |
+
v 0.136126 -0.095505 0.269544
|
1074 |
+
v -0.130246 0.406595 0.264435
|
1075 |
+
v 0.155298 -0.098898 -0.405837
|
1076 |
+
v 0.193240 -0.099919 -0.147222
|
1077 |
+
v -0.025309 0.131180 0.178966
|
1078 |
+
v -0.148705 -0.696542 -0.450956
|
1079 |
+
v 0.148837 0.496474 0.176378
|
1080 |
+
v 0.314085 -0.112314 -0.372548
|
1081 |
+
v 0.231031 -0.085289 -0.339317
|
1082 |
+
v 0.039418 0.929912 0.226607
|
1083 |
+
v 0.392567 0.604285 0.173881
|
1084 |
+
v -0.237314 -0.284226 -0.429205
|
1085 |
+
v 0.185597 0.616321 0.183493
|
1086 |
+
v -0.390971 -0.135642 0.213441
|
1087 |
+
v -0.137612 0.758169 0.269831
|
1088 |
+
v -0.237896 0.787271 0.271380
|
1089 |
+
v -0.202751 0.058219 0.246385
|
1090 |
+
v -0.117833 -0.023849 -0.433042
|
1091 |
+
v -0.332287 0.239419 0.227828
|
1092 |
+
v 0.060823 -0.115107 0.267761
|
1093 |
+
v -0.122486 0.211584 0.178050
|
1094 |
+
v -0.096265 -0.030553 -0.005920
|
1095 |
+
v -0.387226 -0.036023 -0.429027
|
1096 |
+
v 0.277745 0.627785 0.232723
|
1097 |
+
v 0.294338 -0.598479 0.258653
|
1098 |
+
v -0.362256 -0.136192 0.180831
|
1099 |
+
v 0.392966 -0.045686 -0.323039
|
1100 |
+
v 0.212424 -0.624560 -0.387233
|
1101 |
+
v -0.184887 -0.101565 -0.110332
|
1102 |
+
v 0.205395 -0.100112 -0.147822
|
1103 |
+
v 0.318742 -0.709378 0.220712
|
1104 |
+
v 0.063134 0.518292 0.182086
|
1105 |
+
v -0.376997 -0.039687 0.272221
|
1106 |
+
v 0.309708 0.101585 0.266973
|
1107 |
+
v 0.305734 -0.092088 -0.358605
|
1108 |
+
v 0.069975 -0.322863 -0.408623
|
1109 |
+
v -0.193559 -0.538137 -0.479929
|
1110 |
+
v 0.360075 -0.104852 -0.254171
|
1111 |
+
v 0.011371 0.217455 0.175918
|
1112 |
+
v 0.153317 -0.080125 0.268171
|
1113 |
+
v -0.373676 -0.350189 -0.454623
|
1114 |
+
v -0.174652 -0.202759 -0.397139
|
1115 |
+
v 0.018783 -0.025036 -0.304161
|
1116 |
+
v -0.373166 -0.406618 0.218298
|
1117 |
+
v 0.157575 0.042775 0.186212
|
1118 |
+
v -0.036930 -0.021715 -0.396510
|
1119 |
+
v -0.325223 -0.111126 -0.209617
|
1120 |
+
v -0.379548 0.523983 0.210440
|
1121 |
+
v -0.100076 0.311759 0.268413
|
1122 |
+
v -0.057848 -0.469444 -0.468924
|
1123 |
+
v 0.211740 -0.044345 0.274901
|
1124 |
+
v -0.312041 0.297597 0.170582
|
1125 |
+
v -0.142326 -0.102395 -0.095542
|
1126 |
+
v 0.351228 0.603268 0.261022
|
1127 |
+
v -0.204319 -0.138202 0.190374
|
1128 |
+
v 0.349213 -0.117459 -0.346797
|
1129 |
+
v -0.245743 -0.508825 -0.402812
|
1130 |
+
v -0.106459 0.390242 0.270872
|
1131 |
+
v 0.089014 0.542314 0.238949
|
1132 |
+
v 0.326984 -0.444747 -0.366036
|
1133 |
+
v -0.272207 -0.007279 0.200990
|
1134 |
+
v 0.076794 0.034087 0.172421
|
1135 |
+
v -0.379764 -0.150727 -0.146954
|
1136 |
+
v 0.069646 -0.404032 -0.392546
|
1137 |
+
v -0.145301 -0.027455 -0.342669
|
1138 |
+
v -0.386921 -0.084182 -0.304009
|
1139 |
+
v 0.232033 -0.552375 -0.473662
|
1140 |
+
v 0.003966 0.832451 0.237911
|
1141 |
+
v 0.320421 -0.578328 -0.372768
|
1142 |
+
v 0.343155 -0.385264 -0.372548
|
1143 |
+
v -0.307405 -0.621760 0.275481
|
1144 |
+
v -0.204937 -0.675300 -0.355241
|
1145 |
+
v 0.397679 -0.053626 0.072048
|
1146 |
+
v 0.066495 -0.710135 -0.426566
|
1147 |
+
v 0.300992 -0.543433 -0.465642
|
1148 |
+
v -0.315608 -0.118958 0.077382
|
1149 |
+
v 0.325295 -0.131621 0.267829
|
1150 |
+
v 0.087008 -0.188756 -0.461766
|
1151 |
+
v 0.147816 -0.026116 -0.200181
|
1152 |
+
v -0.044943 0.878215 0.239508
|
1153 |
+
v -0.194347 0.882222 0.209302
|
1154 |
+
v 0.200475 -0.313296 -0.463265
|
1155 |
+
v -0.023754 -0.031125 0.019949
|
1156 |
+
v 0.369458 -0.393945 -0.452909
|
1157 |
+
v -0.179741 -0.660692 -0.471493
|
1158 |
+
v 0.012988 0.863878 0.221173
|
1159 |
+
v -0.209931 -0.112624 0.074832
|
1160 |
+
v 0.142801 -0.591627 -0.385912
|
1161 |
+
v -0.087604 -0.105954 0.083945
|
1162 |
+
v 0.262771 -0.118605 -0.394570
|
1163 |
+
v 0.101484 0.950886 0.231428
|
1164 |
+
v 0.033862 0.378062 0.184263
|
1165 |
+
v -0.353327 -0.572108 -0.401100
|
1166 |
+
v 0.256239 -0.159746 -0.368857
|
1167 |
+
v 0.037264 -0.100591 -0.061201
|
1168 |
+
v 0.319167 -0.101570 0.161929
|
1169 |
+
v -0.186673 -0.020967 -0.301732
|
1170 |
+
v -0.189997 -0.188745 -0.473834
|
1171 |
+
v 0.030230 0.844920 0.208287
|
1172 |
+
v 0.236845 -0.460237 -0.481485
|
1173 |
+
v -0.404878 -0.006861 0.240403
|
1174 |
+
v 0.218133 0.226628 0.177491
|
1175 |
+
v 0.149046 -0.109144 0.126792
|
1176 |
+
v -0.320739 -0.703627 -0.389406
|
1177 |
+
v 0.197525 -0.035273 0.198840
|
1178 |
+
v -0.311097 -0.097472 0.260119
|
1179 |
+
v -0.312812 -0.100609 -0.039803
|
1180 |
+
v -0.408610 0.377526 0.244435
|
1181 |
+
v -0.031060 0.466823 0.186186
|
1182 |
+
v -0.390008 0.751259 0.186706
|
1183 |
+
v 0.122923 -0.089487 -0.003743
|
1184 |
+
v 0.130509 -0.019561 -0.411840
|
1185 |
+
v 0.167894 -0.582190 -0.381337
|
1186 |
+
v 0.072468 0.524627 0.186340
|
1187 |
+
v -0.382141 -0.677867 0.157708
|
1188 |
+
v -0.170541 0.306717 0.208899
|
1189 |
+
v -0.036905 0.694018 0.268550
|
1190 |
+
v 0.327946 0.282839 0.173267
|
1191 |
+
v 0.109371 0.765561 0.258162
|
1192 |
+
v -0.275053 -0.562090 0.259613
|
1193 |
+
v -0.350962 0.273131 0.166017
|
1194 |
+
v -0.155792 -0.150162 0.233218
|
1195 |
+
v 0.003775 -0.107839 -0.039634
|
1196 |
+
v -0.021853 0.850465 0.202423
|
1197 |
+
v -0.345715 0.136248 0.271026
|
1198 |
+
v 0.301076 0.303805 0.175223
|
1199 |
+
v 0.340225 0.403318 0.166956
|
1200 |
+
v -0.262787 0.262600 0.179115
|
1201 |
+
v 0.396195 -0.022795 0.181860
|
1202 |
+
v -0.246167 -0.094114 0.195791
|
1203 |
+
v -0.209146 -0.608003 0.274713
|
1204 |
+
v -0.064379 -0.207735 -0.472225
|
1205 |
+
v 0.239255 -0.365176 -0.471956
|
1206 |
+
v 0.367139 -0.275847 -0.381002
|
1207 |
+
v -0.118741 0.897300 0.227817
|
1208 |
+
v -0.234702 0.208909 0.172562
|
1209 |
+
v 0.215253 -0.582413 -0.486133
|
1210 |
+
v 0.118123 -0.591322 -0.475033
|
1211 |
+
v 0.077553 0.902263 0.269249
|
1212 |
+
v 0.111947 0.190330 0.180546
|
1213 |
+
v -0.170739 0.464017 0.248995
|
1214 |
+
v 0.084823 -0.319775 -0.407387
|
1215 |
+
v -0.373472 -0.626664 -0.398835
|
1216 |
+
v 0.360996 -0.424446 -0.357072
|
1217 |
+
v -0.046254 0.514116 0.263902
|
1218 |
+
v -0.245664 -0.117241 -0.319082
|
1219 |
+
v -0.207238 -0.528149 -0.366121
|
1220 |
+
v 0.398976 0.159538 0.221933
|
1221 |
+
v -0.098068 0.855900 0.233786
|
1222 |
+
v 0.370766 -0.120671 -0.402762
|
1223 |
+
v 0.148745 -0.689392 0.267432
|
1224 |
+
v -0.078214 0.023763 0.269126
|
1225 |
+
v 0.334800 0.498502 0.169424
|
1226 |
+
v -0.184855 0.340008 0.267827
|
1227 |
+
v 0.200433 -0.107115 -0.250881
|
1228 |
+
v -0.177879 -0.101530 -0.099812
|
1229 |
+
v 0.309811 -0.111742 0.015442
|
1230 |
+
v -0.226863 -0.030065 0.160991
|
1231 |
+
v -0.380669 -0.125261 -0.215289
|
1232 |
+
v -0.028540 0.806970 0.203878
|
1233 |
+
v -0.368774 0.142596 0.266371
|
1234 |
+
v 0.264850 -0.114930 -0.098401
|
1235 |
+
v -0.208062 -0.503150 -0.425146
|
1236 |
+
v 0.038601 -0.100008 0.167070
|
1237 |
+
v -0.400918 0.224692 0.247835
|
1238 |
+
v -0.366116 -0.584675 -0.415554
|
1239 |
+
v 0.145512 0.959217 0.251887
|
1240 |
+
v 0.164307 -0.697150 -0.408538
|
1241 |
+
v 0.124915 -0.531339 -0.392486
|
1242 |
+
v -0.079776 0.067415 0.178534
|
1243 |
+
v -0.371208 -0.229010 0.268077
|
1244 |
+
v 0.389408 0.570328 0.195443
|
1245 |
+
v 0.300048 0.319506 0.275283
|
1246 |
+
v 0.226954 -0.600732 -0.470251
|
1247 |
+
v 0.200667 0.219633 0.177370
|
1248 |
+
v 0.203118 0.970353 0.265441
|
1249 |
+
v 0.145349 0.338432 0.205251
|
1250 |
+
v 0.377065 -0.472794 0.276403
|
1251 |
+
v -0.106166 -0.678320 0.260178
|
1252 |
+
v 0.200046 0.436550 0.207932
|
1253 |
+
v -0.197771 -0.116668 0.180251
|
1254 |
+
v -0.382168 0.636289 0.165276
|
1255 |
+
v 0.152761 -0.106193 -0.307046
|
1256 |
+
v 0.356707 -0.705109 -0.340862
|
1257 |
+
v 0.085039 -0.181674 -0.394349
|
1258 |
+
v 0.111594 -0.098028 -0.089464
|
1259 |
+
v -0.163994 0.651379 0.181054
|
1260 |
+
v -0.046300 -0.555523 -0.400346
|
1261 |
+
v 0.101543 0.311326 0.267908
|
1262 |
+
v 0.041011 -0.172246 -0.401196
|
1263 |
+
v -0.198339 -0.094309 -0.016357
|
1264 |
+
v -0.267262 -0.086833 -0.149803
|
1265 |
+
v 0.219149 -0.088243 0.024955
|
1266 |
+
v 0.043213 0.282055 0.259011
|
1267 |
+
v 0.174691 0.170195 0.247373
|
1268 |
+
v 0.322912 -0.043820 -0.463607
|
1269 |
+
v -0.075135 -0.090228 -0.380027
|
1270 |
+
v -0.185586 -0.153114 0.267286
|
1271 |
+
v 0.027860 0.080454 0.241666
|
1272 |
+
v -0.307492 0.001630 0.199133
|
1273 |
+
v 0.006535 -0.712792 0.217731
|
1274 |
+
v -0.393501 0.095136 0.197169
|
1275 |
+
v -0.124950 -0.204731 -0.386338
|
1276 |
+
v -0.153865 0.605505 0.196282
|
1277 |
+
v -0.128038 0.291410 0.265266
|
1278 |
+
v 0.124507 -0.040501 0.193320
|
1279 |
+
v -0.337094 -0.106380 0.229182
|
1280 |
+
v -0.320215 -0.104979 -0.073221
|
1281 |
+
v -0.355603 0.352266 0.159277
|
1282 |
+
v 0.177902 -0.142944 -0.366093
|
1283 |
+
v 0.374985 -0.027514 -0.248936
|
1284 |
+
v -0.305114 -0.156054 0.170774
|
1285 |
+
v 0.063352 -0.500549 -0.397697
|
1286 |
+
v -0.186931 -0.111708 -0.152308
|
1287 |
+
v -0.271806 0.476094 0.180733
|
1288 |
+
v -0.253503 -0.038453 0.156504
|
1289 |
+
v -0.294149 -0.317438 -0.381011
|
1290 |
+
v -0.324217 -0.680048 -0.389310
|
1291 |
+
v 0.305540 -0.142951 -0.299341
|
1292 |
+
v -0.385349 -0.064178 -0.056827
|
1293 |
+
v 0.297923 0.233539 0.268966
|
1294 |
+
v -0.163928 -0.005644 0.253767
|
1295 |
+
v -0.369594 -0.511835 0.254934
|
1296 |
+
v -0.195178 0.822759 0.221787
|
1297 |
+
v 0.400152 -0.022783 0.264807
|
1298 |
+
v -0.029277 -0.259896 -0.459378
|
1299 |
+
v 0.092842 0.714435 0.189929
|
1300 |
+
v -0.079111 -0.098282 -0.047044
|
1301 |
+
v -0.222557 -0.101458 -0.288486
|
1302 |
+
v 0.005744 -0.247649 -0.417382
|
1303 |
+
v -0.063282 -0.107890 -0.037815
|
1304 |
+
v -0.026152 0.739929 0.268662
|
1305 |
+
v 0.065056 0.608676 0.178680
|
1306 |
+
v 0.298635 -0.108012 -0.364432
|
1307 |
+
v -0.305218 -0.090177 0.270536
|
1308 |
+
v -0.384436 -0.150001 0.011008
|
1309 |
+
v -0.146608 -0.317985 -0.388906
|
1310 |
+
v -0.248738 -0.091209 -0.030674
|
1311 |
+
v -0.361393 -0.527815 -0.439794
|
1312 |
+
v -0.248440 -0.585734 -0.474413
|
1313 |
+
v 0.230603 0.709625 0.259322
|
1314 |
+
v -0.111898 0.632191 0.201455
|
1315 |
+
v 0.049299 -0.095266 -0.296720
|
1316 |
+
v -0.107332 -0.197696 -0.394748
|
1317 |
+
v -0.345046 -0.651029 -0.438584
|
1318 |
+
v 0.373087 -0.156325 0.137318
|
1319 |
+
v -0.146035 0.538202 0.193849
|
1320 |
+
v 0.382476 0.564362 0.259821
|
1321 |
+
v 0.044353 -0.127650 0.250115
|
1322 |
+
v 0.022327 -0.425045 -0.486131
|
1323 |
+
v 0.176498 0.801129 0.271045
|
1324 |
+
v -0.112213 -0.684503 0.165543
|
1325 |
+
v -0.106789 0.378049 0.256732
|
1326 |
+
v -0.066556 0.297076 0.180257
|
1327 |
+
v -0.359674 0.503077 0.264866
|
1328 |
+
v -0.273608 -0.698891 -0.442951
|
1329 |
+
v -0.272800 -0.574824 -0.455012
|
1330 |
+
v -0.111418 0.268251 0.266485
|
1331 |
+
v -0.332446 -0.185402 0.167368
|
1332 |
+
v -0.299855 -0.319153 -0.475190
|
1333 |
+
v 0.100621 0.194795 0.276517
|
1334 |
+
v -0.221125 0.695374 0.201394
|
1335 |
+
v -0.292613 0.202973 0.260559
|
1336 |
+
v 0.389129 -0.268161 0.234047
|
1337 |
+
v 0.285994 0.226615 0.270340
|
1338 |
+
v -0.009848 0.649115 0.268346
|
1339 |
+
v -0.343240 -0.130616 -0.115936
|
1340 |
+
v -0.199770 -0.028793 -0.229887
|
1341 |
+
v 0.036657 0.238631 0.269562
|
1342 |
+
v -0.334846 -0.101731 -0.131319
|
1343 |
+
v 0.250658 -0.703997 0.239376
|
1344 |
+
v 0.246743 0.954707 0.257905
|
1345 |
+
v 0.314994 0.660412 0.198393
|
1346 |
+
v -0.360365 -0.098498 -0.103828
|
1347 |
+
v -0.208392 -0.089702 0.260494
|
1348 |
+
v -0.189684 -0.677809 -0.382063
|
1349 |
+
v 0.184126 0.502230 0.270423
|
1350 |
+
v -0.317748 -0.630677 0.157431
|
1351 |
+
v 0.329856 -0.128970 -0.023217
|
1352 |
+
v 0.364886 -0.334901 0.159733
|
1353 |
+
v 0.189041 -0.531037 -0.461703
|
1354 |
+
v -0.222650 0.488826 0.175941
|
1355 |
+
v -0.334029 0.694154 0.270698
|
1356 |
+
v -0.367118 -0.427562 -0.472602
|
1357 |
+
v 0.377717 0.046548 0.268570
|
1358 |
+
v 0.347782 -0.633165 0.178804
|
1359 |
+
v 0.292221 0.750497 0.205742
|
1360 |
+
v 0.285967 -0.490515 -0.475021
|
1361 |
+
v -0.091077 -0.641357 0.284386
|
1362 |
+
v -0.363051 -0.701451 -0.419426
|
1363 |
+
v 0.337724 -0.473424 0.267414
|
1364 |
+
v -0.391685 -0.579268 0.262872
|
1365 |
+
v 0.173294 0.365931 0.176621
|
1366 |
+
v 0.084650 -0.024160 -0.325586
|
1367 |
+
v 0.314306 -0.035264 0.188299
|
1368 |
+
v 0.376618 -0.266538 -0.453918
|
1369 |
+
v 0.358758 -0.628391 -0.355174
|
1370 |
+
v -0.175941 -0.131034 -0.376710
|
1371 |
+
v -0.381882 0.196178 0.162395
|
1372 |
+
v 0.387739 -0.268405 -0.371984
|
1373 |
+
v -0.193674 -0.386412 -0.404946
|
1374 |
+
v 0.180656 0.052548 0.174176
|
1375 |
+
v 0.264312 -0.268072 -0.474606
|
1376 |
+
v -0.011995 -0.038104 0.108432
|
1377 |
+
v 0.249264 0.158852 0.182033
|
1378 |
+
v -0.162457 0.194250 0.256959
|
1379 |
+
v 0.279048 -0.107791 -0.358064
|
1380 |
+
v -0.318711 0.378734 0.173385
|
1381 |
+
v -0.253354 -0.103381 -0.247865
|
1382 |
+
v -0.073412 -0.104974 -0.010078
|
1383 |
+
v -0.305603 -0.646425 -0.452031
|
1384 |
+
v -0.214564 -0.657687 0.256860
|
1385 |
+
v -0.331034 -0.394578 -0.464546
|
1386 |
+
v -0.087845 0.348324 0.261977
|
1387 |
+
v -0.214166 -0.693624 -0.359127
|
1388 |
+
v 0.037698 -0.097631 -0.324123
|
1389 |
+
v 0.145921 -0.693078 -0.365662
|
1390 |
+
v 0.273871 -0.425032 0.194225
|
1391 |
+
v 0.184571 0.339966 0.262057
|
1392 |
+
v 0.042214 0.401069 0.177039
|
1393 |
+
v -0.079389 0.457557 0.271110
|
1394 |
+
v -0.105599 -0.037008 0.135208
|
1395 |
+
v -0.290323 0.442164 0.265057
|
1396 |
+
v -0.337792 0.571457 0.267592
|
1397 |
+
v 0.333110 -0.653249 0.131508
|
1398 |
+
v 0.340600 -0.030994 0.084252
|
1399 |
+
v 0.029079 -0.022048 0.149953
|
1400 |
+
v 0.229389 -0.092920 -0.136644
|
1401 |
+
v 0.204251 -0.529777 -0.388017
|
1402 |
+
v -0.034815 0.120355 0.263251
|
1403 |
+
v -0.257684 -0.332495 -0.425182
|
1404 |
+
v 0.358023 -0.688166 0.251469
|
1405 |
+
v 0.065685 0.105506 0.176568
|
1406 |
+
v 0.288275 -0.707343 0.215322
|
1407 |
+
v 0.351976 -0.326327 -0.375213
|
1408 |
+
v -0.093462 0.431001 0.178734
|
1409 |
+
v -0.389462 0.259165 0.199105
|
1410 |
+
v 0.313114 0.644582 0.266509
|
1411 |
+
v 0.005088 0.403525 0.244946
|
1412 |
+
v 0.068931 0.459895 0.273136
|
1413 |
+
v -0.170385 -0.695587 -0.377600
|
1414 |
+
v 0.064564 -0.652942 -0.376327
|
1415 |
+
v -0.227311 -0.697738 -0.438821
|
1416 |
+
v -0.242098 0.056055 0.258862
|
1417 |
+
v -0.185553 0.976929 0.235098
|
1418 |
+
v 0.381733 -0.078856 -0.096698
|
1419 |
+
v 0.220247 0.457055 0.176152
|
1420 |
+
v -0.397867 0.067926 0.215810
|
1421 |
+
v 0.006413 -0.461095 -0.470581
|
1422 |
+
v 0.066191 -0.700792 0.214650
|
1423 |
+
v 0.399928 0.216964 0.206589
|
1424 |
+
v -0.101038 0.936783 0.236748
|
1425 |
+
v -0.097361 -0.409472 -0.401229
|
1426 |
+
v 0.174338 0.416954 0.265063
|
1427 |
+
v 0.294555 0.065627 0.180327
|
1428 |
+
v 0.374445 0.334411 0.164484
|
1429 |
+
v 0.315186 -0.112194 -0.466730
|
1430 |
+
v 0.079432 -0.238228 -0.400478
|
1431 |
+
v -0.215303 -0.035039 0.122701
|
1432 |
+
v 0.298716 0.221240 0.277576
|
1433 |
+
v 0.382597 -0.155758 0.122439
|
1434 |
+
v 0.372856 -0.445907 -0.461655
|
1435 |
+
v -0.355075 -0.148587 0.113844
|
1436 |
+
v -0.115408 -0.248790 -0.465486
|
1437 |
+
v -0.384520 -0.082257 0.196361
|
1438 |
+
v 0.246801 -0.025843 -0.205448
|
1439 |
+
v 0.075518 0.104662 0.176524
|
1440 |
+
v 0.401703 0.591374 0.195727
|
1441 |
+
v -0.200396 0.935143 0.234277
|
1442 |
+
v 0.233435 -0.351847 -0.464542
|
1443 |
+
v 0.078147 0.107793 0.178268
|
1444 |
+
v 0.309991 0.716851 0.209020
|
1445 |
+
v -0.357343 -0.402278 -0.471129
|
1446 |
+
v 0.084611 -0.134970 -0.478399
|
1447 |
+
v -0.148116 0.264769 0.265875
|
1448 |
+
v -0.189985 -0.418747 -0.470260
|
1449 |
+
v -0.299334 -0.015083 -0.267027
|
1450 |
+
v 0.290100 -0.128002 0.159202
|
1451 |
+
v -0.222438 0.745493 0.269773
|
1452 |
+
v -0.357617 -0.279365 -0.450849
|
1453 |
+
v -0.377506 -0.333110 0.274326
|
1454 |
+
v 0.034685 -0.029225 -0.133967
|
1455 |
+
v 0.265734 -0.140657 -0.475538
|
1456 |
+
v -0.176175 0.840432 0.204593
|
1457 |
+
v 0.013052 -0.276711 -0.474087
|
1458 |
+
v -0.096242 0.214176 0.177771
|
1459 |
+
v -0.059756 -0.030174 0.179118
|
1460 |
+
v -0.228362 -0.474262 -0.481357
|
1461 |
+
v 0.057280 0.306242 0.178284
|
1462 |
+
v 0.388096 -0.143887 0.241359
|
1463 |
+
v -0.200518 0.093798 0.191934
|
1464 |
+
v 0.371150 -0.102627 -0.217900
|
1465 |
+
v -0.129380 0.923869 0.250618
|
1466 |
+
v -0.340033 -0.116896 0.203480
|
1467 |
+
v 0.060524 0.064674 0.176931
|
1468 |
+
v -0.029364 0.663041 0.249663
|
1469 |
+
v -0.009569 -0.080221 -0.463569
|
1470 |
+
v 0.235984 -0.026680 0.170402
|
1471 |
+
v 0.070376 -0.027292 -0.176329
|
1472 |
+
v 0.370101 0.777722 0.192757
|
1473 |
+
v 0.242265 -0.659216 0.250703
|
1474 |
+
v 0.114907 -0.091407 -0.127267
|
1475 |
+
v -0.335132 0.259807 0.187431
|
1476 |
+
v -0.118397 -0.041843 0.165898
|
1477 |
+
v -0.407468 -0.042272 0.240854
|
1478 |
+
v 0.249365 -0.488347 -0.482764
|
1479 |
+
v 0.304472 -0.381108 -0.367315
|
1480 |
+
v -0.388295 -0.075965 0.192395
|
1481 |
+
v 0.361099 -0.246729 0.163531
|
1482 |
+
v 0.326837 0.843963 0.197070
|
1483 |
+
v -0.237164 -0.044373 0.169086
|
1484 |
+
v -0.385789 0.171807 0.239117
|
1485 |
+
v 0.322034 0.326702 0.268777
|
1486 |
+
v -0.199208 -0.153797 -0.466465
|
1487 |
+
v 0.347670 -0.145133 -0.206490
|
1488 |
+
v 0.194097 0.528144 0.180859
|
1489 |
+
v -0.188383 -0.029964 -0.229733
|
1490 |
+
v 0.205213 -0.137770 -0.450415
|
1491 |
+
v 0.168349 -0.119700 0.136475
|
1492 |
+
v 0.085171 0.146842 0.267254
|
1493 |
+
v 0.136173 -0.034828 -0.103282
|
1494 |
+
v -0.384745 -0.098991 -0.092287
|
1495 |
+
v 0.109534 0.457125 0.174249
|
1496 |
+
v 0.339941 -0.108971 -0.031248
|
1497 |
+
v -0.365150 -0.159768 0.282613
|
1498 |
+
v 0.253518 0.024282 0.181218
|
1499 |
+
v 0.256949 0.813878 0.220680
|
1500 |
+
v 0.006754 -0.031474 -0.056529
|
1501 |
+
v -0.128342 -0.647289 -0.377072
|
1502 |
+
v 0.333062 -0.117836 -0.464030
|
1503 |
+
v -0.178753 -0.689882 -0.398874
|
1504 |
+
v 0.228465 0.172341 0.176779
|
1505 |
+
v -0.294496 0.718725 0.189904
|
1506 |
+
v -0.308226 -0.707851 0.218665
|
1507 |
+
v -0.238736 0.511573 0.268352
|
1508 |
+
v 0.013468 -0.030063 0.049305
|
1509 |
+
v -0.346864 -0.361910 -0.472147
|
1510 |
+
v 0.264036 -0.530600 -0.373599
|
1511 |
+
v -0.082784 -0.027707 -0.060037
|
1512 |
+
v -0.377263 0.720525 0.248217
|
1513 |
+
v 0.371339 -0.252735 -0.413531
|
1514 |
+
v -0.352152 -0.356268 -0.469038
|
1515 |
+
v 0.181830 -0.168870 -0.390773
|
1516 |
+
v -0.028713 -0.028770 -0.121261
|
1517 |
+
v -0.271538 -0.018339 -0.256762
|
1518 |
+
v -0.144113 -0.026264 0.073131
|
1519 |
+
v 0.185282 -0.105486 -0.230664
|
1520 |
+
v 0.290119 0.229601 0.194000
|
1521 |
+
v 0.316271 -0.078416 -0.028165
|
1522 |
+
v -0.372765 -0.365118 -0.439986
|
1523 |
+
v 0.377973 -0.095641 -0.198159
|
1524 |
+
v 0.159126 -0.290331 -0.473078
|
1525 |
+
v 0.029148 0.243369 0.239303
|
1526 |
+
v -0.394626 -0.102667 0.082331
|
1527 |
+
v -0.327733 -0.136148 0.268366
|
1528 |
+
v 0.136952 -0.409196 -0.405460
|
1529 |
+
v 0.088708 0.008210 0.109464
|
1530 |
+
v -0.307312 -0.092900 0.274999
|
1531 |
+
v -0.321822 -0.702860 0.116937
|
1532 |
+
v -0.087789 0.024321 0.263692
|
1533 |
+
v 0.305230 -0.115726 0.089386
|
1534 |
+
v -0.010001 -0.697425 -0.360188
|
1535 |
+
v -0.206068 0.193490 0.265135
|
1536 |
+
v -0.083435 0.615300 0.271057
|
1537 |
+
v -0.166278 -0.632601 -0.369365
|
1538 |
+
v 0.253399 -0.083054 -0.028493
|
1539 |
+
v -0.237822 -0.158650 -0.463979
|
1540 |
+
v 0.059358 0.697522 0.240999
|
1541 |
+
v -0.300565 -0.541439 -0.471180
|
1542 |
+
v 0.156572 0.563022 0.176964
|
1543 |
+
v -0.090573 -0.125744 0.232748
|
1544 |
+
v -0.380424 -0.437123 0.254110
|
1545 |
+
v -0.400784 0.375793 0.226709
|
1546 |
+
v 0.107592 -0.583534 -0.486197
|
1547 |
+
v -0.377889 -0.377835 0.195470
|
1548 |
+
v -0.210072 -0.661188 -0.380224
|
1549 |
+
v 0.244089 -0.092719 0.067369
|
1550 |
+
v -0.221648 -0.212894 -0.472490
|
1551 |
+
v -0.234123 -0.208196 -0.380091
|
1552 |
+
v 0.186134 -0.095437 -0.472914
|
1553 |
+
v 0.316602 0.531698 0.171468
|
1554 |
+
v -0.378952 -0.468898 -0.441711
|
1555 |
+
v -0.159485 -0.113124 0.149404
|
1556 |
+
v -0.187499 -0.122416 -0.320350
|
1557 |
+
v 0.278123 -0.410550 0.262282
|
1558 |
+
v 0.167947 0.328985 0.178344
|
1559 |
+
v -0.195308 -0.613159 0.171518
|
1560 |
+
v 0.388665 0.782215 0.226876
|
1561 |
+
v 0.376743 -0.549958 0.259250
|
1562 |
+
v 0.139969 -0.025971 -0.352103
|
1563 |
+
v -0.040706 -0.188909 -0.383937
|
1564 |
+
v 0.214704 0.573770 0.176603
|
1565 |
+
v -0.025078 0.063956 0.178946
|
1566 |
+
v -0.275124 -0.710428 0.194999
|
1567 |
+
v -0.160739 -0.030188 -0.094847
|
1568 |
+
v -0.365025 -0.554025 -0.405481
|
1569 |
+
v -0.396661 -0.235184 0.260475
|
1570 |
+
v -0.299929 -0.021273 0.114611
|
1571 |
+
v 0.182606 0.610214 0.182897
|
1572 |
+
v -0.073613 -0.109364 -0.011657
|
1573 |
+
v 0.401262 0.279423 0.225296
|
1574 |
+
v -0.139612 -0.293203 -0.409383
|
1575 |
+
v -0.013553 0.851223 0.210024
|
1576 |
+
v 0.137351 0.308551 0.270287
|
1577 |
+
v -0.292891 -0.281104 -0.469772
|
1578 |
+
v -0.258894 -0.612744 0.221330
|
1579 |
+
v -0.041055 -0.095198 -0.306205
|
1580 |
+
v -0.234359 -0.027759 -0.226734
|
1581 |
+
v 0.228010 -0.052164 0.206914
|
1582 |
+
v -0.171974 -0.029145 -0.271608
|
1583 |
+
v 0.139170 0.602909 0.263401
|
1584 |
+
v 0.188015 -0.104622 -0.113753
|
1585 |
+
v -0.096912 0.293743 0.180842
|
1586 |
+
v -0.067503 -0.271996 -0.469938
|
1587 |
+
v 0.118687 0.547783 0.238663
|
1588 |
+
v 0.005898 -0.491856 -0.490371
|
1589 |
+
v 0.107136 -0.049575 -0.117557
|
1590 |
+
v 0.380475 -0.595736 0.220880
|
1591 |
+
v 0.196665 -0.228738 -0.464706
|
1592 |
+
v -0.370683 -0.392850 0.256979
|
1593 |
+
v -0.245285 -0.111233 -0.131364
|
1594 |
+
v 0.301411 -0.127749 -0.160878
|
1595 |
+
v -0.176852 0.650981 0.183219
|
1596 |
+
v -0.265865 0.200867 0.172245
|
1597 |
+
v 0.260294 0.717324 0.263930
|
1598 |
+
v 0.135542 -0.548297 -0.364954
|
1599 |
+
v -0.197661 -0.704342 0.179764
|
1600 |
+
v -0.189098 0.237784 0.182509
|
1601 |
+
v 0.195225 0.563420 0.266445
|
1602 |
+
v 0.050236 -0.089802 -0.060746
|
1603 |
+
v 0.373338 -0.107371 0.102162
|
1604 |
+
v -0.052284 -0.119724 0.242379
|
1605 |
+
v 0.285797 -0.620227 0.271462
|
1606 |
+
v -0.281781 -0.632219 0.154160
|
1607 |
+
v -0.325051 0.620502 0.173599
|
1608 |
+
v -0.403625 0.538831 0.263448
|
1609 |
+
v -0.332362 -0.229662 0.270711
|
1610 |
+
v -0.327451 0.257275 0.173164
|
1611 |
+
v 0.348571 -0.027319 0.126250
|
1612 |
+
v 0.303534 -0.412155 0.263068
|
1613 |
+
v -0.255058 0.002214 0.177104
|
1614 |
+
v 0.332521 -0.136219 0.272689
|
1615 |
+
v -0.276162 -0.320236 -0.470664
|
1616 |
+
v -0.079073 -0.109477 -0.225098
|
1617 |
+
v -0.276995 -0.514418 -0.472373
|
1618 |
+
v 0.158139 -0.031725 -0.074328
|
1619 |
+
v -0.084028 -0.607118 0.218127
|
1620 |
+
v -0.090468 0.644059 0.191766
|
1621 |
+
v -0.178918 -0.132995 -0.371094
|
1622 |
+
v 0.173089 -0.101520 -0.269274
|
1623 |
+
v -0.138756 -0.100558 -0.289873
|
1624 |
+
v -0.074047 0.667331 0.269501
|
1625 |
+
v 0.277709 0.266681 0.270413
|
1626 |
+
v 0.171071 0.198169 0.184721
|
1627 |
+
v 0.025102 -0.035080 0.068998
|
1628 |
+
v 0.207981 -0.109542 -0.118644
|
1629 |
+
v 0.388655 -0.292020 0.237626
|
1630 |
+
v 0.209896 -0.055368 0.265212
|
1631 |
+
v 0.017026 -0.121188 0.142311
|
1632 |
+
v 0.162693 -0.698428 -0.424964
|
1633 |
+
v -0.080635 0.084685 0.180295
|
1634 |
+
v 0.380373 -0.578997 0.232502
|
1635 |
+
v 0.300462 -0.375186 0.162613
|
1636 |
+
v -0.178047 -0.030541 -0.400570
|
1637 |
+
v -0.182000 -0.043357 -0.053197
|
1638 |
+
v 0.140060 0.902025 0.275271
|
1639 |
+
v 0.300464 0.838497 0.202851
|
1640 |
+
v 0.065287 0.273387 0.266838
|
1641 |
+
v 0.144922 0.515581 0.183632
|
1642 |
+
v -0.290899 -0.029803 -0.004618
|
1643 |
+
v -0.133913 -0.142321 0.275562
|
1644 |
+
v -0.070036 -0.098513 -0.139071
|
1645 |
+
v -0.133255 -0.026653 -0.210535
|
1646 |
+
v 0.013271 -0.035206 -0.476142
|
1647 |
+
v -0.269295 -0.188054 -0.387992
|
1648 |
+
v 0.307259 0.162636 0.172577
|
1649 |
+
v 0.187206 -0.035876 0.099085
|
1650 |
+
v -0.176029 -0.045313 0.156466
|
1651 |
+
v 0.182181 0.521940 0.271086
|
1652 |
+
v -0.048673 0.495734 0.270645
|
1653 |
+
v -0.050807 0.380905 0.264693
|
1654 |
+
v -0.068990 -0.424110 -0.473468
|
1655 |
+
v 0.089874 0.621148 0.267907
|
1656 |
+
v -0.147568 -0.559205 -0.376265
|
1657 |
+
v -0.001750 -0.099355 -0.269556
|
1658 |
+
v 0.057101 0.737501 0.184106
|
1659 |
+
v -0.390315 0.249501 0.238670
|
1660 |
+
v 0.096260 -0.206472 -0.470699
|
1661 |
+
v 0.397599 -0.218083 0.256162
|
1662 |
+
v -0.004609 -0.105585 -0.087599
|
1663 |
+
v -0.166146 -0.402325 -0.469220
|
1664 |
+
v 0.333774 -0.641866 -0.448123
|
1665 |
+
v -0.020922 0.655438 0.270972
|
1666 |
+
v -0.027033 0.208044 0.208085
|
1667 |
+
v -0.137590 0.404988 0.267096
|
1668 |
+
v -0.071536 0.131903 0.178227
|
1669 |
+
v -0.006232 -0.041521 0.139167
|
1670 |
+
v -0.097328 -0.233842 -0.471570
|
1671 |
+
v 0.210386 -0.036000 0.149289
|
1672 |
+
v -0.258673 0.796451 0.266335
|
1673 |
+
v -0.331829 -0.713039 -0.342279
|
1674 |
+
v -0.070470 0.777255 0.268285
|
1675 |
+
v 0.328787 0.750672 0.190542
|
1676 |
+
v 0.305207 -0.103697 -0.247666
|
1677 |
+
v -0.228848 0.607590 0.270616
|
1678 |
+
v -0.338839 -0.155817 -0.469826
|
1679 |
+
v 0.279398 -0.329063 -0.386533
|
1680 |
+
v -0.154972 -0.302857 -0.479374
|
1681 |
+
v -0.141312 0.731598 0.188393
|
1682 |
+
v 0.034385 0.291003 0.181011
|
1683 |
+
v -0.267726 0.563408 0.169850
|
1684 |
+
v -0.060393 0.196500 0.179737
|
1685 |
+
v 0.252987 0.209935 0.179363
|
1686 |
+
v -0.346347 -0.678615 -0.393998
|
1687 |
+
v 0.307488 -0.401875 0.181125
|
1688 |
+
v -0.279358 -0.105520 -0.205641
|
1689 |
+
v 0.119694 -0.037260 0.113792
|
1690 |
+
v 0.006341 -0.039959 0.019865
|
1691 |
+
v 0.059252 -0.109758 0.160215
|
1692 |
+
v 0.305447 -0.103629 0.025356
|
1693 |
+
v 0.166475 0.729070 0.265568
|
1694 |
+
v -0.002750 0.359516 0.269249
|
1695 |
+
v -0.163962 -0.494467 -0.469696
|
1696 |
+
v -0.133146 -0.654786 -0.434344
|
1697 |
+
v 0.296979 -0.177671 0.258958
|
1698 |
+
v 0.309046 0.593783 0.265429
|
1699 |
+
v 0.288621 -0.430683 -0.388567
|
1700 |
+
v 0.012939 0.539663 0.257055
|
1701 |
+
v -0.255038 -0.036042 0.085794
|
1702 |
+
v -0.097263 -0.101353 -0.406241
|
1703 |
+
v -0.381349 0.756844 0.270547
|
1704 |
+
v -0.268159 -0.343516 0.272319
|
1705 |
+
v 0.331896 -0.250644 -0.356871
|
1706 |
+
v -0.139447 -0.158550 -0.471722
|
1707 |
+
v 0.392667 0.487132 0.211106
|
1708 |
+
v -0.336633 -0.531945 0.183020
|
1709 |
+
v 0.147402 -0.287408 -0.479575
|
1710 |
+
v 0.120038 -0.033737 -0.011063
|
1711 |
+
v 0.313666 -0.187466 -0.374678
|
1712 |
+
v -0.293173 -0.125700 -0.463617
|
1713 |
+
v 0.106891 -0.350474 -0.480209
|
1714 |
+
v -0.220371 -0.106916 -0.233731
|
1715 |
+
v 0.052221 -0.139794 0.257031
|
1716 |
+
v -0.230472 0.781861 0.265708
|
1717 |
+
v 0.356745 0.230004 0.166985
|
1718 |
+
v -0.090906 -0.695240 0.276415
|
1719 |
+
v 0.250788 -0.105600 0.279186
|
1720 |
+
v 0.269324 -0.706303 0.178519
|
1721 |
+
v -0.263876 0.319346 0.178222
|
1722 |
+
v 0.170771 -0.084179 0.262890
|
1723 |
+
v -0.084618 0.276230 0.181450
|
1724 |
+
v -0.255588 -0.036255 0.187964
|
1725 |
+
v -0.378462 -0.304088 0.264059
|
1726 |
+
v 0.054408 0.552620 0.256972
|
1727 |
+
v 0.206389 -0.088797 -0.144798
|
1728 |
+
v -0.380517 -0.689275 -0.301295
|
1729 |
+
v 0.345152 -0.014461 -0.431658
|
1730 |
+
v -0.168045 -0.570050 -0.387417
|
1731 |
+
v 0.295179 -0.335260 0.180049
|
1732 |
+
v 0.259366 -0.113750 -0.473348
|
1733 |
+
v -0.127528 0.650641 0.268578
|
1734 |
+
v -0.053611 -0.149442 -0.460766
|
1735 |
+
v 0.086936 0.444291 0.267634
|
1736 |
+
v -0.125601 -0.101723 -0.374866
|
1737 |
+
v -0.353472 -0.129999 -0.357957
|
1738 |
+
v -0.257131 0.517602 0.170967
|
1739 |
+
v -0.198669 0.442481 0.270657
|
1740 |
+
v -0.151359 0.845342 0.218053
|
1741 |
+
v -0.068933 -0.159423 -0.374105
|
1742 |
+
v 0.280615 -0.355761 0.267307
|
1743 |
+
v -0.235576 0.167798 0.176831
|
1744 |
+
v 0.273974 0.432118 0.269936
|
1745 |
+
v 0.132793 -0.119925 0.193846
|
1746 |
+
v -0.355597 -0.092534 -0.254191
|
1747 |
+
v -0.346672 0.257367 0.270050
|
1748 |
+
v -0.227396 -0.143410 -0.378735
|
1749 |
+
v 0.394875 -0.083527 -0.169096
|
1750 |
+
v 0.179583 0.060344 0.266057
|
1751 |
+
v -0.072336 0.777866 0.195319
|
1752 |
+
v -0.219956 -0.088326 -0.157659
|
1753 |
+
v -0.316662 -0.111968 -0.080233
|
1754 |
+
v 0.309293 -0.715461 0.240194
|
1755 |
+
v 0.386218 -0.037049 0.048524
|
1756 |
+
v -0.105661 -0.111840 -0.133184
|
1757 |
+
v -0.318644 -0.578745 0.271976
|
1758 |
+
v 0.028154 0.504364 0.271394
|
1759 |
+
v -0.354371 -0.463212 0.221798
|
1760 |
+
v 0.218900 -0.034149 -0.342280
|
1761 |
+
v -0.280015 -0.379510 0.261825
|
1762 |
+
v 0.050692 -0.058766 -0.460597
|
1763 |
+
v 0.394644 -0.084449 -0.273649
|
1764 |
+
v 0.256107 -0.020113 -0.329406
|
1765 |
+
v 0.374919 -0.462496 -0.356862
|
1766 |
+
v -0.219031 -0.558264 -0.399632
|
1767 |
+
v -0.195594 -0.034714 0.082543
|
1768 |
+
v -0.348950 0.597477 0.269723
|
1769 |
+
v -0.062428 -0.129402 0.214456
|
1770 |
+
v -0.120677 -0.103207 -0.066078
|
1771 |
+
v -0.329655 0.276238 0.191032
|
1772 |
+
v -0.263711 -0.226136 -0.395098
|
1773 |
+
v 0.257311 -0.642204 -0.456330
|
1774 |
+
v -0.342231 -0.380438 -0.466112
|
1775 |
+
v 0.118349 0.141165 0.262604
|
1776 |
+
v 0.355408 0.113500 0.178287
|
1777 |
+
v -0.271449 -0.257135 0.270812
|
1778 |
+
v 0.018892 -0.108209 -0.156638
|
1779 |
+
v -0.396859 0.198479 0.226858
|
1780 |
+
v -0.376722 -0.296501 0.234846
|
1781 |
+
v 0.330000 -0.050997 0.180292
|
1782 |
+
v 0.018277 -0.110012 0.125212
|
1783 |
+
v 0.283717 0.180921 0.269046
|
1784 |
+
v -0.017801 -0.037342 0.106337
|
1785 |
+
v -0.265232 0.868550 0.263133
|
1786 |
+
v -0.352336 -0.248990 -0.459847
|
1787 |
+
v -0.149790 -0.415502 -0.390014
|
1788 |
+
v -0.036562 -0.700926 -0.401980
|
1789 |
+
v -0.278328 0.516855 0.269010
|
1790 |
+
v -0.204063 0.694332 0.265991
|
1791 |
+
v 0.174948 0.530937 0.256923
|
1792 |
+
v 0.022877 -0.033087 -0.015384
|
1793 |
+
v 0.409559 -0.126320 0.172534
|
1794 |
+
v 0.374061 -0.152029 0.262759
|
1795 |
+
v 0.263077 -0.030983 -0.102538
|
1796 |
+
v -0.392807 -0.092172 -0.290254
|
1797 |
+
v 0.085053 0.136753 0.182165
|
1798 |
+
v 0.195780 -0.105659 -0.206882
|
1799 |
+
v -0.024927 -0.103768 -0.449975
|
1800 |
+
v 0.240992 -0.225174 -0.413044
|
1801 |
+
v 0.337394 -0.394112 0.272210
|
1802 |
+
v 0.293223 -0.026077 -0.224565
|
1803 |
+
v 0.111606 -0.030131 -0.161954
|
1804 |
+
v 0.064684 -0.129876 0.197864
|
1805 |
+
v 0.245411 -0.028856 0.009452
|
1806 |
+
v -0.054422 -0.502590 -0.400841
|
1807 |
+
v 0.286912 -0.309603 0.268003
|
1808 |
+
v -0.390478 0.572251 0.240538
|
1809 |
+
v 0.105992 0.232106 0.268651
|
1810 |
+
v -0.052609 -0.035304 0.142952
|
1811 |
+
v -0.034621 -0.105565 -0.205202
|
1812 |
+
v -0.106157 0.235147 0.176445
|
1813 |
+
v 0.158743 -0.680636 0.168795
|
1814 |
+
v 0.053222 0.668480 0.210895
|
1815 |
+
v 0.083996 -0.449395 -0.396876
|
1816 |
+
v 0.126864 -0.106269 0.156763
|
1817 |
+
v -0.322580 -0.504224 -0.373530
|
1818 |
+
v -0.087236 0.534144 0.184025
|
1819 |
+
v 0.233549 -0.504679 -0.389637
|
1820 |
+
v -0.288888 0.533681 0.270130
|
1821 |
+
v 0.230041 -0.078394 -0.467104
|
1822 |
+
v 0.171147 -0.117857 0.261824
|
1823 |
+
v 0.048832 -0.681453 -0.431148
|
1824 |
+
v 0.377766 -0.519626 0.231832
|
1825 |
+
v -0.028321 -0.624488 -0.439372
|
1826 |
+
v 0.113811 -0.696509 0.196306
|
1827 |
+
v 0.094648 -0.102631 0.121234
|
1828 |
+
v -0.147794 -0.038473 0.185326
|
1829 |
+
v -0.065013 -0.107253 -0.393758
|
1830 |
+
v -0.166165 -0.490096 -0.476746
|
1831 |
+
v 0.325395 -0.657998 -0.348549
|
1832 |
+
v 0.237513 0.776683 0.203455
|
1833 |
+
v -0.329588 -0.040352 -0.467147
|
1834 |
+
v -0.363627 -0.547350 -0.465307
|
1835 |
+
v -0.388037 -0.082237 -0.228022
|
1836 |
+
v -0.361498 -0.417973 -0.399503
|
1837 |
+
v 0.152794 0.417329 0.230833
|
1838 |
+
v -0.042540 -0.099868 -0.272888
|
1839 |
+
v -0.193965 0.611932 0.264442
|
1840 |
+
v 0.244461 0.746273 0.190750
|
1841 |
+
v -0.047445 -0.116710 0.260064
|
1842 |
+
v 0.015239 -0.032246 -0.195076
|
1843 |
+
v -0.341636 0.863333 0.200657
|
1844 |
+
v -0.342743 -0.070248 0.195094
|
1845 |
+
v -0.285961 -0.160691 -0.466732
|
1846 |
+
v 0.054454 -0.368977 -0.415006
|
1847 |
+
v -0.295532 -0.247681 0.271257
|
1848 |
+
v 0.313154 0.624501 0.180444
|
1849 |
+
v 0.229526 -0.668470 0.151510
|
1850 |
+
v -0.397600 0.062896 0.194910
|
1851 |
+
v 0.238347 0.409415 0.263248
|
1852 |
+
v -0.241219 0.373518 0.201456
|
1853 |
+
v 0.330654 -0.203956 -0.377488
|
1854 |
+
v 0.201565 -0.027726 -0.037404
|
1855 |
+
v 0.346164 -0.094025 -0.190029
|
1856 |
+
v -0.353684 -0.015827 -0.298682
|
1857 |
+
v 0.389763 -0.132615 -0.011193
|
1858 |
+
v 0.167930 -0.651245 0.148864
|
1859 |
+
v -0.335251 -0.025431 -0.150265
|
1860 |
+
v 0.378417 0.035620 0.154575
|
1861 |
+
v -0.333827 -0.071683 0.198085
|
1862 |
+
v 0.343628 0.234611 0.175973
|
1863 |
+
v 0.370548 -0.094242 -0.058844
|
1864 |
+
v -0.345380 0.217043 0.266899
|
1865 |
+
v -0.019181 0.079448 0.265911
|
1866 |
+
v 0.326955 -0.553111 -0.459925
|
1867 |
+
v -0.167108 0.030511 0.246825
|
1868 |
+
v -0.055017 0.283542 0.175280
|
1869 |
+
v -0.379002 -0.265883 -0.372091
|
1870 |
+
v 0.246006 -0.500725 -0.422976
|
1871 |
+
v 0.116029 -0.149046 -0.467710
|
1872 |
+
v 0.353811 0.749448 0.270035
|
1873 |
+
v 0.274217 0.130268 0.266023
|
1874 |
+
v -0.333797 -0.320821 -0.367563
|
1875 |
+
v 0.184901 -0.110945 -0.076555
|
1876 |
+
v 0.301391 -0.239409 0.276359
|
1877 |
+
v 0.307377 -0.469691 -0.372260
|
1878 |
+
v 0.403263 -0.072297 -0.161185
|
1879 |
+
v -0.366355 -0.501421 0.151680
|
1880 |
+
v 0.011815 -0.662365 -0.367613
|
1881 |
+
v -0.003196 -0.118899 -0.395556
|
1882 |
+
v -0.185268 0.092942 0.267985
|
1883 |
+
v 0.202948 -0.679350 -0.404053
|
1884 |
+
v 0.138829 0.382169 0.261892
|
1885 |
+
v 0.109404 0.365850 0.191346
|
1886 |
+
v 0.354503 0.092859 0.168466
|
1887 |
+
v -0.361345 0.572020 0.172172
|
1888 |
+
v -0.150963 0.596169 0.243211
|
1889 |
+
v 0.246760 0.727353 0.196729
|
1890 |
+
v 0.109053 -0.336504 -0.472391
|
1891 |
+
v -0.176974 0.651546 0.179158
|
1892 |
+
v -0.131539 -0.665771 0.247356
|
1893 |
+
v 0.023861 0.849928 0.209630
|
1894 |
+
v -0.329398 -0.569768 0.257097
|
1895 |
+
v 0.188594 0.560320 0.255275
|
1896 |
+
v 0.347626 -0.044454 -0.326877
|
1897 |
+
v 0.024595 -0.127374 0.255905
|
1898 |
+
v 0.022572 -0.098401 0.027622
|
1899 |
+
v 0.325174 -0.473136 0.164613
|
1900 |
+
v -0.331527 -0.705123 0.151947
|
1901 |
+
v -0.179425 0.240201 0.265511
|
1902 |
+
v -0.304301 0.233143 0.177721
|
1903 |
+
v -0.081965 -0.393512 -0.478595
|
1904 |
+
v -0.200375 0.190819 0.261910
|
1905 |
+
v 0.115656 -0.112974 0.202228
|
1906 |
+
v -0.310377 0.795486 0.272157
|
1907 |
+
v -0.188385 0.679819 0.268257
|
1908 |
+
v -0.175422 0.776974 0.267513
|
1909 |
+
v 0.372182 0.541039 0.258712
|
1910 |
+
v -0.286777 0.808888 0.207040
|
1911 |
+
v 0.392223 0.064117 0.273519
|
1912 |
+
v -0.104843 0.269979 0.183755
|
1913 |
+
v 0.193765 0.729786 0.265606
|
1914 |
+
v 0.035254 -0.229290 -0.406981
|
1915 |
+
v -0.053671 0.899608 0.276715
|
1916 |
+
v -0.031757 -0.036844 -0.212569
|
1917 |
+
v 0.308466 0.264904 0.180638
|
1918 |
+
v -0.358138 -0.092218 -0.005781
|
1919 |
+
v -0.086844 0.228106 0.251863
|
1920 |
+
v -0.023250 0.208627 0.179433
|
1921 |
+
v -0.365063 -0.650290 -0.464215
|
1922 |
+
v 0.192683 -0.048421 0.183586
|
1923 |
+
v -0.194127 -0.023915 -0.217288
|
1924 |
+
v -0.142794 -0.031537 -0.462943
|
1925 |
+
v 0.194606 -0.039512 0.113861
|
1926 |
+
v -0.348163 -0.268639 -0.371869
|
1927 |
+
v -0.357551 -0.686092 0.146087
|
1928 |
+
v -0.017220 -0.116152 -0.472202
|
1929 |
+
v 0.109313 -0.072396 0.164941
|
1930 |
+
v -0.074867 -0.030696 0.167088
|
1931 |
+
v -0.106663 -0.119080 0.265961
|
1932 |
+
v -0.380324 -0.272586 -0.385959
|
1933 |
+
v 0.101444 0.865458 0.271939
|
1934 |
+
v -0.191739 -0.123298 -0.372654
|
1935 |
+
v 0.128480 -0.247417 -0.473747
|
1936 |
+
v 0.276550 -0.034521 0.094966
|
1937 |
+
v -0.047605 -0.029396 -0.288518
|
1938 |
+
v 0.353370 -0.201851 -0.455802
|
1939 |
+
v -0.134081 -0.026491 -0.227671
|
1940 |
+
v 0.259318 -0.514398 0.265097
|
1941 |
+
v -0.311598 -0.106372 -0.231429
|
1942 |
+
v 0.352802 0.429669 0.266432
|
1943 |
+
v 0.311654 -0.103526 -0.272282
|
1944 |
+
v -0.185510 0.537272 0.173854
|
1945 |
+
v 0.269857 -0.114962 0.176784
|
1946 |
+
v -0.082795 0.072393 0.272201
|
1947 |
+
v 0.221474 -0.103686 -0.149381
|
1948 |
+
v -0.370135 -0.288200 -0.384291
|
1949 |
+
v -0.402318 0.461134 0.234053
|
1950 |
+
v -0.377626 0.131080 0.161161
|
1951 |
+
v 0.381449 0.465631 0.179170
|
1952 |
+
v -0.335890 0.163599 0.266981
|
1953 |
+
v 0.320822 -0.140068 0.260399
|
1954 |
+
v 0.132780 -0.263065 -0.416583
|
1955 |
+
v -0.011099 0.012256 0.164607
|
1956 |
+
v -0.179981 0.280205 0.267415
|
1957 |
+
v -0.165382 -0.622693 0.235811
|
1958 |
+
v -0.233071 -0.499160 -0.477566
|
1959 |
+
v -0.316416 -0.657780 -0.466889
|
1960 |
+
v -0.372554 -0.353318 -0.373434
|
1961 |
+
v 0.181678 0.962809 0.227309
|
1962 |
+
v 0.398113 -0.071435 0.187876
|
1963 |
+
v 0.234485 -0.028829 0.106762
|
1964 |
+
v -0.216112 -0.663310 0.208899
|
1965 |
+
v 0.243648 -0.021655 0.173641
|
1966 |
+
v 0.015598 -0.115765 0.188464
|
1967 |
+
v -0.018379 0.180493 0.261185
|
1968 |
+
v 0.397873 -0.267705 0.239463
|
1969 |
+
v -0.261228 0.142457 0.271743
|
1970 |
+
v 0.332841 0.319985 0.165951
|
1971 |
+
v 0.135273 -0.159298 -0.469767
|
1972 |
+
v -0.056585 -0.028821 -0.273797
|
1973 |
+
v -0.243070 -0.319065 -0.378770
|
1974 |
+
v -0.149850 -0.617294 -0.378765
|
1975 |
+
v -0.313506 0.271447 0.175249
|
1976 |
+
v 0.354226 -0.604701 -0.343271
|
1977 |
+
v -0.388130 -0.519482 0.227193
|
1978 |
+
v 0.371502 -0.125248 -0.025310
|
1979 |
+
v 0.398718 0.567233 0.229785
|
1980 |
+
v 0.355349 -0.703982 0.163324
|
1981 |
+
v -0.233169 0.334625 0.273981
|
1982 |
+
v 0.034406 0.193852 0.265644
|
1983 |
+
v 0.226693 0.869406 0.269281
|
1984 |
+
v -0.273271 0.277508 0.177988
|
1985 |
+
v 0.030213 -0.029186 -0.301572
|
1986 |
+
v -0.067986 -0.334728 -0.476756
|
1987 |
+
v 0.293582 -0.471226 0.261398
|
1988 |
+
v -0.060906 0.927072 0.245306
|
1989 |
+
v 0.267416 -0.158630 -0.391762
|
1990 |
+
v -0.191044 -0.444632 -0.400972
|
1991 |
+
v 0.117963 0.560100 0.268940
|
1992 |
+
v -0.359651 -0.716491 0.214585
|
1993 |
+
v -0.203667 -0.185152 0.253953
|
1994 |
+
v -0.123700 -0.521449 -0.388852
|
1995 |
+
v 0.194776 -0.351832 -0.440452
|
1996 |
+
v 0.059318 -0.112280 -0.348477
|
1997 |
+
v 0.284180 -0.094382 -0.022217
|
1998 |
+
v -0.334122 0.337520 0.197917
|
1999 |
+
v -0.058480 -0.029865 0.018530
|
2000 |
+
v -0.227028 0.752498 0.185255
|
2001 |
+
v 0.377835 -0.375290 0.250206
|
2002 |
+
v 0.345794 -0.626021 0.145492
|
2003 |
+
v 0.146502 0.906949 0.229513
|
2004 |
+
v -0.228918 0.290228 0.173889
|
2005 |
+
v 0.340504 -0.124233 0.173837
|
2006 |
+
v -0.177579 -0.696992 0.279670
|
2007 |
+
v -0.351008 0.149973 0.179185
|
2008 |
+
v 0.009865 -0.100435 -0.282645
|
2009 |
+
v -0.210552 0.006839 0.257445
|
2010 |
+
v -0.267370 0.123416 0.170737
|
2011 |
+
v -0.205022 -0.114877 -0.336102
|
2012 |
+
v 0.297849 -0.265765 0.274239
|
2013 |
+
v 0.136636 0.332872 0.227163
|
2014 |
+
v 0.141560 -0.168014 -0.470574
|
2015 |
+
v 0.191307 0.008695 0.275618
|
2016 |
+
v 0.004420 0.846063 0.213985
|
2017 |
+
v -0.094420 -0.213970 -0.471895
|
2018 |
+
v 0.266473 -0.110905 0.060452
|
2019 |
+
v 0.174515 -0.043052 0.140900
|
2020 |
+
v 0.345800 0.266613 0.175295
|
2021 |
+
v -0.128501 0.883347 0.255722
|
2022 |
+
v 0.291641 -0.599356 0.258685
|
2023 |
+
v 0.274321 -0.023283 -0.167224
|
2024 |
+
v -0.247240 0.428223 0.174826
|
2025 |
+
v 0.037055 0.697262 0.197683
|
2026 |
+
v -0.158157 -0.089914 -0.347609
|
2027 |
+
v -0.225764 0.510385 0.176210
|
2028 |
+
v -0.026104 -0.078486 -0.132771
|
2029 |
+
v 0.372524 0.661435 0.182130
|
2030 |
+
v -0.338888 -0.189879 0.171014
|
2031 |
+
v 0.387564 0.752943 0.204218
|
2032 |
+
v 0.298599 0.394387 0.277039
|
2033 |
+
v 0.007417 -0.372250 -0.483312
|
2034 |
+
v -0.325533 0.284950 0.267617
|
2035 |
+
v 0.168266 0.202361 0.177506
|
2036 |
+
v -0.162272 -0.534170 -0.378964
|
2037 |
+
v -0.308149 -0.067652 0.270333
|
2038 |
+
v -0.384736 -0.107145 -0.322966
|
2039 |
+
v 0.079406 -0.106202 0.132012
|
2040 |
+
v 0.388568 -0.243303 0.266765
|
2041 |
+
v 0.162428 -0.266796 -0.465065
|
2042 |
+
v 0.374549 -0.276758 0.250048
|
2043 |
+
v -0.114675 -0.029639 -0.147160
|
2044 |
+
v 0.381710 -0.197076 -0.446443
|
2045 |
+
v 0.202445 0.410649 0.270081
|
2046 |
+
v 0.260117 -0.358713 -0.435188
|
2047 |
+
v 0.329080 -0.095616 -0.005583
|
2048 |
+
v -0.080022 -0.292464 -0.470068
|
point_clouds/default_settings.json
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Goat": {
|
3 |
+
"rx": -1.15,
|
4 |
+
"ry": 0.64,
|
5 |
+
"rz": -2.46,
|
6 |
+
"scale": 1.25,
|
7 |
+
"path": "point_clouds/goat.obj",
|
8 |
+
"dy": 331
|
9 |
+
},
|
10 |
+
|
11 |
+
"Chair": {
|
12 |
+
"rx": -2.08,
|
13 |
+
"ry": 0.13,
|
14 |
+
"rz": 1.46,
|
15 |
+
"scale": 1.09,
|
16 |
+
"path": "point_clouds/chair.obj",
|
17 |
+
"dy": 324
|
18 |
+
},
|
19 |
+
|
20 |
+
"File": {
|
21 |
+
"rx": 0.0,
|
22 |
+
"ry": 0.0,
|
23 |
+
"rz": 0.0,
|
24 |
+
"scale": 1.0,
|
25 |
+
"path": "None",
|
26 |
+
"dy": 300
|
27 |
+
}
|
28 |
+
}
|
point_clouds/goat.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
render_util.py
ADDED
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import numpy as np
|
3 |
+
import math
|
4 |
+
import cv2
|
5 |
+
|
6 |
+
world_mat_object = torch.tensor([
|
7 |
+
[0.5085, 0.3226, 0.7984, 0.0000],
|
8 |
+
[-0.3479, 0.9251, -0.1522, 0.0000],
|
9 |
+
[-0.7877, -0.2003, 0.5826, 0.3384],
|
10 |
+
[0.0000, 0.0000, 0.0000, 1.0000]
|
11 |
+
])
|
12 |
+
|
13 |
+
world_mat_inv = torch.tensor([
|
14 |
+
[0.4019, 0.9157, 0.0000, 0.3359],
|
15 |
+
[-0.1932, 0.0848, 0.9775, -1.0227],
|
16 |
+
[0.8951, -0.3928, 0.2110, -7.0748],
|
17 |
+
[-0.0000, 0.0000, -0.0000, 1.0000]
|
18 |
+
])
|
19 |
+
|
20 |
+
|
21 |
+
proj = torch.tensor([
|
22 |
+
[2.1875, 0.0000, 0.0000, 0.0000],
|
23 |
+
[0.0000, 3.8889, 0.0000, 0.0000],
|
24 |
+
[0.0000, 0.0000, -1.0020, -0.2002],
|
25 |
+
[0.0000, 0.0000, -1.0000, 0.0000]
|
26 |
+
])
|
27 |
+
|
28 |
+
|
29 |
+
RANGES = [[0, 540], [100, 960]]
|
30 |
+
|
31 |
+
TARGET = [500, -1]
|
32 |
+
|
33 |
+
|
34 |
+
def resize(img):
|
35 |
+
if TARGET[1] == -1:
|
36 |
+
r = img.shape[0] / img.shape[1]
|
37 |
+
img = cv2.resize(img, (TARGET[0], int(r * TARGET[0])))
|
38 |
+
else:
|
39 |
+
img = cv2.resize(img, (TARGET[0], TARGET[1]))
|
40 |
+
|
41 |
+
return img
|
42 |
+
|
43 |
+
|
44 |
+
def scatter(u_pix, v_pix, distances, res, radius=5):
|
45 |
+
distances -= 6
|
46 |
+
img = np.zeros(res)
|
47 |
+
for (u, v, d) in zip(u_pix, v_pix, distances):
|
48 |
+
v, u = int(v), int(u)
|
49 |
+
f = np.exp(-d / 0.7)
|
50 |
+
if radius == 0:
|
51 |
+
img[v, u] = max(img[v, u], f)
|
52 |
+
else:
|
53 |
+
for t1 in range(-radius, radius):
|
54 |
+
for t2 in range(-radius, radius):
|
55 |
+
ty, tx = v - t1, u - t2
|
56 |
+
ty, tx = max(0, ty), max(0, tx)
|
57 |
+
ty, tx = min(res[0] - 1, ty), min(res[1] - 1, tx)
|
58 |
+
img[ty, tx] = max(img[ty, tx], f)
|
59 |
+
|
60 |
+
return img
|
61 |
+
|
62 |
+
|
63 |
+
def generate_roation(phi_x, phi_y, phi_z):
|
64 |
+
def Rx(theta):
|
65 |
+
return torch.tensor([[1, 0, 0],
|
66 |
+
[0, math.cos(theta), -math.sin(theta)],
|
67 |
+
[0, math.sin(theta), math.cos(theta)]])
|
68 |
+
|
69 |
+
def Ry(theta):
|
70 |
+
return torch.tensor([[math.cos(theta), 0, math.sin(theta)],
|
71 |
+
[0, 1, 0],
|
72 |
+
[-math.sin(theta), 0, math.cos(theta)]])
|
73 |
+
|
74 |
+
def Rz(theta):
|
75 |
+
return torch.tensor([[math.cos(theta), -math.sin(theta), 0],
|
76 |
+
[math.sin(theta), math.cos(theta), 0],
|
77 |
+
[0, 0, 1]])
|
78 |
+
|
79 |
+
return Rz(phi_z) @ Ry(phi_y) @ Rx(phi_x)
|
80 |
+
|
81 |
+
|
82 |
+
def rotate_pc(pc, rx, ry, rz):
|
83 |
+
rotation = generate_roation(rx, ry, rz)
|
84 |
+
rotated = pc.clone()
|
85 |
+
rotated[:, :3] = rotated[:, :3] @ rotation.T
|
86 |
+
if rotated.shape[-1] == 6:
|
87 |
+
rotated[:, 3:] = rotated[:, 3:] @ rotation.T
|
88 |
+
return rotated
|
89 |
+
|
90 |
+
|
91 |
+
def draw_pc(pc: torch.Tensor, res=(540, 960), radius=5, timer=None, dy=0, scale=1):
|
92 |
+
xyz = pc[:, :3]
|
93 |
+
xyz -= xyz.mean(dim=0)
|
94 |
+
t_scale = xyz.norm(dim=-1).max()
|
95 |
+
xyz /= t_scale
|
96 |
+
xyz *= scale
|
97 |
+
|
98 |
+
xyz[:, -1] += xyz[:, -1].min()
|
99 |
+
|
100 |
+
n, _ = xyz.shape
|
101 |
+
|
102 |
+
if timer is not None:
|
103 |
+
with timer('project'):
|
104 |
+
xyz_pad = torch.cat([xyz, torch.ones_like(pc[:, :1])], dim=-1)
|
105 |
+
xyz_local = xyz_pad @ world_mat_inv.T
|
106 |
+
distances = -xyz_local[:, 2]
|
107 |
+
|
108 |
+
projected = xyz_local @ proj.T
|
109 |
+
projected = projected / projected[:, 3:4]
|
110 |
+
projected = projected[:, :3]
|
111 |
+
|
112 |
+
u_pix = ((projected[0] + 1) / 2) * res[1]
|
113 |
+
v_pix = ((projected[1] + 1) / 2) * res[0] + dy
|
114 |
+
|
115 |
+
with timer('z-buffer'):
|
116 |
+
z_buffer = scatter(u_pix, v_pix, distances, res, radius=radius)[:, :]
|
117 |
+
else:
|
118 |
+
xyz_pad = torch.cat([xyz, torch.ones_like(pc[:, :1])], dim=-1)
|
119 |
+
xyz_local = xyz_pad @ world_mat_inv.T
|
120 |
+
distances = -xyz_local[:, 2]
|
121 |
+
|
122 |
+
projected = xyz_local @ proj.T
|
123 |
+
projected = projected / projected[:, 3:4]
|
124 |
+
projected = projected[:, :3]
|
125 |
+
|
126 |
+
u_pix = ((projected[:, 0] + 1) / 2) * res[1]
|
127 |
+
v_pix = ((projected[:, 1] + 1) / 2) * res[0] + dy
|
128 |
+
|
129 |
+
z_buffer = scatter(u_pix, v_pix, distances, res, radius=radius)[:, :]
|
130 |
+
|
131 |
+
z_buffer = z_buffer[RANGES[0][0]: RANGES[0][1], :]
|
132 |
+
z_buffer = z_buffer[:, RANGES[1][0]:RANGES[1][1]]
|
133 |
+
z_buffer = resize(z_buffer)
|
134 |
+
return z_buffer
|
135 |
+
|
requirements.txt
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
matplotlib==3.5.2
|
2 |
+
numpy==1.22.3
|
3 |
+
opencv-python==4.5.5.64
|
4 |
+
Tqdm
|
5 |
+
torch==1.8.1
|
6 |
+
plotly
|
7 |
+
gdown
|
8 |
+
|
9 |
+
|
util.py
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import math
|
3 |
+
from pathlib import Path
|
4 |
+
import time
|
5 |
+
|
6 |
+
|
7 |
+
def euler_rotation(rx=0, ry=0, rz=0, backwards=False):
|
8 |
+
Ms = []
|
9 |
+
if rz != 0:
|
10 |
+
cosz = math.cos(rz)
|
11 |
+
sinz = math.sin(rz)
|
12 |
+
Ms.append(torch.tensor(
|
13 |
+
[[cosz, -sinz, 0],
|
14 |
+
[sinz, cosz, 0],
|
15 |
+
[0, 0, 1]]))
|
16 |
+
if ry != 0:
|
17 |
+
cosy = math.cos(ry)
|
18 |
+
siny = math.sin(ry)
|
19 |
+
Ms.append(torch.tensor(
|
20 |
+
[[cosy, 0, siny],
|
21 |
+
[0, 1, 0],
|
22 |
+
[-siny, 0, cosy]]))
|
23 |
+
if rx != 0:
|
24 |
+
cosx = math.cos(rx)
|
25 |
+
sinx = math.sin(rx)
|
26 |
+
Ms.append(torch.tensor(
|
27 |
+
[[1, 0, 0],
|
28 |
+
[0, cosx, -sinx],
|
29 |
+
[0, sinx, cosx]]))
|
30 |
+
|
31 |
+
rotation = torch.eye(3)
|
32 |
+
if backwards and len(Ms) > 0:
|
33 |
+
Ms = Ms[::-1]
|
34 |
+
|
35 |
+
for mat in Ms[::-1]:
|
36 |
+
rotation = torch.matmul(rotation, mat)
|
37 |
+
return rotation
|
38 |
+
|
39 |
+
|
40 |
+
def export(file, vs, faces, vn=None, color=None):
|
41 |
+
with open(file, 'w+') as f:
|
42 |
+
for vi, v in enumerate(vs):
|
43 |
+
if color is None:
|
44 |
+
f.write("v %f %f %f\n" % (v[0], v[1], v[2]))
|
45 |
+
else:
|
46 |
+
f.write("v %f %f %f %f %f %f\n" % (v[0], v[1], v[2], color[vi][0], color[vi][1], color[vi][2]))
|
47 |
+
if vn is not None:
|
48 |
+
f.write("vn %f %f %f\n" % (vn[vi, 0], vn[vi, 1], vn[vi, 2]))
|
49 |
+
for face in faces:
|
50 |
+
f.write("f %d %d %d\n" % (face[0] + 1, face[1] + 1, face[2] + 1))
|
51 |
+
|
52 |
+
|
53 |
+
def xyz2tensor(txt, append_normals=False):
|
54 |
+
pts = []
|
55 |
+
for line in txt.split('\n'):
|
56 |
+
line = line.strip()
|
57 |
+
line = line.lstrip('v ')
|
58 |
+
spt = line.split(' ')
|
59 |
+
if 'nan' in line:
|
60 |
+
continue
|
61 |
+
if len(spt) == 6:
|
62 |
+
pts.append(torch.tensor([float(x) for x in spt]))
|
63 |
+
if len(spt) == 3:
|
64 |
+
t = [float(x) for x in spt]
|
65 |
+
if append_normals:
|
66 |
+
t += [0.0 for _ in range(3)]
|
67 |
+
pts.append(torch.tensor(t))
|
68 |
+
|
69 |
+
rtn = torch.stack(pts, dim=0)
|
70 |
+
return rtn
|
71 |
+
|
72 |
+
|
73 |
+
def read_xyz_file(path: Path):
|
74 |
+
with open(path, 'r') as file:
|
75 |
+
return xyz2tensor(file.read(), append_normals=True)
|
76 |
+
|
77 |
+
|
78 |
+
def embed_color(img: torch.Tensor, color, box_size=70):
|
79 |
+
shp = img.shape
|
80 |
+
D2 = [shp[2] - box_size, shp[2]]
|
81 |
+
D3 = [shp[3] - box_size, shp[3]]
|
82 |
+
img = img.clone()
|
83 |
+
img[:, :3, D2[0]:D2[1], D3[0]:D3[1]] = color[:, :, None, None]
|
84 |
+
if img.shape[1] == 4:
|
85 |
+
img[:, -1, D2[0]:D2[1], D3[0]:D3[1]] = 1
|
86 |
+
return img
|
87 |
+
|
88 |
+
|
89 |
+
def get_n_params(model):
|
90 |
+
pp=0
|
91 |
+
for p in list(model.parameters()):
|
92 |
+
nn=1
|
93 |
+
for s in list(p.size()):
|
94 |
+
nn = nn*s
|
95 |
+
pp += nn
|
96 |
+
return pp
|
97 |
+
|
98 |
+
|
99 |
+
def xyz2tensor(txt, append_normals=False):
|
100 |
+
pts = []
|
101 |
+
for line in txt.split('\n'):
|
102 |
+
line = line.strip()
|
103 |
+
line = line.lstrip('v ')
|
104 |
+
spt = line.split(' ')
|
105 |
+
if 'nan' in line:
|
106 |
+
continue
|
107 |
+
if len(spt) == 6:
|
108 |
+
pts.append(torch.tensor([float(x) for x in spt]))
|
109 |
+
if len(spt) == 3:
|
110 |
+
t = [float(x) for x in spt]
|
111 |
+
if append_normals:
|
112 |
+
t += [0.0 for _ in range(3)]
|
113 |
+
pts.append(torch.tensor(t))
|
114 |
+
|
115 |
+
rtn = torch.stack(pts, dim=0)
|
116 |
+
return rtn
|
117 |
+
|
118 |
+
|
119 |
+
def read_xyz_file(path: Path):
|
120 |
+
with open(path, 'r') as file:
|
121 |
+
return xyz2tensor(file.read(), append_normals=True)
|