relative_path
stringclasses
812 values
section
stringclasses
339 values
filename
stringlengths
2
61
text
stringlengths
6
1.76M
PyTorch/Forecasting/TFT/triton
triton
prepare_input_data
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import numpy as np import torch from torch.utils.data import DataLoader from configuration import ElectricityConfig from data_utils import TFTDataset import argparse from deployment_toolkit.dump import JsonDumpWriter def _verify_and_format_dump(**x): data = {} for k, v in x.items(): temp_data = {} for i in range(v.shape[1]): temp_data["INPUT" + str(i)] = v[:,i] data[k] = temp_data return data def main(): args = _parse_args() state_dict = torch.load(os.path.join(args.checkpoint, "checkpoint.pt")) config = state_dict['config'] test_split = TFTDataset(os.path.join(args.dataset, "test.csv"), config) data_loader = DataLoader(test_split, batch_size=args.batch_size, num_workers=2) input_names_dict = {'s_cat': 's_cat__0', 's_cont':'s_cont__1', 'k_cat':'k_cat__2', 'k_cont':'k_cont__3', 'o_cat':'o_cat__4', 'o_cont':'o_cont__5', 'target':'target__6', 'id':'id__7'} reshaper = [-1] + [1] for step, batch in enumerate(data_loader): bs = batch['target'].shape[0] x = {input_names_dict[key]: tensor.numpy() if tensor.numel() else np.ones([bs]).reshape(reshaper) for key, tensor in batch.items()} ids = batch['id'][:,0,:].numpy() y_real = {'target__0':batch['target'][:,config.encoder_length:,:].numpy()} break import json data = {"data": [{k: {"content": v[i].flatten().tolist(), "shape": list(v[i].shape), "dtype": str(v[i].dtype)} for k, v in x.items()} for i in range(args.batch_size)]} with open(os.path.join(args.input_data_dir, "data.json"), "w") as f: f.write(json.dumps(data)) f.close() # d = json.load(f) # print(d) def _parse_args(): parser = argparse.ArgumentParser() parser.add_argument("--checkpoint", required=True) parser.add_argument("--batch-size", required=False, default=1) parser.add_argument("--dataset", help="Path to dataset", required=True) parser.add_argument("--input-data-dir", help="Path to output folder", required=True) args, *_ = parser.parse_known_args() args = parser.parse_args() return args if __name__ == "__main__": main()
TensorFlow2/Segmentation/Contrib/UNet3P/data_preparation
data_preparation
delete_zip_data
rm data/Training_Batch1.zip rm data/Training_Batch2.zip
PyTorch/SpeechRecognition/wav2vec2/common/fairseq
fairseq
incremental_decoding_utils
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import uuid from typing import Dict, Optional from torch import Tensor class FairseqIncrementalState(object): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.init_incremental_state() def init_incremental_state(self): self._incremental_state_id = str(uuid.uuid4()) def _get_full_incremental_state_key(self, key: str) -> str: return "{}.{}".format(self._incremental_state_id, key) def get_incremental_state( self, incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]], key: str, ) -> Optional[Dict[str, Optional[Tensor]]]: """Helper for getting incremental state for an nn.Module.""" full_key = self._get_full_incremental_state_key(key) if incremental_state is None or full_key not in incremental_state: return None return incremental_state[full_key] def set_incremental_state( self, incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]], key: str, value: Dict[str, Optional[Tensor]], ) -> Optional[Dict[str, Dict[str, Optional[Tensor]]]]: """Helper for setting incremental state for an nn.Module.""" if incremental_state is not None: full_key = self._get_full_incremental_state_key(key) incremental_state[full_key] = value return incremental_state def with_incremental_state(cls): cls.__bases__ = (FairseqIncrementalState,) + tuple( b for b in cls.__bases__ if b != FairseqIncrementalState ) return cls
TensorFlow2/Detection/Efficientdet/object_detection
object_detection
box_list
# Copyright 2020 Google Research. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Bounding Box List definition. BoxList represents a list of bounding boxes as tensorflow tensors, where each bounding box is represented as a row of 4 numbers, [y_min, x_min, y_max, x_max]. It is assumed that all bounding boxes within a given list correspond to a single image. See also box_list_ops.py for common box related operations (such as area, iou, etc). Optionally, users can add additional related fields (such as weights). We assume the following things to be true about fields: * they correspond to boxes in the box_list along the 0th dimension * they have inferable rank at graph construction time * all dimensions except for possibly the 0th can be inferred (i.e., not None) at graph construction time. Some other notes: * Following tensorflow conventions, we use height, width ordering, and correspondingly, y,x (or ymin, xmin, ymax, xmax) ordering * Tensors are always provided as (flat) [N, 4] tensors. """ import tensorflow.compat.v1 as tf class BoxList(object): """Box collection.""" def __init__(self, boxes): """Constructs box collection. Args: boxes: a tensor of shape [N, 4] representing box corners Raises: ValueError: if invalid dimensions for bbox data or if bbox data is not in float32 format. """ if len(boxes.get_shape()) != 2 or boxes.get_shape()[-1] != 4: raise ValueError('Invalid dimensions for box data.') if boxes.dtype != tf.float32: raise ValueError('Invalid tensor type: should be tf.float32') self.data = {'boxes': boxes} def num_boxes(self): """Returns number of boxes held in collection. Returns: a tensor representing the number of boxes held in the collection. """ return tf.shape(self.data['boxes'])[0] def num_boxes_static(self): """Returns number of boxes held in collection. This number is inferred at graph construction time rather than run-time. Returns: Number of boxes held in collection (integer) or None if this is not inferable at graph construction time. """ return self.data['boxes'].get_shape().as_list()[0] def get_all_fields(self): """Returns all fields.""" return self.data.keys() def get_extra_fields(self): """Returns all non-box fields (i.e., everything not named 'boxes').""" return [k for k in self.data.keys() if k != 'boxes'] def add_field(self, field, field_data): """Add field to box list. This method can be used to add related box data such as weights/labels, etc. Args: field: a string key to access the data via `get` field_data: a tensor containing the data to store in the BoxList """ self.data[field] = field_data def has_field(self, field): return field in self.data def get(self): """Convenience function for accessing box coordinates. Returns: a tensor with shape [N, 4] representing box coordinates. """ return self.get_field('boxes') def set(self, boxes): """Convenience function for setting box coordinates. Args: boxes: a tensor of shape [N, 4] representing box corners Raises: ValueError: if invalid dimensions for bbox data """ if len(boxes.get_shape()) != 2 or boxes.get_shape()[-1] != 4: raise ValueError('Invalid dimensions for box data.') self.data['boxes'] = boxes def get_field(self, field): """Accesses a box collection and associated fields. This function returns specified field with object; if no field is specified, it returns the box coordinates. Args: field: this optional string parameter can be used to specify a related field to be accessed. Returns: a tensor representing the box collection or an associated field. Raises: ValueError: if invalid field """ if not self.has_field(field): raise ValueError('field ' + str(field) + ' does not exist') return self.data[field] def set_field(self, field, value): """Sets the value of a field. Updates the field of a box_list with a given value. Args: field: (string) name of the field to set value. value: the value to assign to the field. Raises: ValueError: if the box_list does not have specified field. """ if not self.has_field(field): raise ValueError('field %s does not exist' % field) self.data[field] = value def get_center_coordinates_and_sizes(self, scope=None): """Computes the center coordinates, height and width of the boxes. Args: scope: name scope of the function. Returns: a list of 4 1-D tensors [ycenter, xcenter, height, width]. """ with tf.name_scope(scope, 'get_center_coordinates_and_sizes'): box_corners = self.get() ymin, xmin, ymax, xmax = tf.unstack(tf.transpose(box_corners)) width = xmax - xmin height = ymax - ymin ycenter = ymin + height / 2. xcenter = xmin + width / 2. return [ycenter, xcenter, height, width] def transpose_coordinates(self, scope=None): """Transpose the coordinate representation in a boxlist. Args: scope: name scope of the function. """ with tf.name_scope(scope, 'transpose_coordinates'): y_min, x_min, y_max, x_max = tf.split( value=self.get(), num_or_size_splits=4, axis=1) self.set(tf.concat([x_min, y_min, x_max, y_max], 1)) def as_tensor_dict(self, fields=None): """Retrieves specified fields as a dictionary of tensors. Args: fields: (optional) list of fields to return in the dictionary. If None (default), all fields are returned. Returns: tensor_dict: A dictionary of tensors specified by fields. Raises: ValueError: if specified field is not contained in boxlist. """ tensor_dict = {} if fields is None: fields = self.get_all_fields() for field in fields: if not self.has_field(field): raise ValueError('boxlist must contain all specified fields') tensor_dict[field] = self.get_field(field) return tensor_dict
TensorFlow2/LanguageModeling/ELECTRA
ELECTRA
postprocess_pretrained_ckpt
# Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import collections import json import os import tensorflow as tf from utils import log, heading from run_pretraining import PretrainingConfig from modeling import PretrainingModel def from_pretrained_ckpt(args): config = PretrainingConfig( model_name='postprocessing', data_dir='postprocessing', generator_hidden_size=0.3333333, ) # Padding for divisibility by 8 if config.vocab_size % 8 != 0: config.vocab_size += 8 - (config.vocab_size % 8) if args.amp: policy = tf.keras.mixed_precision.experimental.Policy("mixed_float16", loss_scale="dynamic") tf.keras.mixed_precision.experimental.set_policy(policy) print('Compute dtype: %s' % policy.compute_dtype) # Compute dtype: float16 print('Variable dtype: %s' % policy.variable_dtype) # Variable dtype: float32 # Set up model model = PretrainingModel(config) # Load checkpoint checkpoint = tf.train.Checkpoint(step=tf.Variable(1), model=model) checkpoint.restore(args.pretrained_checkpoint).expect_partial() log(" ** Restored from {} at step {}".format(args.pretrained_checkpoint, int(checkpoint.step) - 1)) disc_dir = os.path.join(args.output_dir, 'discriminator') gen_dir = os.path.join(args.output_dir, 'generator') heading(" ** Saving discriminator") model.discriminator(model.discriminator.dummy_inputs) model.discriminator.save_pretrained(disc_dir) heading(" ** Saving generator") model.generator(model.generator.dummy_inputs) model.generator.save_pretrained(gen_dir) if __name__ == '__main__': # Parse essential args parser = argparse.ArgumentParser() parser.add_argument('--pretrained_checkpoint') parser.add_argument('--output_dir') parser.add_argument('--amp', action='store_true', default=False) args = parser.parse_args() from_pretrained_ckpt(args)
PyTorch/Segmentation/MaskRCNN/pytorch/maskrcnn_benchmark/utils
utils
miscellaneous
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. import errno import os def mkdir(path): try: os.makedirs(path) except OSError as e: if e.errno != errno.EEXIST: raise
PyTorch/Classification/GPUNet/triton/runner
runner
runner_proxy
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pathlib from typing import List, Type # method from PEP-366 to support relative import in executed modules if __name__ == "__main__" and __package__ is None: __package__ = pathlib.Path(__file__).parent.name from .config import Config from .executor import Executor from .finalizer import Finalizer from .maintainer import Maintainer from .pipeline import Pipeline from .preparer import Preparer from .runner import Runner class RunnerProxy: """ Runner proxy to configure original runner """ maintainer_cls: Type[Maintainer] = None executor_cls: Type[Executor] = None preparer_cls: Type[Preparer] = None finalizer_cls: Type[Finalizer] = None def __init__(self, config: Config, pipeline: Pipeline, devices: List[str]): """ RunnerProxy constructor Args: config: Config object pipeline: Pipeline to evaluate devices: List of devices to use for tests """ self._runner = Runner( config=config, pipeline=pipeline, devices=devices, maintainer_cls=self.maintainer_cls, executor_cls=self.executor_cls, preparer_cls=self.preparer_cls, finalizer_cls=self.finalizer_cls, ) def start(self) -> None: """ Runner interface """ self._runner.start()
TensorFlow/Detection/SSD/models/research/object_detection/models
models
embedded_ssd_mobilenet_v1_feature_extractor_test
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for embedded_ssd_mobilenet_v1_feature_extractor.""" import numpy as np import tensorflow as tf from object_detection.models import embedded_ssd_mobilenet_v1_feature_extractor from object_detection.models import ssd_feature_extractor_test class EmbeddedSSDMobileNetV1FeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, is_training=True): """Constructs a new feature extractor. Args: depth_multiplier: float depth multiplier for feature extractor pad_to_multiple: the nearest multiple to zero pad the input height and width dimensions to. is_training: whether the network is in training mode. Returns: an ssd_meta_arch.SSDFeatureExtractor object. """ min_depth = 32 return (embedded_ssd_mobilenet_v1_feature_extractor. EmbeddedSSDMobileNetV1FeatureExtractor( is_training, depth_multiplier, min_depth, pad_to_multiple, self.conv_hyperparams_fn, override_base_feature_extractor_hyperparams=True)) def test_extract_features_returns_correct_shapes_256(self): image_height = 256 image_width = 256 depth_multiplier = 1.0 pad_to_multiple = 1 expected_feature_map_shape = [(2, 16, 16, 512), (2, 8, 8, 1024), (2, 4, 4, 512), (2, 2, 2, 256), (2, 1, 1, 256)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape) def test_extract_features_returns_correct_shapes_with_dynamic_inputs(self): image_height = 256 image_width = 256 depth_multiplier = 1.0 pad_to_multiple = 1 expected_feature_map_shape = [(2, 16, 16, 512), (2, 8, 8, 1024), (2, 4, 4, 512), (2, 2, 2, 256), (2, 1, 1, 256)] self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape) def test_extract_features_returns_correct_shapes_enforcing_min_depth(self): image_height = 256 image_width = 256 depth_multiplier = 0.5**12 pad_to_multiple = 1 expected_feature_map_shape = [(2, 16, 16, 32), (2, 8, 8, 32), (2, 4, 4, 32), (2, 2, 2, 32), (2, 1, 1, 32)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape) def test_extract_features_returns_correct_shapes_with_pad_to_multiple_of_1( self): image_height = 256 image_width = 256 depth_multiplier = 1.0 pad_to_multiple = 1 expected_feature_map_shape = [(2, 16, 16, 512), (2, 8, 8, 1024), (2, 4, 4, 512), (2, 2, 2, 256), (2, 1, 1, 256)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape) def test_extract_features_raises_error_with_pad_to_multiple_not_1(self): depth_multiplier = 1.0 pad_to_multiple = 2 with self.assertRaises(ValueError): _ = self._create_feature_extractor(depth_multiplier, pad_to_multiple) def test_extract_features_raises_error_with_invalid_image_size(self): image_height = 128 image_width = 128 depth_multiplier = 1.0 pad_to_multiple = 1 self.check_extract_features_raises_error_with_invalid_image_size( image_height, image_width, depth_multiplier, pad_to_multiple) def test_preprocess_returns_correct_value_range(self): image_height = 256 image_width = 256 depth_multiplier = 1 pad_to_multiple = 1 test_image = np.random.rand(4, image_height, image_width, 3) feature_extractor = self._create_feature_extractor(depth_multiplier, pad_to_multiple) preprocessed_image = feature_extractor.preprocess(test_image) self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0))) def test_variables_only_created_in_scope(self): depth_multiplier = 1 pad_to_multiple = 1 scope_name = 'MobilenetV1' self.check_feature_extractor_variables_under_scope( depth_multiplier, pad_to_multiple, scope_name) if __name__ == '__main__': tf.test.main()
PyTorch/Forecasting/TFT/triton/runner
runner
core
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pathlib from enum import Enum from typing import Any, Dict, List import yaml class CustomDumper(yaml.Dumper): """ Custom YAML dumper to avoid craeting aliases """ def ignore_aliases(self, data: Dict) -> bool: return True class Paths: """ Paths mapping inside Triton Container """ MODEL_REPOSITORY_PATH = "/mnt/triton-models" LIBRARIES_PATH = "/mnt/libs" class Framework(Enum): """ Supported frameworks """ TensorFlow1 = "TensorFlow1" TensorFlow2 = "TensorFlow2" PyTorch = "PyTorch" class Command: """Represents wrapper of raw string command""" def __init__(self, data: str): """ Store command data Args: data: string with bash commands to execute """ self._data = data def __str__(self) -> str: """ String object representation Returns: String """ return self._data class DataObject(object): """ Data object representation handling recursive transformation from object to dict """ READ_ONLY = set() def to_dict(self) -> Dict: """ Represent object as dictionary Returns: Dict """ data = dict() filtered_data = {key: value for key, value in self.__dict__.items() if key not in self.READ_ONLY} for key, value in filtered_data.items(): data[key] = self._convert_value(value) return data def _convert_value(self, value: Any) -> Any: """ Convert value based on its type Args: value: variable to convert Returns: Converted object """ if isinstance(value, DataObject): value = value.to_dict() elif isinstance(value, dict): value = self._from_dict(value) elif isinstance(value, list): value = self._from_list(value) elif isinstance(value, Enum): value = value.value elif isinstance(value, pathlib.Path): value = value.as_posix() return value def _from_dict(self, values: Dict) -> Any: """ Convert dictionary values Args: values: dictionary with values Returns: Any """ data = dict() for key, value in values.items(): data[key] = self._convert_value(value) return data def _from_list(self, values: List) -> Any: """ Convert list of values Args: values: list with values Returns: Any """ items = list() for value in values: item = self._convert_value(value) items.append(item) return items AVAILABLE_FRAMEWORKS = [f.value for f in Framework]
PyTorch/SpeechSynthesis/Tacotron2/notebooks/conversationalai
conversationalai
export_tacotron2_ts
# ***************************************************************************** # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the NVIDIA CORPORATION nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # ***************************************************************************** import torch import argparse import sys sys.path.append('./') from inference import checkpoint_from_distributed, unwrap_distributed, load_and_setup_model def parse_args(parser): """ Parse commandline arguments. """ parser.add_argument('--tacotron2', type=str, required=True, help='full path to the Tacotron2 model checkpoint file') parser.add_argument('-o', '--output', type=str, default="trtis_repo/tacotron/1/model.pt", help='filename for the Tacotron 2 TorchScript model') parser.add_argument('--fp16', action='store_true', help='inference with mixed precision') return parser def main(): parser = argparse.ArgumentParser( description='PyTorch Tacotron 2 Inference') parser = parse_args(parser) args = parser.parse_args() tacotron2 = load_and_setup_model('Tacotron2', parser, args.tacotron2, fp16_run=args.fp16, cpu_run=False, forward_is_infer=True) jitted_tacotron2 = torch.jit.script(tacotron2) torch.jit.save(jitted_tacotron2, args.output) if __name__ == '__main__': main()
TensorFlow/Detection/SSD/models/research/object_detection/utils
utils
vrd_evaluation
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Evaluator class for Visual Relations Detection. VRDDetectionEvaluator is a class which manages ground truth information of a visual relations detection (vrd) dataset, and computes frequently used detection metrics such as Precision, Recall, Recall@k, of the provided vrd detection results. It supports the following operations: 1) Adding ground truth information of images sequentially. 2) Adding detection results of images sequentially. 3) Evaluating detection metrics on already inserted detection results. Note1: groundtruth should be inserted before evaluation. Note2: This module operates on numpy boxes and box lists. """ from abc import abstractmethod import collections import logging import numpy as np from object_detection.core import standard_fields from object_detection.utils import metrics from object_detection.utils import object_detection_evaluation from object_detection.utils import per_image_vrd_evaluation # Below standard input numpy datatypes are defined: # box_data_type - datatype of the groundtruth visual relations box annotations; # this datatype consists of two named boxes: subject bounding box and object # bounding box. Each box is of the format [y_min, x_min, y_max, x_max], each # coordinate being of type float32. # label_data_type - corresponding datatype of the visual relations label # annotaions; it consists of three numerical class labels: subject class label, # object class label and relation class label, each class label being of type # int32. vrd_box_data_type = np.dtype([('subject', 'f4', (4,)), ('object', 'f4', (4,))]) single_box_data_type = np.dtype([('box', 'f4', (4,))]) label_data_type = np.dtype([('subject', 'i4'), ('object', 'i4'), ('relation', 'i4')]) class VRDDetectionEvaluator(object_detection_evaluation.DetectionEvaluator): """A class to evaluate VRD detections. This class serves as a base class for VRD evaluation in two settings: - phrase detection - relation detection. """ def __init__(self, matching_iou_threshold=0.5, metric_prefix=None): """Constructor. Args: matching_iou_threshold: IOU threshold to use for matching groundtruth boxes to detection boxes. metric_prefix: (optional) string prefix for metric name; if None, no prefix is used. """ super(VRDDetectionEvaluator, self).__init__([]) self._matching_iou_threshold = matching_iou_threshold self._evaluation = _VRDDetectionEvaluation( matching_iou_threshold=self._matching_iou_threshold) self._image_ids = set([]) self._metric_prefix = (metric_prefix + '_') if metric_prefix else '' self._evaluatable_labels = {} self._negative_labels = {} @abstractmethod def _process_groundtruth_boxes(self, groundtruth_box_tuples): """Pre-processes boxes before adding them to the VRDDetectionEvaluation. Phrase detection and Relation detection subclasses re-implement this method depending on the task. Args: groundtruth_box_tuples: A numpy array of structures with the shape [M, 1], each structure containing the same number of named bounding boxes. Each box is of the format [y_min, x_min, y_max, x_max] (see datatype vrd_box_data_type, single_box_data_type above). """ raise NotImplementedError( '_process_groundtruth_boxes method should be implemented in subclasses' 'of VRDDetectionEvaluator.') @abstractmethod def _process_detection_boxes(self, detections_box_tuples): """Pre-processes boxes before adding them to the VRDDetectionEvaluation. Phrase detection and Relation detection subclasses re-implement this method depending on the task. Args: detections_box_tuples: A numpy array of structures with the shape [M, 1], each structure containing the same number of named bounding boxes. Each box is of the format [y_min, x_min, y_max, x_max] (see datatype vrd_box_data_type, single_box_data_type above). """ raise NotImplementedError( '_process_detection_boxes method should be implemented in subclasses' 'of VRDDetectionEvaluator.') def add_single_ground_truth_image_info(self, image_id, groundtruth_dict): """Adds groundtruth for a single image to be used for evaluation. Args: image_id: A unique string/integer identifier for the image. groundtruth_dict: A dictionary containing - standard_fields.InputDataFields.groundtruth_boxes: A numpy array of structures with the shape [M, 1], representing M tuples, each tuple containing the same number of named bounding boxes. Each box is of the format [y_min, x_min, y_max, x_max] (see datatype vrd_box_data_type, single_box_data_type above). standard_fields.InputDataFields.groundtruth_classes: A numpy array of structures shape [M, 1], representing the class labels of the corresponding bounding boxes and possibly additional classes (see datatype label_data_type above). standard_fields.InputDataFields.groundtruth_image_classes: numpy array of shape [K] containing verified labels. Raises: ValueError: On adding groundtruth for an image more than once. """ if image_id in self._image_ids: raise ValueError('Image with id {} already added.'.format(image_id)) groundtruth_class_tuples = ( groundtruth_dict[standard_fields.InputDataFields.groundtruth_classes]) groundtruth_box_tuples = ( groundtruth_dict[standard_fields.InputDataFields.groundtruth_boxes]) self._evaluation.add_single_ground_truth_image_info( image_key=image_id, groundtruth_box_tuples=self._process_groundtruth_boxes( groundtruth_box_tuples), groundtruth_class_tuples=groundtruth_class_tuples) self._image_ids.update([image_id]) all_classes = [] for field in groundtruth_box_tuples.dtype.fields: all_classes.append(groundtruth_class_tuples[field]) groudtruth_positive_classes = np.unique(np.concatenate(all_classes)) verified_labels = groundtruth_dict.get( standard_fields.InputDataFields.groundtruth_image_classes, np.array([], dtype=int)) self._evaluatable_labels[image_id] = np.unique( np.concatenate((verified_labels, groudtruth_positive_classes))) self._negative_labels[image_id] = np.setdiff1d(verified_labels, groudtruth_positive_classes) def add_single_detected_image_info(self, image_id, detections_dict): """Adds detections for a single image to be used for evaluation. Args: image_id: A unique string/integer identifier for the image. detections_dict: A dictionary containing - standard_fields.DetectionResultFields.detection_boxes: A numpy array of structures with shape [N, 1], representing N tuples, each tuple containing the same number of named bounding boxes. Each box is of the format [y_min, x_min, y_max, x_max] (as an example see datatype vrd_box_data_type, single_box_data_type above). standard_fields.DetectionResultFields.detection_scores: float32 numpy array of shape [N] containing detection scores for the boxes. standard_fields.DetectionResultFields.detection_classes: A numpy array of structures shape [N, 1], representing the class labels of the corresponding bounding boxes and possibly additional classes (see datatype label_data_type above). """ if image_id not in self._image_ids: logging.warn('No groundtruth for the image with id %s.', image_id) # Since for the correct work of evaluator it is assumed that groundtruth # is inserted first we make sure to break the code if is it not the case. self._image_ids.update([image_id]) self._negative_labels[image_id] = np.array([]) self._evaluatable_labels[image_id] = np.array([]) num_detections = detections_dict[ standard_fields.DetectionResultFields.detection_boxes].shape[0] detection_class_tuples = detections_dict[ standard_fields.DetectionResultFields.detection_classes] detection_box_tuples = detections_dict[ standard_fields.DetectionResultFields.detection_boxes] negative_selector = np.zeros(num_detections, dtype=bool) selector = np.ones(num_detections, dtype=bool) # Only check boxable labels for field in detection_box_tuples.dtype.fields: # Verify if one of the labels is negative (this is sure FP) negative_selector |= np.isin(detection_class_tuples[field], self._negative_labels[image_id]) # Verify if all labels are verified selector &= np.isin(detection_class_tuples[field], self._evaluatable_labels[image_id]) selector |= negative_selector self._evaluation.add_single_detected_image_info( image_key=image_id, detected_box_tuples=self._process_detection_boxes( detection_box_tuples[selector]), detected_scores=detections_dict[ standard_fields.DetectionResultFields.detection_scores][selector], detected_class_tuples=detection_class_tuples[selector]) def evaluate(self, relationships=None): """Compute evaluation result. Args: relationships: A dictionary of numerical label-text label mapping; if specified, returns per-relationship AP. Returns: A dictionary of metrics with the following fields - summary_metrics: 'weightedAP@<matching_iou_threshold>IOU' : weighted average precision at the specified IOU threshold. 'AP@<matching_iou_threshold>IOU/<relationship>' : AP per relationship. 'mAP@<matching_iou_threshold>IOU': mean average precision at the specified IOU threshold. 'Recall@50@<matching_iou_threshold>IOU': recall@50 at the specified IOU threshold. 'Recall@100@<matching_iou_threshold>IOU': recall@100 at the specified IOU threshold. if relationships is specified, returns <relationship> in AP metrics as readable names, otherwise the names correspond to class numbers. """ (weighted_average_precision, mean_average_precision, average_precisions, _, _, recall_50, recall_100, _, _) = ( self._evaluation.evaluate()) vrd_metrics = { (self._metric_prefix + 'weightedAP@{}IOU'.format( self._matching_iou_threshold)): weighted_average_precision, self._metric_prefix + 'mAP@{}IOU'.format(self._matching_iou_threshold): mean_average_precision, self._metric_prefix + 'Recall@50@{}IOU'.format( self._matching_iou_threshold): recall_50, self._metric_prefix + 'Recall@100@{}IOU'.format( self._matching_iou_threshold): recall_100, } if relationships: for key, average_precision in average_precisions.iteritems(): vrd_metrics[self._metric_prefix + 'AP@{}IOU/{}'.format( self._matching_iou_threshold, relationships[key])] = average_precision else: for key, average_precision in average_precisions.iteritems(): vrd_metrics[self._metric_prefix + 'AP@{}IOU/{}'.format( self._matching_iou_threshold, key)] = average_precision return vrd_metrics def clear(self): """Clears the state to prepare for a fresh evaluation.""" self._evaluation = _VRDDetectionEvaluation( matching_iou_threshold=self._matching_iou_threshold) self._image_ids.clear() self._negative_labels.clear() self._evaluatable_labels.clear() class VRDRelationDetectionEvaluator(VRDDetectionEvaluator): """A class to evaluate VRD detections in relations setting. Expected groundtruth box datatype is vrd_box_data_type, expected groudtruth labels datatype is label_data_type. Expected detection box datatype is vrd_box_data_type, expected detection labels datatype is label_data_type. """ def __init__(self, matching_iou_threshold=0.5): super(VRDRelationDetectionEvaluator, self).__init__( matching_iou_threshold=matching_iou_threshold, metric_prefix='VRDMetric_Relationships') def _process_groundtruth_boxes(self, groundtruth_box_tuples): """Pre-processes boxes before adding them to the VRDDetectionEvaluation. Args: groundtruth_box_tuples: A numpy array of structures with the shape [M, 1], each structure containing the same number of named bounding boxes. Each box is of the format [y_min, x_min, y_max, x_max]. Returns: Unchanged input. """ return groundtruth_box_tuples def _process_detection_boxes(self, detections_box_tuples): """Pre-processes boxes before adding them to the VRDDetectionEvaluation. Phrase detection and Relation detection subclasses re-implement this method depending on the task. Args: detections_box_tuples: A numpy array of structures with the shape [M, 1], each structure containing the same number of named bounding boxes. Each box is of the format [y_min, x_min, y_max, x_max] (see datatype vrd_box_data_type, single_box_data_type above). Returns: Unchanged input. """ return detections_box_tuples class VRDPhraseDetectionEvaluator(VRDDetectionEvaluator): """A class to evaluate VRD detections in phrase setting. Expected groundtruth box datatype is vrd_box_data_type, expected groudtruth labels datatype is label_data_type. Expected detection box datatype is single_box_data_type, expected detection labels datatype is label_data_type. """ def __init__(self, matching_iou_threshold=0.5): super(VRDPhraseDetectionEvaluator, self).__init__( matching_iou_threshold=matching_iou_threshold, metric_prefix='VRDMetric_Phrases') def _process_groundtruth_boxes(self, groundtruth_box_tuples): """Pre-processes boxes before adding them to the VRDDetectionEvaluation. In case of phrase evaluation task, evaluation expects exactly one bounding box containing all objects in the phrase. This bounding box is computed as an enclosing box of all groundtruth boxes of a phrase. Args: groundtruth_box_tuples: A numpy array of structures with the shape [M, 1], each structure containing the same number of named bounding boxes. Each box is of the format [y_min, x_min, y_max, x_max]. See vrd_box_data_type for an example of structure. Returns: result: A numpy array of structures with the shape [M, 1], each structure containing exactly one named bounding box. i-th output structure corresponds to the result of processing i-th input structure, where the named bounding box is computed as an enclosing bounding box of all bounding boxes of the i-th input structure. """ first_box_key = groundtruth_box_tuples.dtype.fields.keys()[0] miny = groundtruth_box_tuples[first_box_key][:, 0] minx = groundtruth_box_tuples[first_box_key][:, 1] maxy = groundtruth_box_tuples[first_box_key][:, 2] maxx = groundtruth_box_tuples[first_box_key][:, 3] for fields in groundtruth_box_tuples.dtype.fields: miny = np.minimum(groundtruth_box_tuples[fields][:, 0], miny) minx = np.minimum(groundtruth_box_tuples[fields][:, 1], minx) maxy = np.maximum(groundtruth_box_tuples[fields][:, 2], maxy) maxx = np.maximum(groundtruth_box_tuples[fields][:, 3], maxx) data_result = [] for i in range(groundtruth_box_tuples.shape[0]): data_result.append(([miny[i], minx[i], maxy[i], maxx[i]],)) result = np.array(data_result, dtype=[('box', 'f4', (4,))]) return result def _process_detection_boxes(self, detections_box_tuples): """Pre-processes boxes before adding them to the VRDDetectionEvaluation. In case of phrase evaluation task, evaluation expects exactly one bounding box containing all objects in the phrase. This bounding box is computed as an enclosing box of all groundtruth boxes of a phrase. Args: detections_box_tuples: A numpy array of structures with the shape [M, 1], each structure containing the same number of named bounding boxes. Each box is of the format [y_min, x_min, y_max, x_max]. See vrd_box_data_type for an example of this structure. Returns: result: A numpy array of structures with the shape [M, 1], each structure containing exactly one named bounding box. i-th output structure corresponds to the result of processing i-th input structure, where the named bounding box is computed as an enclosing bounding box of all bounding boxes of the i-th input structure. """ first_box_key = detections_box_tuples.dtype.fields.keys()[0] miny = detections_box_tuples[first_box_key][:, 0] minx = detections_box_tuples[first_box_key][:, 1] maxy = detections_box_tuples[first_box_key][:, 2] maxx = detections_box_tuples[first_box_key][:, 3] for fields in detections_box_tuples.dtype.fields: miny = np.minimum(detections_box_tuples[fields][:, 0], miny) minx = np.minimum(detections_box_tuples[fields][:, 1], minx) maxy = np.maximum(detections_box_tuples[fields][:, 2], maxy) maxx = np.maximum(detections_box_tuples[fields][:, 3], maxx) data_result = [] for i in range(detections_box_tuples.shape[0]): data_result.append(([miny[i], minx[i], maxy[i], maxx[i]],)) result = np.array(data_result, dtype=[('box', 'f4', (4,))]) return result VRDDetectionEvalMetrics = collections.namedtuple('VRDDetectionEvalMetrics', [ 'weighted_average_precision', 'mean_average_precision', 'average_precisions', 'precisions', 'recalls', 'recall_50', 'recall_100', 'median_rank_50', 'median_rank_100' ]) class _VRDDetectionEvaluation(object): """Performs metric computation for the VRD task. This class is internal. """ def __init__(self, matching_iou_threshold=0.5): """Constructor. Args: matching_iou_threshold: IOU threshold to use for matching groundtruth boxes to detection boxes. """ self._per_image_eval = per_image_vrd_evaluation.PerImageVRDEvaluation( matching_iou_threshold=matching_iou_threshold) self._groundtruth_box_tuples = {} self._groundtruth_class_tuples = {} self._num_gt_instances = 0 self._num_gt_imgs = 0 self._num_gt_instances_per_relationship = {} self.clear_detections() def clear_detections(self): """Clears detections.""" self._detection_keys = set() self._scores = [] self._relation_field_values = [] self._tp_fp_labels = [] self._average_precisions = {} self._precisions = [] self._recalls = [] def add_single_ground_truth_image_info( self, image_key, groundtruth_box_tuples, groundtruth_class_tuples): """Adds groundtruth for a single image to be used for evaluation. Args: image_key: A unique string/integer identifier for the image. groundtruth_box_tuples: A numpy array of structures with the shape [M, 1], representing M tuples, each tuple containing the same number of named bounding boxes. Each box is of the format [y_min, x_min, y_max, x_max]. groundtruth_class_tuples: A numpy array of structures shape [M, 1], representing the class labels of the corresponding bounding boxes and possibly additional classes. """ if image_key in self._groundtruth_box_tuples: logging.warn( 'image %s has already been added to the ground truth database.', image_key) return self._groundtruth_box_tuples[image_key] = groundtruth_box_tuples self._groundtruth_class_tuples[image_key] = groundtruth_class_tuples self._update_groundtruth_statistics(groundtruth_class_tuples) def add_single_detected_image_info(self, image_key, detected_box_tuples, detected_scores, detected_class_tuples): """Adds detections for a single image to be used for evaluation. Args: image_key: A unique string/integer identifier for the image. detected_box_tuples: A numpy array of structures with shape [N, 1], representing N tuples, each tuple containing the same number of named bounding boxes. Each box is of the format [y_min, x_min, y_max, x_max]. detected_scores: A float numpy array of shape [N, 1], representing the confidence scores of the detected N object instances. detected_class_tuples: A numpy array of structures shape [N, 1], representing the class labels of the corresponding bounding boxes and possibly additional classes. """ self._detection_keys.add(image_key) if image_key in self._groundtruth_box_tuples: groundtruth_box_tuples = self._groundtruth_box_tuples[image_key] groundtruth_class_tuples = self._groundtruth_class_tuples[image_key] else: groundtruth_box_tuples = np.empty( shape=[0, 4], dtype=detected_box_tuples.dtype) groundtruth_class_tuples = np.array([], dtype=detected_class_tuples.dtype) scores, tp_fp_labels, mapping = ( self._per_image_eval.compute_detection_tp_fp( detected_box_tuples=detected_box_tuples, detected_scores=detected_scores, detected_class_tuples=detected_class_tuples, groundtruth_box_tuples=groundtruth_box_tuples, groundtruth_class_tuples=groundtruth_class_tuples)) self._scores += [scores] self._tp_fp_labels += [tp_fp_labels] self._relation_field_values += [detected_class_tuples[mapping]['relation']] def _update_groundtruth_statistics(self, groundtruth_class_tuples): """Updates grouth truth statistics. Args: groundtruth_class_tuples: A numpy array of structures shape [M, 1], representing the class labels of the corresponding bounding boxes and possibly additional classes. """ self._num_gt_instances += groundtruth_class_tuples.shape[0] self._num_gt_imgs += 1 for relation_field_value in np.unique(groundtruth_class_tuples['relation']): if relation_field_value not in self._num_gt_instances_per_relationship: self._num_gt_instances_per_relationship[relation_field_value] = 0 self._num_gt_instances_per_relationship[relation_field_value] += np.sum( groundtruth_class_tuples['relation'] == relation_field_value) def evaluate(self): """Computes evaluation result. Returns: A named tuple with the following fields - average_precision: a float number corresponding to average precision. precisions: an array of precisions. recalls: an array of recalls. recall@50: recall computed on 50 top-scoring samples. recall@100: recall computed on 100 top-scoring samples. median_rank@50: median rank computed on 50 top-scoring samples. median_rank@100: median rank computed on 100 top-scoring samples. """ if self._num_gt_instances == 0: logging.warn('No ground truth instances') if not self._scores: scores = np.array([], dtype=float) tp_fp_labels = np.array([], dtype=bool) else: scores = np.concatenate(self._scores) tp_fp_labels = np.concatenate(self._tp_fp_labels) relation_field_values = np.concatenate(self._relation_field_values) for relation_field_value, _ in ( self._num_gt_instances_per_relationship.iteritems()): precisions, recalls = metrics.compute_precision_recall( scores[relation_field_values == relation_field_value], tp_fp_labels[relation_field_values == relation_field_value], self._num_gt_instances_per_relationship[relation_field_value]) self._average_precisions[ relation_field_value] = metrics.compute_average_precision( precisions, recalls) self._mean_average_precision = np.mean(self._average_precisions.values()) self._precisions, self._recalls = metrics.compute_precision_recall( scores, tp_fp_labels, self._num_gt_instances) self._weighted_average_precision = metrics.compute_average_precision( self._precisions, self._recalls) self._recall_50 = ( metrics.compute_recall_at_k(self._tp_fp_labels, self._num_gt_instances, 50)) self._median_rank_50 = ( metrics.compute_median_rank_at_k(self._tp_fp_labels, 50)) self._recall_100 = ( metrics.compute_recall_at_k(self._tp_fp_labels, self._num_gt_instances, 100)) self._median_rank_100 = ( metrics.compute_median_rank_at_k(self._tp_fp_labels, 100)) return VRDDetectionEvalMetrics( self._weighted_average_precision, self._mean_average_precision, self._average_precisions, self._precisions, self._recalls, self._recall_50, self._recall_100, self._median_rank_50, self._median_rank_100)
TensorFlow2/Detection/Efficientdet/model
model
activation_builder
# Copyright 2020 Google Research. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Common utils.""" from typing import Text import tensorflow as tf def srelu_fn(x): """Smooth relu: a smooth version of relu.""" with tf.name_scope('srelu'): beta = tf.Variable(20.0, name='srelu_beta', dtype=tf.float32)**2 beta = tf.cast(beta**2, x.dtype) safe_log = tf.math.log(tf.where(x > 0., beta * x + 1., tf.ones_like(x))) return tf.where((x > 0.), x - (1. / beta) * safe_log, tf.zeros_like(x)) def activation_fn(features: tf.Tensor, act_type: Text): """Customized non-linear activation type.""" if act_type in ('silu', 'swish'): return tf.nn.swish(features) elif act_type == 'swish_native': return features * tf.sigmoid(features) elif act_type == 'hswish': return features * tf.nn.relu6(features + 3) / 6 elif act_type == 'relu': return tf.nn.relu(features) elif act_type == 'relu6': return tf.nn.relu6(features) elif act_type == 'mish': return features * tf.math.tanh(tf.math.softplus(features)) elif act_type == 'identity': return tf.identity(features) elif act_type == 'srelu': return srelu_fn(features) else: raise ValueError('Unsupported act_type {}'.format(act_type))
TensorFlow/LanguageModeling/BERT/biobert/scripts
scripts
rel_chemprot
#!/bin/bash echo "Container nvidia build = " $NVIDIA_BUILD_ID init_checkpoint=${1:-"/results/biobert_tf_uncased_base/model.ckpt"} train_batch_size=${2:-8} learning_rate=${3:-1.5e-6} cased=${4:-false} precision=${5:-"fp16"} use_xla=${6:-"true"} num_gpu=${7:-"16"} seq_length=${8:-512} bert_model=${9:-"base"} eval_batch_size=${10:-16} #Eval and Predict BS is assumed to be same epochs=${11:-"3.0"} if [ "$cased" = "true" ] ; then DO_LOWER_CASE=0 CASING_DIR_PREFIX="cased" case_flag="--do_lower_case=False" else DO_LOWER_CASE=1 CASING_DIR_PREFIX="uncased" case_flag="--do_lower_case=True" fi if [ "$bert_model" = "large" ] ; then export BERT_DIR=/workspace/bert/data/download/google_pretrained_weights/${CASING_DIR_PREFIX}_L-24_H-1024_A-16 else export BERT_DIR=/workspace/bert/data/download/google_pretrained_weights/${CASING_DIR_PREFIX}_L-12_H-768_A-12 fi export GBS=$(expr $train_batch_size \* $num_gpu) printf -v TAG "tf_bert_biobert_rel_chemprot_%s_%s_gbs%d" "$bert_model" "$precision" $GBS DATESTAMP=`date +'%y%m%d%H%M%S'` DATASET_DIR=/workspace/bert/data/biobert/chemprot-data_treeLSTM OUTPUT_DIR=/results/${TAG}_${DATESTAMP} mkdir -p ${OUTPUT_DIR} use_fp16="" if [ "$precision" = "fp16" ] ; then echo "fp16 activated!" use_fp16="--amp" else echo "fp32/tf32 activated!" use_fp16="--noamp" fi if [ "$use_xla" = "true" ] ; then use_xla_tag="--use_xla" echo "XLA activated" else use_xla_tag="--nouse_xla" fi if [ $num_gpu -gt 1 ] ; then mpi_command="mpirun -np $num_gpu -H localhost:$num_gpu \ --allow-run-as-root -bind-to none -map-by slot \ -x NCCL_DEBUG=INFO \ -x LD_LIBRARY_PATH \ -x PATH -mca pml ob1 -mca btl ^openib" use_hvd="--horovod" else mpi_command="" use_hvd="" fi $mpi_command python3 /workspace/bert/run_re.py \ --do_prepare=true \ --do_train=true \ --do_eval=true \ --do_predict=true \ --task_name="chemprot" \ --vocab_file=$BERT_DIR/vocab.txt \ --bert_config_file=$BERT_DIR/bert_config.json \ --init_checkpoint=$init_checkpoint \ --num_train_epochs=$epochs \ --data_dir=$DATASET_DIR \ --output_dir=$OUTPUT_DIR \ --learning_rate=$learning_rate \ --train_batch_size=$train_batch_size \ --eval_batch_size=$eval_batch_size \ --predict_batch_size=$eval_batch_size \ --max_seq_length=$seq_length \ "$use_hvd" "$use_fp16" $use_xla_tag $case_flag python3 /workspace/bert/biobert/re_eval.py --task=chemprot --output_path=$OUTPUT_DIR/test_results.tsv \ --answer_path=$DATASET_DIR/test.tsv |& tee $OUTPUT_DIR/test_results.txt
TensorFlow/LanguageModeling/BERT/data
data
__init__
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
PyTorch/Segmentation/MaskRCNN/pytorch/configs/caffe2
caffe2
e2e_faster_rcnn_R_101_FPN_1x_caffe2
MODEL: META_ARCHITECTURE: "GeneralizedRCNN" WEIGHT: "catalog://Caffe2Detectron/COCO/35857890/e2e_faster_rcnn_R-101-FPN_1x" BACKBONE: CONV_BODY: "R-101-FPN" OUT_CHANNELS: 256 RPN: USE_FPN: True ANCHOR_STRIDE: (4, 8, 16, 32, 64) PRE_NMS_TOP_N_TRAIN: 2000 PRE_NMS_TOP_N_TEST: 1000 POST_NMS_TOP_N_TEST: 1000 FPN_POST_NMS_TOP_N_TEST: 1000 ROI_HEADS: USE_FPN: True ROI_BOX_HEAD: POOLER_RESOLUTION: 7 POOLER_SCALES: (0.25, 0.125, 0.0625, 0.03125) POOLER_SAMPLING_RATIO: 2 FEATURE_EXTRACTOR: "FPN2MLPFeatureExtractor" PREDICTOR: "FPNPredictor" DATASETS: TEST: ("coco_2014_minival",) DATALOADER: SIZE_DIVISIBILITY: 32
Tools/DGLPyTorch/SyntheticGraphGeneration/syngen/generator/tabular
tabular
random
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import Optional, List import cupy as cp import pickle import numpy as np import pandas as pd from syngen.generator.tabular.chunked_tabular_generator import ChunkedBaseTabularGenerator import warnings warnings.simplefilter(action="ignore", category=FutureWarning) class RandomMVGenerator(ChunkedBaseTabularGenerator): """Random Multivariate Gaussian generator """ def __init__(self, **kwargs): super().__init__(**kwargs) self.ndims = None self.column_order = None def fit(self, data: Optional[pd.DataFrame] = None, ndims: Optional[int] = None, columns: Optional[List[str]] = None, categorical_columns=(), verbose=False): """ random ignores categorical columns at the moment """ assert ndims is not None or data is not None or self.ndims is not None or columns is not None if data is not None: ndims = len(data.columns) self.column_order = list(data.columns) if columns is not None: self.column_order = columns ndims = len(columns) if ndims is None: ndims = self.ndims self.mu = np.random.randn(ndims).astype(np.float32) self.cov = np.eye(ndims) * np.abs( np.random.randn(ndims).reshape(-1, 1) ).astype(np.float32) self.ndims = ndims def _space_complexity_factor(self): return 2.0 def sample(self, n, gpu=False, memmap_kwargs=None, start_idx=0, end_idx=None, **kwargs): use_memmap = memmap_kwargs is not None if use_memmap: memmap_outfile = np.load(memmap_kwargs['filename'], mmap_mode='r+') if gpu: samples = cp.random.multivariate_normal(self.mu, self.cov, size=n, dtype=cp.float32) samples = cp.asnumpy(samples) else: samples = np.random.multivariate_normal(self.mu, self.cov, size=n).astype(np.float32) if use_memmap: memmap_outfile[start_idx:end_idx] = samples return None else: df = pd.DataFrame(samples) if self.column_order is None: df.columns = df.columns.astype(str) else: df.columns = self.column_order return df def save(self, path): with open(path, 'wb') as file_handler: pickle.dump(self, file_handler, protocol=pickle.HIGHEST_PROTOCOL) @classmethod def load(cls, path): with open(path, 'rb') as file_handler: model = pickle.load(file_handler) return model
PyTorch/LanguageModeling/BERT/triton
triton
README
# Deploying the BERT model on Triton Inference Server This folder contains instructions for deployment to run inference on Triton Inference Server, as well as detailed performance analysis. The purpose of this document is to help you with achieving the best inference performance. ## Table of contents - [Solution overview](#solution-overview) - [Introduction](#introduction) - [Deployment process](#deployment-process) - [Setup](#setup) - [Quick Start Guide](#quick-start-guide) - [Release notes](#release-notes) - [Changelog](#changelog) - [Known issues](#known-issues) ## Solution overview ### Introduction The [NVIDIA Triton Inference Server](https://github.com/NVIDIA/triton-inference-server) provides a datacenter and cloud inferencing solution optimized for NVIDIA GPUs. The server provides an inference service via an HTTP or gRPC endpoint, allowing remote clients to request inferencing for any number of GPU or CPU models being managed by the server. This README provides step-by-step deployment instructions for models generated during training (as described in the [model README](../readme.md)). Additionally, this README provides the corresponding deployment scripts that ensure optimal GPU utilization during inferencing on the Triton Inference Server. ### Deployment process The deployment process consists of two steps: 1. Conversion. The purpose of conversion is to find the best performing model format supported by the Triton Inference Server. Triton Inference Server uses a number of runtime backends such as [TensorRT](https://developer.nvidia.com/tensorrt), [LibTorch](https://github.com/triton-inference-server/pytorch_backend) and [ONNX Runtime](https://github.com/triton-inference-server/onnxruntime_backend) to support various model types. Refer to the [Triton documentation](https://github.com/triton-inference-server/backend#where-can-i-find-all-the-backends-that-are-available-for-triton) for a list of available backends. 2. Configuration. Model configuration on the Triton Inference Server, which generates necessary [configuration files](https://github.com/triton-inference-server/server/blob/master/docs/model_configuration.md). After deployment, the Triton inference server is used for evaluation of the converted model in two steps: 1. Accuracy tests. Produce results that are tested against given accuracy thresholds. 2. Performance tests. Produce latency and throughput results for offline (static batching) and online (dynamic batching) scenarios. All steps are executed by the provided runner script. Refer to [Quick Start Guide](#quick-start-guide) ## Setup Ensure you have the following components: * [NVIDIA Docker](https://github.com/NVIDIA/nvidia-docker) * [PyTorch NGC container 21.10](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/pytorch) * [Triton Inference Server NGC container 21.10](https://ngc.nvidia.com/catalog/containers/nvidia:tritonserver) * [NVIDIA CUDA](https://docs.nvidia.com/cuda/archive//index.html) * [NVIDIA Ampere](https://www.nvidia.com/en-us/data-center/nvidia-ampere-gpu-architecture/), [Volta](https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/) or [Turing](https://www.nvidia.com/en-us/geforce/turing/) based GPU ## Quick Start Guide Deployment is supported for the following architectures. For the deployment steps, refer to the appropriate readme file: * [BERT-large](./large/README.md) * [BERT-distilled-4l](./dist4l/README.md) * [BERT-distilled-6l](./dist6l/README.md) ## Release Notes We’re constantly refining and improving our performance on AI and HPC workloads with frequent updates to our software stack. For our latest performance data refer to these pages for [AI](https://developer.nvidia.com/deep-learning-performance-training-inference) and [HPC](https://developer.nvidia.com/hpc-application-performance) benchmarks. ### Changelog ### Known issues - There are no known issues with this model.
TensorFlow/LanguageModeling/BERT/scripts
scripts
run_pretraining_lamb_phase2
#! /bin/bash # Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. echo "Container nvidia build = " $NVIDIA_BUILD_ID train_batch_size_phase1=${1:-64} train_batch_size_phase2=${2:-8} eval_batch_size=${3:-8} learning_rate_phase1=${4:-"7.5e-4"} learning_rate_phase2=${5:-"5e-4"} precision=${6:-"fp16"} use_xla=${7:-"true"} num_gpus=${8:-8} warmup_steps_phase1=${9:-"2000"} warmup_steps_phase2=${10:-"200"} train_steps=${11:-7820} save_checkpoints_steps=${12:-100} num_accumulation_steps_phase1=${13:-128} num_accumulation_steps_phase2=${14:-512} bert_model=${15:-"large"} DATA_DIR=${DATA_DIR:-data} #Edit to save logs & checkpoints in a different directory RESULTS_DIR=${RESULTS_DIR:-/results} if [ "$bert_model" = "large" ] ; then export BERT_CONFIG=data/download/nvidia_pretrained/bert_tf_pretraining_large_lamb/bert_config.json else export BERT_CONFIG=data/download/nvidia_pretrained/bert_tf_squad11_base_128/bert_config.json fi echo "Container nvidia build = " $NVIDIA_BUILD_ID PREC="" if [ "$precision" = "fp16" ] ; then PREC="--amp" elif [ "$precision" = "fp32" ] ; then PREC="--noamp" elif [ "$precision" = "tf32" ] ; then PREC="--noamp" elif [ "$precision" = "manual_fp16" ] ; then PREC="--noamp --manual_fp16" else echo "Unknown <precision> argument" exit -2 fi if [ "$use_xla" = "true" ] ; then PREC="$PREC --use_xla" echo "XLA activated" else PREC="$PREC --nouse_xla" fi mpi="" horovod_str="" if [ $num_gpus -gt 1 ] ; then mpi="mpiexec --allow-run-as-root -np $num_gpus --bind-to socket" horovod_str="--horovod" fi #PHASE 1 Config train_steps_phase1=$(expr $train_steps \* 9 \/ 10) #Phase 1 is 10% of training gbs_phase1=$(expr $train_batch_size_phase1 \* $num_accumulation_steps_phase1) PHASE1_CKPT=${RESULTS_DIR}/phase_1/model.ckpt-${train_steps_phase1} #PHASE 2 seq_len=512 max_pred_per_seq=80 train_steps_phase2=$(expr $train_steps \* 1 \/ 10) #Phase 2 is 10% of training gbs_phase2=$(expr $train_batch_size_phase2 \* $num_accumulation_steps_phase2) train_steps_phase2=$(expr $train_steps_phase2 \* $gbs_phase1 \/ $gbs_phase2) # Adjust for batch size RESULTS_DIR_PHASE2=${RESULTS_DIR}/phase_2 mkdir -m 777 -p $RESULTS_DIR_PHASE2 INPUT_FILES="$DATA_DIR/tfrecord/lower_case_1_seq_len_${seq_len}_max_pred_${max_pred_per_seq}_masked_lm_prob_0.15_random_seed_12345_dupe_factor_5_shard_1472_test_split_10/books_wiki_en_corpus/training" EVAL_FILES="$DATA_DIR/tfrecord/lower_case_1_seq_len_${seq_len}_max_pred_${max_pred_per_seq}_masked_lm_prob_0.15_random_seed_12345_dupe_factor_5_shard_1472_test_split_10/books_wiki_en_corpus/test" #Check if all necessary files are available before training for DIR_or_file in $DATA_DIR $RESULTS_DIR $BERT_CONFIG ${PHASE1_CKPT}.meta; do if [ ! -d "$DIR_or_file" ] && [ ! -f "$DIR_or_file" ]; then echo "Error! $DIR_or_file directory missing. Please mount correctly" exit -1 fi done $mpi python /workspace/bert/run_pretraining.py \ --input_files_dir=$INPUT_FILES \ --init_checkpoint=$PHASE1_CKPT \ --eval_files_dir=$EVAL_FILES \ --output_dir=$RESULTS_DIR_PHASE2 \ --bert_config_file=$BERT_CONFIG \ --do_train=True \ --do_eval=True \ --train_batch_size=$train_batch_size_phase2 \ --eval_batch_size=$eval_batch_size \ --max_seq_length=$seq_len \ --max_predictions_per_seq=$max_pred_per_seq \ --num_train_steps=$train_steps_phase2 \ --num_accumulation_steps=$num_accumulation_steps_phase2 \ --num_warmup_steps=$warmup_steps_phase2 \ --save_checkpoints_steps=$save_checkpoints_steps \ --learning_rate=$learning_rate_phase2 \ $horovod_str $PREC \ --allreduce_post_accumulation=True
TensorFlow/Segmentation/UNet_Industrial/scripts/benchmarking
benchmarking
UNet_evalbench_AMP
#!/usr/bin/env bash # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This script launches UNet evaluation benchmark in TF-AMP on 1 GPUs using 16 batch size # Usage ./UNet_evalbench_AMP.sh <path to dataset> <dagm classID (1-10)> BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" export TF_CPP_MIN_LOG_LEVEL=3 # Cleaning up for benchmark RESULT_DIR="/tmp" rm -rf "${RESULT_DIR}" python "${BASEDIR}/../../main.py" \ --unet_variant='tinyUNet' \ --activation_fn='relu' \ --exec_mode='inference_benchmark' \ --iter_unit='batch' \ --num_iter=1500 \ --batch_size=16 \ --warmup_step=500 \ --results_dir="${RESULT_DIR}" \ --data_dir="${1}" \ --dataset_name='DAGM2007' \ --dataset_classID="${2}" \ --data_format='NCHW' \ --use_auto_loss_scaling \ --amp \ --xla \ --learning_rate=1e-4 \ --learning_rate_decay_factor=0.8 \ --learning_rate_decay_steps=500 \ --rmsprop_decay=0.9 \ --rmsprop_momentum=0.8 \ --loss_fn_name='adaptive_loss' \ --weight_decay=1e-5 \ --weight_init_method='he_uniform' \ --augment_data \ --display_every=250 \ --debug_verbosity=0
TensorFlow/Translation/GNMT/utils
utils
iterator_utils
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """For loading data into NMT models.""" from __future__ import print_function import os import tensorflow as tf from utils import vocab_utils def get_effective_epoch_size(hparams, train=True): """Get training epoch size after filtering.""" if train: src_file = "%s.%s" % (hparams.train_prefix, hparams.src) tgt_file = "%s.%s" % (hparams.train_prefix, hparams.tgt) src_max_len = hparams.src_max_len tgt_max_len = hparams.tgt_max_len else: src_file = "%s.%s" % (hparams.test_prefix, hparams.src) tgt_file = "%s.%s" % (hparams.test_prefix, hparams.tgt) src_max_len = hparams.src_max_len_infer tgt_max_len = None if src_max_len is None: src_max_len = float('inf') if tgt_max_len is None: tgt_max_len = float('inf') srcf = tf.gfile.GFile(src_file, "r") tgtf = tf.gfile.GFile(tgt_file, "r") epoch_size = 0 src_tokens = 0 tgt_tokens = 0 for srcline, tgtline in zip(srcf, tgtf): len_srcline = len(srcline.split()) len_tgtline = len(tgtline.split()) if ( len_srcline < src_max_len and len_tgtline < tgt_max_len): epoch_size += 1 src_tokens += len_srcline tgt_tokens += len_tgtline srcf.close() tgtf.close() return epoch_size, src_tokens, tgt_tokens # pylint: disable=g-long-lambda,line-too-long def get_iterator(src_dataset, tgt_dataset, src_vocab_table, tgt_vocab_table, batch_size, sos, eos, random_seed, num_buckets, src_max_len=None, tgt_max_len=None, num_parallel_calls=4, output_buffer_size=None, skip_count=None, num_shards=1, shard_index=0, reshuffle_each_iteration=True, use_char_encode=False, num_repeat=1, filter_oversized_sequences=False): """Function that returns input dataset.""" if not output_buffer_size: output_buffer_size = batch_size * 1000 if use_char_encode: src_eos_id = vocab_utils.EOS_CHAR_ID else: src_eos_id = tf.cast(src_vocab_table.lookup(tf.constant(eos)), tf.int32) tgt_sos_id = tf.cast(tgt_vocab_table.lookup(tf.constant(sos)), tf.int32) tgt_eos_id = tf.cast(tgt_vocab_table.lookup(tf.constant(eos)), tf.int32) src_tgt_dataset = tf.data.Dataset.zip((src_dataset, tgt_dataset)) src_tgt_dataset = src_tgt_dataset.shard(num_shards, shard_index) if skip_count is not None: src_tgt_dataset = src_tgt_dataset.skip(skip_count) src_tgt_dataset = src_tgt_dataset.shuffle( output_buffer_size, random_seed, reshuffle_each_iteration).repeat(num_repeat) src_tgt_dataset = src_tgt_dataset.map( lambda src, tgt: (tf.string_split([src]).values, tf.string_split([tgt]).values), num_parallel_calls=num_parallel_calls).prefetch(output_buffer_size) # Filter zero length input sequences. src_tgt_dataset = src_tgt_dataset.filter( lambda src, tgt: tf.logical_and(tf.size(src) > 0, tf.size(tgt) > 0)) # Filter oversized input sequences. if filter_oversized_sequences: src_tgt_dataset = src_tgt_dataset.filter( lambda src, tgt: tf.logical_and(tf.size(src) < src_max_len, tf.size(tgt) < tgt_max_len)) if src_max_len: src_tgt_dataset = src_tgt_dataset.map( lambda src, tgt: (src[:src_max_len], tgt), num_parallel_calls=num_parallel_calls).prefetch(output_buffer_size) if tgt_max_len: src_tgt_dataset = src_tgt_dataset.map( lambda src, tgt: (src, tgt[:tgt_max_len]), num_parallel_calls=num_parallel_calls).prefetch(output_buffer_size) # Convert the word strings to ids. Word strings that are not in the # vocab get the lookup table's default_value integer. if use_char_encode: src_tgt_dataset = src_tgt_dataset.map( lambda src, tgt: (tf.reshape(vocab_utils.tokens_to_bytes(src), [-1]), tf.cast(tgt_vocab_table.lookup(tgt), tf.int32)), num_parallel_calls=num_parallel_calls) else: src_tgt_dataset = src_tgt_dataset.map( lambda src, tgt: (tf.cast(src_vocab_table.lookup(src), tf.int32), tf.cast(tgt_vocab_table.lookup(tgt), tf.int32)), num_parallel_calls=num_parallel_calls) src_tgt_dataset = src_tgt_dataset.prefetch(output_buffer_size) # Create a tgt_input prefixed with <sos> and a tgt_output suffixed with <eos>. src_tgt_dataset = src_tgt_dataset.map( lambda src, tgt: (src, tf.concat(([tgt_sos_id], tgt), 0), tf.concat((tgt, [tgt_eos_id]), 0)), num_parallel_calls=num_parallel_calls).prefetch(output_buffer_size) # Add in sequence lengths. if use_char_encode: src_tgt_dataset = src_tgt_dataset.map( lambda src, tgt_in, tgt_out: ( src, tgt_in, tgt_out, tf.to_int32(tf.size(src) / vocab_utils.DEFAULT_CHAR_MAXLEN), tf.size(tgt_in)), num_parallel_calls=num_parallel_calls) else: src_tgt_dataset = src_tgt_dataset.map( lambda src, tgt_in, tgt_out: ( src, tgt_in, tgt_out, tf.size(src), tf.size(tgt_in)), num_parallel_calls=num_parallel_calls) src_tgt_dataset = src_tgt_dataset.prefetch(output_buffer_size) use_xla_compile = os.environ["xla_compile"] == "true" force_inputs_padding = os.environ["force_inputs_padding"] == "true" use_static_input_shape = use_xla_compile or force_inputs_padding # Bucket by source sequence length (buckets for lengths 0-9, 10-19, ...) def batching_func(x): return x.padded_batch( batch_size, # The first three entries are the source and target line rows; # these have unknown-length vectors. The last two entries are # the source and target row sizes; these are scalars. padded_shapes=( tf.TensorShape( [src_max_len if use_static_input_shape else None]), # src tf.TensorShape( [tgt_max_len if use_static_input_shape else None]), # tgt_input tf.TensorShape([tgt_max_len if use_static_input_shape else None ]), # tgt_output tf.TensorShape([]), # src_len tf.TensorShape([])), # tgt_len # Pad the source and target sequences with eos tokens. # (Though notice we don't generally need to do this since # later on we will be masking out calculations past the true sequence. padding_values=( src_eos_id, # src tgt_eos_id, # tgt_input tgt_eos_id, # tgt_output 0, # src_len -- unused 0), drop_remainder=True) if num_buckets > 1: def key_func(unused_1, unused_2, unused_3, src_len, tgt_len): """Calculate bucket_width by maximum source sequence length.""" # Pairs with length [0, bucket_width) go to bucket 0, length # [bucket_width, 2 * bucket_width) go to bucket 1, etc. Pairs with length # over ((num_bucket-1) * bucket_width) words all go into the last bucket. if src_max_len: bucket_width = (src_max_len + num_buckets - 1) // num_buckets else: bucket_width = 10 # Bucket sentence pairs by the length of their source sentence and target # sentence. bucket_id = tf.maximum(src_len // bucket_width, tgt_len // bucket_width) return tf.to_int64(tf.minimum(num_buckets, bucket_id)) def reduce_func(unused_key, windowed_data): return batching_func(windowed_data) batched_dataset = src_tgt_dataset.apply( tf.contrib.data.group_by_window( key_func=key_func, reduce_func=reduce_func, window_size=batch_size)) else: batched_dataset = batching_func(src_tgt_dataset) # Make_one_shot_iterator is not applicable here since we have lookup table. # Instead return a tf.data.dataset and let TpuEstimator to initialize and make # iterator out of it. batched_dataset = batched_dataset.map( lambda src, tgt_in, tgt_out, source_size, tgt_in_size: ( {"source": src, "target_input": tgt_in, "target_output": tgt_out, "source_sequence_length": source_size, "target_sequence_length": tgt_in_size})) return batched_dataset def get_infer_iterator(src_dataset, src_vocab_table, batch_size, eos, src_max_len=None, use_char_encode=False): """Get dataset for inference.""" if use_char_encode: src_eos_id = vocab_utils.EOS_CHAR_ID else: src_eos_id = tf.cast(src_vocab_table.lookup(tf.constant(eos)), tf.int32) src_dataset = src_dataset.map(lambda src: tf.string_split([src]).values) if src_max_len: src_dataset = src_dataset.map(lambda src: src[:src_max_len]) if use_char_encode: # Convert the word strings to character ids src_dataset = src_dataset.map( lambda src: tf.reshape(vocab_utils.tokens_to_bytes(src), [-1])) else: # Convert the word strings to ids src_dataset = src_dataset.map( lambda src: tf.cast(src_vocab_table.lookup(src), tf.int32)) # Add in the word counts. if use_char_encode: src_dataset = src_dataset.map( lambda src: (src, tf.to_int32( tf.size(src) / vocab_utils.DEFAULT_CHAR_MAXLEN))) else: src_dataset = src_dataset.map(lambda src: (src, tf.size(src))) def batching_func(x): return x.padded_batch( batch_size, # The entry is the source line rows; # this has unknown-length vectors. The last entry is # the source row size; this is a scalar. padded_shapes=( tf.TensorShape([None]), # src tf.TensorShape([])), # src_len # Pad the source sequences with eos tokens. # (Though notice we don't generally need to do this since # later on we will be masking out calculations past the true sequence. padding_values=( src_eos_id, # src 0)) # src_len -- unused batched_dataset = batching_func(src_dataset) batched_dataset = batched_dataset.map( lambda src_ids, src_seq_len: ( {"source": src_ids, "source_sequence_length": src_seq_len})) return batched_dataset
TensorFlow/Translation/GNMT/utils
utils
vocab_utils
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utility to handle vocabularies.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import codecs import os import tensorflow as tf from tensorflow.python.ops import lookup_ops from utils import misc_utils as utils # word level special token UNK = "<unk>" SOS = "<s>" EOS = "</s>" UNK_ID = 0 # char ids 0-255 come from utf-8 encoding bytes # assign 256-300 to special chars BOS_CHAR_ID = 256 # <begin sentence> EOS_CHAR_ID = 257 # <end sentence> BOW_CHAR_ID = 258 # <begin word> EOW_CHAR_ID = 259 # <end word> PAD_CHAR_ID = 260 # <padding> DEFAULT_CHAR_MAXLEN = 50 # max number of chars for each word. def _string_to_bytes(text, max_length): """Given string and length, convert to byte seq of at most max_length. This process mimics docqa/elmo's preprocessing: https://github.com/allenai/document-qa/blob/master/docqa/elmo/data.py Note that we make use of BOS_CHAR_ID and EOS_CHAR_ID in iterator_utils.py & our usage differs from docqa/elmo. Args: text: tf.string tensor of shape [] max_length: max number of chars for each word. Returns: A tf.int32 tensor of the byte encoded text. """ byte_ids = tf.to_int32(tf.decode_raw(text, tf.uint8)) byte_ids = byte_ids[:max_length - 2] padding = tf.fill([max_length - tf.shape(byte_ids)[0] - 2], PAD_CHAR_ID) byte_ids = tf.concat( [[BOW_CHAR_ID], byte_ids, [EOW_CHAR_ID], padding], axis=0) tf.logging.info(byte_ids) byte_ids = tf.reshape(byte_ids, [max_length]) tf.logging.info(byte_ids.get_shape().as_list()) return byte_ids + 1 def tokens_to_bytes(tokens): """Given a sequence of strings, map to sequence of bytes. Args: tokens: A tf.string tensor Returns: A tensor of shape words.shape + [bytes_per_word] containing byte versions of each word. """ bytes_per_word = DEFAULT_CHAR_MAXLEN with tf.device("/cpu:0"): tf.assert_rank(tokens, 1) shape = tf.shape(tokens) tf.logging.info(tokens) tokens_flat = tf.reshape(tokens, [-1]) as_bytes_flat = tf.map_fn( fn=lambda x: _string_to_bytes(x, max_length=bytes_per_word), elems=tokens_flat, dtype=tf.int32, back_prop=False) tf.logging.info(as_bytes_flat) as_bytes = tf.reshape(as_bytes_flat, [shape[0], bytes_per_word]) return as_bytes def load_vocab(vocab_file): vocab = [] with codecs.getreader("utf-8")(tf.gfile.GFile(vocab_file, "rb")) as f: vocab_size = 0 for word in f: vocab_size += 1 vocab.append(word.strip()) return vocab, vocab_size def check_vocab(vocab_file, output_dir, check_special_token=True, sos=None, eos=None, unk=None, pad_vocab=False): """Check if vocab_file doesn't exist, create from corpus_file.""" if tf.gfile.Exists(vocab_file): utils.print_out("# Vocab file %s exists" % vocab_file) vocab, vocab_size = load_vocab(vocab_file) if check_special_token: # Verify if the vocab starts with unk, sos, eos # If not, prepend those tokens & generate a new vocab file if not unk: unk = UNK if not sos: sos = SOS if not eos: eos = EOS assert len(vocab) >= 3 if vocab[0] != unk or vocab[1] != sos or vocab[2] != eos: utils.print_out("The first 3 vocab words [%s, %s, %s]" " are not [%s, %s, %s]" % (vocab[0], vocab[1], vocab[2], unk, sos, eos)) vocab = [unk, sos, eos] + vocab vocab_size += 3 new_vocab_file = os.path.join(output_dir, os.path.basename(vocab_file)) with codecs.getwriter("utf-8")( tf.gfile.GFile(new_vocab_file, "wb")) as f: for word in vocab: f.write("%s\n" % word) vocab_file = new_vocab_file if pad_vocab == True and vocab_size % 8 != 0: new_vocab_file = os.path.join(output_dir, os.path.basename(vocab_file)) padded_vocab_size = ((vocab_size + 8 - 1)// 8) * 8 for i in range(0, padded_vocab_size - vocab_size): token = "<madeupword" + str(i) + ">" vocab.append(token) with codecs.getwriter("utf-8")( tf.gfile.GFile(new_vocab_file, "wb")) as f: for word in vocab: f.write("%s\n" % word) vocab_file = new_vocab_file else: raise ValueError("vocab_file '%s' does not exist." % vocab_file) vocab_size = len(vocab) return vocab_size, vocab_file def create_vocab_tables(src_vocab_file, tgt_vocab_file, share_vocab): """Creates vocab tables for src_vocab_file and tgt_vocab_file.""" src_vocab_table = lookup_ops.index_table_from_file( src_vocab_file, default_value=UNK_ID) if share_vocab: tgt_vocab_table = src_vocab_table else: tgt_vocab_table = lookup_ops.index_table_from_file( tgt_vocab_file, default_value=UNK_ID) return src_vocab_table, tgt_vocab_table def load_embed_txt(embed_file): """Load embed_file into a python dictionary. Note: the embed_file should be a Glove/word2vec formatted txt file. Assuming Here is an exampe assuming embed_size=5: the -0.071549 0.093459 0.023738 -0.090339 0.056123 to 0.57346 0.5417 -0.23477 -0.3624 0.4037 and 0.20327 0.47348 0.050877 0.002103 0.060547 For word2vec format, the first line will be: <num_words> <emb_size>. Args: embed_file: file path to the embedding file. Returns: a dictionary that maps word to vector, and the size of embedding dimensions. """ emb_dict = dict() emb_size = None is_first_line = True with codecs.getreader("utf-8")(tf.gfile.GFile(embed_file, "rb")) as f: for line in f: tokens = line.rstrip().split(" ") if is_first_line: is_first_line = False if len(tokens) == 2: # header line emb_size = int(tokens[1]) continue word = tokens[0] vec = list(map(float, tokens[1:])) emb_dict[word] = vec if emb_size: if emb_size != len(vec): utils.print_out( "Ignoring %s since embeding size is inconsistent." % word) del emb_dict[word] else: emb_size = len(vec) return emb_dict, emb_size
TensorFlow2/Classification/ConvNets/model/blocks
blocks
__init__
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from model.blocks.conv2d_block import conv2d_block from model.blocks.mb_conv_block import mb_conv_block from model.blocks.fused_mb_conv_block import fused_mb_conv_block __all__ = ['conv2d_block', 'mb_conv_block','fused_mb_conv_block']
Tools/PyTorch/TimeSeriesPredictionPlatform/conf/trainer/optimizer
optimizer
SGD
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. _target_: torch.optim.SGD lr: 0.01 momentum: 0.0 weight_decay: 0.0 dampening: 0.0 nesterov: False
TensorFlow2/LanguageModeling/BERT/official/nlp/transformer
transformer
model_params
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Defines Transformer model parameters.""" from collections import defaultdict BASE_PARAMS = defaultdict( lambda: None, # Set default value to None. # Input params default_batch_size=2048, # Maximum number of tokens per batch of examples. default_batch_size_tpu=32768, max_length=256, # Maximum number of tokens per example. # Model params initializer_gain=1.0, # Used in trainable variable initialization. vocab_size=33708, # Number of tokens defined in the vocabulary file. hidden_size=512, # Model dimension in the hidden layers. num_hidden_layers=6, # Number of layers in the encoder and decoder stacks. num_heads=8, # Number of heads to use in multi-headed attention. filter_size=2048, # Inner layer dimension in the feedforward network. # Dropout values (only used when training) layer_postprocess_dropout=0.1, attention_dropout=0.1, relu_dropout=0.1, # Training params label_smoothing=0.1, learning_rate=2.0, learning_rate_decay_rate=1.0, learning_rate_warmup_steps=16000, # Optimizer params optimizer_adam_beta1=0.9, optimizer_adam_beta2=0.997, optimizer_adam_epsilon=1e-09, # Default prediction params extra_decode_length=50, beam_size=4, alpha=0.6, # used to calculate length normalization in beam search # TPU specific parameters use_tpu=False, static_batch=False, allow_ffn_pad=True, ) BIG_PARAMS = BASE_PARAMS.copy() BIG_PARAMS.update( default_batch_size=4096, # default batch size is smaller than for BASE_PARAMS due to memory limits. default_batch_size_tpu=16384, hidden_size=1024, filter_size=4096, num_heads=16, ) # Parameters for running the model in multi gpu. These should not change the # params that modify the model shape (such as the hidden_size or num_heads). BASE_MULTI_GPU_PARAMS = BASE_PARAMS.copy() BASE_MULTI_GPU_PARAMS.update( learning_rate_warmup_steps=8000 ) BIG_MULTI_GPU_PARAMS = BIG_PARAMS.copy() BIG_MULTI_GPU_PARAMS.update( layer_postprocess_dropout=0.3, learning_rate_warmup_steps=8000 ) # Parameters for testing the model TINY_PARAMS = BASE_PARAMS.copy() TINY_PARAMS.update( default_batch_size=1024, default_batch_size_tpu=1024, hidden_size=32, num_heads=4, filter_size=256, )
PyTorch/Classification/GPUNet/triton/scripts
scripts
setup_environment
#!/usr/bin/env bash # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. WORKDIR="${WORKDIR:=$(pwd)}" export DATASETS_DIR=${WORKDIR}/datasets export WORKSPACE_DIR=${WORKDIR}/runner_workspace export CHECKPOINTS_DIR=${WORKSPACE_DIR}/checkpoints export MODEL_REPOSITORY_PATH=${WORKSPACE_DIR}/model_store export SHARED_DIR=${WORKSPACE_DIR}/shared_dir echo "Preparing directories" mkdir -p ${WORKSPACE_DIR} mkdir -p ${DATASETS_DIR} mkdir -p ${CHECKPOINTS_DIR} mkdir -p ${MODEL_REPOSITORY_PATH} mkdir -p ${SHARED_DIR} echo "Setting up environment" export MODEL_NAME=GPUnet export ENSEMBLE_MODEL_NAME= export TRITON_LOAD_MODEL_METHOD=explicit export TRITON_INSTANCES=1
TensorFlow2/Segmentation/MaskRCNN/mrcnn_tf2/model
model
losses
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Losses used for Mask-RCNN.""" import tensorflow as tf class MaskRCNNLoss(tf.keras.layers.Layer): """ Layer that computes the mask loss of Mask-RCNN. This layer implements the mask loss of Mask-RCNN. As the `mask_outputs` produces `num_classes` masks for each RoI, the reference model expands `mask_targets` to match the shape of `mask_outputs` and selects only the target that the RoI has a maximum overlap. (Reference: https://github.com/facebookresearch/Detectron/blob/master/detectron/roi_data/mask_rcnn.py) Instead, this implementation selects the `mask_outputs` by the `class_targets` so that it doesn't expand `mask_targets`. """ def __init__(self): super().__init__(trainable=False, dtype=tf.float32) def call(self, inputs, **kwargs): """ Args: inputs: dictionary with model outputs, which has to include: mask_outputs: a float tensor representing the prediction for each mask, with a shape of [batch_size, num_masks, mask_height, mask_width]. mask_targets: a float tensor representing the binary mask of ground truth labels for each mask with a shape of [batch_size, num_masks, mask_height, mask_width]. select_class_targets: a tensor with a shape of [batch_size, num_masks], representing the foreground mask targets. Returns: mask_loss: a float tensor representing total mask loss. """ mask_outputs = inputs['mask_outputs'] mask_targets = inputs['mask_targets'] select_class_targets = inputs['selected_class_targets'] batch_size, num_masks, mask_height, mask_width = mask_outputs.get_shape().as_list() weights = tf.tile( tf.reshape(tf.greater(select_class_targets, 0), [batch_size, num_masks, 1, 1]), [1, 1, mask_height, mask_width] ) weights = tf.cast(weights, tf.float32) return _sigmoid_cross_entropy( multi_class_labels=mask_targets, logits=mask_outputs, weights=weights, sum_by_non_zeros_weights=True ) class FastRCNNLoss(tf.keras.layers.Layer): """ Layer that computes the box and class loss (Fast-RCNN branch) of Mask-RCNN. This layer implements the classification and box regression loss of the Fast-RCNN branch in Mask-RCNN. As the `box_outputs` produces `num_classes` boxes for each RoI, the reference model expands `box_targets` to match the shape of `box_outputs` and selects only the target that the RoI has a maximum overlap. (Reference: https://github.com/facebookresearch/Detectron/blob/master/detectron/roi_data/fast_rcnn.py) Instead, this function selects the `box_outputs` by the `class_targets` so that it doesn't expand `box_targets`. The loss computation has two parts: (1) classification loss is softmax on all RoIs. (2) box loss is smooth L1-loss on only positive samples of RoIs. Reference: https://github.com/facebookresearch/Detectron/blob/master/detectron/modeling/fast_rcnn_heads.py """ def __init__(self, num_classes): super().__init__(trainable=False, dtype=tf.float32) self._num_classes = num_classes def call(self, inputs, **kwargs): """ Args: inputs: dictionary with model outputs, which has to include: class_outputs: a float tensor representing the class prediction for each box with a shape of [batch_size, num_boxes, num_classes]. box_outputs: a float tensor representing the box prediction for each box with a shape of [batch_size, num_boxes, num_classes * 4]. class_targets: a float tensor representing the class label for each box with a shape of [batch_size, num_boxes]. box_targets: a float tensor representing the box label for each box with a shape of [batch_size, num_boxes, 4]. Returns: cls_loss: a float tensor representing total class loss. box_loss: a float tensor representing total box regression loss. """ class_outputs = inputs['class_outputs'] box_outputs = inputs['box_outputs'] class_targets = inputs['class_targets'] box_targets = inputs['box_targets'] class_targets = tf.cast(class_targets, dtype=tf.int32) # Selects the box from `box_outputs` based on `class_targets`, with which # the box has the maximum overlap. batch_size, num_rois, _ = box_outputs.get_shape().as_list() box_outputs = tf.reshape(box_outputs, [batch_size, num_rois, self._num_classes, 4]) box_indices = tf.reshape( class_targets + tf.tile(tf.expand_dims(tf.range(batch_size) * num_rois * self._num_classes, 1), [1, num_rois]) + tf.tile(tf.expand_dims(tf.range(num_rois) * self._num_classes, 0), [batch_size, 1]), [-1] ) box_outputs = tf.matmul( tf.one_hot( box_indices, batch_size * num_rois * self._num_classes, dtype=box_outputs.dtype ), tf.reshape(box_outputs, [-1, 4]) ) box_outputs = tf.reshape(box_outputs, [batch_size, -1, 4]) box_loss = _fast_rcnn_box_loss( box_outputs=box_outputs, box_targets=box_targets, class_targets=class_targets, normalizer=1.0 ) class_targets = tf.one_hot(class_targets, self._num_classes) class_loss = _fast_rcnn_class_loss( class_outputs=class_outputs, class_targets_one_hot=class_targets, normalizer=1.0 ) return class_loss, box_loss class RPNLoss(tf.keras.layers.Layer): """ Layer that computes total RPN detection loss. Computes total RPN detection loss including box and score from all levels. """ def __init__(self, batch_size, rpn_batch_size_per_im, min_level, max_level): super().__init__(trainable=False, dtype=tf.float32) self._batch_size = batch_size self._rpn_batch_size_per_im = rpn_batch_size_per_im self._min_level = min_level self._max_level = max_level def call(self, inputs, **kwargs): """ Args: inputs: dictionary with model outputs, which has to include: score_outputs: an OrderDict with keys representing levels and values representing scores in [batch_size, height, width, num_anchors]. box_outputs: an OrderDict with keys representing levels and values representing box regression targets in [batch_size, height, width, num_anchors * 4]. score_targets_*: ground truth score targets box_targets_*: ground truth box targets Returns: rpn_score_loss: a float tensor representing total score loss. rpn_box_loss: a float tensor representing total box regression loss. """ score_outputs = inputs['rpn_score_outputs'] box_outputs = inputs['rpn_box_outputs'] score_losses = [] box_losses = [] for level in range(int(self._min_level), int(self._max_level + 1)): score_targets_at_level = inputs['score_targets_%d' % level] box_targets_at_level = inputs['box_targets_%d' % level] score_losses.append( _rpn_score_loss( score_outputs=score_outputs[level], score_targets=score_targets_at_level, normalizer=tf.cast(self._batch_size * self._rpn_batch_size_per_im, dtype=tf.float32) ) ) box_losses.append(_rpn_box_loss( box_outputs=box_outputs[level], box_targets=box_targets_at_level, normalizer=1.0 )) # Sum per level losses to total loss. rpn_score_loss = tf.add_n(score_losses) rpn_box_loss = tf.add_n(box_losses) return rpn_score_loss, rpn_box_loss def _huber_loss(y_true, y_pred, weights, delta): num_non_zeros = tf.math.count_nonzero(weights, dtype=tf.float32) huber_keras_loss = tf.keras.losses.Huber( delta=delta, reduction=tf.keras.losses.Reduction.SUM, name='huber_loss' ) y_true = tf.expand_dims(y_true, axis=-1) y_pred = tf.expand_dims(y_pred, axis=-1) huber_loss = huber_keras_loss( y_true, y_pred, sample_weight=weights ) assert huber_loss.dtype == tf.float32 huber_loss = tf.math.divide_no_nan(huber_loss, num_non_zeros, name="huber_loss") assert huber_loss.dtype == tf.float32 return huber_loss def _sigmoid_cross_entropy(multi_class_labels, logits, weights, sum_by_non_zeros_weights=False): assert weights.dtype == tf.float32 sigmoid_cross_entropy = tf.nn.sigmoid_cross_entropy_with_logits( labels=multi_class_labels, logits=logits, name="x-entropy" ) assert sigmoid_cross_entropy.dtype == tf.float32 sigmoid_cross_entropy = tf.math.multiply(sigmoid_cross_entropy, weights) sigmoid_cross_entropy = tf.math.reduce_sum(input_tensor=sigmoid_cross_entropy) assert sigmoid_cross_entropy.dtype == tf.float32 if sum_by_non_zeros_weights: num_non_zeros = tf.math.count_nonzero(weights, dtype=tf.float32) sigmoid_cross_entropy = tf.math.divide_no_nan( sigmoid_cross_entropy, num_non_zeros, name="sum_by_non_zeros_weights" ) assert sigmoid_cross_entropy.dtype == tf.float32 return sigmoid_cross_entropy def _softmax_cross_entropy(onehot_labels, logits): num_non_zeros = tf.math.count_nonzero(onehot_labels, dtype=tf.float32) softmax_cross_entropy = tf.nn.softmax_cross_entropy_with_logits( labels=tf.stop_gradient(onehot_labels), logits=logits ) assert softmax_cross_entropy.dtype == tf.float32 softmax_cross_entropy = tf.math.reduce_sum(input_tensor=softmax_cross_entropy) softmax_cross_entropy = tf.math.divide_no_nan( softmax_cross_entropy, num_non_zeros, name="softmax_cross_entropy" ) assert softmax_cross_entropy.dtype == tf.float32 return softmax_cross_entropy def _rpn_score_loss(score_outputs, score_targets, normalizer=1.0): """Computes score loss.""" with tf.name_scope('rpn_score_loss'): # score_targets has three values: # * (1) score_targets[i]=1, the anchor is a positive sample. # * (2) score_targets[i]=0, negative. # * (3) score_targets[i]=-1, the anchor is don't care (ignore). mask = tf.math.greater_equal(score_targets, 0) mask = tf.cast(mask, dtype=tf.float32) score_targets = tf.maximum(score_targets, tf.zeros_like(score_targets)) score_targets = tf.cast(score_targets, dtype=tf.float32) assert score_outputs.dtype == tf.float32 assert score_targets.dtype == tf.float32 score_loss = _sigmoid_cross_entropy( multi_class_labels=score_targets, logits=score_outputs, weights=mask, sum_by_non_zeros_weights=False ) assert score_loss.dtype == tf.float32 if isinstance(normalizer, tf.Tensor) or normalizer != 1.0: score_loss /= normalizer assert score_loss.dtype == tf.float32 return score_loss def _rpn_box_loss(box_outputs, box_targets, normalizer=1.0, delta=1. / 9): """Computes box regression loss.""" # delta is typically around the mean value of regression target. # for instances, the regression targets of 512x512 input with 6 anchors on # P2-P6 pyramid is about [0.1, 0.1, 0.2, 0.2]. with tf.name_scope('rpn_box_loss'): mask = tf.not_equal(box_targets, 0.0) mask = tf.cast(mask, tf.float32) assert mask.dtype == tf.float32 # The loss is normalized by the sum of non-zero weights before additional # normalizer provided by the function caller. box_loss = _huber_loss(y_true=box_targets, y_pred=box_outputs, weights=mask, delta=delta) assert box_loss.dtype == tf.float32 if isinstance(normalizer, tf.Tensor) or normalizer != 1.0: box_loss /= normalizer assert box_loss.dtype == tf.float32 return box_loss def _fast_rcnn_class_loss(class_outputs, class_targets_one_hot, normalizer=1.0): """Computes classification loss.""" with tf.name_scope('fast_rcnn_class_loss'): # The loss is normalized by the sum of non-zero weights before additional # normalizer provided by the function caller. class_loss = _softmax_cross_entropy(onehot_labels=class_targets_one_hot, logits=class_outputs) if isinstance(normalizer, tf.Tensor) or normalizer != 1.0: class_loss /= normalizer return class_loss def _fast_rcnn_box_loss(box_outputs, box_targets, class_targets, normalizer=1.0, delta=1.): """Computes box regression loss.""" # delta is typically around the mean value of regression target. # for instances, the regression targets of 512x512 input with 6 anchors on # P2-P6 pyramid is about [0.1, 0.1, 0.2, 0.2]. with tf.name_scope('fast_rcnn_box_loss'): mask = tf.tile(tf.expand_dims(tf.greater(class_targets, 0), axis=2), [1, 1, 4]) # The loss is normalized by the sum of non-zero weights before additional # normalizer provided by the function caller. box_loss = _huber_loss(y_true=box_targets, y_pred=box_outputs, weights=mask, delta=delta) if isinstance(normalizer, tf.Tensor) or normalizer != 1.0: box_loss /= normalizer return box_loss
TensorFlow/Recommendation/NCF
NCF
neumf
# Copyright (c) 2018. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ----------------------------------------------------------------------- # # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import tensorflow as tf import horovod.tensorflow as hvd def float32_variable_storage_getter(getter, name, shape=None, dtype=None, initializer=None, regularizer=None, trainable=True, *args, **kwargs): """ Custom variable getter that forces trainable variables to be stored in float32 precision and then casts them to the half-precision """ storage_dtype = tf.float32 if trainable else dtype variable = getter(name, shape, dtype=storage_dtype, initializer=initializer, regularizer=regularizer, trainable=trainable, *args, **kwargs) if trainable and dtype != tf.float32: variable = tf.cast(variable, dtype) return variable def neural_mf(users, items, model_dtype, nb_users, nb_items, mf_dim, mf_reg, mlp_layer_sizes, mlp_layer_regs, dropout_rate, sigmoid=False): """ Constructs the model graph """ # Check params if len(mlp_layer_sizes) != len(mlp_layer_regs): raise RuntimeError('u dummy, layer_sized != layer_regs') if mlp_layer_sizes[0] % 2 != 0: raise RuntimeError('u dummy, mlp_layer_sizes[0] % 2 != 0') nb_mlp_layers = len(mlp_layer_sizes) # Embeddings user_embed = tf.get_variable( "user_embeddings", shape=[nb_users, mf_dim + mlp_layer_sizes[0] // 2], initializer=tf.initializers.random_normal(mean=0.0, stddev=0.01)) item_embed = tf.get_variable( "item_embeddings", shape=[nb_items, mf_dim + mlp_layer_sizes[0] // 2], initializer=tf.initializers.random_normal(mean=0.0, stddev=0.01)) # Matrix Factorization Embeddings xmfu = tf.nn.embedding_lookup(user_embed[:, :mf_dim], users, partition_strategy='div') xmfi = tf.nn.embedding_lookup(item_embed[:, :mf_dim], items, partition_strategy='div') # MLP Network Embeddings xmlpu = tf.nn.embedding_lookup(user_embed[:, mf_dim:], users, partition_strategy='div') xmlpi = tf.nn.embedding_lookup(item_embed[:, mf_dim:], items, partition_strategy='div') # Enforce model to use fp16 data types when manually enabling mixed precision # (Tensorfow ops will use automatically use the data type of the first input) if model_dtype == tf.float16: xmfu = tf.cast(xmfu, model_dtype) xmfi = tf.cast(xmfi, model_dtype) xmlpu = tf.cast(xmlpu, model_dtype) xmlpi = tf.cast(xmlpi, model_dtype) # Matrix Factorization xmf = tf.math.multiply(xmfu, xmfi) # MLP Layers xmlp = tf.concat((xmlpu, xmlpi), 1) for i in range(1, nb_mlp_layers): xmlp = tf.layers.Dense( mlp_layer_sizes[i], activation=tf.nn.relu, kernel_initializer=tf.glorot_uniform_initializer() ).apply(xmlp) xmlp = tf.layers.Dropout(rate=dropout_rate).apply(xmlp) # Final fully-connected layer logits = tf.concat((xmf, xmlp), 1) logits = tf.layers.Dense( 1, kernel_initializer=tf.keras.initializers.lecun_uniform() ).apply(logits) if sigmoid: logits = tf.math.sigmoid(logits) # Cast model outputs back to float32 if manually enabling mixed precision for loss calculation if model_dtype == tf.float16: logits = tf.cast(logits, tf.float32) return logits def compute_eval_metrics(logits, dup_mask, val_batch_size, K): """ Constructs the graph to compute Hit Rate and NDCG """ # Replace duplicate (uid, iid) pairs with -inf logits = logits * (1. - dup_mask) logits = logits + (dup_mask * logits.dtype.min) # Reshape tensors so that each row corresponds with a user logits_by_user = tf.reshape(logits, [-1, val_batch_size]) dup_mask_by_user = tf.cast(tf.reshape(logits, [-1, val_batch_size]), tf.bool) # Get the topk items for each user top_item_indices = tf.math.top_k(logits_by_user, K)[1] # Check that the positive sample (last index) is in the top K is_positive = tf.cast(tf.equal(top_item_indices, val_batch_size-1), tf.int32) found_positive = tf.reduce_sum(is_positive, axis=1) # Extract the rankings of the positive samples positive_ranks = tf.reduce_sum(is_positive * tf.expand_dims(tf.range(K), 0), axis=1) dcg = tf.log(2.) / tf.log(tf.cast(positive_ranks, tf.float32) + 2) dcg *= tf.cast(found_positive, dcg.dtype) return found_positive, dcg def ncf_model_ops(users, items, labels, dup_mask, params, mode='TRAIN'): """ Constructs the training and evaluation graphs """ # Validation params val_batch_size = params['val_batch_size'] K = params['top_k'] # Training params learning_rate = params['learning_rate'] beta_1 = params['beta_1'] beta_2 = params['beta_2'] epsilon = params['epsilon'] # Model params fp16 = False nb_users = params['num_users'] nb_items = params['num_items'] mf_dim = params['num_factors'] mf_reg = params['mf_reg'] mlp_layer_sizes = params['layer_sizes'] mlp_layer_regs = params['layer_regs'] dropout = params['dropout'] sigmoid = False #params['sigmoid'] loss_scale = params['loss_scale'] model_dtype = tf.float16 if fp16 else tf.float32 # If manually enabling mixed precision, use the custom variable getter custom_getter = None if not fp16 else float32_variable_storage_getter # Allow soft device placement with tf.device(None), \ tf.variable_scope('neumf', custom_getter=custom_getter): # Model graph logits = neural_mf( users, items, model_dtype, nb_users, nb_items, mf_dim, mf_reg, mlp_layer_sizes, mlp_layer_regs, dropout, sigmoid ) logits = tf.squeeze(logits) if mode == 'INFERENCE': return logits # Evaluation Ops found_positive, dcg = compute_eval_metrics(logits, dup_mask, val_batch_size, K) # Metrics hit_rate = tf.metrics.mean(found_positive, name='hit_rate') ndcg = tf.metrics.mean(dcg, name='ndcg') eval_op = tf.group(hit_rate[1], ndcg[1]) if mode == 'EVAL': return hit_rate[0], ndcg[0], eval_op, None # Labels labels = tf.reshape(labels, [-1, 1]) logits = tf.reshape(logits, [-1, 1]) # Use adaptive momentum optimizer optimizer = tf.train.AdamOptimizer( learning_rate=learning_rate, beta1=beta_1, beta2=beta_2, epsilon=epsilon) loss = tf.losses.sigmoid_cross_entropy( labels, logits, reduction=tf.losses.Reduction.MEAN) # Apply loss scaling if manually enabling mixed precision if fp16: if loss_scale is None: loss_scale_manager = tf.contrib.mixed_precision.ExponentialUpdateLossScaleManager(2**32, 1000) else: loss_scale_manager = tf.contrib.mixed_precision.FixedLossScaleManager(loss_scale) optimizer = tf.contrib.mixed_precision.LossScaleOptimizer(optimizer, loss_scale_manager) # Horovod wrapper for distributed training optimizer = hvd.DistributedOptimizer(optimizer) # Update ops global_step = tf.train.get_global_step() train_op = optimizer.minimize(loss, global_step=global_step) return hit_rate[0], ndcg[0], eval_op, train_op
Tools/PyTorch/TimeSeriesPredictionPlatform/conf/trainer/callbacks/callbacks
callbacks
logging
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. _target_: callbacks.ctl_callbacks.LoggingCallback
TensorFlow2/Segmentation/MaskRCNN/mrcnn_tf2/ops
ops
preprocess_ops
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Preprocessing ops.""" import math import tensorflow as tf from mrcnn_tf2.object_detection import preprocessor def normalize_image(image): """Normalize the image. Args: image: a tensor of shape [height, width, 3] in dtype=tf.float32. Returns: normalized_image: a tensor which has the same shape and dtype as image, with pixel values normalized. """ offset = tf.constant([0.485, 0.456, 0.406]) offset = tf.reshape(offset, shape=(1, 1, 3)) scale = tf.constant([0.229, 0.224, 0.225]) scale = tf.reshape(scale, shape=(1, 1, 3)) normalized_image = (image - offset) / scale return normalized_image def random_horizontal_flip(image, boxes=None, masks=None, seed=None): """Random horizontal flip the image, boxes, and masks. Args: image: a tensor of shape [height, width, 3] representing the image. boxes: (Optional) a tensor of shape [num_boxes, 4] represneting the box corners in normalized coordinates. masks: (Optional) a tensor of shape [num_masks, height, width] representing the object masks. Note that the size of the mask is the same as the image. Returns: image: the processed image tensor after being randomly flipped. boxes: None or the processed box tensor after being randomly flipped. masks: None or the processed mask tensor after being randomly flipped. """ return preprocessor.random_horizontal_flip(image, boxes, masks, seed=seed) def resize_and_pad(image, target_size, stride, boxes=None, masks=None): """Resize and pad images, boxes and masks. Resize and pad images, (optionally boxes and masks) given the desired output size of the image and stride size. Here are the preprocessing steps. 1. For a given image, keep its aspect ratio and rescale the image to make it the largest rectangle to be bounded by the rectangle specified by the `target_size`. 2. Pad the rescaled image such that the height and width of the image become the smallest multiple of the stride that is larger or equal to the desired output diemension. Args: image: an image tensor of shape [original_height, original_width, 3]. target_size: a tuple of two integers indicating the desired output image size. Note that the actual output size could be different from this. stride: the stride of the backbone network. Each of the output image sides must be the multiple of this. boxes: (Optional) a tensor of shape [num_boxes, 4] represneting the box corners in normalized coordinates. masks: (Optional) a tensor of shape [num_masks, height, width] representing the object masks. Note that the size of the mask is the same as the image. Returns: image: the processed image tensor after being resized and padded. image_info: a tensor of shape [5] which encodes the height, width before and after resizing and the scaling factor. boxes: None or the processed box tensor after being resized and padded. After the processing, boxes will be in the absolute coordinates w.r.t. the scaled image. masks: None or the processed mask tensor after being resized and padded. """ input_height, input_width, _ = tf.unstack( tf.cast(tf.shape(input=image), dtype=tf.float32), axis=0 ) target_height, target_width = target_size scale_if_resize_height = target_height / input_height scale_if_resize_width = target_width / input_width scale = tf.minimum(scale_if_resize_height, scale_if_resize_width) scaled_height = tf.cast(scale * input_height, dtype=tf.int32) scaled_width = tf.cast(scale * input_width, dtype=tf.int32) image = tf.image.resize(image, [scaled_height, scaled_width], method=tf.image.ResizeMethod.BILINEAR) padded_height = int(math.ceil(target_height * 1.0 / stride) * stride) padded_width = int(math.ceil(target_width * 1.0 / stride) * stride) image = tf.image.pad_to_bounding_box(image, 0, 0, padded_height, padded_width) image.set_shape([padded_height, padded_width, 3]) image_info = tf.stack([ tf.cast(scaled_height, dtype=tf.float32), tf.cast(scaled_width, dtype=tf.float32), 1.0 / scale, input_height, input_width] ) if boxes is not None: normalized_box_list = preprocessor.box_list.BoxList(boxes) scaled_boxes = preprocessor.box_list_scale(normalized_box_list, scaled_height, scaled_width).get() else: scaled_boxes = None if masks is not None: scaled_masks = tf.image.resize( tf.expand_dims(masks, -1), [scaled_height, scaled_width], method=tf.image.ResizeMethod.NEAREST_NEIGHBOR ) # Check if there is any instance in this image or not. num_masks = tf.shape(input=scaled_masks)[0] scaled_masks = tf.cond( pred=tf.greater(num_masks, 0), true_fn=lambda: tf.image.pad_to_bounding_box(scaled_masks, 0, 0, padded_height, padded_width), false_fn=lambda: tf.zeros([0, padded_height, padded_width, 1]) ) else: scaled_masks = None return image, image_info, scaled_boxes, scaled_masks def crop_gt_masks(instance_masks, boxes, gt_mask_size, image_size): """Crops the ground truth binary masks and resize to fixed-size masks.""" num_masks = tf.shape(input=instance_masks)[0] scale_sizes = tf.convert_to_tensor(value=[image_size[0], image_size[1]] * 2, dtype=tf.float32) boxes = boxes / scale_sizes cropped_gt_masks = tf.image.crop_and_resize( image=instance_masks, boxes=boxes, box_indices=tf.range(num_masks, dtype=tf.int32), crop_size=[gt_mask_size, gt_mask_size], method='bilinear')[:, :, :, 0] cropped_gt_masks = tf.pad( tensor=cropped_gt_masks, paddings=tf.constant([[0, 0], [2, 2], [2, 2]]), mode='CONSTANT', constant_values=0. ) return cropped_gt_masks def pad_to_fixed_size(data, pad_value, output_shape): """Pad data to a fixed length at the first dimension. Args: data: Tensor to be padded to output_shape. pad_value: A constant value assigned to the paddings. output_shape: The output shape of a 2D tensor. Returns: The Padded tensor with output_shape [max_num_instances, dimension]. """ max_num_instances = output_shape[0] dimension = output_shape[1] data = tf.reshape(data, [-1, dimension]) num_instances = tf.shape(input=data)[0] pad_length = max_num_instances - num_instances paddings = pad_value * tf.ones([pad_length, dimension]) padded_data = tf.reshape(tf.concat([data, paddings], axis=0), output_shape) return padded_data
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/trt/tacotron2
tacotron2
tacotron2StreamingInstance
/* * Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the NVIDIA CORPORATION nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef TT2I_TACOTRON2STREAMINGINSTANCE_H #define TT2I_TACOTRON2STREAMINGINSTANCE_H #include "cudaMemory.h" #include "timedObject.h" #include "trtPtr.h" #include <memory> namespace nvinfer1 { class ICudaEngine; } namespace tts { class EncoderInstance; class DecoderInstance; class DecoderInstancePlain; class DecoderInstancePlugins; class PostNetInstance; class Tacotron2StreamingInstance : public virtual TimedObject { public: /** * @brief Create a new Tacotron2 instance. * * @param encoder The built encoder network. * @param decoder The built decoder network without plugins. * @param decoder The built decoder network with plugins. * @param postnet The built postnet network. */ Tacotron2StreamingInstance( TRTPtr<nvinfer1::ICudaEngine> encoder, TRTPtr<nvinfer1::ICudaEngine> decoderPlain, TRTPtr<nvinfer1::ICudaEngine> decoderPlugins, TRTPtr<nvinfer1::ICudaEngine> postnet); // deleted copy constructor and assignment operator Tacotron2StreamingInstance(const Tacotron2StreamingInstance& other) = delete; Tacotron2StreamingInstance& operator=(const Tacotron2StreamingInstance& other) = delete; /** * @brief Setup inference for a given input tensor. * * @param batchSize The number of sequences in the batch. * @param inputDevice The input for each item in the batch. * @param inputSpacing The spacing between the start of each item in the * batch. * @param inputLength The length of each input. * @param stream The stream to operate on. */ void startInference( int batchSize, const int* inputDevice, int inputSpacing, const int* inputLength, cudaStream_t stream); /** * @brief Generate the next chunk of output. * * @param outputDevice The location to write the output tensor in batch, * frame, channel order. * @param outputLength The length of each output sequence. * @param stream The stream to operate on. * * @return True if not all sequences have finished. */ bool inferNext(float* outputDevice, int* outputLength, cudaStream_t stream); /** * @brief The random seed to use for dropouts. This resets the * inference state, and `startInference()` must be called afterwards. * * @param seed The seed value. */ void setSeed(unsigned int seed); /** * @brief Get the number of mels produced at once. * * @return The number of mels. */ int getChunkSize() const; /** * @brief Get the number of channels each frame will have. * * @return The number of channels. */ int getNumMelChannels() const; /** * @brief Get the maximum length of an input sequence. * * @return The maximum length of the sequence. */ int getMaximumInputLength() const; /** * @brief Get the maximum batch size supported by this Tacotron2 instance. * * @return The maximum batch size. */ int getMaxBatchSize() const; /** * @brief Set whether or not to use plugins when possible. This resets the * inference state, and `startInference()` must be called afterwards. * * @param usePlugins True to use plugins, false to not. */ void usePlugins(bool usePlugins); /** * @brief Check whether or not plugins will be used for the given batch size. * * @param batchSize The batch size. * * @return True if plugins would be used. */ bool willUsePlugins(int batchSize) const; /** * @brief Set the number of decoder loops to execute for subsequent calls to * nextInfer. The number must be less than or equal to the return of * `getMaxChunkSize()`. * * @param chunkSize The number of frames to generate. */ void setNextChunkSize(int chunkSize); private: // TRT network components std::shared_ptr<EncoderInstance> mEncoder; std::shared_ptr<DecoderInstancePlain> mDecoderPlain; std::shared_ptr<DecoderInstancePlugins> mDecoderPlugins; std::shared_ptr<PostNetInstance> mPostnet; int mMaxInputLength; int mNumMelChannels; int mNumMelChunks; int mMaxBatchSize; int mBatchSize; bool mUsePlugins; DecoderInstance* mInUseDecoder; CudaMemory<int32_t> mPaddedInputDevice; CudaMemory<float> mInputMaskDevice; CudaMemory<int32_t> mInputLengthsDevice; CudaMemory<float> mEncodingDevice; CudaMemory<float> mProcessedEncodingDevice; CudaMemory<float> mMelChunkDevice; const int* mInputLengthHost; /** * @brief Reset internal variables to prevent `inferNext()` from being * called until `startInference()` is called. */ void resetInference(); }; } // namespace tts #endif
PyTorch/Classification/ConvNets/efficientnet/training/TF32
TF32
DGXA100_efficientnet-widese-b4_TF32
python ./multiproc.py --nproc_per_node 8 ./launch.py --model efficientnet-widese-b4 --precision TF32 --mode convergence --platform DGXA100 /imagenet --workspace ${1:-./} --raport-file raport.json
TensorFlow2/Segmentation/UNet_Medical/model
model
layers
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # -*- coding: utf-8 -*- """ Contains a set of utilities that allow building the UNet model """ import tensorflow as tf def _crop_and_concat(inputs, residual_input): """ Perform a central crop of ``residual_input`` and concatenate to ``inputs`` Args: inputs (tf.Tensor): Tensor with input residual_input (tf.Tensor): Residual input Return: Concatenated tf.Tensor with the size of ``inputs`` """ factor = inputs.shape[1] / residual_input.shape[1] return tf.concat([inputs, tf.image.central_crop(residual_input, factor)], axis=-1) class InputBlock(tf.keras.Model): def __init__(self, filters): """ UNet input block Perform two unpadded convolutions with a specified number of filters and downsample through max-pooling. First convolution Args: filters (int): Number of filters in convolution """ super().__init__(self) with tf.name_scope('input_block'): self.conv1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), activation=tf.nn.relu) self.conv2 = tf.keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), activation=tf.nn.relu) self.maxpool = tf.keras.layers.MaxPool2D(pool_size=(2, 2), strides=2) def call(self, inputs): out = self.conv1(inputs) out = self.conv2(out) mp = self.maxpool(out) return mp, out class DownsampleBlock(tf.keras.Model): def __init__(self, filters, idx): """ UNet downsample block Perform two unpadded convolutions with a specified number of filters and downsample through max-pooling Args: filters (int): Number of filters in convolution idx (int): Index of block Return: Tuple of convolved ``inputs`` after and before downsampling """ super().__init__(self) with tf.name_scope('downsample_block_{}'.format(idx)): self.conv1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), activation=tf.nn.relu) self.conv2 = tf.keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), activation=tf.nn.relu) self.maxpool = tf.keras.layers.MaxPool2D(pool_size=(2, 2), strides=2) def call(self, inputs): out = self.conv1(inputs) out = self.conv2(out) mp = self.maxpool(out) return mp, out class BottleneckBlock(tf.keras.Model): def __init__(self, filters): """ UNet central block Perform two unpadded convolutions with a specified number of filters and upsample including dropout before upsampling for training Args: filters (int): Number of filters in convolution """ super().__init__(self) with tf.name_scope('bottleneck_block'): self.conv1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), activation=tf.nn.relu) self.conv2 = tf.keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), activation=tf.nn.relu) self.dropout = tf.keras.layers.Dropout(rate=0.5) self.conv_transpose = tf.keras.layers.Conv2DTranspose(filters=filters // 2, kernel_size=(3, 3), strides=(2, 2), padding='same', activation=tf.nn.relu) def call(self, inputs, training): out = self.conv1(inputs) out = self.conv2(out) out = self.dropout(out, training=training) out = self.conv_transpose(out) return out class UpsampleBlock(tf.keras.Model): def __init__(self, filters, idx): """ UNet upsample block Perform two unpadded convolutions with a specified number of filters and upsample Args: filters (int): Number of filters in convolution idx (int): Index of block """ super().__init__(self) with tf.name_scope('upsample_block_{}'.format(idx)): self.conv1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), activation=tf.nn.relu) self.conv2 = tf.keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), activation=tf.nn.relu) self.conv_transpose = tf.keras.layers.Conv2DTranspose(filters=filters // 2, kernel_size=(3, 3), strides=(2, 2), padding='same', activation=tf.nn.relu) def call(self, inputs, residual_input): out = _crop_and_concat(inputs, residual_input) out = self.conv1(out) out = self.conv2(out) out = self.conv_transpose(out) return out class OutputBlock(tf.keras.Model): def __init__(self, filters, n_classes): """ UNet output block Perform three unpadded convolutions, the last one with the same number of channels as classes we want to classify Args: filters (int): Number of filters in convolution n_classes (int): Number of output classes """ super().__init__(self) with tf.name_scope('output_block'): self.conv1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), activation=tf.nn.relu) self.conv2 = tf.keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), activation=tf.nn.relu) self.conv3 = tf.keras.layers.Conv2D(filters=n_classes, kernel_size=(1, 1), activation=None) def call(self, inputs, residual_input): out = _crop_and_concat(inputs, residual_input) out = self.conv1(out) out = self.conv2(out) out = self.conv3(out) return out
PyTorch/LanguageModeling/BART/utils
utils
make_datafiles
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import sys import os import hashlib dm_single_close_quote = u'\u2019' # unicode dm_double_close_quote = u'\u201d' END_TOKENS = ['.', '!', '?', '...', "'", "`", '"', dm_single_close_quote, dm_double_close_quote, ")"] # acceptable ways to end a sentence all_train_urls = "url_lists/all_train.txt" all_val_urls = "url_lists/all_val.txt" all_test_urls = "url_lists/all_test.txt" finished_files_dir = "cnn_dm" # These are the number of .story files we expect there to be in cnn_stories_dir and dm_stories_dir num_expected_cnn_stories = 92579 num_expected_dm_stories = 219506 def read_text_file(text_file): lines = [] with open(text_file, "r") as f: for line in f: lines.append(line.strip()) return lines def hashhex(s): """Returns a heximal formated SHA1 hash of the input string.""" h = hashlib.sha1() h.update(s.encode()) return h.hexdigest() def get_url_hashes(url_list): return [hashhex(url) for url in url_list] def fix_missing_period(line): """Adds a period to a line that is missing a period""" if "@highlight" in line: return line if line=="": return line if line[-1] in END_TOKENS: return line # print line[-1] return line + " ." def get_art_abs(story_file): lines = read_text_file(story_file) # Put periods on the ends of lines that are missing them (this is a problem in the dataset because many image captions don't end in periods; consequently they end up in the body of the article as run-on sentences) lines = [fix_missing_period(line) for line in lines] # Separate out article and abstract sentences article_lines = [] highlights = [] next_is_highlight = False for idx,line in enumerate(lines): if line == "": continue # empty line elif line.startswith("@highlight"): next_is_highlight = True elif next_is_highlight: highlights.append(line) else: article_lines.append(line) # Make article into a single string article = ' '.join(article_lines) # Make abstract into a signle string abstract = ' '.join(highlights) return article, abstract def write_to_bin(url_file, out_name): """Reads the tokenized .story files corresponding to the urls listed in the url_file and writes them to a out_file.""" print("Making bin file for URLs listed in %s..." % url_file) url_list = read_text_file(url_file) url_hashes = get_url_hashes(url_list) story_fnames = [s+".story" for s in url_hashes] num_stories = len(story_fnames) article_out = out_name + '.source' abstract_out = out_name + '.target' with open(article_out, 'w') as article_writer, open(abstract_out, 'w') as abstract_writer: for idx,s in enumerate(story_fnames): if idx % 1000 == 0: print("Writing story %i of %i; %.2f percent done" % (idx, num_stories, float(idx)*100.0/float(num_stories))) # Look in the tokenized story dirs to find the .story file corresponding to this url if os.path.isfile(os.path.join(cnn_stories_dir, s)): story_file = os.path.join(cnn_stories_dir, s) elif os.path.isfile(os.path.join(dm_stories_dir, s)): story_file = os.path.join(dm_stories_dir, s) else: print("Error: Couldn't find story file %s in story directories %s and %s." % (s, cnn_stories_dir, dm_stories_dir)) # Check again if stories directories contain correct number of files print("Checking that the stories directories %s and %s contain correct number of files..." % (cnn_stories_dir, dm_stories_dir)) check_num_stories(cnn_stories_dir, num_expected_cnn_stories) check_num_stories(dm_stories_dir, num_expected_dm_stories) raise Exception("Stories directories %s and %s contain correct number of files but story file %s found in neither." % (cnn_stories_dir, dm_stories_dir, s)) # Get the strings to write to .bin file article, abstract = get_art_abs(story_file) article_writer.write(article + '\n') abstract_writer.write(abstract + '\n') print("Finished writing file %s and %s\n" % (article_out, abstract_out)) def check_num_stories(stories_dir, num_expected): num_stories = len(os.listdir(stories_dir)) if num_stories != num_expected: raise Exception("stories directory %s contains %i files but should contain %i" % (stories_dir, num_stories, num_expected)) if __name__ == '__main__': if len(sys.argv) != 3: print("USAGE: python make_datafiles.py <cnn_stories_dir> <dailymail_stories_dir>") sys.exit() cnn_stories_dir = sys.argv[1] dm_stories_dir = sys.argv[2] # Check the stories directories contain the correct number of .story files check_num_stories(cnn_stories_dir, num_expected_cnn_stories) check_num_stories(dm_stories_dir, num_expected_dm_stories) # Create some new directories if not os.path.exists(finished_files_dir): os.makedirs(finished_files_dir) # Read the tokenized stories, do a little postprocessing then write to bin files write_to_bin(all_test_urls, os.path.join(finished_files_dir, "test")) write_to_bin(all_val_urls, os.path.join(finished_files_dir, "val")) write_to_bin(all_train_urls, os.path.join(finished_files_dir, "train"))
PyTorch/SpeechSynthesis/Tacotron2/platform
platform
DGXA100_tacotron2_TF32_4NGPU_train
mkdir -p output python -m multiproc train.py -m Tacotron2 -o output/ -lr 1e-3 --epochs 1501 -bs 128 --weight-decay 1e-6 --grad-clip-thresh 1.0 --cudnn-enabled --load-mel-from-disk --training-files=filelists/ljs_mel_text_train_filelist.txt --validation-files=filelists/ljs_mel_text_val_filelist.txt --log-file nvlog.json --anneal-steps 500 1000 1500 --anneal-factor 0.1
PaddlePaddle/LanguageModeling/BERT/data
data
BooksDownloader
# Copyright (c) 2022 NVIDIA Corporation. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import subprocess class BooksDownloader: def __init__(self, save_path): self.save_path = save_path pass def download(self): bookscorpus_download_command = 'python3 /workspace/bookcorpus/download_files.py --list /workspace/bookcorpus/url_list.jsonl --out' bookscorpus_download_command += ' ' + self.save_path + '/bookscorpus' bookscorpus_download_command += ' --trash-bad-count' subprocess.run(bookscorpus_download_command, shell=True, check=True)
TensorFlow/Segmentation/UNet_3D_Medical
UNet_3D_Medical
README
# 3D-UNet Medical Image Segmentation for TensorFlow 1.x This repository provides a script and recipe to train the 3D-UNet model to achieve state-of-the-art accuracy. The content of this repository is tested and maintained by NVIDIA. 3D-UNet model for TensorFlow1 is no longer maintained and will soon become unavailable, please consider other PyTorch or TensorFlow2 models as a substitute for your requirements. ## Table of Contents - [Model overview](#model-overview) * [Model architecture](#model-architecture) * [Default configuration](#default-configuration) * [Feature support matrix](#feature-support-matrix) * [Features](#features) * [Mixed precision training](#mixed-precision-training) * [Enabling mixed precision](#enabling-mixed-precision) * [Enabling TF32](#enabling-tf32) - [Setup](#setup) * [Requirements](#requirements) - [Quick Start Guide](#quick-start-guide) - [Advanced](#advanced) * [Scripts and sample code](#scripts-and-sample-code) * [Parameters](#parameters) * [Command-line options](#command-line-options) * [Getting the data](#getting-the-data) * [Dataset guidelines](#dataset-guidelines) * [Multi-dataset](#multi-dataset) * [Training process](#training-process) * [Inference process](#inference-process) - [Performance](#performance) * [Benchmarking](#benchmarking) * [Training performance benchmark](#training-performance-benchmark) * [Inference performance benchmark](#inference-performance-benchmark) * [Results](#results) * [Training accuracy results](#training-accuracy-results) * [Training accuracy: NVIDIA DGX A100 (8x A100 80G)](#training-accuracy-nvidia-dgx-a100-8x-a100-80g) * [Training accuracy: NVIDIA DGX-1 (8x V100 16G)](#training-accuracy-nvidia-dgx-1-8x-v100-16g) * [Training performance results](#training-performance-results) * [Training performance: NVIDIA DGX A100 (8x A100 80G)](#training-performance-nvidia-dgx-a100-8x-a100-80g) * [Training performance: NVIDIA DGX-1 (8x V100 16G)](#training-performance-nvidia-dgx-1-8x-v100-16g) * [Inference performance results](#inference-performance-results) * [Inference performance: NVIDIA DGX A100 (1x A100 80G)](#inference-performance-nvidia-dgx-a100-1x-a100-80g) * [Inference performance: NVIDIA DGX-1 (1x V100 16G)](#inference-performance-nvidia-dgx-1-1x-v100-16g) - [Release notes](#release-notes) * [Changelog](#changelog) * [Known issues](#known-issues) ## Model overview The U-Net model is a convolutional neural network for 3D image segmentation. This repository contains a 3D-UNet implementation introduced in [3D U-Net: Learning Dense Volumetric Segmentation from Sparse Annotation](https://arxiv.org/pdf/1606.06650), with modifications described in [No New-Net](https://arxiv.org/pdf/1809.10483). This model is trained with mixed precision using Tensor Cores on Volta, Turing, and the NVIDIA Ampere GPU architectures. Therefore, researchers can get results up to 2.3x faster than training without Tensor Cores, while experiencing the benefits of mixed precision training. This model is tested against each NGC monthly container release to ensure consistent accuracy and performance over time. ### Model architecture 3D-UNet was first introduced by Olaf Ronneberger, Philip Fischer, and Thomas Brox in the paper: [3D U-Net: Learning Dense Volumetric Segmentation from Sparse Annotation](https://arxiv.org/pdf/1606.06650). In this repository we host a 3D-UNet version adapted by Fabian Isensee et al. to brain tumor segmentation. 3D-UNet allows for seamless segmentation of 3D volumes, with high accuracy and performance, and can be adapted to solve many different segmentation problems. The following figure shows the construction of the 3D-UNet model and its different components. 3D-UNet is composed of a contractive and an expanding path, that aims at building a bottleneck in its centermost part through a combination of convolution and pooling operations. After this bottleneck, the image is reconstructed through a combination of convolutions and upsampling. Skip connections are added with the goal of helping the backward flow of gradients in order to improve the training. ![U-Net3D](images/unet3d.png) ### Default configuration 3D-UNet consists of a contractive (left-side) and expanding (right-side) path. It repeatedly applies unpadded convolutions followed by max pooling for downsampling. Every step in the expanding path consists of an upsampling of the feature maps and a concatenation with the correspondingly cropped feature map from the contractive path. ### Feature support matrix The following features are supported by this model. | **Feature** | **3D-UNet** | |---------------------------------|-----| | Automatic mixed precision (AMP) | Yes | | Horovod Multi-GPU (NCCL) | Yes | | Accelerated Linear Algebra (XLA)| Yes | #### Features **Automatic Mixed Precision (AMP)** This implementation of 3D-UNet uses AMP to implement mixed precision training. Computation graphs can be modified by TensorFlow on runtime to support mixed precision training. Detailed explanation of mixed precision can be found in the next section. **Horovod** Horovod is a distributed training framework for TensorFlow, Keras, PyTorch, and MXNet. The goal of Horovod is to make distributed deep learning fast and easy to use. For more information about how to get started with Horovod, see the [Horovod: Official repository](https://github.com/horovod/horovod). Multi-GPU training with Horovod Our model uses Horovod to implement efficient multi-GPU training with NCCL. For details, see example sources in this repository or see the [TensorFlow tutorial](https://github.com/horovod/horovod/#usage). **XLA support (experimental)** XLA is a domain-specific compiler for linear algebra that can accelerate TensorFlow models with potentially no source code changes. The results are improvements in speed and memory usage: most internal benchmarks run ~1.1-1.5x faster after XLA is enabled. ### Mixed precision training Mixed precision is the combined use of different numerical precisions in a computational method. [Mixed precision](https://arxiv.org/abs/1710.03740) training offers significant computational speedup by performing operations in half-precision format while storing minimal information in single-precision to retain as much information as possible in critical parts of the network. Since the introduction of [Tensor Cores](https://developer.nvidia.com/tensor-cores) in Volta, and following with both the Turing and Ampere architectures, significant training speedups are experienced by switching to mixed precision -- up to 3x overall speedup on the most arithmetically intense model architectures. Using [mixed precision training](https://docs.nvidia.com/deeplearning/performance/mixed-precision-training/index.html) previously required two steps: 1. Porting the model to use the FP16 data type where appropriate. 2. Adding loss scaling to preserve small gradient values. This can now be achieved using Automatic Mixed Precision (AMP) for TensorFlow to enable the full [mixed precision methodology](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html#tensorflow) in your existing TensorFlow model code. AMP enables mixed precision training on Volta, Turing, and NVIDIA Ampere GPU architectures automatically. The TensorFlow framework code makes all necessary model changes internally. In TF-AMP, the computational graph is optimized to use as few casts as necessary and maximize the use of FP16, and the loss scaling is automatically applied inside of supported optimizers. AMP can be configured to work with the existing tf.contrib loss scaling manager by disabling the AMP scaling with a single environment variable to perform only the automatic mixed-precision optimization. It accomplishes this by automatically rewriting all computation graphs with the necessary operations to enable mixed precision training and automatic loss scaling. For information about: - How to train using mixed precision, see the [Mixed Precision Training](https://arxiv.org/abs/1710.03740) paper and [Training With Mixed Precision](https://docs.nvidia.com/deeplearning/performance/mixed-precision-training/index.html) documentation. - Techniques used for mixed precision training, see the [Mixed-Precision Training of Deep Neural Networks](https://devblogs.nvidia.com/mixed-precision-training-deep-neural-networks/) blog. - How to access and enable AMP for TensorFlow, see [Using TF-AMP](https://docs.nvidia.com/deeplearning/dgx/tensorflow-user-guide/index.html#tfamp) from the TensorFlow User Guide. #### Enabling mixed precision Mixed precision is enabled in TensorFlow by using the Automatic Mixed Precision (TF-AMP) extension which casts variables to half-precision upon retrieval, while storing variables in single-precision format. Furthermore, to preserve small gradient magnitudes in backpropagation, a [loss scaling](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html#lossscaling) step must be included when applying gradients. In TensorFlow, loss scaling can be applied statically by using simple multiplication of loss by a constant value or automatically, by TF-AMP. Automatic mixed precision makes all the adjustments internally in TensorFlow, providing two benefits over manual operations. First, programmers need not modify network model code, reducing development and maintenance effort. Second, using AMP maintains forward and backward compatibility with all the APIs for defining and running TensorFlow models. To enable mixed precision, you can simply add the values to the environmental variable inside your training script: ``` os.environ['TF_ENABLE_AUTO_MIXED_PRECISION'] = '1' ``` Exporting these variables ensures that loss scaling is performed correctly and automatically. By supplying the `--amp` flag to the `main.py` script while training in FP32/TF32, the following variables are set to their correct value for mixed precision training: ``` if params.amp: os.environ['TF_ENABLE_AUTO_MIXED_PRECISION'] = '1' ``` #### Enabling TF32 TensorFloat-32 (TF32) is the new math mode in [NVIDIA A100](#https://www.nvidia.com/en-us/data-center/a100/) GPUs for handling the matrix math also called tensor operations. TF32 running on Tensor Cores in A100 GPUs can provide up to 10x speedups compared to single-precision floating-point math (FP32) on Volta GPUs. TF32 Tensor Cores can speed up networks using FP32, typically with no loss of accuracy. It is more robust than FP16 for models which require high dynamic range for weights or activations. For more information, refer to the [TensorFloat-32 in the A100 GPU Accelerates AI Training, HPC up to 20x](#https://blogs.nvidia.com/blog/2020/05/14/tensorfloat-32-precision-format/) blog post. TF32 is supported in the NVIDIA Ampere GPU architecture and is enabled by default. ## Setup The following section lists the requirements that you need to meet in order to start training the 3D-UNet model. ### Requirements This repository contains Dockerfile which extends the TensorFlow NGC container and encapsulates some dependencies. Aside from these dependencies, ensure you have the following components: - [NVIDIA Docker](https://github.com/NVIDIA/nvidia-docker) - TensorFlow 21.10-tf1-py3 [NGC container](https://ngc.nvidia.com/registry/nvidia-tensorflow) - GPU-based architecture: - [NVIDIA Volta](https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/) - [NVIDIA Turing](https://www.nvidia.com/en-us/geforce/turing/) - [NVIDIA Ampere](https://www.nvidia.com/en-us/data-center/nvidia-ampere-gpu-architecture/) For more information about how to get started with NGC containers, see the following sections from the NVIDIA GPU Cloud Documentation and the Deep Learning Documentation: - [Getting Started Using NVIDIA GPU Cloud](https://docs.nvidia.com/ngc/ngc-getting-started-guide/index.html) - [Accessing And Pulling From The NGC container registry](https://docs.nvidia.com/deeplearning/dgx/user-guide/index.html#accessing_registry) - [Running TensorFlow](https://docs.nvidia.com/deeplearning/dgx/tensorflow-release-notes/running.html#running) For those unable to use the TensorFlow NGC container, to set up the required environment or create your own container, see the versioned [NVIDIA Container Support Matrix](https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html). ## Quick Start Guide To train your model using mixed or TF32 precision with Tensor Cores or using FP32, perform the following steps using the default parameters of the 3D-UNet model on the [Brain Tumor Segmentation 2019](https://www.med.upenn.edu/cbica/brats-2019/) dataset. These steps enable you to build the 3D-UNet TensorFlow NGC container, train and evaluate your model, and generate predictions on the test data. For the specifics concerning training and inference, see the [Advanced](#advanced) section. 1. Clone the repository. Executing this command will create your local repository with all the code to run 3D-UNet. ```bash git clone https://github.com/NVIDIA/DeepLearningExamples cd DeepLearningExamples/TensorFlow/Segmentation/U-Net3D_TF 2. Build the U-Net TensorFlow NGC container. This command will use the `Dockerfile` to create a Docker image named `unet3d_tf`, downloading all the required components automatically. ```bash docker build -t unet3d_tf . ``` The NGC container contains all the components optimized for usage on NVIDIA hardware. 3. Start an interactive session in the NGC container to run preprocessing/training/inference. The following command will launch the container and mount the `./data` directory as a volume to the `/data` directory inside the container, and `./results` directory to the `/results` directory in the container. ```bash mkdir data mkdir results docker run --runtime=nvidia -it --shm-size=1g --ulimit memlock=-1 --ulimit stack=67108864 --rm --ipc=host -v ${PWD}/data:/data -v ${PWD}/results:/results unet3d_tf:latest /bin/bash ``` Any datasets and experiment results (logs, checkpoints, etc.) saved to `/data` or `/results` will be accessible in the `./data` or `./results` directory on the host, respectively. 4. Download and pre-process the data. Data can be obtained by registering on [Brain Tumor Segmentation 2019 dataset](https://www.med.upenn.edu/cbica/brats-2019/) website. The data should be downloaded and placed where `/data` in the container is mounted. The `dataset/preprocess_data.py` script will convert the raw data into tfrecord format used for training and evaluation. The script can be launched as ```python python dataset/preprocess_data.py -i /data/<name/of/the/raw/data/folder> -o /data/<name/of/the/preprocessed/data/folder> -v ``` 5. Start training. After the Docker container is launched, the training of a single fold (fold 0) with the [default hyperparameters](#default-parameters) (for example 1/8 GPUs TF-AMP/FP32/TF32) can be started with: ```bash bash scripts/unet3d_train_single{_TF-AMP}.sh <number/of/gpus> <path/to/dataset> <path/to/checkpoint> <batch/size> ``` For example, to run with 32-bit precision (FP32 or TF32) with batch size 2 on 1 GPU, simply use: ```bash bash scripts/unet3d_train_single.sh 1 /data/preprocessed /results 2 ``` to train a single fold with mixed precision (TF-AMP) with on 8 GPUs batch size 2 per GPU, use: ```bash bash scripts/unet3d_train_single_TF-AMP.sh 8 /data/preprocessed /results 2 ``` The obtained dice scores will be reported after the training has finished. 6. Start benchmarking. The training performance can be evaluated by using benchmarking scripts, such as: ```bash bash scripts/unet3d_{train,infer}_benchmark{_TF-AMP}.sh <number/of/gpus/for/training> <path/to/dataset> <path/to/checkpoint> <batch/size> ``` which will make the model run and report the performance. For example, to benchmark training with TF-AMP with batch size 2 on 4 GPUs, use: ```bash bash scripts/unet3d_train_benchmark_TF-AMP.sh 4 /data/preprocessed /results 2 ``` to obtain inference performance with 32-bit precision (FP32 or TF32) with batch size 1, use: ```bash bash scripts/unet3d_infer_benchmark.sh /data/preprocessed /results 1 ``` ## Advanced The following sections provide greater details of the dataset, running training and inference, and the training results. ### Scripts and sample code In the root directory, the most important files are: * `main.py`: Serves as the entry point to the application. Encapsulates the training routine. * `Dockerfile`: Container with the basic set of dependencies to run U-Net. * `requirements.txt`: Set of extra requirements for running U-Net. * `preprocess_data.py`: Converts the dataset to tfrecord format for training. The `dataset/` folder contains the necessary tools to train and perform inference using U-Net. Its main components are: * `data_loader.py`: Implements the data loading and augmentation. * `transforms.py`: Implements the data augmentation functions. * `preprocess_data.py`: Implements the data conversion and pre-processing functionality. The `runtime/` folder contains scripts with training and inference logic. Its contents are: * `arguments.py`: Implements the command-line arguments parsing. * `hooks.py`: Collects different metrics to be used for benchmarking and testing. * `parse_results.py`: Defines a set of functions used for parsing the partial results. * `setup.py`: Defines a set of functions to set the environment up. The `model/` folder contains information about the building blocks of 3D-UNet and the way they are assembled. Its contents are: * `layers.py`: Defines the different blocks that are used to assemble 3D-UNet. * `losses.py`: Defines the different losses used during training and evaluation. * `model_fn.py`: Defines the computational graph to optimize. * `unet3d.py`: Defines the model architecture using the blocks from the `layers.py` file. Other folders included in the root directory are: * `scripts/`: Provides examples for training and benchmarking U-Net * `images/`: Contains the model diagram ### Parameters The complete list of the available parameters for the main.py script contains: * `--exec_mode`: Select the execution mode to run the model (default: `train`). Modes available: * `train` - trains a model and stores checkpoints in the directory passed using `--model_dir` * `evaluate` - loads checkpoint (if available) and performs evaluation on validation subset (requires `--fold` other than `None`). * `train_and_evaluate` - trains model from scratch and performs validation at the end (requires `--fold` other than `None`). * `predict` - loads checkpoint (if available) and runs inference on the test set. Stores the results in the `--model_dir` directory. * `train_and_predict` - trains model from scratch and performs inference. * `--model_dir`: Set the output directory for information related to the model (default: `/results`). * `--log_dir`: Set the output directory for logs (default: None). * `--data_dir`: Set the input directory containing the dataset (default: `None`). * `--batch_size`: Size of each minibatch per GPU (default: `1`). * `--fold`: Selected fold for cross-validation (default: `None`). * `--max_steps`: Maximum number of steps (batches) for training (default: `16000`). * `--seed`: Set random seed for reproducibility (default: `0`). * `--log_every`: Log performance every n steps (default: `100`). * `--learning_rate`: Model’s learning rate (default: `0.0002`). * `--augment`: Enable data augmentation (disabled by default). * `--benchmark`: Enable performance benchmarking (disabled by default). If the flag is set, the script runs in a benchmark mode - each iteration is timed and the performance result (in images per second) is printed at the end. Works for both `train` and `predict` execution modes. * `--warmup_steps`: Used during benchmarking - the number of steps to skip (default: `200`). First iterations are usually much slower since the graph is being constructed. Skipping the initial iterations is required for a fair performance assessment. * `--resume_training`: Whether to resume training from a checkpoint, if there is one (disabled by default) * `--xla`: Enable accelerated linear algebra optimization (disabled by default). * `--amp`: Enable automatic mixed precision (disabled by default). ### Command line options To see the full list of available options and their descriptions, use the `-h` or `--help` command-line option, for example: ```bash python main.py --help ``` The following example output is printed when running the model: ```python main.py --help usage: main.py [-h] --data_dir DATA_DIR --model_dir MODEL_DIR [--exec_mode {train,evaluate,train_and_evaluate,predict}] [--benchmark] [--max_steps MAX_STEPS] [--learning_rate LEARNING_RATE] [--log_every LOG_EVERY] [--log_dir LOG_DIR] [--loss {dice,ce,dice+ce}] [--warmup_steps WARMUP_STEPS][--resume_training] [--augment] [--batch_size BATCH_SIZE] [--fold FOLD] [--amp] [--xla] UNet-3D optional arguments: -h, --help show this help message and exit --model_dir MODEL_DIR Output directory for information related to the model --data_dir DATA_DIR Input directory containing the dataset for training the model --exec_mode {train,train_and_predict,predict,evaluate,train_and_evaluate} Execution mode of running the model --log_dir LOG_DIR Output directory for training logs --batch_size BATCH_SIZE Size of each minibatch per GPU --learning_rate LEARNING_RATE Learning rate coefficient for AdamOptimizer --fold Fold number Chosen fold for cross-validation. Use None to disable cross-validation --max_steps MAX_STEPS Maximum number of steps (batches) used for training --log_every LOG_EVERY Log performance every n steps --warmup_steps WARMUP_STEPS Number of warmup steps --resume_training Whether to resume training from the checkpoint --seed SEED Random seed --augment Perform data augmentation during training --benchmark Collect performance metrics during training --amp Train using TF-AMP --xla Train using XLA ``` ### Getting the data The 3D-UNet model was trained in the [Brain Tumor Segmentation 2019 dataset](https://www.med.upenn.edu/cbica/brats-2019/). Test images provided by the organization were used to produce the resulting masks for submission. Upon registration, the challenge's data is made available through the https//ipp.cbica.upenn.edu service. The dataset consists of 335 240x240x155 `nifti` volumes. Each volume is represented by 4 modalities and a corresponding segmentation mask. The modalities are: * Native T1-weighted (T1), * Post-contrast T1-weighted (T1Gd), * Native T2-weighted (T2), * T2 Fluid Attenuated Inversion Recovery (FLAIR). Each voxel in a segmentation mask belongs to one of four classes: * 0 corresponds to healthy tissue or background, * 1 indicates the presence of the necrotic and non-enhancing tumor core (TC), * 2 indicates the presence of the peritumoral edema (ED), * 4 indicates the presence of the GD-enhancing tumor (ET). The objective is to produce a set of masks that segment the data as accurately as possible. The results are expected to be submitted as a 12-bit `nifti` 3D image, with values corresponding to the underlying class. #### Dataset guidelines The training and test datasets are given as 3D `nifti` volumes that can be read using the Nibabel library and NumPy (both packages are installed by the `Dockerfile`). Initially, all modalities are loaded, stacked and converted into 240x240x155x4 NumPy arrays using Nibabel. To decrease the size of the dataset, each volume is clipped to 85% of the maximal value, normalized to 255 for each modality separately, casted to 8-bit, grouped by 4 volumes, and saved as a `tfrecord` file. The process of converting from `nifti` to `tfrecord` can be found in the `preprocess_data.py` script. The `tfrecord` files are fed to the model through `tf.data.TFRecordDataset()` to achieve high performance. The foreground voxel intensities then z-score normalized, whereas labels are one-hot encoded for their later use in dice or pixel-wise cross-entropy loss, becoming 240x240x155x4 tensors. If augmentation is enabled, the following set of augmentation techniques are applied: * Random horizontal flipping * Random 128x128x128x4 crop * Random brightness shifting In addition, random vertical flip and random gamma correction augmentations were implemented, but are not used. The process of loading, normalizing and augmenting the data contained in the dataset can be found in the `data_loader.py` script. #### Multi-dataset This implementation is tuned for the Brain Tumor Segmentation 2019 dataset. Using other datasets is possible, but might require changes to the code (data loader) and tuning some hyperparameters (e.g. learning rate, number of iterations). In the current implementation, the data loader works with tfrecord files. It should work seamlessly with any dataset containing 3D data stored in tfrecord format, as long as features (with corresponding mean and standard deviation) and labels are stored as bytestream in the same file as `X`, `Y`, `mean`, and `stdev`. See the data pre-processing script for details. If your data is stored in a different format, you will have to modify the parsing function in the `dataset/data_loader.py` file. For a walk-through, check the [TensorFlow tf.data API guide](https://www.tensorflow.org/guide/data_performance) ## Training process The model trains for a total 16,000 (16,000 / number of GPUs) iterations for each fold, with the default 3D-UNet setup: * Adam optimizer with learning rate of 0.0002. * Training and evaluation batch size of 2. This default parametrization is applied when running scripts from the `./examples` directory and when running `main.py` without explicitly overriding these parameters. By default, the training is in full precision. To enable AMP, pass the `--amp` flag. AMP can be enabled for every mode of execution. The default configuration minimizes a function _L = 1 - DICE + cross entropy_ during training and reports achieved convergence as dice score per class, mean dice score, and dice score for whole tumor vs background. The training with a combination of dice and cross entropy has been proven to achieve better convergence than a training using only dice. The training can be run directly without using the predefined scripts. The name of the training script is `main.py`. Because of the multi-GPU support, training should always be run with the Horovod distributed launcher like this: ```bash horovodrun -np <number/of/gpus> python main.py --data_dir /data/preprocessed --exec_mode train [other parameters] ``` *Note:* When calling the `main.py` script manually, data augmentation is disabled. In order to enable data augmentation, use the `--augment` flag in your invocation. The main result of the training are checkpoints stored by default in `./results/` directory on the host machine, and in the `/results` directory in the container. This location can be controlled by the `--model_dir` command-line argument, if a different location was mounted while starting the container. In the case when the training is run in `train_and_predict` mode, the inference will take place after the training is finished, and inference results will be stored to the `/results` directory. If the `--exec_mode train_and_evaluate` parameter was used, and if `--fold` parameter is set to an integer value of {0, 1, 2, 3, 4}, the evaluation of the validation set takes place after the training is completed. The results of the evaluation will be printed to the console. ### Inference process Inference can be launched with the same script used for training by passing the `--exec_mode predict` flag: ```bash python main.py --exec_mode predict --data_dir /data/preprocessed --model_dir <path/to/checkpoint> [other parameters] ``` The script will then: * Load the checkpoint from the directory specified by the `<path/to/checkpoint>` directory (`/results`), * Run inference on the test dataset, * Save the resulting masks in the `numpy` format in the `--model_dir` directory. ## Performance The performance measurements in this document were conducted at the time of publication and may not reflect the performance achieved from NVIDIA’s latest software release. For the most up-to-date performance measurements, go to [NVIDIA Data Center Deep Learning Product Performance](https://developer.nvidia.com/deep-learning-performance-training-inference). ### Benchmarking The following section shows how to run benchmarks measuring the model performance in training and inference modes. #### Training performance benchmark To benchmark training, run one of the `train_benchmark` scripts in `./scripts/`: ```bash bash scripts/unet3d_train_benchmark{_TF-AMP}.sh <num/of/gpus> <path/to/dataset> <path/to/checkpoints> <batch/size> ``` For example, to benchmark training using mixed-precision on 4 GPUs with batch size of 2 use: ```bash bash scripts/unet3d_train_benchmark_TF-AMP.sh 4 <path/to/dataset> <path/to/checkpoints> 2 ``` Each of these scripts will by default run 40 warm-up iterations and benchmark the performance during training in the next 40 iterations. To have more control, you can run the script by directly providing all relevant run parameters. For example: ```bash horovodrun -np <num/of/gpus> python main.py --exec_mode train --benchmark --augment --data_dir <path/to/dataset> --model_dir <path/to/checkpoints> --batch_size <batch/size> --warmup_steps <warm-up/steps> --max_steps <max/steps> ``` At the end of the script, a line reporting the best train throughput will be printed. #### Inference performance benchmark To benchmark inference, run one of the scripts in `./scripts/`: ```bash bash scripts/unet3d_infer_benchmark{_TF-AMP}.sh <path/to/dataset> <path/to/checkpoints> <batch/size> ``` For example, to benchmark inference using mixed-precision with batch size 4: ```bash bash scripts/unet3d_infer_benchmark_TF-AMP.sh <path/to/dataset> <path/to/checkpoints> 4 ``` Each of these scripts will by default run 20 warm-up iterations and benchmark the performance during inference in the next 20 iterations. To have more control, you can run the script by directly providing all relevant run parameters. For example: ```bash python main.py --exec_mode predict --benchmark --data_dir <path/to/dataset> --model_dir <optional, path/to/checkpoint> --batch_size <batch/size> --warmup_steps <warm-up/steps> --max_steps <max/steps> ``` At the end of the script, a line reporting the best inference throughput will be printed. ### Results The following sections provide details on how we achieved our performance and accuracy of training and inference. #### Training accuracy results To reproduce this result, start the Docker container interactively and run one of the train scripts: ```bash bash scripts/unet3d_train_full{_TF-AMP}.sh <num/of/gpus> <path/to/dataset> <path/to/checkpoint> <batch/size> ``` for example to train using 8 GPUs and batch size of 2: ```bash bash scripts/unet3d_train_full_TF-AMP.sh 8 /data/preprocessed /results 2 ``` This command will launch a script which will run 5-fold cross-validation training for 16,000 iterations on each fold and print: * the validation DICE scores for each class: Tumor Core (TC), Peritumoral Edema (ED), Enhancing Tumor (ET), * the mean DICE score, * the whole tumor (WT) which represents a binary classification case (tumor vs background). The time reported is for one fold, which means that the training of 5 folds will take 5 times longer. The default batch size is 2, however if you have less than 16 GB memory card and you encounter GPU memory issues you should decrease the batch size. The logs of the runs can be found in the `/results` directory once the script is finished. ##### Training accuracy: NVIDIA DGX A100 (8x A100 80G) The following table lists the average DICE score across 5-fold cross-validation. Our results were obtained by running the `scripts/unet3d_train_full{_TF-AMP}.sh` training script in the `tensorflow:21.10-tf1-py3` NGC container on NVIDIA DGX A100 (8x A100 80G) GPUs. | GPUs | Batch size / GPU | DICE - TF32 | DICE - mixed precision | Time to train - FP32 | Time to train - mixed precision | Time to train speedup (FP32 to mixed precision) | |---|---|--------|--------|--------|--------|------| | 8 | 2 | 0.8818 | 0.8819 | 8 min | 7 min | 1.14 | ##### Training accuracy: NVIDIA DGX-1 (8x V100 16G) The following table lists the average DICE score across 5-fold cross-validation. Our results were obtained by running the `scripts/unet3d_train_full{_TF-AMP}.sh` training script in the `tensorflow:21.10-tf1-py3` NGC container on NVIDIA DGX-1 (8x V100 16G) GPUs. | GPUs | Batch size / GPU | DICE - FP32 | DICE - mixed precision | Time to train - FP32 | Time to train - mixed precision | Time to train speedup (FP32 to mixed precision) | |---|---|--------|--------|--------|--------|------| | 8 | 2 | 0.8818 | 0.8819 | 33 min | 13 min | 2.54 | #### Training performance results ##### Training performance: NVIDIA DGX A100 (8x A100 80G) Our results were obtained by running the `scripts/unet3d_train_benchmark{_TF-AMP}.sh` training script in the `tensorflow:21.10-tf1-py3` NGC container on NVIDIA DGX A100 with (8x A100 80G) GPUs. Performance numbers (in volumes per second) were averaged over 80 iterations, excluding the first 40 warm-up steps. | GPUs | Batch size / GPU | Throughput - TF32 [img/s] | Throughput - mixed precision [img/s] | Throughput speedup (FP32 - mixed precision) | Weak scaling - FP32 | Weak scaling - mixed precision | |---|---|--------|--------|------|------|------| | 1 | 2 | 10.40 | 17.91 | 1.72 | N/A | N/A | | 1 | 4 | 10.66 | 19.88 | 1.86 | N/A | N/A | | 1 | 8 | 3.99 | 20.89 | 5.23 | N/A | N/A | | 8 | 2 | 81.71 | 100.24 | 1.23 | 7.85 | 5.60 | | 8 | 4 | 80.65 | 140.44 | 1.74 | 7.56 | 7.06 | | 8 | 8 | 29.79 | 137.61 | 4.62 | 7.47 | 6.59 | ##### Training performance: NVIDIA DGX-1 (8x V100 16G) Our results were obtained by running the `scripts/unet3d_train_benchmark{_TF-AMP}.sh` training script in the `tensorflow:21.10-tf1-py3` NGC container on NVIDIA DGX-1 with (8x V100 16G) GPUs. Performance numbers (in volumes per second) were averaged over 80 iterations, excluding the first 40 warm-up steps. | GPUs | Batch size / GPU | Throughput - FP32 [img/s] | Throughput - mixed precision [img/s] | Throughput speedup (FP32 - mixed precision) | Weak scaling - FP32 | Weak scaling - mixed precision | |---|---|-------|-------|------|------|------| | 1 | 1 | 1.87 | 7.45 | 3.98 | N/A | N/A | | 1 | 2 | 2.32 | 8.79 | 3.79 | N/A | N/A | | 8 | 1 | 14.49 | 46.88 | 3.23 | 7.75 | 6.29 | | 8 | 2 | 18.06 | 58.30 | 3.23 | 7.78 | 6.63 | To achieve these same results, follow the steps in the [Training performance benchmark](#training-performance-benchmark) section. #### Inference performance results ##### Inference performance: NVIDIA DGX A100 (1x A100 80G) Our results were obtained by running the `scripts/unet3d_infer_benchmark{_TF-AMP}.sh` inference benchmarking script in the `tensorflow:21.10-tf1-py3` NGC container on NVIDIA DGX A100 with (1x A100 80G) GPU. Performance numbers (in volumes per second) were averaged over 40 iterations, excluding the first 20 warm-up steps. FP16 | Batch size | Resolution | Throughput Avg [img/s] | Latency Avg [ms] | Latency 90% [ms] | Latency 95% [ms] | Latency 99% [ms] | |---|---------------|-------|--------|--------|--------|--------| | 1 | 224x224x160x4 | 15.58 | 67.32 | 68.63 | 78.00 | 109.42 | | 2 | 224x224x160x4 | 15.81 | 129.06 | 129.93 | 135.31 | 166.62 | | 4 | 224x224x160x4 | 8.34 | 479.47 | 482.55 | 487.68 | 494.80 | TF32 | Batch size | Resolution | Throughput Avg [img/s] | Latency Avg [ms] | Latency 90% [ms] | Latency 95% [ms] | Latency 99% [ms] | |---|---------------|-------|---------|---------|---------|---------| | 1 | 224x224x160x4 | 9.42 | 106.22 | 106.68 | 107.67 | 122.73 | | 2 | 224x224x160x4 | 4.69 | 427.13 | 428.33 | 428.76 | 429.19 | | 4 | 224x224x160x4 | 2.32 | 1723.79 | 1725.77 | 1726.30 | 1728.23 | To achieve these same results, follow the steps in the [Inference performance benchmark](#inference-performance-benchmark) section. ##### Inference performance: NVIDIA DGX-1 (1x V100 16G) Our results were obtained by running the `scripts/unet3d_infer_benchmark{_TF-AMP}.sh` inference benchmarking script in the `tensorflow:21.10-tf1-py3` NGC container on NVIDIA DGX-1 with (1x V100 16G) GPU. Performance numbers (in volumes per second) were averaged over 40 iterations, excluding the first 20 warm-up steps. FP16 | Batch size | Resolution | Throughput Avg [img/s] | Latency Avg [ms] | Latency 90% [ms] | Latency 95% [ms] | Latency 99% [ms] | |---|---------------|------|--------|--------|--------|--------| | 1 | 224x224x160x4 | 7.64 | 136.81 | 138.94 | 143.59 | 152.74 | | 2 | 224x224x160x4 | 7.75 | 260.66 | 267.07 | 270.88 | 274.44 | | 4 | 224x224x160x4 | 4.78 | 838.52 | 842.88 | 843.30 | 844.62 | FP32 | Batch size | Resolution | Throughput Avg [img/s] | Latency Avg [ms] | Latency 90% [ms] | Latency 95% [ms] | Latency 99% [ms] | |---|---------------|------|--------|--------|--------|--------| | 1 | 224x224x160x4 | 2.30 | 434.95 | 436.82 | 437.40 | 438.48 | | 2 | 224x224x160x4 | 2.40 | 834.99 | 837.22 | 837.51 | 838.18 | | 4 | 224x224x160x4 | OOM | | | | | To achieve these same results, follow the steps in the [Inference performance benchmark](#inference-performance-benchmark) section. ## Release notes ### Changelog April 2023 * Ceased maintenance of this model in TensorFlow1 November 2021 * Updated README tables June 2020 * Initial release ### Known issues There are no known issues in this release.
PyTorch/SpeechRecognition/Jasper/triton/model_repo_configs/fp16/jasper-tensorrt-ensemble
jasper-tensorrt-ensemble
config
name: "jasper-tensorrt-ensemble" platform: "ensemble" max_batch_size: 8#MAX_BATCH input { name: "AUDIO_SIGNAL" data_type: TYPE_FP16 dims: -1#AUDIO_LENGTH } input { name: "NUM_SAMPLES" data_type: TYPE_INT32 dims: [ 1 ] } output { name: "TRANSCRIPT" data_type: TYPE_INT32 dims: [-1] } ensemble_scheduling { step { model_name: "feature-extractor-ts-trace" model_version: -1 input_map { key: "input__0" value: "AUDIO_SIGNAL" } input_map { key: "input__1" value: "NUM_SAMPLES" } output_map { key: "output__0" value: "AUDIO_FEATURES" } } step { model_name: "jasper-tensorrt" model_version: -1 input_map { key: "input__0" value: "AUDIO_FEATURES" } output_map { key: "output__0" value: "CHARACTER_PROBABILITIES" } } step { model_name: "decoder-ts-script" model_version: -1 input_map { key: "input__0" value: "CHARACTER_PROBABILITIES" } output_map { key: "output__0" value: "TRANSCRIPT" } } }
TensorFlow2/Recommendation/DLRM_and_DCNv2/deployment/hps
hps
__init__
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # author: Tomasz Grel (tgrel@nvidia.com) from deployment.hps.constants import dense_model_name, hps_model_name from deployment.hps.deploy_dense import deploy_dense from deployment.hps.deploy_ensemble import deploy_ensemble from deployment.hps.deploy_sparse import deploy_sparse
PyTorch/Translation/Transformer/fairseq/optim
optim
adagrad
# Copyright (c) 2017-present, Facebook, Inc. # All rights reserved. # # This source code is licensed under the license found in the LICENSE file in # the root directory of this source tree. An additional grant of patent rights # can be found in the PATENTS file in the same directory. import torch.optim from . import FairseqOptimizer, register_optimizer @register_optimizer('adagrad') class Adagrad(FairseqOptimizer): def __init__(self, args, params): super().__init__(args, params) self._optimizer = torch.optim.Adagrad(params, **self.optimizer_config) @property def optimizer_config(self): """ Return a kwarg dictionary that will be used to override optimizer args stored in checkpoints. This allows us to load a checkpoint and resume training using a different set of optimizer args, e.g., with a different learning rate. """ return { 'lr': self.args.lr[0], 'weight_decay': self.args.weight_decay, }
TensorFlow/Detection/SSD/models/research/object_detection/meta_architectures
meta_architectures
faster_rcnn_meta_arch_test_lib
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for object_detection.meta_architectures.faster_rcnn_meta_arch.""" import functools from absl.testing import parameterized import numpy as np import tensorflow as tf from google.protobuf import text_format from object_detection.anchor_generators import grid_anchor_generator from object_detection.builders import box_predictor_builder from object_detection.builders import hyperparams_builder from object_detection.builders import post_processing_builder from object_detection.core import balanced_positive_negative_sampler as sampler from object_detection.core import losses from object_detection.core import post_processing from object_detection.core import target_assigner from object_detection.meta_architectures import faster_rcnn_meta_arch from object_detection.protos import box_predictor_pb2 from object_detection.protos import hyperparams_pb2 from object_detection.protos import post_processing_pb2 from object_detection.utils import ops from object_detection.utils import test_case from object_detection.utils import test_utils slim = tf.contrib.slim BOX_CODE_SIZE = 4 class FakeFasterRCNNFeatureExtractor( faster_rcnn_meta_arch.FasterRCNNFeatureExtractor): """Fake feature extracture to use in tests.""" def __init__(self): super(FakeFasterRCNNFeatureExtractor, self).__init__( is_training=False, first_stage_features_stride=32, reuse_weights=None, weight_decay=0.0) def preprocess(self, resized_inputs): return tf.identity(resized_inputs) def _extract_proposal_features(self, preprocessed_inputs, scope): with tf.variable_scope('mock_model'): proposal_features = 0 * slim.conv2d( preprocessed_inputs, num_outputs=3, kernel_size=1, scope='layer1') return proposal_features, {} def _extract_box_classifier_features(self, proposal_feature_maps, scope): with tf.variable_scope('mock_model'): return 0 * slim.conv2d(proposal_feature_maps, num_outputs=3, kernel_size=1, scope='layer2') class FasterRCNNMetaArchTestBase(test_case.TestCase, parameterized.TestCase): """Base class to test Faster R-CNN and R-FCN meta architectures.""" def _build_arg_scope_with_hyperparams(self, hyperparams_text_proto, is_training): hyperparams = hyperparams_pb2.Hyperparams() text_format.Merge(hyperparams_text_proto, hyperparams) return hyperparams_builder.build(hyperparams, is_training=is_training) def _get_second_stage_box_predictor_text_proto(self): box_predictor_text_proto = """ mask_rcnn_box_predictor { fc_hyperparams { op: FC activation: NONE regularizer { l2_regularizer { weight: 0.0005 } } initializer { variance_scaling_initializer { factor: 1.0 uniform: true mode: FAN_AVG } } } } """ return box_predictor_text_proto def _add_mask_to_second_stage_box_predictor_text_proto( self, masks_are_class_agnostic=False): agnostic = 'true' if masks_are_class_agnostic else 'false' box_predictor_text_proto = """ mask_rcnn_box_predictor { predict_instance_masks: true masks_are_class_agnostic: """ + agnostic + """ mask_height: 14 mask_width: 14 conv_hyperparams { op: CONV regularizer { l2_regularizer { weight: 0.0 } } initializer { truncated_normal_initializer { stddev: 0.01 } } } } """ return box_predictor_text_proto def _get_second_stage_box_predictor(self, num_classes, is_training, predict_masks, masks_are_class_agnostic): box_predictor_proto = box_predictor_pb2.BoxPredictor() text_format.Merge(self._get_second_stage_box_predictor_text_proto(), box_predictor_proto) if predict_masks: text_format.Merge( self._add_mask_to_second_stage_box_predictor_text_proto( masks_are_class_agnostic), box_predictor_proto) return box_predictor_builder.build( hyperparams_builder.build, box_predictor_proto, num_classes=num_classes, is_training=is_training) def _get_model(self, box_predictor, **common_kwargs): return faster_rcnn_meta_arch.FasterRCNNMetaArch( initial_crop_size=3, maxpool_kernel_size=1, maxpool_stride=1, second_stage_mask_rcnn_box_predictor=box_predictor, **common_kwargs) def _build_model(self, is_training, number_of_stages, second_stage_batch_size, first_stage_max_proposals=8, num_classes=2, hard_mining=False, softmax_second_stage_classification_loss=True, predict_masks=False, pad_to_max_dimension=None, masks_are_class_agnostic=False, use_matmul_crop_and_resize=False, clip_anchors_to_image=False, use_matmul_gather_in_matcher=False, use_static_shapes=False): def image_resizer_fn(image, masks=None): """Fake image resizer function.""" resized_inputs = [] resized_image = tf.identity(image) if pad_to_max_dimension is not None: resized_image = tf.image.pad_to_bounding_box(image, 0, 0, pad_to_max_dimension, pad_to_max_dimension) resized_inputs.append(resized_image) if masks is not None: resized_masks = tf.identity(masks) if pad_to_max_dimension is not None: resized_masks = tf.image.pad_to_bounding_box(tf.transpose(masks, [1, 2, 0]), 0, 0, pad_to_max_dimension, pad_to_max_dimension) resized_masks = tf.transpose(resized_masks, [2, 0, 1]) resized_inputs.append(resized_masks) resized_inputs.append(tf.shape(image)) return resized_inputs # anchors in this test are designed so that a subset of anchors are inside # the image and a subset of anchors are outside. first_stage_anchor_scales = (0.001, 0.005, 0.1) first_stage_anchor_aspect_ratios = (0.5, 1.0, 2.0) first_stage_anchor_strides = (1, 1) first_stage_anchor_generator = grid_anchor_generator.GridAnchorGenerator( first_stage_anchor_scales, first_stage_anchor_aspect_ratios, anchor_stride=first_stage_anchor_strides) first_stage_target_assigner = target_assigner.create_target_assigner( 'FasterRCNN', 'proposal', use_matmul_gather=use_matmul_gather_in_matcher) fake_feature_extractor = FakeFasterRCNNFeatureExtractor() first_stage_box_predictor_hyperparams_text_proto = """ op: CONV activation: RELU regularizer { l2_regularizer { weight: 0.00004 } } initializer { truncated_normal_initializer { stddev: 0.03 } } """ first_stage_box_predictor_arg_scope_fn = ( self._build_arg_scope_with_hyperparams( first_stage_box_predictor_hyperparams_text_proto, is_training)) first_stage_box_predictor_kernel_size = 3 first_stage_atrous_rate = 1 first_stage_box_predictor_depth = 512 first_stage_minibatch_size = 3 first_stage_sampler = sampler.BalancedPositiveNegativeSampler( positive_fraction=0.5, is_static=use_static_shapes) first_stage_nms_score_threshold = -1.0 first_stage_nms_iou_threshold = 1.0 first_stage_max_proposals = first_stage_max_proposals first_stage_non_max_suppression_fn = functools.partial( post_processing.batch_multiclass_non_max_suppression, score_thresh=first_stage_nms_score_threshold, iou_thresh=first_stage_nms_iou_threshold, max_size_per_class=first_stage_max_proposals, max_total_size=first_stage_max_proposals, use_static_shapes=use_static_shapes) first_stage_localization_loss_weight = 1.0 first_stage_objectness_loss_weight = 1.0 post_processing_text_proto = """ batch_non_max_suppression { score_threshold: -20.0 iou_threshold: 1.0 max_detections_per_class: 5 max_total_detections: 5 use_static_shapes: """ +'{}'.format(use_static_shapes) + """ } """ post_processing_config = post_processing_pb2.PostProcessing() text_format.Merge(post_processing_text_proto, post_processing_config) second_stage_target_assigner = target_assigner.create_target_assigner( 'FasterRCNN', 'detection', use_matmul_gather=use_matmul_gather_in_matcher) second_stage_non_max_suppression_fn, _ = post_processing_builder.build( post_processing_config) second_stage_sampler = sampler.BalancedPositiveNegativeSampler( positive_fraction=1.0, is_static=use_static_shapes) second_stage_score_conversion_fn = tf.identity second_stage_localization_loss_weight = 1.0 second_stage_classification_loss_weight = 1.0 if softmax_second_stage_classification_loss: second_stage_classification_loss = ( losses.WeightedSoftmaxClassificationLoss()) else: second_stage_classification_loss = ( losses.WeightedSigmoidClassificationLoss()) hard_example_miner = None if hard_mining: hard_example_miner = losses.HardExampleMiner( num_hard_examples=1, iou_threshold=0.99, loss_type='both', cls_loss_weight=second_stage_classification_loss_weight, loc_loss_weight=second_stage_localization_loss_weight, max_negatives_per_positive=None) crop_and_resize_fn = ( ops.matmul_crop_and_resize if use_matmul_crop_and_resize else ops.native_crop_and_resize) common_kwargs = { 'is_training': is_training, 'num_classes': num_classes, 'image_resizer_fn': image_resizer_fn, 'feature_extractor': fake_feature_extractor, 'number_of_stages': number_of_stages, 'first_stage_anchor_generator': first_stage_anchor_generator, 'first_stage_target_assigner': first_stage_target_assigner, 'first_stage_atrous_rate': first_stage_atrous_rate, 'first_stage_box_predictor_arg_scope_fn': first_stage_box_predictor_arg_scope_fn, 'first_stage_box_predictor_kernel_size': first_stage_box_predictor_kernel_size, 'first_stage_box_predictor_depth': first_stage_box_predictor_depth, 'first_stage_minibatch_size': first_stage_minibatch_size, 'first_stage_sampler': first_stage_sampler, 'first_stage_non_max_suppression_fn': first_stage_non_max_suppression_fn, 'first_stage_max_proposals': first_stage_max_proposals, 'first_stage_localization_loss_weight': first_stage_localization_loss_weight, 'first_stage_objectness_loss_weight': first_stage_objectness_loss_weight, 'second_stage_target_assigner': second_stage_target_assigner, 'second_stage_batch_size': second_stage_batch_size, 'second_stage_sampler': second_stage_sampler, 'second_stage_non_max_suppression_fn': second_stage_non_max_suppression_fn, 'second_stage_score_conversion_fn': second_stage_score_conversion_fn, 'second_stage_localization_loss_weight': second_stage_localization_loss_weight, 'second_stage_classification_loss_weight': second_stage_classification_loss_weight, 'second_stage_classification_loss': second_stage_classification_loss, 'hard_example_miner': hard_example_miner, 'crop_and_resize_fn': crop_and_resize_fn, 'clip_anchors_to_image': clip_anchors_to_image, 'use_static_shapes': use_static_shapes, 'resize_masks': True, } return self._get_model( self._get_second_stage_box_predictor( num_classes=num_classes, is_training=is_training, predict_masks=predict_masks, masks_are_class_agnostic=masks_are_class_agnostic), **common_kwargs) def test_predict_gives_correct_shapes_in_inference_mode_first_stage_only( self, use_static_shapes=False): batch_size = 2 height = 10 width = 12 input_image_shape = (batch_size, height, width, 3) def graph_fn(images): """Function to construct tf graph for the test.""" model = self._build_model( is_training=False, number_of_stages=1, second_stage_batch_size=2, clip_anchors_to_image=use_static_shapes, use_static_shapes=use_static_shapes) preprocessed_inputs, true_image_shapes = model.preprocess(images) prediction_dict = model.predict(preprocessed_inputs, true_image_shapes) return (prediction_dict['rpn_box_predictor_features'], prediction_dict['rpn_features_to_crop'], prediction_dict['image_shape'], prediction_dict['rpn_box_encodings'], prediction_dict['rpn_objectness_predictions_with_background'], prediction_dict['anchors']) images = np.zeros(input_image_shape, dtype=np.float32) # In inference mode, anchors are clipped to the image window, but not # pruned. Since MockFasterRCNN.extract_proposal_features returns a # tensor with the same shape as its input, the expected number of anchors # is height * width * the number of anchors per location (i.e. 3x3). expected_num_anchors = height * width * 3 * 3 expected_output_shapes = { 'rpn_box_predictor_features': (batch_size, height, width, 512), 'rpn_features_to_crop': (batch_size, height, width, 3), 'rpn_box_encodings': (batch_size, expected_num_anchors, 4), 'rpn_objectness_predictions_with_background': (batch_size, expected_num_anchors, 2), 'anchors': (expected_num_anchors, 4) } if use_static_shapes: results = self.execute(graph_fn, [images]) else: results = self.execute_cpu(graph_fn, [images]) self.assertAllEqual(results[0].shape, expected_output_shapes['rpn_box_predictor_features']) self.assertAllEqual(results[1].shape, expected_output_shapes['rpn_features_to_crop']) self.assertAllEqual(results[2], input_image_shape) self.assertAllEqual(results[3].shape, expected_output_shapes['rpn_box_encodings']) self.assertAllEqual( results[4].shape, expected_output_shapes['rpn_objectness_predictions_with_background']) self.assertAllEqual(results[5].shape, expected_output_shapes['anchors']) # Check that anchors are clipped to window. anchors = results[5] self.assertTrue(np.all(np.greater_equal(anchors, 0))) self.assertTrue(np.all(np.less_equal(anchors[:, 0], height))) self.assertTrue(np.all(np.less_equal(anchors[:, 1], width))) self.assertTrue(np.all(np.less_equal(anchors[:, 2], height))) self.assertTrue(np.all(np.less_equal(anchors[:, 3], width))) def test_predict_gives_valid_anchors_in_training_mode_first_stage_only(self): test_graph = tf.Graph() with test_graph.as_default(): model = self._build_model( is_training=True, number_of_stages=1, second_stage_batch_size=2) batch_size = 2 height = 10 width = 12 input_image_shape = (batch_size, height, width, 3) _, true_image_shapes = model.preprocess(tf.zeros(input_image_shape)) preprocessed_inputs = tf.placeholder( dtype=tf.float32, shape=(batch_size, None, None, 3)) prediction_dict = model.predict(preprocessed_inputs, true_image_shapes) expected_output_keys = set([ 'rpn_box_predictor_features', 'rpn_features_to_crop', 'image_shape', 'rpn_box_encodings', 'rpn_objectness_predictions_with_background', 'anchors']) # At training time, anchors that exceed image bounds are pruned. Thus # the `expected_num_anchors` in the above inference mode test is now # a strict upper bound on the number of anchors. num_anchors_strict_upper_bound = height * width * 3 * 3 init_op = tf.global_variables_initializer() with self.test_session(graph=test_graph) as sess: sess.run(init_op) prediction_out = sess.run(prediction_dict, feed_dict={ preprocessed_inputs: np.zeros(input_image_shape) }) self.assertEqual(set(prediction_out.keys()), expected_output_keys) self.assertAllEqual(prediction_out['image_shape'], input_image_shape) # Check that anchors have less than the upper bound and # are clipped to window. anchors = prediction_out['anchors'] self.assertTrue(len(anchors.shape) == 2 and anchors.shape[1] == 4) num_anchors_out = anchors.shape[0] self.assertLess(num_anchors_out, num_anchors_strict_upper_bound) self.assertTrue(np.all(np.greater_equal(anchors, 0))) self.assertTrue(np.all(np.less_equal(anchors[:, 0], height))) self.assertTrue(np.all(np.less_equal(anchors[:, 1], width))) self.assertTrue(np.all(np.less_equal(anchors[:, 2], height))) self.assertTrue(np.all(np.less_equal(anchors[:, 3], width))) self.assertAllEqual(prediction_out['rpn_box_encodings'].shape, (batch_size, num_anchors_out, 4)) self.assertAllEqual( prediction_out['rpn_objectness_predictions_with_background'].shape, (batch_size, num_anchors_out, 2)) def test_predict_correct_shapes_in_inference_mode_two_stages( self, use_static_shapes=False): def compare_results(results, expected_output_shapes): """Checks if the shape of the predictions are as expected.""" self.assertAllEqual(results[0].shape, expected_output_shapes['rpn_box_predictor_features']) self.assertAllEqual(results[1].shape, expected_output_shapes['rpn_features_to_crop']) self.assertAllEqual(results[2].shape, expected_output_shapes['image_shape']) self.assertAllEqual(results[3].shape, expected_output_shapes['rpn_box_encodings']) self.assertAllEqual( results[4].shape, expected_output_shapes['rpn_objectness_predictions_with_background']) self.assertAllEqual(results[5].shape, expected_output_shapes['anchors']) self.assertAllEqual(results[6].shape, expected_output_shapes['refined_box_encodings']) self.assertAllEqual( results[7].shape, expected_output_shapes['class_predictions_with_background']) self.assertAllEqual(results[8].shape, expected_output_shapes['num_proposals']) self.assertAllEqual(results[9].shape, expected_output_shapes['proposal_boxes']) self.assertAllEqual(results[10].shape, expected_output_shapes['proposal_boxes_normalized']) self.assertAllEqual(results[11].shape, expected_output_shapes['box_classifier_features']) batch_size = 2 image_size = 10 max_num_proposals = 8 initial_crop_size = 3 maxpool_stride = 1 input_shapes = [(batch_size, image_size, image_size, 3), (None, image_size, image_size, 3), (batch_size, None, None, 3), (None, None, None, 3)] def graph_fn_tpu(images): """Function to construct tf graph for the test.""" model = self._build_model( is_training=False, number_of_stages=2, second_stage_batch_size=2, predict_masks=False, use_matmul_crop_and_resize=use_static_shapes, clip_anchors_to_image=use_static_shapes, use_static_shapes=use_static_shapes) preprocessed_inputs, true_image_shapes = model.preprocess(images) prediction_dict = model.predict(preprocessed_inputs, true_image_shapes) return (prediction_dict['rpn_box_predictor_features'], prediction_dict['rpn_features_to_crop'], prediction_dict['image_shape'], prediction_dict['rpn_box_encodings'], prediction_dict['rpn_objectness_predictions_with_background'], prediction_dict['anchors'], prediction_dict['refined_box_encodings'], prediction_dict['class_predictions_with_background'], prediction_dict['num_proposals'], prediction_dict['proposal_boxes'], prediction_dict['proposal_boxes_normalized'], prediction_dict['box_classifier_features']) expected_num_anchors = image_size * image_size * 3 * 3 expected_shapes = { 'rpn_box_predictor_features': (2, image_size, image_size, 512), 'rpn_features_to_crop': (2, image_size, image_size, 3), 'image_shape': (4,), 'rpn_box_encodings': (2, expected_num_anchors, 4), 'rpn_objectness_predictions_with_background': (2, expected_num_anchors, 2), 'anchors': (expected_num_anchors, 4), 'refined_box_encodings': (2 * max_num_proposals, 2, 4), 'class_predictions_with_background': (2 * max_num_proposals, 2 + 1), 'num_proposals': (2,), 'proposal_boxes': (2, max_num_proposals, 4), 'proposal_boxes_normalized': (2, max_num_proposals, 4), 'box_classifier_features': self._get_box_classifier_features_shape(image_size, batch_size, max_num_proposals, initial_crop_size, maxpool_stride, 3) } if use_static_shapes: input_shape = (batch_size, image_size, image_size, 3) images = np.zeros(input_shape, dtype=np.float32) results = self.execute(graph_fn_tpu, [images]) compare_results(results, expected_shapes) else: for input_shape in input_shapes: test_graph = tf.Graph() with test_graph.as_default(): model = self._build_model( is_training=False, number_of_stages=2, second_stage_batch_size=2, predict_masks=False) preprocessed_inputs = tf.placeholder(tf.float32, shape=input_shape) _, true_image_shapes = model.preprocess(preprocessed_inputs) result_tensor_dict = model.predict( preprocessed_inputs, true_image_shapes) init_op = tf.global_variables_initializer() with self.test_session(graph=test_graph) as sess: sess.run(init_op) tensor_dict_out = sess.run(result_tensor_dict, feed_dict={ preprocessed_inputs: np.zeros((batch_size, image_size, image_size, 3))}) self.assertEqual(set(tensor_dict_out.keys()), set(expected_shapes.keys())) for key in expected_shapes: self.assertAllEqual(tensor_dict_out[key].shape, expected_shapes[key]) def test_predict_gives_correct_shapes_in_train_mode_both_stages( self, use_static_shapes=False): batch_size = 2 image_size = 10 max_num_proposals = 7 initial_crop_size = 3 maxpool_stride = 1 def graph_fn(images, gt_boxes, gt_classes, gt_weights): """Function to construct tf graph for the test.""" model = self._build_model( is_training=True, number_of_stages=2, second_stage_batch_size=7, predict_masks=False, use_matmul_crop_and_resize=use_static_shapes, clip_anchors_to_image=use_static_shapes, use_static_shapes=use_static_shapes) preprocessed_inputs, true_image_shapes = model.preprocess(images) model.provide_groundtruth( groundtruth_boxes_list=tf.unstack(gt_boxes), groundtruth_classes_list=tf.unstack(gt_classes), groundtruth_weights_list=tf.unstack(gt_weights)) result_tensor_dict = model.predict(preprocessed_inputs, true_image_shapes) return (result_tensor_dict['refined_box_encodings'], result_tensor_dict['class_predictions_with_background'], result_tensor_dict['proposal_boxes'], result_tensor_dict['proposal_boxes_normalized'], result_tensor_dict['anchors'], result_tensor_dict['rpn_box_encodings'], result_tensor_dict['rpn_objectness_predictions_with_background'], result_tensor_dict['rpn_features_to_crop'], result_tensor_dict['rpn_box_predictor_features'], ) image_shape = (batch_size, image_size, image_size, 3) images = np.zeros(image_shape, dtype=np.float32) gt_boxes = np.stack([ np.array([[0, 0, .5, .5], [.5, .5, 1, 1]], dtype=np.float32), np.array([[0, .5, .5, 1], [.5, 0, 1, .5]], dtype=np.float32) ]) gt_classes = np.stack([ np.array([[1, 0], [0, 1]], dtype=np.float32), np.array([[1, 0], [1, 0]], dtype=np.float32) ]) gt_weights = np.stack([ np.array([1, 1], dtype=np.float32), np.array([1, 1], dtype=np.float32) ]) if use_static_shapes: results = self.execute(graph_fn, [images, gt_boxes, gt_classes, gt_weights]) else: results = self.execute_cpu(graph_fn, [images, gt_boxes, gt_classes, gt_weights]) expected_shapes = { 'rpn_box_predictor_features': (2, image_size, image_size, 512), 'rpn_features_to_crop': (2, image_size, image_size, 3), 'refined_box_encodings': (2 * max_num_proposals, 2, 4), 'class_predictions_with_background': (2 * max_num_proposals, 2 + 1), 'proposal_boxes': (2, max_num_proposals, 4), 'rpn_box_encodings': (2, image_size * image_size * 9, 4), 'proposal_boxes_normalized': (2, max_num_proposals, 4), 'box_classifier_features': self._get_box_classifier_features_shape( image_size, batch_size, max_num_proposals, initial_crop_size, maxpool_stride, 3), 'rpn_objectness_predictions_with_background': (2, image_size * image_size * 9, 2) } # TODO(rathodv): Possibly change utils/test_case.py to accept dictionaries # and return dicionaries so don't have to rely on the order of tensors. self.assertAllEqual(results[0].shape, expected_shapes['refined_box_encodings']) self.assertAllEqual(results[1].shape, expected_shapes['class_predictions_with_background']) self.assertAllEqual(results[2].shape, expected_shapes['proposal_boxes']) self.assertAllEqual(results[3].shape, expected_shapes['proposal_boxes_normalized']) anchors_shape = results[4].shape self.assertAllEqual(results[5].shape, [batch_size, anchors_shape[0], 4]) self.assertAllEqual(results[6].shape, [batch_size, anchors_shape[0], 2]) self.assertAllEqual(results[7].shape, expected_shapes['rpn_features_to_crop']) self.assertAllEqual(results[8].shape, expected_shapes['rpn_box_predictor_features']) def test_postprocess_first_stage_only_inference_mode( self, use_static_shapes=False, pad_to_max_dimension=None): batch_size = 2 first_stage_max_proposals = 4 if use_static_shapes else 8 def graph_fn(images, rpn_box_encodings, rpn_objectness_predictions_with_background, rpn_features_to_crop, anchors): """Function to construct tf graph for the test.""" model = self._build_model( is_training=False, number_of_stages=1, second_stage_batch_size=6, use_matmul_crop_and_resize=use_static_shapes, clip_anchors_to_image=use_static_shapes, use_static_shapes=use_static_shapes, use_matmul_gather_in_matcher=use_static_shapes, first_stage_max_proposals=first_stage_max_proposals, pad_to_max_dimension=pad_to_max_dimension) _, true_image_shapes = model.preprocess(images) proposals = model.postprocess({ 'rpn_box_encodings': rpn_box_encodings, 'rpn_objectness_predictions_with_background': rpn_objectness_predictions_with_background, 'rpn_features_to_crop': rpn_features_to_crop, 'anchors': anchors}, true_image_shapes) return (proposals['num_detections'], proposals['detection_boxes'], proposals['detection_scores']) anchors = np.array( [[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [16, 16, 32, 32]], dtype=np.float32) rpn_box_encodings = np.zeros( (batch_size, anchors.shape[0], BOX_CODE_SIZE), dtype=np.float32) # use different numbers for the objectness category to break ties in # order of boxes returned by NMS rpn_objectness_predictions_with_background = np.array([ [[-10, 13], [10, -10], [10, -11], [-10, 12]], [[10, -10], [-10, 13], [-10, 12], [10, -11]]], dtype=np.float32) rpn_features_to_crop = np.ones((batch_size, 8, 8, 10), dtype=np.float32) image_shape = (batch_size, 32, 32, 3) images = np.zeros(image_shape, dtype=np.float32) if use_static_shapes: results = self.execute(graph_fn, [images, rpn_box_encodings, rpn_objectness_predictions_with_background, rpn_features_to_crop, anchors]) else: results = self.execute_cpu(graph_fn, [images, rpn_box_encodings, rpn_objectness_predictions_with_background, rpn_features_to_crop, anchors]) expected_proposal_boxes = [ [[0, 0, .5, .5], [.5, .5, 1, 1], [0, .5, .5, 1], [.5, 0, 1.0, .5]] + 4 * [4 * [0]], [[0, .5, .5, 1], [.5, 0, 1.0, .5], [0, 0, .5, .5], [.5, .5, 1, 1]] + 4 * [4 * [0]]] expected_proposal_scores = [[1, 1, 0, 0, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0, 0, 0]] expected_num_proposals = [4, 4] self.assertAllClose(results[0], expected_num_proposals) for indx, num_proposals in enumerate(expected_num_proposals): self.assertAllClose(results[1][indx][0:num_proposals], expected_proposal_boxes[indx][0:num_proposals]) self.assertAllClose(results[2][indx][0:num_proposals], expected_proposal_scores[indx][0:num_proposals]) def _test_postprocess_first_stage_only_train_mode(self, pad_to_max_dimension=None): model = self._build_model( is_training=True, number_of_stages=1, second_stage_batch_size=2, pad_to_max_dimension=pad_to_max_dimension) batch_size = 2 anchors = tf.constant( [[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [16, 16, 32, 32]], dtype=tf.float32) rpn_box_encodings = tf.zeros( [batch_size, anchors.get_shape().as_list()[0], BOX_CODE_SIZE], dtype=tf.float32) # use different numbers for the objectness category to break ties in # order of boxes returned by NMS rpn_objectness_predictions_with_background = tf.constant([ [[-10, 13], [-10, 12], [-10, 11], [-10, 10]], [[-10, 13], [-10, 12], [-10, 11], [-10, 10]]], dtype=tf.float32) rpn_features_to_crop = tf.ones((batch_size, 8, 8, 10), dtype=tf.float32) image_shape = tf.constant([batch_size, 32, 32, 3], dtype=tf.int32) groundtruth_boxes_list = [ tf.constant([[0, 0, .5, .5], [.5, .5, 1, 1]], dtype=tf.float32), tf.constant([[0, .5, .5, 1], [.5, 0, 1, .5]], dtype=tf.float32)] groundtruth_classes_list = [tf.constant([[1, 0], [0, 1]], dtype=tf.float32), tf.constant([[1, 0], [1, 0]], dtype=tf.float32)] groundtruth_weights_list = [ tf.constant([1, 1], dtype=tf.float32), tf.constant([1, 1], dtype=tf.float32) ] _, true_image_shapes = model.preprocess(tf.zeros(image_shape)) model.provide_groundtruth( groundtruth_boxes_list, groundtruth_classes_list, groundtruth_weights_list=groundtruth_weights_list) proposals = model.postprocess({ 'rpn_box_encodings': rpn_box_encodings, 'rpn_objectness_predictions_with_background': rpn_objectness_predictions_with_background, 'rpn_features_to_crop': rpn_features_to_crop, 'anchors': anchors}, true_image_shapes) expected_proposal_boxes = [ [[0, 0, .5, .5], [.5, .5, 1, 1]], [[0, .5, .5, 1], [.5, 0, 1, .5]]] expected_proposal_scores = [[1, 1], [1, 1]] expected_num_proposals = [2, 2] expected_output_keys = set(['detection_boxes', 'detection_scores', 'num_detections']) self.assertEqual(set(proposals.keys()), expected_output_keys) with self.test_session() as sess: proposals_out = sess.run(proposals) for image_idx in range(batch_size): self.assertTrue( test_utils.first_rows_close_as_set( proposals_out['detection_boxes'][image_idx].tolist(), expected_proposal_boxes[image_idx])) self.assertAllClose(proposals_out['detection_scores'], expected_proposal_scores) self.assertAllEqual(proposals_out['num_detections'], expected_num_proposals) def test_postprocess_first_stage_only_train_mode(self): self._test_postprocess_first_stage_only_train_mode() def test_postprocess_first_stage_only_train_mode_padded_image(self): self._test_postprocess_first_stage_only_train_mode(pad_to_max_dimension=56) def test_postprocess_second_stage_only_inference_mode( self, use_static_shapes=False, pad_to_max_dimension=None): batch_size = 2 num_classes = 2 image_shape = np.array((2, 36, 48, 3), dtype=np.int32) first_stage_max_proposals = 8 total_num_padded_proposals = batch_size * first_stage_max_proposals def graph_fn(images, refined_box_encodings, class_predictions_with_background, num_proposals, proposal_boxes): """Function to construct tf graph for the test.""" model = self._build_model( is_training=False, number_of_stages=2, second_stage_batch_size=6, use_matmul_crop_and_resize=use_static_shapes, clip_anchors_to_image=use_static_shapes, use_static_shapes=use_static_shapes, use_matmul_gather_in_matcher=use_static_shapes, pad_to_max_dimension=pad_to_max_dimension) _, true_image_shapes = model.preprocess(images) detections = model.postprocess({ 'refined_box_encodings': refined_box_encodings, 'class_predictions_with_background': class_predictions_with_background, 'num_proposals': num_proposals, 'proposal_boxes': proposal_boxes, }, true_image_shapes) return (detections['num_detections'], detections['detection_boxes'], detections['detection_scores'], detections['detection_classes']) proposal_boxes = np.array( [[[1, 1, 2, 3], [0, 0, 1, 1], [.5, .5, .6, .6], 4*[0], 4*[0], 4*[0], 4*[0], 4*[0]], [[2, 3, 6, 8], [1, 2, 5, 3], 4*[0], 4*[0], 4*[0], 4*[0], 4*[0], 4*[0]]], dtype=np.float32) num_proposals = np.array([3, 2], dtype=np.int32) refined_box_encodings = np.zeros( [total_num_padded_proposals, num_classes, 4], dtype=np.float32) class_predictions_with_background = np.ones( [total_num_padded_proposals, num_classes+1], dtype=np.float32) images = np.zeros(image_shape, dtype=np.float32) if use_static_shapes: results = self.execute(graph_fn, [images, refined_box_encodings, class_predictions_with_background, num_proposals, proposal_boxes]) else: results = self.execute_cpu(graph_fn, [images, refined_box_encodings, class_predictions_with_background, num_proposals, proposal_boxes]) expected_num_detections = [5, 4] expected_detection_classes = [[0, 0, 0, 1, 1], [0, 0, 1, 1, 0]] expected_detection_scores = [[1, 1, 1, 1, 1], [1, 1, 1, 1, 0]] self.assertAllClose(results[0], expected_num_detections) for indx, num_proposals in enumerate(expected_num_detections): self.assertAllClose(results[2][indx][0:num_proposals], expected_detection_scores[indx][0:num_proposals]) self.assertAllClose(results[3][indx][0:num_proposals], expected_detection_classes[indx][0:num_proposals]) if not use_static_shapes: self.assertAllEqual(results[1].shape, [2, 5, 4]) def test_preprocess_preserves_input_shapes(self): image_shapes = [(3, None, None, 3), (None, 10, 10, 3), (None, None, None, 3)] for image_shape in image_shapes: model = self._build_model( is_training=False, number_of_stages=2, second_stage_batch_size=6) image_placeholder = tf.placeholder(tf.float32, shape=image_shape) preprocessed_inputs, _ = model.preprocess(image_placeholder) self.assertAllEqual(preprocessed_inputs.shape.as_list(), image_shape) # TODO(rathodv): Split test into two - with and without masks. def test_loss_first_stage_only_mode(self): model = self._build_model( is_training=True, number_of_stages=1, second_stage_batch_size=6) batch_size = 2 anchors = tf.constant( [[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [16, 16, 32, 32]], dtype=tf.float32) rpn_box_encodings = tf.zeros( [batch_size, anchors.get_shape().as_list()[0], BOX_CODE_SIZE], dtype=tf.float32) # use different numbers for the objectness category to break ties in # order of boxes returned by NMS rpn_objectness_predictions_with_background = tf.constant([ [[-10, 13], [10, -10], [10, -11], [-10, 12]], [[10, -10], [-10, 13], [-10, 12], [10, -11]]], dtype=tf.float32) image_shape = tf.constant([batch_size, 32, 32, 3], dtype=tf.int32) groundtruth_boxes_list = [ tf.constant([[0, 0, .5, .5], [.5, .5, 1, 1]], dtype=tf.float32), tf.constant([[0, .5, .5, 1], [.5, 0, 1, .5]], dtype=tf.float32)] groundtruth_classes_list = [tf.constant([[1, 0], [0, 1]], dtype=tf.float32), tf.constant([[1, 0], [1, 0]], dtype=tf.float32)] prediction_dict = { 'rpn_box_encodings': rpn_box_encodings, 'rpn_objectness_predictions_with_background': rpn_objectness_predictions_with_background, 'image_shape': image_shape, 'anchors': anchors } _, true_image_shapes = model.preprocess(tf.zeros(image_shape)) model.provide_groundtruth(groundtruth_boxes_list, groundtruth_classes_list) loss_dict = model.loss(prediction_dict, true_image_shapes) with self.test_session() as sess: loss_dict_out = sess.run(loss_dict) self.assertAllClose(loss_dict_out['Loss/RPNLoss/localization_loss'], 0) self.assertAllClose(loss_dict_out['Loss/RPNLoss/objectness_loss'], 0) self.assertNotIn('Loss/BoxClassifierLoss/localization_loss', loss_dict_out) self.assertNotIn('Loss/BoxClassifierLoss/classification_loss', loss_dict_out) # TODO(rathodv): Split test into two - with and without masks. def test_loss_full(self): model = self._build_model( is_training=True, number_of_stages=2, second_stage_batch_size=6) batch_size = 3 anchors = tf.constant( [[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [16, 16, 32, 32]], dtype=tf.float32) rpn_box_encodings = tf.zeros( [batch_size, anchors.get_shape().as_list()[0], BOX_CODE_SIZE], dtype=tf.float32) # use different numbers for the objectness category to break ties in # order of boxes returned by NMS rpn_objectness_predictions_with_background = tf.constant( [[[-10, 13], [10, -10], [10, -11], [-10, 12]], [[10, -10], [-10, 13], [ -10, 12 ], [10, -11]], [[10, -10], [-10, 13], [-10, 12], [10, -11]]], dtype=tf.float32) image_shape = tf.constant([batch_size, 32, 32, 3], dtype=tf.int32) num_proposals = tf.constant([6, 6, 6], dtype=tf.int32) proposal_boxes = tf.constant( 3 * [[[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [16, 16, 32, 32], [0, 0, 16, 16], [0, 16, 16, 32]]], dtype=tf.float32) refined_box_encodings = tf.zeros( (batch_size * model.max_num_proposals, model.num_classes, BOX_CODE_SIZE), dtype=tf.float32) class_predictions_with_background = tf.constant( [ [-10, 10, -10], # first image [10, -10, -10], [10, -10, -10], [-10, -10, 10], [-10, 10, -10], [10, -10, -10], [10, -10, -10], # second image [-10, 10, -10], [-10, 10, -10], [10, -10, -10], [10, -10, -10], [-10, 10, -10], [10, -10, -10], # third image [-10, 10, -10], [-10, 10, -10], [10, -10, -10], [10, -10, -10], [-10, 10, -10] ], dtype=tf.float32) mask_predictions_logits = 20 * tf.ones((batch_size * model.max_num_proposals, model.num_classes, 14, 14), dtype=tf.float32) groundtruth_boxes_list = [ tf.constant([[0, 0, .5, .5], [.5, .5, 1, 1]], dtype=tf.float32), tf.constant([[0, .5, .5, 1], [.5, 0, 1, .5]], dtype=tf.float32), tf.constant([[0, .5, .5, 1], [.5, 0, 1, 1]], dtype=tf.float32) ] groundtruth_classes_list = [ tf.constant([[1, 0], [0, 1]], dtype=tf.float32), tf.constant([[1, 0], [1, 0]], dtype=tf.float32), tf.constant([[1, 0], [0, 1]], dtype=tf.float32) ] # Set all elements of groundtruth mask to 1.0. In this case all proposal # crops of the groundtruth masks should return a mask that covers the entire # proposal. Thus, if mask_predictions_logits element values are all greater # than 20, the loss should be zero. groundtruth_masks_list = [ tf.convert_to_tensor(np.ones((2, 32, 32)), dtype=tf.float32), tf.convert_to_tensor(np.ones((2, 32, 32)), dtype=tf.float32), tf.convert_to_tensor(np.ones((2, 32, 32)), dtype=tf.float32) ] groundtruth_weights_list = [ tf.constant([1, 1], dtype=tf.float32), tf.constant([1, 1], dtype=tf.float32), tf.constant([1, 0], dtype=tf.float32) ] prediction_dict = { 'rpn_box_encodings': rpn_box_encodings, 'rpn_objectness_predictions_with_background': rpn_objectness_predictions_with_background, 'image_shape': image_shape, 'anchors': anchors, 'refined_box_encodings': refined_box_encodings, 'class_predictions_with_background': class_predictions_with_background, 'proposal_boxes': proposal_boxes, 'num_proposals': num_proposals, 'mask_predictions': mask_predictions_logits } _, true_image_shapes = model.preprocess(tf.zeros(image_shape)) model.provide_groundtruth( groundtruth_boxes_list, groundtruth_classes_list, groundtruth_masks_list, groundtruth_weights_list=groundtruth_weights_list) loss_dict = model.loss(prediction_dict, true_image_shapes) with self.test_session() as sess: loss_dict_out = sess.run(loss_dict) self.assertAllClose(loss_dict_out['Loss/RPNLoss/localization_loss'], 0) self.assertAllClose(loss_dict_out['Loss/RPNLoss/objectness_loss'], 0) self.assertAllClose(loss_dict_out[ 'Loss/BoxClassifierLoss/localization_loss'], 0) self.assertAllClose(loss_dict_out[ 'Loss/BoxClassifierLoss/classification_loss'], 0) self.assertAllClose(loss_dict_out['Loss/BoxClassifierLoss/mask_loss'], 0) def test_loss_full_zero_padded_proposals(self): model = self._build_model( is_training=True, number_of_stages=2, second_stage_batch_size=6) batch_size = 1 anchors = tf.constant( [[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [16, 16, 32, 32]], dtype=tf.float32) rpn_box_encodings = tf.zeros( [batch_size, anchors.get_shape().as_list()[0], BOX_CODE_SIZE], dtype=tf.float32) # use different numbers for the objectness category to break ties in # order of boxes returned by NMS rpn_objectness_predictions_with_background = tf.constant([ [[-10, 13], [10, -10], [10, -11], [10, -12]],], dtype=tf.float32) image_shape = tf.constant([batch_size, 32, 32, 3], dtype=tf.int32) # box_classifier_batch_size is 6, but here we assume that the number of # actual proposals (not counting zero paddings) is fewer (3). num_proposals = tf.constant([3], dtype=tf.int32) proposal_boxes = tf.constant( [[[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [0, 0, 0, 0], # begin paddings [0, 0, 0, 0], [0, 0, 0, 0]]], dtype=tf.float32) refined_box_encodings = tf.zeros( (batch_size * model.max_num_proposals, model.num_classes, BOX_CODE_SIZE), dtype=tf.float32) class_predictions_with_background = tf.constant( [[-10, 10, -10], [10, -10, -10], [10, -10, -10], [0, 0, 0], # begin paddings [0, 0, 0], [0, 0, 0]], dtype=tf.float32) mask_predictions_logits = 20 * tf.ones((batch_size * model.max_num_proposals, model.num_classes, 14, 14), dtype=tf.float32) groundtruth_boxes_list = [ tf.constant([[0, 0, .5, .5]], dtype=tf.float32)] groundtruth_classes_list = [tf.constant([[1, 0]], dtype=tf.float32)] # Set all elements of groundtruth mask to 1.0. In this case all proposal # crops of the groundtruth masks should return a mask that covers the entire # proposal. Thus, if mask_predictions_logits element values are all greater # than 20, the loss should be zero. groundtruth_masks_list = [tf.convert_to_tensor(np.ones((1, 32, 32)), dtype=tf.float32)] prediction_dict = { 'rpn_box_encodings': rpn_box_encodings, 'rpn_objectness_predictions_with_background': rpn_objectness_predictions_with_background, 'image_shape': image_shape, 'anchors': anchors, 'refined_box_encodings': refined_box_encodings, 'class_predictions_with_background': class_predictions_with_background, 'proposal_boxes': proposal_boxes, 'num_proposals': num_proposals, 'mask_predictions': mask_predictions_logits } _, true_image_shapes = model.preprocess(tf.zeros(image_shape)) model.provide_groundtruth(groundtruth_boxes_list, groundtruth_classes_list, groundtruth_masks_list) loss_dict = model.loss(prediction_dict, true_image_shapes) with self.test_session() as sess: loss_dict_out = sess.run(loss_dict) self.assertAllClose(loss_dict_out['Loss/RPNLoss/localization_loss'], 0) self.assertAllClose(loss_dict_out['Loss/RPNLoss/objectness_loss'], 0) self.assertAllClose(loss_dict_out[ 'Loss/BoxClassifierLoss/localization_loss'], 0) self.assertAllClose(loss_dict_out[ 'Loss/BoxClassifierLoss/classification_loss'], 0) self.assertAllClose(loss_dict_out['Loss/BoxClassifierLoss/mask_loss'], 0) def test_loss_full_multiple_label_groundtruth(self): model = self._build_model( is_training=True, number_of_stages=2, second_stage_batch_size=6, softmax_second_stage_classification_loss=False) batch_size = 1 anchors = tf.constant( [[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [16, 16, 32, 32]], dtype=tf.float32) rpn_box_encodings = tf.zeros( [batch_size, anchors.get_shape().as_list()[0], BOX_CODE_SIZE], dtype=tf.float32) # use different numbers for the objectness category to break ties in # order of boxes returned by NMS rpn_objectness_predictions_with_background = tf.constant([ [[-10, 13], [10, -10], [10, -11], [10, -12]],], dtype=tf.float32) image_shape = tf.constant([batch_size, 32, 32, 3], dtype=tf.int32) # box_classifier_batch_size is 6, but here we assume that the number of # actual proposals (not counting zero paddings) is fewer (3). num_proposals = tf.constant([3], dtype=tf.int32) proposal_boxes = tf.constant( [[[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [0, 0, 0, 0], # begin paddings [0, 0, 0, 0], [0, 0, 0, 0]]], dtype=tf.float32) # second_stage_localization_loss should only be computed for predictions # that match groundtruth. For multiple label groundtruth boxes, the loss # should only be computed once for the label with the smaller index. refined_box_encodings = tf.constant( [[[0, 0, 0, 0], [1, 1, -1, -1]], [[1, 1, -1, -1], [1, 1, 1, 1]], [[1, 1, -1, -1], [1, 1, 1, 1]], [[1, 1, -1, -1], [1, 1, 1, 1]], [[1, 1, -1, -1], [1, 1, 1, 1]], [[1, 1, -1, -1], [1, 1, 1, 1]]], dtype=tf.float32) class_predictions_with_background = tf.constant( [[-100, 100, 100], [100, -100, -100], [100, -100, -100], [0, 0, 0], # begin paddings [0, 0, 0], [0, 0, 0]], dtype=tf.float32) mask_predictions_logits = 20 * tf.ones((batch_size * model.max_num_proposals, model.num_classes, 14, 14), dtype=tf.float32) groundtruth_boxes_list = [ tf.constant([[0, 0, .5, .5]], dtype=tf.float32)] # Box contains two ground truth labels. groundtruth_classes_list = [tf.constant([[1, 1]], dtype=tf.float32)] # Set all elements of groundtruth mask to 1.0. In this case all proposal # crops of the groundtruth masks should return a mask that covers the entire # proposal. Thus, if mask_predictions_logits element values are all greater # than 20, the loss should be zero. groundtruth_masks_list = [tf.convert_to_tensor(np.ones((1, 32, 32)), dtype=tf.float32)] prediction_dict = { 'rpn_box_encodings': rpn_box_encodings, 'rpn_objectness_predictions_with_background': rpn_objectness_predictions_with_background, 'image_shape': image_shape, 'anchors': anchors, 'refined_box_encodings': refined_box_encodings, 'class_predictions_with_background': class_predictions_with_background, 'proposal_boxes': proposal_boxes, 'num_proposals': num_proposals, 'mask_predictions': mask_predictions_logits } _, true_image_shapes = model.preprocess(tf.zeros(image_shape)) model.provide_groundtruth(groundtruth_boxes_list, groundtruth_classes_list, groundtruth_masks_list) loss_dict = model.loss(prediction_dict, true_image_shapes) with self.test_session() as sess: loss_dict_out = sess.run(loss_dict) self.assertAllClose(loss_dict_out['Loss/RPNLoss/localization_loss'], 0) self.assertAllClose(loss_dict_out['Loss/RPNLoss/objectness_loss'], 0) self.assertAllClose(loss_dict_out[ 'Loss/BoxClassifierLoss/localization_loss'], 0) self.assertAllClose(loss_dict_out[ 'Loss/BoxClassifierLoss/classification_loss'], 0) self.assertAllClose(loss_dict_out['Loss/BoxClassifierLoss/mask_loss'], 0) def test_loss_full_zero_padded_proposals_nonzero_loss_with_two_images( self, use_static_shapes=False, shared_boxes=False): batch_size = 2 first_stage_max_proposals = 8 second_stage_batch_size = 6 num_classes = 2 def graph_fn(anchors, rpn_box_encodings, rpn_objectness_predictions_with_background, images, num_proposals, proposal_boxes, refined_box_encodings, class_predictions_with_background, groundtruth_boxes, groundtruth_classes): """Function to construct tf graph for the test.""" model = self._build_model( is_training=True, number_of_stages=2, second_stage_batch_size=second_stage_batch_size, first_stage_max_proposals=first_stage_max_proposals, num_classes=num_classes, use_matmul_crop_and_resize=use_static_shapes, clip_anchors_to_image=use_static_shapes, use_static_shapes=use_static_shapes) prediction_dict = { 'rpn_box_encodings': rpn_box_encodings, 'rpn_objectness_predictions_with_background': rpn_objectness_predictions_with_background, 'image_shape': tf.shape(images), 'anchors': anchors, 'refined_box_encodings': refined_box_encodings, 'class_predictions_with_background': class_predictions_with_background, 'proposal_boxes': proposal_boxes, 'num_proposals': num_proposals } _, true_image_shapes = model.preprocess(images) model.provide_groundtruth(tf.unstack(groundtruth_boxes), tf.unstack(groundtruth_classes)) loss_dict = model.loss(prediction_dict, true_image_shapes) return (loss_dict['Loss/RPNLoss/localization_loss'], loss_dict['Loss/RPNLoss/objectness_loss'], loss_dict['Loss/BoxClassifierLoss/localization_loss'], loss_dict['Loss/BoxClassifierLoss/classification_loss']) anchors = np.array( [[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [16, 16, 32, 32]], dtype=np.float32) rpn_box_encodings = np.zeros( [batch_size, anchors.shape[1], BOX_CODE_SIZE], dtype=np.float32) # use different numbers for the objectness category to break ties in # order of boxes returned by NMS rpn_objectness_predictions_with_background = np.array( [[[-10, 13], [10, -10], [10, -11], [10, -12]], [[-10, 13], [10, -10], [10, -11], [10, -12]]], dtype=np.float32) images = np.zeros([batch_size, 32, 32, 3], dtype=np.float32) # box_classifier_batch_size is 6, but here we assume that the number of # actual proposals (not counting zero paddings) is fewer. num_proposals = np.array([3, 2], dtype=np.int32) proposal_boxes = np.array( [[[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [0, 0, 0, 0], # begin paddings [0, 0, 0, 0], [0, 0, 0, 0]], [[0, 0, 16, 16], [0, 16, 16, 32], [0, 0, 0, 0], # begin paddings [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]], dtype=np.float32) refined_box_encodings = np.zeros( (batch_size * second_stage_batch_size, 1 if shared_boxes else num_classes, BOX_CODE_SIZE), dtype=np.float32) class_predictions_with_background = np.array( [[-10, 10, -10], # first image [10, -10, -10], [10, -10, -10], [0, 0, 0], # begin paddings [0, 0, 0], [0, 0, 0], [-10, -10, 10], # second image [10, -10, -10], [0, 0, 0], # begin paddings [0, 0, 0], [0, 0, 0], [0, 0, 0],], dtype=np.float32) # The first groundtruth box is 4/5 of the anchor size in both directions # experiencing a loss of: # 2 * SmoothL1(5 * log(4/5)) / num_proposals # = 2 * (abs(5 * log(1/2)) - .5) / 3 # The second groundtruth box is identical to the prediction and thus # experiences zero loss. # Total average loss is (abs(5 * log(1/2)) - .5) / 3. groundtruth_boxes = np.stack([ np.array([[0.05, 0.05, 0.45, 0.45]], dtype=np.float32), np.array([[0.0, 0.0, 0.5, 0.5]], dtype=np.float32)]) groundtruth_classes = np.stack([np.array([[1, 0]], dtype=np.float32), np.array([[0, 1]], dtype=np.float32)]) execute_fn = self.execute_cpu if use_static_shapes: execute_fn = self.execute results = execute_fn(graph_fn, [ anchors, rpn_box_encodings, rpn_objectness_predictions_with_background, images, num_proposals, proposal_boxes, refined_box_encodings, class_predictions_with_background, groundtruth_boxes, groundtruth_classes ]) exp_loc_loss = (-5 * np.log(.8) - 0.5) / 3.0 self.assertAllClose(results[0], exp_loc_loss, rtol=1e-4, atol=1e-4) self.assertAllClose(results[1], 0.0) self.assertAllClose(results[2], exp_loc_loss, rtol=1e-4, atol=1e-4) self.assertAllClose(results[3], 0.0) def test_loss_with_hard_mining(self): model = self._build_model(is_training=True, number_of_stages=2, second_stage_batch_size=None, first_stage_max_proposals=6, hard_mining=True) batch_size = 1 anchors = tf.constant( [[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [16, 16, 32, 32]], dtype=tf.float32) rpn_box_encodings = tf.zeros( [batch_size, anchors.get_shape().as_list()[0], BOX_CODE_SIZE], dtype=tf.float32) # use different numbers for the objectness category to break ties in # order of boxes returned by NMS rpn_objectness_predictions_with_background = tf.constant( [[[-10, 13], [-10, 12], [10, -11], [10, -12]]], dtype=tf.float32) image_shape = tf.constant([batch_size, 32, 32, 3], dtype=tf.int32) # box_classifier_batch_size is 6, but here we assume that the number of # actual proposals (not counting zero paddings) is fewer (3). num_proposals = tf.constant([3], dtype=tf.int32) proposal_boxes = tf.constant( [[[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [0, 0, 0, 0], # begin paddings [0, 0, 0, 0], [0, 0, 0, 0]]], dtype=tf.float32) refined_box_encodings = tf.zeros( (batch_size * model.max_num_proposals, model.num_classes, BOX_CODE_SIZE), dtype=tf.float32) class_predictions_with_background = tf.constant( [[-10, 10, -10], # first image [-10, -10, 10], [10, -10, -10], [0, 0, 0], # begin paddings [0, 0, 0], [0, 0, 0]], dtype=tf.float32) # The first groundtruth box is 4/5 of the anchor size in both directions # experiencing a loss of: # 2 * SmoothL1(5 * log(4/5)) / num_proposals # = 2 * (abs(5 * log(1/2)) - .5) / 3 # The second groundtruth box is 46/50 of the anchor size in both directions # experiencing a loss of: # 2 * SmoothL1(5 * log(42/50)) / num_proposals # = 2 * (.5(5 * log(.92))^2 - .5) / 3. # Since the first groundtruth box experiences greater loss, and we have # set num_hard_examples=1 in the HardMiner, the final localization loss # corresponds to that of the first groundtruth box. groundtruth_boxes_list = [ tf.constant([[0.05, 0.05, 0.45, 0.45], [0.02, 0.52, 0.48, 0.98],], dtype=tf.float32)] groundtruth_classes_list = [tf.constant([[1, 0], [0, 1]], dtype=tf.float32)] exp_loc_loss = 2 * (-5 * np.log(.8) - 0.5) / 3.0 prediction_dict = { 'rpn_box_encodings': rpn_box_encodings, 'rpn_objectness_predictions_with_background': rpn_objectness_predictions_with_background, 'image_shape': image_shape, 'anchors': anchors, 'refined_box_encodings': refined_box_encodings, 'class_predictions_with_background': class_predictions_with_background, 'proposal_boxes': proposal_boxes, 'num_proposals': num_proposals } _, true_image_shapes = model.preprocess(tf.zeros(image_shape)) model.provide_groundtruth(groundtruth_boxes_list, groundtruth_classes_list) loss_dict = model.loss(prediction_dict, true_image_shapes) with self.test_session() as sess: loss_dict_out = sess.run(loss_dict) self.assertAllClose(loss_dict_out[ 'Loss/BoxClassifierLoss/localization_loss'], exp_loc_loss) self.assertAllClose(loss_dict_out[ 'Loss/BoxClassifierLoss/classification_loss'], 0) def test_loss_with_hard_mining_and_losses_mask(self): model = self._build_model(is_training=True, number_of_stages=2, second_stage_batch_size=None, first_stage_max_proposals=6, hard_mining=True) batch_size = 2 number_of_proposals = 3 anchors = tf.constant( [[0, 0, 16, 16], [0, 16, 16, 32], [16, 0, 32, 16], [16, 16, 32, 32]], dtype=tf.float32) rpn_box_encodings = tf.zeros( [batch_size, anchors.get_shape().as_list()[0], BOX_CODE_SIZE], dtype=tf.float32) # use different numbers for the objectness category to break ties in # order of boxes returned by NMS rpn_objectness_predictions_with_background = tf.constant( [[[-10, 13], [-10, 12], [10, -11], [10, -12]], [[-10, 13], [-10, 12], [10, -11], [10, -12]]], dtype=tf.float32) image_shape = tf.constant([batch_size, 32, 32, 3], dtype=tf.int32) # box_classifier_batch_size is 6, but here we assume that the number of # actual proposals (not counting zero paddings) is fewer (3). num_proposals = tf.constant([number_of_proposals, number_of_proposals], dtype=tf.int32) proposal_boxes = tf.constant( [[[0, 0, 16, 16], # first image [0, 16, 16, 32], [16, 0, 32, 16], [0, 0, 0, 0], # begin paddings [0, 0, 0, 0], [0, 0, 0, 0]], [[0, 0, 16, 16], # second image [0, 16, 16, 32], [16, 0, 32, 16], [0, 0, 0, 0], # begin paddings [0, 0, 0, 0], [0, 0, 0, 0]]], dtype=tf.float32) refined_box_encodings = tf.zeros( (batch_size * model.max_num_proposals, model.num_classes, BOX_CODE_SIZE), dtype=tf.float32) class_predictions_with_background = tf.constant( [[-10, 10, -10], # first image [-10, -10, 10], [10, -10, -10], [0, 0, 0], # begin paddings [0, 0, 0], [0, 0, 0], [-10, 10, -10], # second image [-10, -10, 10], [10, -10, -10], [0, 0, 0], # begin paddings [0, 0, 0], [0, 0, 0]], dtype=tf.float32) # The first groundtruth box is 4/5 of the anchor size in both directions # experiencing a loss of: # 2 * SmoothL1(5 * log(4/5)) / (num_proposals * batch_size) # = 2 * (abs(5 * log(1/2)) - .5) / 3 # The second groundtruth box is 46/50 of the anchor size in both directions # experiencing a loss of: # 2 * SmoothL1(5 * log(42/50)) / (num_proposals * batch_size) # = 2 * (.5(5 * log(.92))^2 - .5) / 3. # Since the first groundtruth box experiences greater loss, and we have # set num_hard_examples=1 in the HardMiner, the final localization loss # corresponds to that of the first groundtruth box. groundtruth_boxes_list = [ tf.constant([[0.05, 0.05, 0.45, 0.45], [0.02, 0.52, 0.48, 0.98]], dtype=tf.float32), tf.constant([[0.05, 0.05, 0.45, 0.45], [0.02, 0.52, 0.48, 0.98]], dtype=tf.float32)] groundtruth_classes_list = [ tf.constant([[1, 0], [0, 1]], dtype=tf.float32), tf.constant([[1, 0], [0, 1]], dtype=tf.float32)] is_annotated_list = [tf.constant(True, dtype=tf.bool), tf.constant(False, dtype=tf.bool)] exp_loc_loss = (2 * (-5 * np.log(.8) - 0.5) / (number_of_proposals * batch_size)) prediction_dict = { 'rpn_box_encodings': rpn_box_encodings, 'rpn_objectness_predictions_with_background': rpn_objectness_predictions_with_background, 'image_shape': image_shape, 'anchors': anchors, 'refined_box_encodings': refined_box_encodings, 'class_predictions_with_background': class_predictions_with_background, 'proposal_boxes': proposal_boxes, 'num_proposals': num_proposals } _, true_image_shapes = model.preprocess(tf.zeros(image_shape)) model.provide_groundtruth(groundtruth_boxes_list, groundtruth_classes_list, is_annotated_list=is_annotated_list) loss_dict = model.loss(prediction_dict, true_image_shapes) with self.test_session() as sess: loss_dict_out = sess.run(loss_dict) self.assertAllClose(loss_dict_out[ 'Loss/BoxClassifierLoss/localization_loss'], exp_loc_loss) self.assertAllClose(loss_dict_out[ 'Loss/BoxClassifierLoss/classification_loss'], 0) def test_restore_map_for_classification_ckpt(self): # Define mock tensorflow classification graph and save variables. test_graph_classification = tf.Graph() with test_graph_classification.as_default(): image = tf.placeholder(dtype=tf.float32, shape=[1, 20, 20, 3]) with tf.variable_scope('mock_model'): net = slim.conv2d(image, num_outputs=3, kernel_size=1, scope='layer1') slim.conv2d(net, num_outputs=3, kernel_size=1, scope='layer2') init_op = tf.global_variables_initializer() saver = tf.train.Saver() save_path = self.get_temp_dir() with self.test_session(graph=test_graph_classification) as sess: sess.run(init_op) saved_model_path = saver.save(sess, save_path) # Create tensorflow detection graph and load variables from # classification checkpoint. test_graph_detection = tf.Graph() with test_graph_detection.as_default(): model = self._build_model( is_training=False, number_of_stages=2, second_stage_batch_size=6) inputs_shape = (2, 20, 20, 3) inputs = tf.to_float(tf.random_uniform( inputs_shape, minval=0, maxval=255, dtype=tf.int32)) preprocessed_inputs, true_image_shapes = model.preprocess(inputs) prediction_dict = model.predict(preprocessed_inputs, true_image_shapes) model.postprocess(prediction_dict, true_image_shapes) var_map = model.restore_map(fine_tune_checkpoint_type='classification') self.assertIsInstance(var_map, dict) saver = tf.train.Saver(var_map) with self.test_session(graph=test_graph_classification) as sess: saver.restore(sess, saved_model_path) for var in sess.run(tf.report_uninitialized_variables()): self.assertNotIn(model.first_stage_feature_extractor_scope, var) self.assertNotIn(model.second_stage_feature_extractor_scope, var) def test_restore_map_for_detection_ckpt(self): # Define first detection graph and save variables. test_graph_detection1 = tf.Graph() with test_graph_detection1.as_default(): model = self._build_model( is_training=False, number_of_stages=2, second_stage_batch_size=6) inputs_shape = (2, 20, 20, 3) inputs = tf.to_float(tf.random_uniform( inputs_shape, minval=0, maxval=255, dtype=tf.int32)) preprocessed_inputs, true_image_shapes = model.preprocess(inputs) prediction_dict = model.predict(preprocessed_inputs, true_image_shapes) model.postprocess(prediction_dict, true_image_shapes) another_variable = tf.Variable([17.0], name='another_variable') # pylint: disable=unused-variable init_op = tf.global_variables_initializer() saver = tf.train.Saver() save_path = self.get_temp_dir() with self.test_session(graph=test_graph_detection1) as sess: sess.run(init_op) saved_model_path = saver.save(sess, save_path) # Define second detection graph and restore variables. test_graph_detection2 = tf.Graph() with test_graph_detection2.as_default(): model2 = self._build_model(is_training=False, number_of_stages=2, second_stage_batch_size=6, num_classes=42) inputs_shape2 = (2, 20, 20, 3) inputs2 = tf.to_float(tf.random_uniform( inputs_shape2, minval=0, maxval=255, dtype=tf.int32)) preprocessed_inputs2, true_image_shapes = model2.preprocess(inputs2) prediction_dict2 = model2.predict(preprocessed_inputs2, true_image_shapes) model2.postprocess(prediction_dict2, true_image_shapes) another_variable = tf.Variable([17.0], name='another_variable') # pylint: disable=unused-variable var_map = model2.restore_map(fine_tune_checkpoint_type='detection') self.assertIsInstance(var_map, dict) saver = tf.train.Saver(var_map) with self.test_session(graph=test_graph_detection2) as sess: saver.restore(sess, saved_model_path) uninitialized_vars_list = sess.run(tf.report_uninitialized_variables()) self.assertIn('another_variable', uninitialized_vars_list) for var in uninitialized_vars_list: self.assertNotIn(model2.first_stage_feature_extractor_scope, var) self.assertNotIn(model2.second_stage_feature_extractor_scope, var) def test_load_all_det_checkpoint_vars(self): test_graph_detection = tf.Graph() with test_graph_detection.as_default(): model = self._build_model( is_training=False, number_of_stages=2, second_stage_batch_size=6, num_classes=42) inputs_shape = (2, 20, 20, 3) inputs = tf.to_float( tf.random_uniform(inputs_shape, minval=0, maxval=255, dtype=tf.int32)) preprocessed_inputs, true_image_shapes = model.preprocess(inputs) prediction_dict = model.predict(preprocessed_inputs, true_image_shapes) model.postprocess(prediction_dict, true_image_shapes) another_variable = tf.Variable([17.0], name='another_variable') # pylint: disable=unused-variable var_map = model.restore_map( fine_tune_checkpoint_type='detection', load_all_detection_checkpoint_vars=True) self.assertIsInstance(var_map, dict) self.assertIn('another_variable', var_map) if __name__ == '__main__': tf.test.main()
TensorFlow/Detection/SSD/models/research/slim/scripts
scripts
finetune_inception_resnet_v2_on_flowers
#!/bin/bash # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # # This script performs the following operations: # 1. Downloads the Flowers dataset # 2. Fine-tunes an Inception Resnet V2 model on the Flowers training set. # 3. Evaluates the model on the Flowers validation set. # # Usage: # cd slim # ./slim/scripts/finetune_inception_resnet_v2_on_flowers.sh set -e # Where the pre-trained Inception Resnet V2 checkpoint is saved to. PRETRAINED_CHECKPOINT_DIR=/tmp/checkpoints # Where the pre-trained Inception Resnet V2 checkpoint is saved to. MODEL_NAME=inception_resnet_v2 # Where the training (fine-tuned) checkpoint and logs will be saved to. TRAIN_DIR=/tmp/flowers-models/${MODEL_NAME} # Where the dataset is saved to. DATASET_DIR=/tmp/flowers # Download the pre-trained checkpoint. if [ ! -d "$PRETRAINED_CHECKPOINT_DIR" ]; then mkdir ${PRETRAINED_CHECKPOINT_DIR} fi if [ ! -f ${PRETRAINED_CHECKPOINT_DIR}/${MODEL_NAME}.ckpt ]; then wget http://download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz tar -xvf inception_resnet_v2_2016_08_30.tar.gz mv inception_resnet_v2.ckpt ${PRETRAINED_CHECKPOINT_DIR}/${MODEL_NAME}.ckpt rm inception_resnet_v2_2016_08_30.tar.gz fi # Download the dataset python download_and_convert_data.py \ --dataset_name=flowers \ --dataset_dir=${DATASET_DIR} # Fine-tune only the new layers for 1000 steps. python train_image_classifier.py \ --train_dir=${TRAIN_DIR} \ --dataset_name=flowers \ --dataset_split_name=train \ --dataset_dir=${DATASET_DIR} \ --model_name=${MODEL_NAME} \ --checkpoint_path=${PRETRAINED_CHECKPOINT_DIR}/${MODEL_NAME}.ckpt \ --checkpoint_exclude_scopes=InceptionResnetV2/Logits,InceptionResnetV2/AuxLogits \ --trainable_scopes=InceptionResnetV2/Logits,InceptionResnetV2/AuxLogits \ --max_number_of_steps=1000 \ --batch_size=32 \ --learning_rate=0.01 \ --learning_rate_decay_type=fixed \ --save_interval_secs=60 \ --save_summaries_secs=60 \ --log_every_n_steps=10 \ --optimizer=rmsprop \ --weight_decay=0.00004 # Run evaluation. python eval_image_classifier.py \ --checkpoint_path=${TRAIN_DIR} \ --eval_dir=${TRAIN_DIR} \ --dataset_name=flowers \ --dataset_split_name=validation \ --dataset_dir=${DATASET_DIR} \ --model_name=${MODEL_NAME} # Fine-tune all the new layers for 500 steps. python train_image_classifier.py \ --train_dir=${TRAIN_DIR}/all \ --dataset_name=flowers \ --dataset_split_name=train \ --dataset_dir=${DATASET_DIR} \ --model_name=${MODEL_NAME} \ --checkpoint_path=${TRAIN_DIR} \ --max_number_of_steps=500 \ --batch_size=32 \ --learning_rate=0.0001 \ --learning_rate_decay_type=fixed \ --save_interval_secs=60 \ --save_summaries_secs=60 \ --log_every_n_steps=10 \ --optimizer=rmsprop \ --weight_decay=0.00004 # Run evaluation. python eval_image_classifier.py \ --checkpoint_path=${TRAIN_DIR}/all \ --eval_dir=${TRAIN_DIR}/all \ --dataset_name=flowers \ --dataset_split_name=validation \ --dataset_dir=${DATASET_DIR} \ --model_name=${MODEL_NAME}
TensorFlow/Classification/ConvNets
ConvNets
.gitignore
# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg *.whl MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: local_settings.py db.sqlite3 # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Python IDEs .idea/ .vscode/ # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ models/ # Testing tests/ pytest.ini requirements_test.txt # Inference related workspace/ navigator_workspace/
PyTorch/Detection/Efficientdet/scripts/D0
D0
inference_AMP_V100-32G
#!/bin/bash rm -rf *.json python -u -m bind_launch --nproc_per_node=${NUM_PROC:-1} validate.py '/workspace/object_detection/datasets/coco/' --model efficientdet_d0 -b ${BATCH_SIZE:-8} --torchscript --use-ema --amp --checkpoint ${CKPT_PATH:-/checkpoints/Effdet_B0.pth} --inference
PyTorch/SpeechSynthesis/Tacotron2/phrases
phrases
phrase_4_256
The forms of printed letters should be beautiful, and that their arrangement on the page should be reasonable and a help to the shapeliness of the letters themselves and the form of printed letters should be beautiful, and that their arrangement on pages. The forms of printed letters should be beautiful, and that their arrangement on the page should be reasonable and a help to the shapeliness of the letters themselves and the form of printed letters should be beautiful, and that their arrangement on pages. The forms of printed letters should be beautiful, and that their arrangement on the page should be reasonable and a help to the shapeliness of the letters themselves and the form of printed letters should be beautiful, and that their arrangement on pages. The forms of printed letters should be beautiful, and that their arrangement on the page should be reasonable and a help to the shapeliness of the letters themselves and the form of printed letters should be beautiful, and that their arrangement on pages.
TensorFlow/Classification/ConvNets/se-resnext101-32x4d/training
training
DGX2_SE-RNxt101-32x4d_FP32_90E
#!/bin/bash # Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. WORKSPACE=${1:-"/workspace/rn50v15_tf"} DATA_DIR=${2:-"/data"} OTHER=${@:3} if [[ ! -z "${BIND_TO_SOCKET}" ]]; then BIND_TO_SOCKET="--bind-to socket" fi mpiexec --allow-run-as-root ${BIND_TO_SOCKET} -np 16 python3 main.py --arch=se-resnext101-32x4d \ --mode=train_and_evaluate --iter_unit=epoch --num_iter=90 \ --batch_size=64 --warmup_steps=100 --cosine_lr --label_smoothing 0.1 \ --lr_init=0.256 --lr_warmup_epochs=8 --momentum=0.875 --weight_decay=6.103515625e-05 \ --data_dir=${DATA_DIR}/tfrecords --data_idx_dir=${DATA_DIR}/dali_idx \ --results_dir=${WORKSPACE}/results --weight_init=fan_in ${OTHER}
PyTorch/SpeechSynthesis/HiFiGAN/scripts
scripts
extract_fine_tune_mels
#!/usr/bin/env bash # Runs a process which resembles FastPitch training and extracts mel-scale # spectrograms generated with FastPitch for HiFi-GAN fine-tuning. export OMP_NUM_THREADS=1 : ${NUM_GPUS:=8} : ${BATCH_SIZE:=16} : ${AMP:=false} : ${DATASET_PATH:="data/LJSpeech-1.1"} : ${OUTPUT_DIR:="data/mels-fastpitch-ljs22khz"} : ${DATASET_FILELIST:=data/filelists/ljs_audio_pitch_text.txt} # train + val + test : ${LOAD_PITCH_FROM_DISK:=true} : ${LOAD_MEL_FROM_DISK:=false} # mel-spec of the original data : ${SAMPLING_RATE:=22050} : ${FASTPITCH:="pretrained_models/fastpitch/nvidia_fastpitch_210824.pt"} mkdir -p "$OUTPUT_DIR" # Pre-calculate pitch values and write to disk # This step requires only CPU if [[ "$LOAD_PITCH_FROM_DISK" = true && ! -d "$DATASET_PATH/pitch" ]]; then echo "Pitch values needs for FastPitch not found in $DATASET_PATH/pitch." echo "Calcluating..." python prepare_dataset.py \ --wav-text-filelists data/filelists/ljs_audio_text.txt \ --n-workers 16 \ --batch-size 1 \ --dataset-path $DATASET_PATH \ --extract-pitch fi ARGS+=" --cuda" ARGS+=" -o $OUTPUT_DIR" ARGS+=" --dataset-path $DATASET_PATH" ARGS+=" --dataset-files $DATASET_FILELIST" ARGS+=" -bs $BATCH_SIZE" [ -n "$FASTPITCH" ] && ARGS+=" --checkpoint-path $FASTPITCH" [ -z "$FASTPITCH" ] && ARGS+=" --resume" [ "$AMP" = "true" ] && ARGS+=" --amp" [ "$LOAD_MEL_FROM_DISK" = true ] && ARGS+=" --load-mel-from-disk" [ "$LOAD_PITCH_FROM_DISK" = true ] && ARGS+=" --load-pitch-from-disk" [ "$PITCH_ONLINE_DIR" != "" ] && ARGS+=" --pitch-online-dir $PITCH_ONLINE_DIR" # e.g., /dev/shm/pitch if [ "$SAMPLING_RATE" == "44100" ]; then ARGS+=" --sampling-rate 44100" ARGS+=" --filter-length 2048" ARGS+=" --hop-length 512" ARGS+=" --win-length 2048" ARGS+=" --mel-fmin 0.0" ARGS+=" --mel-fmax 22050.0" elif [ "$SAMPLING_RATE" != "22050" ]; then echo "Sampling rate $SAMPLING_RATE not supported. Edit $0 manually." exit 1 fi : ${DISTRIBUTED:="-m torch.distributed.launch --nproc_per_node $NUM_GPUS"} python $DISTRIBUTED fastpitch/extract_mels.py $ARGS "$@"
Tools/DGLPyTorch/SyntheticGraphGeneration/syngen/cli/commands
commands
mimic_dataset
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import argparse import json import logging from collections import defaultdict from syngen.cli.commands.base_command import BaseCommand from syngen.configuration import SynGenDatasetFeatureSpec, SynGenConfiguration from syngen.generator.tabular import tabular_generators_classes from syngen.utils.types import MetaData logger = logging.getLogger(__name__) log = logger class MimicDatasetCommand(BaseCommand): def init_parser(self, base_parser): mimic_parser = base_parser.add_parser( "mimic-dataset", help="Quickly creates a SynGen Configuration for the given dataset", formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) mimic_parser.set_defaults(action=self.run) mimic_parser.add_argument( "-dp", "--dataset-path", type=str, required=True, help="Path to the dataset in SynGen format" ) mimic_parser.add_argument( "-of", "--output-file", type=str, required=True, help="Path to the generated SynGen Configuration" ) mimic_parser.add_argument( "-tg", "--tab-gen", type=str, choices=list(tabular_generators_classes.keys()), default='kde', help="Tabular Generator to mimic all tabular features" ) mimic_parser.add_argument( "-rsg", "--random-struct-gen", action='store_true', help="Generates random structure based on Erdos-Renyi model" ) mimic_parser.add_argument( "-es", "--edge-scale", type=float, default=None, help="Multiples the number of edges to generate by the provided number" ) mimic_parser.add_argument( "-en", "--node-scale", type=float, default=None, help="Multiples the number of nodes to generate by the provided number" ) mimic_parser.add_argument( "-gdp", "--gen-dump-path", type=str, default=None, help="Path to store the fitted generators" ) def run(self, args): dict_args = vars(args) feature_spec = SynGenDatasetFeatureSpec.instantiate_from_preprocessed(dict_args['dataset_path']) scales = { MetaData.EDGES: dict_args['edge_scale'], MetaData.NODES: dict_args['node_scale'], } for part in [MetaData.NODES, MetaData.EDGES]: for part_info in feature_spec[part]: if scales[part]: part_info[MetaData.COUNT] = int(part_info[MetaData.COUNT] * scales[part]) if MetaData.FEATURES in part_info and len(part_info[MetaData.FEATURES]) > 0: feature_files_content = defaultdict(list) for feature in part_info[MetaData.FEATURES]: if MetaData.FEATURE_FILE in feature: feature_files_content[feature[MetaData.FEATURE_FILE]].append(feature[MetaData.NAME]) if feature_files_content: part_info[MetaData.TABULAR_GENERATORS] = [ { MetaData.TYPE: dict_args['tab_gen'], MetaData.FEATURES_LIST: feats_list, MetaData.FEATURE_FILE: ff, MetaData.DATA_SOURCE: { MetaData.TYPE: 'rnd', } if dict_args['tab_gen'] == 'random' else { MetaData.TYPE: 'cfg', MetaData.PATH: dict_args['dataset_path'], MetaData.NAME: part_info[MetaData.NAME], }, MetaData.PARAMS: {}, MetaData.DUMP_PATH: os.path.join(dict_args['gen_dump_path'], f"{part}_{part_info[MetaData.NAME]}_tab_gen_{idx}.pkl") if dict_args['gen_dump_path'] else None } for idx, (ff, feats_list) in enumerate(feature_files_content.items()) ] else: part_info[MetaData.TABULAR_GENERATORS] = [ { MetaData.TYPE: dict_args['tab_gen'], MetaData.FEATURES_LIST: -1, MetaData.DATA_SOURCE: { MetaData.TYPE: 'rnd', } if dict_args['tab_gen'] == 'random' else { MetaData.TYPE: 'cfg', MetaData.PATH: dict_args['dataset_path'], MetaData.NAME: part_info[MetaData.NAME], }, MetaData.PARAMS: {}, MetaData.DUMP_PATH: os.path.join(dict_args['gen_dump_path'], f"{part}_{part_info[MetaData.NAME]}_tab_gen_{0}.pkl") if dict_args['gen_dump_path'] else None } ] if part == MetaData.EDGES: part_info[MetaData.STRUCTURE_GENERATOR] = { MetaData.TYPE: 'RMAT', MetaData.DATA_SOURCE: { MetaData.TYPE: 'rnd', } if dict_args['random_struct_gen'] else { MetaData.TYPE: 'cfg', MetaData.PATH: dict_args['dataset_path'], MetaData.NAME: part_info[MetaData.NAME], }, MetaData.PARAMS: {}, MetaData.DUMP_PATH: os.path.join(dict_args['gen_dump_path'], f"{part_info[MetaData.NAME]}_struct_gen.pkl") if dict_args['gen_dump_path'] else None } config = SynGenConfiguration(feature_spec) with open(dict_args['output_file'], 'w') as f: json.dump(config, f, indent=4) log.info(f"SynGen Configuration saved into {dict_args['output_file']}")
PyTorch/Detection/Efficientdet/effdet
effdet
model
""" PyTorch EfficientDet model Based on official Tensorflow version at: https://github.com/google/automl/tree/master/efficientdet Paper: https://arxiv.org/abs/1911.09070 Hacked together by Ross Wightman """ # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import torch import torch.nn as nn import logging import math from collections import OrderedDict from typing import List, Callable from .layers import create_conv2d, drop_path, create_pool2d, Swish, get_act_layer from .config import get_fpn_config, get_backbone_config from .efficientnet import EfficientNet, efficientnet_configs _DEBUG = False _ACT_LAYER = Swish class SequentialAppend(nn.Sequential): def __init__(self, *args): super(SequentialAppend, self).__init__(*args) def forward(self, x: List[torch.Tensor]): for module in self: x.append(module(x)) return x class SequentialAppendLast(nn.Sequential): def __init__(self, *args): super(SequentialAppendLast, self).__init__(*args) def forward(self, x: List[torch.Tensor]): for module in self: x.append(module(x[-1])) return x class ConvBnAct2d(nn.Module): def __init__(self, in_channels, out_channels, kernel_size, stride=1, dilation=1, padding='', bias=False, norm_layer=nn.BatchNorm2d, norm_kwargs=None, act_layer=_ACT_LAYER): super(ConvBnAct2d, self).__init__() norm_kwargs = norm_kwargs or {} self.conv = create_conv2d( in_channels, out_channels, kernel_size, stride=stride, dilation=dilation, padding=padding, bias=bias) self.bn = None if norm_layer is None else norm_layer(out_channels, **norm_kwargs) # here self.act = None if act_layer is None else act_layer(inplace=True) def forward(self, x): x = self.conv(x) if self.bn is not None: x = self.bn(x) if self.act is not None: x = self.act(x) return x class SeparableConv2d(nn.Module): """ Separable Conv """ def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, dilation=1, padding='', bias=False, channel_multiplier=1.0, pw_kernel_size=1, act_layer=_ACT_LAYER, norm_layer=nn.BatchNorm2d, norm_kwargs=None): super(SeparableConv2d, self).__init__() norm_kwargs = norm_kwargs or {} self.conv_dw = create_conv2d( in_channels, int(in_channels * channel_multiplier), kernel_size, stride=stride, dilation=dilation, padding=padding, depthwise=True) self.conv_pw = create_conv2d( int(in_channels * channel_multiplier), out_channels, pw_kernel_size, padding=padding, bias=bias) self.bn = None if norm_layer is None else norm_layer(out_channels, **norm_kwargs) # Here self.act = None if act_layer is None else act_layer(inplace=True) def forward(self, x): x = self.conv_dw(x) x = self.conv_pw(x) if self.bn is not None: x = self.bn(x) if self.act is not None: x = self.act(x) return x class ResampleFeatureMap(nn.Sequential): def __init__(self, in_channels, out_channels, reduction_ratio=1., pad_type='', pooling_type='max', norm_layer=nn.BatchNorm2d, norm_kwargs=None, apply_bn=False, conv_after_downsample=False, redundant_bias=False): super(ResampleFeatureMap, self).__init__() pooling_type = pooling_type or 'max' self.in_channels = in_channels self.out_channels = out_channels self.reduction_ratio = reduction_ratio self.conv_after_downsample = conv_after_downsample conv = None if in_channels != out_channels: conv = ConvBnAct2d( in_channels, out_channels, kernel_size=1, padding=pad_type, norm_layer=norm_layer if apply_bn else None, norm_kwargs=norm_kwargs, bias=not apply_bn or redundant_bias, act_layer=None) if reduction_ratio > 1: stride_size = int(reduction_ratio) if conv is not None and not self.conv_after_downsample: self.add_module('conv', conv) self.add_module( 'downsample', create_pool2d( pooling_type, kernel_size=stride_size + 1, stride=stride_size, padding=pad_type)) if conv is not None and self.conv_after_downsample: self.add_module('conv', conv) else: if conv is not None: self.add_module('conv', conv) if reduction_ratio < 1: scale = int(1 // reduction_ratio) self.add_module('upsample', nn.UpsamplingNearest2d(scale_factor=scale)) # def forward(self, x): # # here for debugging only # assert x.shape[1] == self.in_channels # if self.reduction_ratio > 1: # if hasattr(self, 'conv') and not self.conv_after_downsample: # x = self.conv(x) # x = self.downsample(x) # if hasattr(self, 'conv') and self.conv_after_downsample: # x = self.conv(x) # else: # if hasattr(self, 'conv'): # x = self.conv(x) # if self.reduction_ratio < 1: # x = self.upsample(x) # return x class FpnCombine(nn.Module): def __init__(self, feature_info, fpn_config, fpn_channels, inputs_offsets, target_reduction, pad_type='', pooling_type='max', norm_layer=nn.BatchNorm2d, norm_kwargs=None, apply_bn_for_resampling=False, conv_after_downsample=False, redundant_bias=False, weight_method='attn'): super(FpnCombine, self).__init__() self.inputs_offsets = inputs_offsets self.weight_method = weight_method self.resample = nn.ModuleDict() for idx, offset in enumerate(inputs_offsets): in_channels = fpn_channels if offset < len(feature_info): in_channels = feature_info[offset]['num_chs'] input_reduction = feature_info[offset]['reduction'] else: node_idx = offset - len(feature_info) input_reduction = fpn_config.nodes[node_idx]['reduction'] reduction_ratio = target_reduction / input_reduction self.resample[str(offset)] = ResampleFeatureMap( in_channels, fpn_channels, reduction_ratio=reduction_ratio, pad_type=pad_type, pooling_type=pooling_type, norm_layer=norm_layer, norm_kwargs=norm_kwargs, apply_bn=apply_bn_for_resampling, conv_after_downsample=conv_after_downsample, redundant_bias=redundant_bias) if weight_method == 'attn' or weight_method == 'fastattn': # WSM self.edge_weights = nn.Parameter(torch.ones(len(inputs_offsets)), requires_grad=True) else: self.edge_weights = None def forward(self, x): dtype = x[0].dtype nodes = [] for offset in self.inputs_offsets: input_node = x[offset] input_node = self.resample[str(offset)](input_node) nodes.append(input_node) if self.weight_method == 'attn': normalized_weights = torch.softmax(self.edge_weights.type(dtype), dim=0) x = torch.stack(nodes, dim=-1) * normalized_weights elif self.weight_method == 'fastattn': edge_weights = nn.functional.relu(self.edge_weights.type(dtype)) weights_sum = torch.sum(edge_weights) x = torch.stack( [(nodes[i] * edge_weights[i]) / (weights_sum + 0.0001) for i in range(len(nodes))], dim=-1) elif self.weight_method == 'sum': x = torch.stack(nodes, dim=-1) else: raise ValueError('unknown weight_method {}'.format(self.weight_method)) x = torch.sum(x, dim=-1) return x class BiFpnLayer(nn.Module): def __init__(self, feature_info, fpn_config, fpn_channels, num_levels=5, pad_type='', pooling_type='max', norm_layer=nn.BatchNorm2d, norm_kwargs=None, act_layer=_ACT_LAYER, apply_bn_for_resampling=False, conv_after_downsample=True, conv_bn_relu_pattern=False, separable_conv=True, redundant_bias=False): super(BiFpnLayer, self).__init__() self.fpn_config = fpn_config self.num_levels = num_levels self.conv_bn_relu_pattern = False self.feature_info = [] self.fnode = SequentialAppend() for i, fnode_cfg in enumerate(fpn_config.nodes): logging.debug('fnode {} : {}'.format(i, fnode_cfg)) fnode_layers = OrderedDict() # combine features reduction = fnode_cfg['reduction'] fnode_layers['combine'] = FpnCombine( feature_info, fpn_config, fpn_channels, fnode_cfg['inputs_offsets'], target_reduction=reduction, pad_type=pad_type, pooling_type=pooling_type, norm_layer=norm_layer, norm_kwargs=norm_kwargs, apply_bn_for_resampling=apply_bn_for_resampling, conv_after_downsample=conv_after_downsample, redundant_bias=redundant_bias, weight_method=fpn_config.weight_method) self.feature_info.append(dict(num_chs=fpn_channels, reduction=reduction)) # after combine ops after_combine = OrderedDict() if not conv_bn_relu_pattern: after_combine['act'] = act_layer(inplace=True) conv_bias = redundant_bias conv_act = None else: conv_bias = False conv_act = act_layer conv_kwargs = dict( in_channels=fpn_channels, out_channels=fpn_channels, kernel_size=3, padding=pad_type, bias=conv_bias, norm_layer=norm_layer, norm_kwargs=norm_kwargs, act_layer=conv_act) after_combine['conv'] = SeparableConv2d(**conv_kwargs) if separable_conv else ConvBnAct2d(**conv_kwargs) fnode_layers['after_combine'] = nn.Sequential(after_combine) self.fnode.add_module(str(i), nn.Sequential(fnode_layers)) self.feature_info = self.feature_info[-num_levels::] def forward(self, x): x = self.fnode(x) return x[-self.num_levels::] class BiFpn(nn.Module): def __init__(self, config, feature_info, norm_layer=nn.BatchNorm2d, norm_kwargs=None, act_layer=_ACT_LAYER): super(BiFpn, self).__init__() self.config = config fpn_config = config.fpn_config or get_fpn_config(config.fpn_name) self.resample = SequentialAppendLast() for level in range(config.num_levels): if level < len(feature_info): in_chs = feature_info[level]['num_chs'] reduction = feature_info[level]['reduction'] else: # Adds a coarser level by downsampling the last feature map reduction_ratio = 2 self.resample.add_module(str(level), ResampleFeatureMap( in_channels=in_chs, out_channels=config.fpn_channels, pad_type=config.pad_type, pooling_type=config.pooling_type, norm_layer=norm_layer, norm_kwargs=norm_kwargs, reduction_ratio=reduction_ratio, apply_bn=config.apply_bn_for_resampling, conv_after_downsample=config.conv_after_downsample, redundant_bias=config.redundant_bias, )) in_chs = config.fpn_channels reduction = int(reduction * reduction_ratio) feature_info.append(dict(num_chs=in_chs, reduction=reduction)) self.cell = nn.Sequential() for rep in range(config.fpn_cell_repeats): logging.debug('building cell {}'.format(rep)) fpn_layer = BiFpnLayer( feature_info=feature_info, fpn_config=fpn_config, fpn_channels=config.fpn_channels, num_levels=config.num_levels, pad_type=config.pad_type, pooling_type=config.pooling_type, norm_layer=norm_layer, norm_kwargs=norm_kwargs, act_layer=act_layer, separable_conv=config.separable_conv, apply_bn_for_resampling=config.apply_bn_for_resampling, conv_after_downsample=config.conv_after_downsample, conv_bn_relu_pattern=config.conv_bn_relu_pattern, redundant_bias=config.redundant_bias, ) self.cell.add_module(str(rep), fpn_layer) feature_info = fpn_layer.feature_info def forward(self, x): assert len(self.resample) == self.config.num_levels - len(x) x = self.resample(x) x = self.cell(x) return x class HeadNet(nn.Module): def __init__(self, config, num_outputs, norm_layer=nn.BatchNorm2d, norm_kwargs=None, act_layer=_ACT_LAYER, predict_nhwc=False): super(HeadNet, self).__init__() norm_kwargs = norm_kwargs or {} self.config = config self.predict_nhwc = predict_nhwc num_anchors = len(config.aspect_ratios) * config.num_scales self.conv_rep = nn.ModuleList() self.bn_rep = nn.ModuleList() conv_kwargs = dict( in_channels=config.fpn_channels, out_channels=config.fpn_channels, kernel_size=3, padding=self.config.pad_type, bias=config.redundant_bias, act_layer=None, norm_layer=None) for i in range(config.box_class_repeats): conv = SeparableConv2d(**conv_kwargs) if config.separable_conv else ConvBnAct2d(**conv_kwargs) self.conv_rep.append(conv) bn_levels = [] for _ in range(config.num_levels): bn_seq = nn.Sequential() bn_seq.add_module('bn', norm_layer(config.fpn_channels, **norm_kwargs)) # Here bn_levels.append(bn_seq) self.bn_rep.append(nn.ModuleList(bn_levels)) self.act = act_layer(inplace=True) predict_kwargs = dict( in_channels=config.fpn_channels, out_channels=num_outputs * num_anchors, kernel_size=3, padding=self.config.pad_type, bias=True, norm_layer=None, act_layer=None) if config.separable_conv: self.predict = SeparableConv2d(**predict_kwargs) else: self.predict = ConvBnAct2d(**predict_kwargs) if self.predict_nhwc: self.predict = self.predict.to(memory_format=torch.channels_last) def forward(self, x): outputs = [] for level in range(self.config.num_levels): x_level = x[level] for i in range(self.config.box_class_repeats): x_level_ident = x_level x_level = self.conv_rep[i](x_level) x_level = self.bn_rep[i][level](x_level) x_level = self.act(x_level) if i > 0 and self.config.fpn_drop_path_rate: x_level = drop_path(x_level, self.config.fpn_drop_path_rate, self.training) x_level += x_level_ident if self.predict_nhwc: x_level = x_level.contiguous(memory_format=torch.channels_last) outputs.append(self.predict(x_level)) return outputs def _init_weight(m, n='', ): """ Weight initialization as per Tensorflow official implementations. """ def _fan_in_out(w, groups=1): dimensions = w.dim() if dimensions < 2: raise ValueError("Fan in and fan out can not be computed for tensor with fewer than 2 dimensions") num_input_fmaps = w.size(1) num_output_fmaps = w.size(0) receptive_field_size = 1 if w.dim() > 2: receptive_field_size = w[0][0].numel() fan_in = num_input_fmaps * receptive_field_size fan_out = num_output_fmaps * receptive_field_size fan_out //= groups return fan_in, fan_out def _glorot_uniform(w, gain=1, groups=1): fan_in, fan_out = _fan_in_out(w, groups) gain /= max(1., (fan_in + fan_out) / 2.) # fan avg limit = math.sqrt(3.0 * gain) w.data.uniform_(-limit, limit) def _variance_scaling(w, gain=1, groups=1): fan_in, fan_out = _fan_in_out(w, groups) # gain /= max(1., fan_in) # fan in gain /= max(1., (fan_in + fan_out) / 2.) # fan # should it be normal or trunc normal? using normal for now since no good trunc in PT # constant taken from scipy.stats.truncnorm.std(a=-2, b=2, loc=0., scale=1.) # std = math.sqrt(gain) / .87962566103423978 # w.data.trunc_normal(std=std) std = math.sqrt(gain) w.data.normal_(std=std) if isinstance(m, SeparableConv2d): if 'box_net' in n or 'class_net' in n: _variance_scaling(m.conv_dw.weight, groups=m.conv_dw.groups) _variance_scaling(m.conv_pw.weight) if m.conv_pw.bias is not None: if 'class_net.predict' in n: m.conv_pw.bias.data.fill_(-math.log((1 - 0.01) / 0.01)) else: m.conv_pw.bias.data.zero_() else: _glorot_uniform(m.conv_dw.weight, groups=m.conv_dw.groups) _glorot_uniform(m.conv_pw.weight) if m.conv_pw.bias is not None: m.conv_pw.bias.data.zero_() elif isinstance(m, ConvBnAct2d): if 'box_net' in n or 'class_net' in n: m.conv.weight.data.normal_(std=.01) if m.conv.bias is not None: if 'class_net.predict' in n: m.conv.bias.data.fill_(-math.log((1 - 0.01) / 0.01)) else: m.conv.bias.data.zero_() else: _glorot_uniform(m.conv.weight) if m.conv.bias is not None: m.conv.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): # looks like all bn init the same? m.weight.data.fill_(1.0) m.bias.data.zero_() def _init_weight_alt(m, n='', ): """ Weight initialization alternative, based on EfficientNet bacbkone init w/ class bias addition NOTE: this will likely be removed after some experimentation """ if isinstance(m, nn.Conv2d): fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels fan_out //= m.groups m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) if m.bias is not None: if 'class_net.predict' in n: m.bias.data.fill_(-math.log((1 - 0.01) / 0.01)) else: m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1.0) m.bias.data.zero_() class EfficientDet(nn.Module): def __init__(self, config, norm_kwargs=None, pretrained_backbone_path='', alternate_init=False): super(EfficientDet, self).__init__() norm_kwargs = norm_kwargs or dict(eps=.001, momentum=.01) ## Replacing backbone global_config = efficientnet_configs['fanout'] backbone_config = get_backbone_config(config.backbone_name) self.backbone = EfficientNet(width_coeff=backbone_config['width_coeff'], depth_coeff=backbone_config['depth_coeff'], \ dropout=backbone_config['dropout'], num_classes=1000, global_config=global_config, features_only=True, out_indices=[2,3,4]) feature_info = self.backbone.feature_info if pretrained_backbone_path != '': ckpt_state_dict = torch.load(pretrained_backbone_path, map_location=lambda storage, loc: storage) print("Backbone being loaded from checkpoint {}".format(pretrained_backbone_path)) self.backbone.load_state_dict(ckpt_state_dict, strict=False) del ckpt_state_dict # Pad to multiple of 8 for better performance if config.fused_focal_loss: num_classes = (config.num_classes + 7) // 8 * 8 else: num_classes = config.num_classes # TODO: predict_nhwc=config.fused_focal_loss for class_net act_layer = get_act_layer(config.act_type) self.fpn = BiFpn(config, feature_info, norm_kwargs=norm_kwargs, act_layer=act_layer) self.class_net = HeadNet(config, num_outputs=num_classes, norm_kwargs=norm_kwargs, act_layer=act_layer) self.box_net = HeadNet(config, num_outputs=4, norm_kwargs=norm_kwargs, act_layer=act_layer) for n, m in self.named_modules(): if 'backbone' not in n: if alternate_init: _init_weight_alt(m, n) else: _init_weight(m, n) def forward(self, x): _, x = self.backbone(x) x = self.fpn(x) x_class = self.class_net(x) x_box = self.box_net(x) return x_class, x_box
Tools/PyTorch/TimeSeriesPredictionPlatform/models/tft_pyt/scripts
scripts
run_traffic_DGX1-16G
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. : ${SEED:=1} : ${LR:=1e-3} : ${NGPU:=8} : ${BATCH_SIZE:=1024} : ${EPOCHS:=20} python -m torch.distributed.run --nproc_per_node=${NGPU} train.py \ --dataset traffic \ --data_path /data/processed/traffic_bin \ --batch_size=${BATCH_SIZE} \ --sample 450000 50000 \ --lr ${LR} \ --epochs ${EPOCHS} \ --seed ${SEED} \ --use_amp \ --results /results/TFT_traffic_bs${NGPU}x${BATCH_SIZE}_lr${LR}/seed_${SEED}
PyTorch/Classification/GPUNet/triton/runner
runner
stages
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pathlib from typing import List, Optional, Tuple, Union # method from PEP-366 to support relative import in executed modules if __name__ == "__main__" and __package__ is None: __package__ = pathlib.Path(__file__).parent.name from .core import Command class ResultsType: """ Results types generated by runner """ TRITON_PERFORMANCE_OFFLINE = "triton_performance_offline" TRITON_PERFORMANCE_ONLINE = "triton_performance_online" class Stage: """ Stage definition """ label: str commands: List[Command] result_path: Optional[str] result_type: Optional[str] def __init__( self, commands: Union[Tuple[str, ...], List[str]], result_path: Optional[str] = None, result_type: Optional[str] = None, ): """ Args: commands: List or Tuple of commands provided as raw string result_path: Path to results file generated by stage result_type: Type of results generated by stage """ if type(commands) not in [tuple, list]: raise ValueError("""Incorrect type of commands list. Please, provide list of commands as tuple.""") self.commands = list(map(lambda command: Command(data=command), commands)) self.result_path = result_path self.result_type = result_type class ExportStage(Stage): label = "Export Model" class ConversionStage(Stage): label = "Convert Model" class DeployStage(Stage): label = "Deploy Model" class CorrectnessStage(Stage): label = "Model Correctness" class TritonPreparePerformanceProfilingDataStage(Stage): label = "Prepare Triton Profiling Data" class TritonPerformanceOfflineStage(Stage): label = "Triton Performance Offline" class TritonPerformanceOnlineStage(Stage): label = "Triton Performance Online"
PyTorch/LanguageModeling/Transformer-XL
Transformer-XL
requirements
pytorch-transformers==1.1.0 sacremoses==0.0.35 pynvml==8.0.4
Tools/DGLPyTorch/SyntheticGraphGeneration/syngen/cli/commands
commands
pretrain
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import logging from syngen.cli.commands.base_command import BaseCommand from syngen.benchmark.tasks import train_ec from syngen.configuration import SynGenDatasetFeatureSpec, SynGenConfiguration from syngen.generator.tabular import tabular_generators_classes from syngen.utils.types import MetaData from syngen.benchmark.models import MODELS logging.basicConfig() logging.root.setLevel(logging.NOTSET) logger = logging.getLogger(__name__) log = logger class PretrainCommand(BaseCommand): def init_parser(self, base_parser): pretrain_parser = base_parser.add_parser( "pretrain", help="Run Synthetic Graph Data Pre-training Tool", formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) pretrain_parser.set_defaults(action=self.run) # global pretrain_parser.add_argument( "--task", type=str, default="ec", help=f"now the only available option is ec (edge-classification)", ) pretrain_parser.add_argument( "--seed", type=int, default=777, help="Set a seed globally" ) pretrain_parser.add_argument( "--timeit", action="store_true", help="Measures average training time", ) pretrain_parser.add_argument( "--data-path", type=str, required=True, help="Path to dataset in SynGen format to train/finetune on", ) pretrain_parser.add_argument( "--edge-name", type=str, required=True, help="Name of the edge to be used during train/finetune", ) pretrain_parser.add_argument( "--pretraining-data-path", type=str, default=None, help="Path to dataset in SynGen format to pretrain on", ) pretrain_parser.add_argument( "--pretraining-edge-name", type=str, default=None, help="Name of the edge to be used during pretraining", ) # model pretrain_parser.add_argument( "--model", type=str, default="gat_ec", help=f"List of available models: {list(MODELS.keys())}", ) pretrain_parser.add_argument( "--hidden-dim", type=int, default=128, help="Hidden feature dimension" ) pretrain_parser.add_argument( "--out-dim", type=int, default=32, help="Output feature dimension", ) pretrain_parser.add_argument( "--num-classes", type=int, required=True, help="Number of classes in the target column", ) pretrain_parser.add_argument( "--n-layers", type=int, default=1, help="Multi-layer full neighborhood sampler layers", ) for key in MODELS.keys(): MODELS[key].add_args(pretrain_parser) # dataset pretrain_parser.add_argument( "--target-col", type=str, required=True, help="Target column for downstream prediction", ) pretrain_parser.add_argument( "--train-ratio", type=float, default=0.8, help="Ratio of data to use as train", ) pretrain_parser.add_argument( "--val-ratio", type=float, default=0.1, help="Ratio of data to use as val", ) pretrain_parser.add_argument( "--test-ratio", type=float, default=0.1, help="Ratio of data to use as test", ) # training pretrain_parser.add_argument( "--learning-rate", "--lr", dest="learning_rate", type=float, default=1e-3, help=f"Initial learning rate for optimizer", ) pretrain_parser.add_argument( "--weight-decay", type=float, default=0.1, help=f"Weight decay for optimizer", ) pretrain_parser.add_argument( "--batch-size", type=int, default=128, help="Pre-training and Fine-tuning dataloader batch size", ) pretrain_parser.add_argument( "--num-workers", type=int, default=8, help="Number of dataloading workers", ) pretrain_parser.add_argument( "--shuffle", action="store_true", default=False, help="Shuffles data each epoch" ) pretrain_parser.add_argument( "--pretrain-epochs", type=int, default=0, help="Number of pre-training epochs", ) pretrain_parser.add_argument( "--finetune-epochs", type=int, default=1, help="Number of finetuning epochs", ) pretrain_parser.add_argument( "--log-interval", type=int, default=1, help="logging interval" ) def run(self, args): dict_args = vars(args) finetune_feature_spec = SynGenDatasetFeatureSpec.instantiate_from_preprocessed( dict_args['data_path'] ) pretrain_feature_spec = None if dict_args['pretraining_data_path']: pretrain_feature_spec = SynGenDatasetFeatureSpec.instantiate_from_preprocessed( dict_args['pretraining_data_path'] ) if args.task == "ec": out = train_ec( args, finetune_feature_spec=finetune_feature_spec, pretrain_feature_spec=pretrain_feature_spec, ) else: raise ValueError("benchmark not supported") log.info(out) return out
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/trt/util
util
layerData
/* * Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the NVIDIA CORPORATION nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "layerData.h" #include <sstream> #include <stdexcept> using namespace nvinfer1; namespace tts { /****************************************************************************** * CONSTRUCTORS / DESTRUCTOR ************************************************** *****************************************************************************/ LayerData::LayerData() : mKeys() , mPrefix{0} , mData{} { } /****************************************************************************** * PUBLIC METHODS ************************************************************* *****************************************************************************/ Weights LayerData::get(const std::string& name) const { auto pos = mKeys.find(name); if (pos == mKeys.end()) { std::ostringstream ss; ss << "Unable to find '" << name << "' in : {"; for (auto pair : mKeys) { ss << "'" << pair.first << "', "; } ss << "}"; throw std::runtime_error(ss.str()); } const size_t idx = pos->second; return Weights{DataType::kFLOAT, (const void*) (mData.data() + mPrefix[idx]), static_cast<int64_t>(mPrefix[idx + 1] - mPrefix[idx])}; } bool LayerData::has(const std::string& name) const { return mKeys.count(name) > 0; } /****************************************************************************** * OUTPUT FUNCTIONS *********************************************************** *****************************************************************************/ std::ostream& operator<<(std::ostream& stream, const LayerData& data) { stream << "LayerData: {"; for (auto pair : data.mKeys) { stream << pair.first << ":" << (data.mPrefix[pair.second + 1] - data.mPrefix[pair.second]) << ", "; } stream << "}"; return stream; } } // namespace tts
DGLPyTorch/DrugDiscovery/SE3Transformer/se3_transformer/model
model
basis
# Copyright (c) 2021-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. # # SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES # SPDX-License-Identifier: MIT from functools import lru_cache from typing import Dict, List import e3nn.o3 as o3 import torch import torch.nn.functional as F from torch import Tensor from torch.cuda.nvtx import range as nvtx_range from se3_transformer.runtime.utils import degree_to_dim torch._C._jit_set_profiling_executor(False) torch._C._jit_set_profiling_mode(False) @lru_cache(maxsize=None) def get_clebsch_gordon(J: int, d_in: int, d_out: int, device) -> Tensor: """ Get the (cached) Q^{d_out,d_in}_J matrices from equation (8) """ return o3.wigner_3j(J, d_in, d_out, dtype=torch.float64, device=device).permute(2, 1, 0) @lru_cache(maxsize=None) def get_all_clebsch_gordon(max_degree: int, device) -> List[List[Tensor]]: all_cb = [] for d_in in range(max_degree + 1): for d_out in range(max_degree + 1): K_Js = [] for J in range(abs(d_in - d_out), d_in + d_out + 1): K_Js.append(get_clebsch_gordon(J, d_in, d_out, device)) all_cb.append(K_Js) return all_cb def get_spherical_harmonics(relative_pos: Tensor, max_degree: int) -> List[Tensor]: all_degrees = list(range(2 * max_degree + 1)) sh = o3.spherical_harmonics(all_degrees, relative_pos, normalize=True) return torch.split(sh, [degree_to_dim(d) for d in all_degrees], dim=1) @torch.jit.script def get_basis_script(max_degree: int, use_pad_trick: bool, spherical_harmonics: List[Tensor], clebsch_gordon: List[List[Tensor]], amp: bool) -> Dict[str, Tensor]: """ Compute pairwise bases matrices for degrees up to max_degree :param max_degree: Maximum input or output degree :param use_pad_trick: Pad some of the odd dimensions for a better use of Tensor Cores :param spherical_harmonics: List of computed spherical harmonics :param clebsch_gordon: List of computed CB-coefficients :param amp: When true, return bases in FP16 precision """ basis = {} idx = 0 # Double for loop instead of product() because of JIT script for d_in in range(max_degree + 1): for d_out in range(max_degree + 1): key = f'{d_in},{d_out}' K_Js = [] for freq_idx, J in enumerate(range(abs(d_in - d_out), d_in + d_out + 1)): Q_J = clebsch_gordon[idx][freq_idx] K_Js.append(torch.einsum('n f, k l f -> n l k', spherical_harmonics[J].float(), Q_J.float())) basis[key] = torch.stack(K_Js, 2) # Stack on second dim so order is n l f k if amp: basis[key] = basis[key].half() if use_pad_trick: basis[key] = F.pad(basis[key], (0, 1)) # Pad the k dimension, that can be sliced later idx += 1 return basis @torch.jit.script def update_basis_with_fused(basis: Dict[str, Tensor], max_degree: int, use_pad_trick: bool, fully_fused: bool) -> Dict[str, Tensor]: """ Update the basis dict with partially and optionally fully fused bases """ num_edges = basis['0,0'].shape[0] device = basis['0,0'].device dtype = basis['0,0'].dtype sum_dim = sum([degree_to_dim(d) for d in range(max_degree + 1)]) # Fused per output degree for d_out in range(max_degree + 1): sum_freq = sum([degree_to_dim(min(d, d_out)) for d in range(max_degree + 1)]) basis_fused = torch.zeros(num_edges, sum_dim, sum_freq, degree_to_dim(d_out) + int(use_pad_trick), device=device, dtype=dtype) acc_d, acc_f = 0, 0 for d_in in range(max_degree + 1): basis_fused[:, acc_d:acc_d + degree_to_dim(d_in), acc_f:acc_f + degree_to_dim(min(d_out, d_in)), :degree_to_dim(d_out)] = basis[f'{d_in},{d_out}'][:, :, :, :degree_to_dim(d_out)] acc_d += degree_to_dim(d_in) acc_f += degree_to_dim(min(d_out, d_in)) basis[f'out{d_out}_fused'] = basis_fused # Fused per input degree for d_in in range(max_degree + 1): sum_freq = sum([degree_to_dim(min(d, d_in)) for d in range(max_degree + 1)]) basis_fused = torch.zeros(num_edges, degree_to_dim(d_in), sum_freq, sum_dim, device=device, dtype=dtype) acc_d, acc_f = 0, 0 for d_out in range(max_degree + 1): basis_fused[:, :, acc_f:acc_f + degree_to_dim(min(d_out, d_in)), acc_d:acc_d + degree_to_dim(d_out)] \ = basis[f'{d_in},{d_out}'][:, :, :, :degree_to_dim(d_out)] acc_d += degree_to_dim(d_out) acc_f += degree_to_dim(min(d_out, d_in)) basis[f'in{d_in}_fused'] = basis_fused if fully_fused: # Fully fused # Double sum this way because of JIT script sum_freq = sum([ sum([degree_to_dim(min(d_in, d_out)) for d_in in range(max_degree + 1)]) for d_out in range(max_degree + 1) ]) basis_fused = torch.zeros(num_edges, sum_dim, sum_freq, sum_dim, device=device, dtype=dtype) acc_d, acc_f = 0, 0 for d_out in range(max_degree + 1): b = basis[f'out{d_out}_fused'] basis_fused[:, :, acc_f:acc_f + b.shape[2], acc_d:acc_d + degree_to_dim(d_out)] = b[:, :, :, :degree_to_dim(d_out)] acc_f += b.shape[2] acc_d += degree_to_dim(d_out) basis['fully_fused'] = basis_fused del basis['0,0'] # We know that the basis for l = k = 0 is filled with a constant return basis def get_basis(relative_pos: Tensor, max_degree: int = 4, compute_gradients: bool = False, use_pad_trick: bool = False, amp: bool = False) -> Dict[str, Tensor]: with nvtx_range('spherical harmonics'): spherical_harmonics = get_spherical_harmonics(relative_pos, max_degree) with nvtx_range('CB coefficients'): clebsch_gordon = get_all_clebsch_gordon(max_degree, relative_pos.device) with torch.autograd.set_grad_enabled(compute_gradients): with nvtx_range('bases'): basis = get_basis_script(max_degree=max_degree, use_pad_trick=use_pad_trick, spherical_harmonics=spherical_harmonics, clebsch_gordon=clebsch_gordon, amp=amp) return basis
PyTorch/Classification/ConvNets
ConvNets
README
# Convolutional Network for Image Classification in PyTorch In this repository you will find implementations of various image classification models. Detailed information on each model can be found here: ## Table Of Contents * [Models](#models) * [Validation accuracy results](#validation-accuracy-results) * [Training performance results](#training-performance-results) * [Training performance: NVIDIA DGX A100 (8x A100 80GB)](#training-performance-nvidia-dgx-a100-8x-a100-80gb) * [Training performance: NVIDIA DGX-1 16GB (8x V100 16GB)](#training-performance-nvidia-dgx-1-16gb-8x-v100-16gb) * [Training performance: NVIDIA DGX-2 (16x V100 32GB)](#training-performance-nvidia-dgx-2-16x-v100-32gb) * [Model comparison](#model-comparison) * [Accuracy vs FLOPS](#accuracy-vs-flops) * [Latency vs Throughput on different batch sizes](#latency-vs-throughput-on-different-batch-sizes) ## Models The following table provides links to where you can find additional information on each model: | **Model** | **Link**| |:-:|:-:| | resnet50 | [README](./resnet50v1.5/README.md) | | resnext101-32x4d | [README](./resnext101-32x4d/README.md) | | se-resnext101-32x4d | [README](./se-resnext101-32x4d/README.md) | | EfficientNet | [README](./efficientnet/README.md) | ## Validation accuracy results Our results were obtained by running the applicable training scripts in the 20.12 PyTorch NGC container on NVIDIA DGX-1 with (8x V100 16GB) GPUs. The specific training script that was run is documented in the corresponding model's README. The following table shows the validation accuracy results of the three classification models side-by-side. | **Model** | **Mixed Precision Top1** | **Mixed Precision Top5** | **32 bit Top1** | **32 bit Top5** | |:----------------------:|:------------------------:|:------------------------:|:---------------:|:---------------:| | efficientnet-b0 | 77.63 | 93.82 | 77.31 | 93.76 | | efficientnet-b4 | 82.98 | 96.44 | 82.92 | 96.43 | | efficientnet-widese-b0 | 77.89 | 94.00 | 77.97 | 94.05 | | efficientnet-widese-b4 | 83.28 | 96.45 | 83.30 | 96.47 | | resnet50 | 78.60 | 94.19 | 78.69 | 94.16 | | resnext101-32x4d | 80.43 | 95.06 | 80.40 | 95.04 | | se-resnext101-32x4d | 81.00 | 95.48 | 81.09 | 95.45 | ## Training performance results ### Training performance: NVIDIA DGX A100 (8x A100 80GB) Our results were obtained by running the applicable training scripts in the 21.03 PyTorch NGC container on NVIDIA DGX A100 with (8x A100 80GB) GPUs. Performance numbers (in images per second) were averaged over an entire training epoch. The specific training script that was run is documented in the corresponding model's README. The following table shows the training accuracy results of all the classification models side-by-side. | **Model** | **Mixed Precision** | **TF32** | **Mixed Precision Speedup** | |:----------------------:|:-------------------:|:----------:|:---------------------------:| | efficientnet-b0 | 16652 img/s | 8193 img/s | 2.03 x | | efficientnet-b4 | 2570 img/s | 1223 img/s | 2.1 x | | efficientnet-widese-b0 | 16368 img/s | 8244 img/s | 1.98 x | | efficientnet-widese-b4 | 2585 img/s | 1223 img/s | 2.11 x | | resnet50 | 16621 img/s | 7248 img/s | 2.29 x | | resnext101-32x4d | 7925 img/s | 3471 img/s | 2.28 x | | se-resnext101-32x4d | 5779 img/s | 2991 img/s | 1.93 x | ### Training performance: NVIDIA DGX-1 16G (8x V100 16GB) Our results were obtained by running the applicable training scripts in the 21.03 PyTorch NGC container on NVIDIA DGX-1 with (8x V100 16GB) GPUs. Performance numbers (in images per second) were averaged over an entire training epoch. The specific training script that was run is documented in the corresponding model's README. The following table shows the training accuracy results of all the classification models side-by-side. | **Model** | **Mixed Precision** | **FP32** | **Mixed Precision Speedup** | |:----------------------:|:-------------------:|:----------:|:---------------------------:| | efficientnet-b0 | 7789 img/s | 4672 img/s | 1.66 x | | efficientnet-b4 | 1366 img/s | 616 img/s | 2.21 x | | efficientnet-widese-b0 | 7875 img/s | 4592 img/s | 1.71 x | | efficientnet-widese-b4 | 1356 img/s | 612 img/s | 2.21 x | | resnet50 | 8322 img/s | 2855 img/s | 2.91 x | | resnext101-32x4d | 4065 img/s | 1133 img/s | 3.58 x | | se-resnext101-32x4d | 2971 img/s | 1004 img/s | 2.95 x | ## Model Comparison ### Accuracy vs FLOPS ![ACCvsFLOPS](./img/ACCvsFLOPS.png) Plot describes relationship between floating point operations needed for computing forward pass on a 224px x 224px image, for the implemented models. Dot size indicates number of trainable parameters. ### Latency vs Throughput on different batch sizes ![LATvsTHR](./img/LATvsTHR.png) Plot describes relationship between inference latency, throughput and batch size for the implemented models.
TensorFlow/Segmentation/UNet_Industrial/model/layers
layers
conv2d
#!/usr/bin/env python # -*- coding: utf-8 -*- # ============================================================================== # # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ============================================================================== import tensorflow as tf from model.layers.utils import _log_hparams __all__ = ['conv2d'] def conv2d( inputs, n_channels=8, kernel_size=(3, 3), strides=(1, 1), padding='VALID', data_format='NHWC', dilation_rate=(1, 1), use_bias=True, kernel_initializer=tf.variance_scaling_initializer(), bias_initializer=tf.zeros_initializer(), trainable=True ): if data_format not in ['NHWC', 'NCHW']: raise ValueError("Unknown data format: `%s` (accepted: ['NHWC', 'NCHW'])" % data_format) if padding.upper() not in ['SAME', 'VALID']: raise ValueError("Unknown padding: `%s` (accepted: ['SAME', 'VALID'])" % padding.upper()) net = tf.layers.conv2d( inputs, filters=n_channels, kernel_size=kernel_size, strides=strides, padding=padding, dilation_rate=dilation_rate, data_format='channels_last' if data_format == 'NHWC' else 'channels_first', use_bias=use_bias, kernel_initializer=kernel_initializer, bias_initializer=bias_initializer, trainable=trainable, activation=None ) _log_hparams( classname='Conv2D', layername=net.name, n_channels=n_channels, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, use_bias=use_bias, trainable=trainable, out_shape=str(net.get_shape()), out_dtype=net.dtype ) return net
TensorFlow/Detection/SSD/models
models
README
# TensorFlow Models This repository contains a number of different models implemented in [TensorFlow](https://www.tensorflow.org): The [official models](https://github.com/tensorflow/models/tree/master/official) are a collection of example models that use TensorFlow's high-level APIs. They are intended to be well-maintained, tested, and kept up to date with the latest stable TensorFlow API. They should also be reasonably optimized for fast performance while still being easy to read. We especially recommend newer TensorFlow users to start here. The [research models](https://github.com/tensorflow/models/tree/master/research) are a large collection of models implemented in TensorFlow by researchers. They are not officially supported or available in release branches; it is up to the individual researchers to maintain the models and/or provide support on issues and pull requests. The [samples folder](https://github.com/tensorflow/models/tree/master/samples) contains code snippets and smaller models that demonstrate features of TensorFlow, including code presented in various blog posts. The [tutorials folder](https://github.com/tensorflow/models/tree/master/tutorials) is a collection of models described in the [TensorFlow tutorials](https://www.tensorflow.org/tutorials/). ## Contribution guidelines If you want to contribute to models, be sure to review the [contribution guidelines](CONTRIBUTING.md). ## License [Apache License 2.0](LICENSE)
TensorFlow/Detection/SSD/models/research/object_detection/core
core
standard_fields
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Contains classes specifying naming conventions used for object detection. Specifies: InputDataFields: standard fields used by reader/preprocessor/batcher. DetectionResultFields: standard fields returned by object detector. BoxListFields: standard field used by BoxList TfExampleFields: standard fields for tf-example data format (go/tf-example). """ class InputDataFields(object): """Names for the input tensors. Holds the standard data field names to use for identifying input tensors. This should be used by the decoder to identify keys for the returned tensor_dict containing input tensors. And it should be used by the model to identify the tensors it needs. Attributes: image: image. image_additional_channels: additional channels. original_image: image in the original input size. original_image_spatial_shape: image in the original input size. key: unique key corresponding to image. source_id: source of the original image. filename: original filename of the dataset (without common path). groundtruth_image_classes: image-level class labels. groundtruth_image_confidences: image-level class confidences. groundtruth_boxes: coordinates of the ground truth boxes in the image. groundtruth_classes: box-level class labels. groundtruth_confidences: box-level class confidences. The shape should be the same as the shape of groundtruth_classes. groundtruth_label_types: box-level label types (e.g. explicit negative). groundtruth_is_crowd: [DEPRECATED, use groundtruth_group_of instead] is the groundtruth a single object or a crowd. groundtruth_area: area of a groundtruth segment. groundtruth_difficult: is a `difficult` object groundtruth_group_of: is a `group_of` objects, e.g. multiple objects of the same class, forming a connected group, where instances are heavily occluding each other. proposal_boxes: coordinates of object proposal boxes. proposal_objectness: objectness score of each proposal. groundtruth_instance_masks: ground truth instance masks. groundtruth_instance_boundaries: ground truth instance boundaries. groundtruth_instance_classes: instance mask-level class labels. groundtruth_keypoints: ground truth keypoints. groundtruth_keypoint_visibilities: ground truth keypoint visibilities. groundtruth_label_weights: groundtruth label weights. groundtruth_weights: groundtruth weight factor for bounding boxes. num_groundtruth_boxes: number of groundtruth boxes. is_annotated: whether an image has been labeled or not. true_image_shapes: true shapes of images in the resized images, as resized images can be padded with zeros. multiclass_scores: the label score per class for each box. """ image = 'image' image_additional_channels = 'image_additional_channels' original_image = 'original_image' original_image_spatial_shape = 'original_image_spatial_shape' key = 'key' source_id = 'source_id' filename = 'filename' groundtruth_image_classes = 'groundtruth_image_classes' groundtruth_image_confidences = 'groundtruth_image_confidences' groundtruth_boxes = 'groundtruth_boxes' groundtruth_classes = 'groundtruth_classes' groundtruth_confidences = 'groundtruth_confidences' groundtruth_label_types = 'groundtruth_label_types' groundtruth_is_crowd = 'groundtruth_is_crowd' groundtruth_area = 'groundtruth_area' groundtruth_difficult = 'groundtruth_difficult' groundtruth_group_of = 'groundtruth_group_of' proposal_boxes = 'proposal_boxes' proposal_objectness = 'proposal_objectness' groundtruth_instance_masks = 'groundtruth_instance_masks' groundtruth_instance_boundaries = 'groundtruth_instance_boundaries' groundtruth_instance_classes = 'groundtruth_instance_classes' groundtruth_keypoints = 'groundtruth_keypoints' groundtruth_keypoint_visibilities = 'groundtruth_keypoint_visibilities' groundtruth_label_weights = 'groundtruth_label_weights' groundtruth_weights = 'groundtruth_weights' num_groundtruth_boxes = 'num_groundtruth_boxes' is_annotated = 'is_annotated' true_image_shape = 'true_image_shape' multiclass_scores = 'multiclass_scores' class DetectionResultFields(object): """Naming conventions for storing the output of the detector. Attributes: source_id: source of the original image. key: unique key corresponding to image. detection_boxes: coordinates of the detection boxes in the image. detection_scores: detection scores for the detection boxes in the image. detection_classes: detection-level class labels. detection_masks: contains a segmentation mask for each detection box. detection_boundaries: contains an object boundary for each detection box. detection_keypoints: contains detection keypoints for each detection box. num_detections: number of detections in the batch. """ source_id = 'source_id' key = 'key' detection_boxes = 'detection_boxes' detection_scores = 'detection_scores' detection_classes = 'detection_classes' detection_masks = 'detection_masks' detection_boundaries = 'detection_boundaries' detection_keypoints = 'detection_keypoints' num_detections = 'num_detections' class BoxListFields(object): """Naming conventions for BoxLists. Attributes: boxes: bounding box coordinates. classes: classes per bounding box. scores: scores per bounding box. weights: sample weights per bounding box. objectness: objectness score per bounding box. masks: masks per bounding box. boundaries: boundaries per bounding box. keypoints: keypoints per bounding box. keypoint_heatmaps: keypoint heatmaps per bounding box. is_crowd: is_crowd annotation per bounding box. """ boxes = 'boxes' classes = 'classes' scores = 'scores' weights = 'weights' confidences = 'confidences' objectness = 'objectness' masks = 'masks' boundaries = 'boundaries' keypoints = 'keypoints' keypoint_heatmaps = 'keypoint_heatmaps' is_crowd = 'is_crowd' class TfExampleFields(object): """TF-example proto feature names for object detection. Holds the standard feature names to load from an Example proto for object detection. Attributes: image_encoded: JPEG encoded string image_format: image format, e.g. "JPEG" filename: filename channels: number of channels of image colorspace: colorspace, e.g. "RGB" height: height of image in pixels, e.g. 462 width: width of image in pixels, e.g. 581 source_id: original source of the image image_class_text: image-level label in text format image_class_label: image-level label in numerical format object_class_text: labels in text format, e.g. ["person", "cat"] object_class_label: labels in numbers, e.g. [16, 8] object_bbox_xmin: xmin coordinates of groundtruth box, e.g. 10, 30 object_bbox_xmax: xmax coordinates of groundtruth box, e.g. 50, 40 object_bbox_ymin: ymin coordinates of groundtruth box, e.g. 40, 50 object_bbox_ymax: ymax coordinates of groundtruth box, e.g. 80, 70 object_view: viewpoint of object, e.g. ["frontal", "left"] object_truncated: is object truncated, e.g. [true, false] object_occluded: is object occluded, e.g. [true, false] object_difficult: is object difficult, e.g. [true, false] object_group_of: is object a single object or a group of objects object_depiction: is object a depiction object_is_crowd: [DEPRECATED, use object_group_of instead] is the object a single object or a crowd object_segment_area: the area of the segment. object_weight: a weight factor for the object's bounding box. instance_masks: instance segmentation masks. instance_boundaries: instance boundaries. instance_classes: Classes for each instance segmentation mask. detection_class_label: class label in numbers. detection_bbox_ymin: ymin coordinates of a detection box. detection_bbox_xmin: xmin coordinates of a detection box. detection_bbox_ymax: ymax coordinates of a detection box. detection_bbox_xmax: xmax coordinates of a detection box. detection_score: detection score for the class label and box. """ image_encoded = 'image/encoded' image_format = 'image/format' # format is reserved keyword filename = 'image/filename' channels = 'image/channels' colorspace = 'image/colorspace' height = 'image/height' width = 'image/width' source_id = 'image/source_id' image_class_text = 'image/class/text' image_class_label = 'image/class/label' object_class_text = 'image/object/class/text' object_class_label = 'image/object/class/label' object_bbox_ymin = 'image/object/bbox/ymin' object_bbox_xmin = 'image/object/bbox/xmin' object_bbox_ymax = 'image/object/bbox/ymax' object_bbox_xmax = 'image/object/bbox/xmax' object_view = 'image/object/view' object_truncated = 'image/object/truncated' object_occluded = 'image/object/occluded' object_difficult = 'image/object/difficult' object_group_of = 'image/object/group_of' object_depiction = 'image/object/depiction' object_is_crowd = 'image/object/is_crowd' object_segment_area = 'image/object/segment/area' object_weight = 'image/object/weight' instance_masks = 'image/segmentation/object' instance_boundaries = 'image/boundaries/object' instance_classes = 'image/segmentation/object/class' detection_class_label = 'image/detection/label' detection_bbox_ymin = 'image/detection/bbox/ymin' detection_bbox_xmin = 'image/detection/bbox/xmin' detection_bbox_ymax = 'image/detection/bbox/ymax' detection_bbox_xmax = 'image/detection/bbox/xmax' detection_score = 'image/detection/score'
TensorFlow2/Recommendation/DLRM_and_DCNv2/tensorflow-dot-based-interact/tensorflow_dot_based_interact/python/ops
ops
__init__
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
TensorFlow/Segmentation/UNet_Industrial/model/layers
layers
array_ops
#!/usr/bin/env python # -*- coding: utf-8 -*- # ============================================================================== # # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ============================================================================== import tensorflow as tf from model.layers.utils import _log_hparams __all__ = ['concat', 'flatten', 'reshape', 'squeeze', 'upscale_2d'] def concat(values, axis, name='concat'): net = tf.concat(values=values, axis=axis, name=name) _log_hparams(classname='Concat', layername=net.name, axis=axis, out_shape=str(net.get_shape()), out_dtype=net.dtype) return net def flatten(inputs, name='flatten'): net = tf.layers.flatten(inputs, name=name) _log_hparams(classname='Flatten', layername=net.name, out_shape=str(net.get_shape()), out_dtype=net.dtype) return net def reshape(tensor, shape, name='reshape'): net = tf.reshape(tensor, shape=shape, name=name) _log_hparams( classname='Reshape', layername=net.name, shape=shape, out_shape=str(net.get_shape()), out_dtype=net.dtype ) return net def squeeze(tensor, axis, name='squeeze'): net = tf.squeeze(tensor, axis=axis, name=name) _log_hparams( classname='Squeeze', layername=net.name, axis=axis, out_shape=str(net.get_shape()), out_dtype=net.dtype ) return net def upscale_2d(inputs, size, is_scale=True, method=0, align_corners=True, data_format='NHWC', name='upsample2d_layer'): if not isinstance(size, (list, tuple)) and len(size) == 2: raise AssertionError() if data_format not in ['NHWC', 'NCHW']: raise ValueError("Unknown data format received: `%s` (allowed: `NHWC`, `NCHW`)" % data_format) input_shape = inputs.get_shape() if len(inputs.get_shape()) == 3: if is_scale: size_h = size[0] * int(inputs.get_shape()[0]) size_w = size[1] * int(inputs.get_shape()[1]) _size = [size_h, size_w] else: _size = size elif len(inputs.get_shape()) == 4: if data_format == 'NCHW': inputs = tf.transpose(inputs, [0, 2, 3, 1]) # NCHW => NHWC if is_scale: size_h = size[0] * int(inputs.get_shape()[1]) size_w = size[1] * int(inputs.get_shape()[2]) _size = [size_h, size_w] else: _size = size else: raise Exception("Do not support shape %s" % str(inputs.get_shape())) with tf.variable_scope(name): net = tf.image.resize_images(inputs, size=_size, method=method, align_corners=align_corners) if data_format == 'NCHW' and len(inputs.get_shape()) == 4: net = tf.transpose(net, [0, 3, 1, 2]) # NHWC => NCHW _log_hparams( classname='Upscale2D', layername=net.name, size=size, is_scale=is_scale, method=method, align_corners=align_corners, data_format=data_format, input_shape=str(input_shape), out_shape=str(net.get_shape()), out_dtype=net.dtype ) return net
DGLPyTorch/DrugDiscovery/SE3Transformer/se3_transformer/data_loading
data_loading
data_module
# Copyright (c) 2021-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. # # SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES # SPDX-License-Identifier: MIT import torch.distributed as dist from abc import ABC from torch.utils.data import DataLoader, DistributedSampler, Dataset from se3_transformer.runtime.utils import get_local_rank def _get_dataloader(dataset: Dataset, shuffle: bool, **kwargs) -> DataLoader: # Classic or distributed dataloader depending on the context sampler = DistributedSampler(dataset, shuffle=shuffle) if dist.is_initialized() else None return DataLoader(dataset, shuffle=(shuffle and sampler is None), sampler=sampler, **kwargs) class DataModule(ABC): """ Abstract DataModule. Children must define self.ds_{train | val | test}. """ def __init__(self, **dataloader_kwargs): super().__init__() if get_local_rank() == 0: self.prepare_data() # Wait until rank zero has prepared the data (download, preprocessing, ...) if dist.is_initialized(): dist.barrier(device_ids=[get_local_rank()]) self.dataloader_kwargs = {'pin_memory': True, 'persistent_workers': dataloader_kwargs.get('num_workers', 0) > 0, **dataloader_kwargs} self.ds_train, self.ds_val, self.ds_test = None, None, None def prepare_data(self): """ Method called only once per node. Put here any downloading or preprocessing """ pass def train_dataloader(self) -> DataLoader: return _get_dataloader(self.ds_train, shuffle=True, **self.dataloader_kwargs) def val_dataloader(self) -> DataLoader: return _get_dataloader(self.ds_val, shuffle=False, **self.dataloader_kwargs) def test_dataloader(self) -> DataLoader: return _get_dataloader(self.ds_test, shuffle=False, **self.dataloader_kwargs)
TensorFlow2/Segmentation/MaskRCNN/mrcnn_tf2/runtime
runtime
run
import logging import os import tensorflow as tf import dllogger from mrcnn_tf2.model.mask_rcnn import MaskRCNN from mrcnn_tf2.runtime.callbacks import DLLoggerMetricsCallback, DLLoggerPerfCallback, PretrainedWeightsLoadingCallback from mrcnn_tf2.runtime.evaluation import evaluate from mrcnn_tf2.runtime.learning_rate import PiecewiseConstantWithWarmupSchedule from mrcnn_tf2.runtime.weights_mapping import WEIGHTS_MAPPING def run_training(dataset, params): setup(params) strategy = tf.distribute.MirroredStrategy() params.replicas = strategy.num_replicas_in_sync params.global_train_batch_size = params.train_batch_size * params.replicas logging.info(f'Distributed Strategy is activated for {params.replicas} device(s)') with strategy.scope(): learning_rate = PiecewiseConstantWithWarmupSchedule( init_value=params.init_learning_rate, # scale boundaries from epochs to steps boundaries=[ int(b * dataset.train_size / params.global_train_batch_size) for b in params.learning_rate_boundaries ], values=params.learning_rate_values, # scale only by local BS as distributed strategy later scales it by number of replicas scale=params.train_batch_size ) optimizer = tf.keras.optimizers.SGD( learning_rate=learning_rate, momentum=params.momentum ) mask_rcnn_model = create_model(params) mask_rcnn_model.compile( optimizer=optimizer ) # distributed strategy splits data between instances so we need global BS train_data = dataset.train_fn(batch_size=params.global_train_batch_size) if params.eagerly: mask_rcnn_model.run_eagerly = True logging.warning('Model is running in eager mode which might reduce performance') mask_rcnn_model.fit( x=train_data, epochs=params.epochs, steps_per_epoch=params.steps_per_epoch or (dataset.train_size // params.global_train_batch_size), callbacks=list(create_callbacks(params)), verbose=0 ) def run_evaluation(dataset, params): setup(params) mask_rcnn_model = create_model(params) if params.eagerly: mask_rcnn_model.run_eagerly = True logging.warning('Model is running in eager mode which might reduce performance') predictions = mask_rcnn_model.predict( x=dataset.eval_fn(params.eval_batch_size), callbacks=list(create_callbacks(params)) ) eval_results = evaluate( predictions=predictions, eval_file=params.eval_file, include_mask=params.include_mask ) dllogger.log( step=tuple(), data={k: float(v) for k, v in eval_results.items()} ) def run_inference(dataset, params): setup(params) mask_rcnn_model = create_model(params) if params.eagerly: mask_rcnn_model.run_eagerly = True logging.warning('Model is running in eager mode which might reduce performance') mask_rcnn_model.predict( x=dataset.eval_fn(params.eval_batch_size), callbacks=list(create_callbacks(params)) ) def setup(params): # enforces that AMP is enabled using --amp and not env var # mainly for NGC where it is enabled by default os.environ['TF_ENABLE_AUTO_MIXED_PRECISION'] = '0' if params.xla: tf.config.optimizer.set_jit(True) logging.info('XLA is activated') if params.amp: policy = tf.keras.mixed_precision.experimental.Policy("mixed_float16", loss_scale="dynamic") tf.keras.mixed_precision.experimental.set_policy(policy) logging.info('AMP is activated') def create_model(params): model = MaskRCNN( params=params, trainable='train' in params.mode ) checkpoint_path = tf.train.latest_checkpoint(params.model_dir) # if there is no checkpoint we are done if checkpoint_path is None: logging.info(f"No checkpoint was found in: {params.model_dir}") return model model.load_weights(checkpoint_path).expect_partial() logging.info(f"Loaded weights from checkpoint: {checkpoint_path}") # don't load backbone weights to do not override the checkpoint if params.backbone_checkpoint: params.backbone_checkpoint = None logging.info("Pretrained backbone weights will not be loaded") return model def create_callbacks(params): yield DLLoggerMetricsCallback( dllogger=dllogger, log_every=params.log_every ) yield DLLoggerPerfCallback( dllogger=dllogger, batch_sizes={ 'train': params.train_batch_size * getattr(params, 'replicas', 1), 'test': params.eval_batch_size * getattr(params, 'replicas', 1), 'predict': params.eval_batch_size * getattr(params, 'replicas', 1) }, warmup_steps=params.log_warmup_steps, log_every=params.log_every ) if params.backbone_checkpoint: yield PretrainedWeightsLoadingCallback( checkpoint_path=params.backbone_checkpoint, mapping=lambda name: WEIGHTS_MAPPING.get(name.replace(':0', ''), name) ) yield tf.keras.callbacks.ModelCheckpoint( filepath=os.path.join(params.model_dir, params.checkpoint_name_format), verbose=1 ) if params.log_tensorboard: yield tf.keras.callbacks.TensorBoard( log_dir=params.log_tensorboard, update_freq='batch' )
Tools/PyTorch/TimeSeriesPredictionPlatform/distributed_launcher
distributed_launcher
setup
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved from setuptools import find_namespace_packages, setup setup( name="hydra-torch-dist-launcher", version="0.1", author="Jan Baczek", author_email="jbaczek@nvidia.com", description="Torch distributed launcher plugin", packages=find_namespace_packages(include=["hydra_plugins.*"]), classifiers=[ "License :: OSI Approved :: MIT Apache License, Version 2.0", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", ], install_requires=["hydra-core==1.1.1" ], include_package_data=True, )
TensorFlow2/Recommendation/DLRM_and_DCNv2
DLRM_and_DCNv2
main
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # author: Tomasz Grel (tgrel@nvidia.com) from absl import app, flags import os import sys import json from distributed_embeddings.python.layers import dist_model_parallel as dmp # Define the flags first before importing TensorFlow. # Otherwise, enabling XLA-Lite would be impossible with a command-line flag def define_common_flags(): flags.DEFINE_enum("mode", default="train", enum_values=['inference', 'eval', 'train'], help='Choose "train" to train the model, "inference" to benchmark inference' ' and "eval" to run validation') # Debug parameters flags.DEFINE_bool("run_eagerly", default=False, help="Disable all tf.function decorators for debugging") flags.DEFINE_bool("tfdata_debug", default=False, help="Run tf.data operations eagerly (experimental)") flags.DEFINE_integer("seed", default=None, help="Random seed") flags.DEFINE_bool("embedding_zeros_initializer", default=False, help="Initialize the embeddings to zeros. This takes much less time so it's useful" " for benchmarking and debugging.") flags.DEFINE_bool("embedding_trainable", default=True, help="If True the embeddings will be trainable, otherwise frozen") # Hardware and performance features flags.DEFINE_bool("amp", default=False, help="Enable automatic mixed precision") flags.DEFINE_bool("use_mde_embeddings", default=True, help="Use the embedding implementation from the TensorFlow Distributed Embeddings package") flags.DEFINE_bool("concat_embedding", default=False, help="Concatenate embeddings with the same dimension. Only supported for singleGPU.") flags.DEFINE_string("dist_strategy", default='memory_balanced', help="Strategy for the Distributed Embeddings to use. Supported options are" "'memory_balanced', 'basic' and 'memory_optimized'") flags.DEFINE_integer("column_slice_threshold", default=5*1000*1000*1000, help='Number of elements above which a distributed embedding will be sliced across' 'multiple devices') flags.DEFINE_integer("row_slice_threshold", default=10*1000*1000*1000, help='Number of elements above which a distributed embedding will be sliced across' 'multiple devices') flags.DEFINE_integer("data_parallel_threshold", default=None, help='Number of elements above which a distributed embedding will be sliced across' 'multiple devices') flags.DEFINE_integer("cpu_offloading_threshold_gb", default=75, help='Size of the embedding tables in GB above which ' 'offloading to CPU memory should be employed.' 'Applies only to singleGPU at the moment.') flags.DEFINE_bool('cpu', default=False, help='Place the entire model on CPU') flags.DEFINE_bool("xla", default=False, help="Enable XLA") flags.DEFINE_integer("loss_scale", default=65536, help="Static loss scale to use with mixed precision training") flags.DEFINE_integer("inter_op_parallelism", default=None, help='Number of inter op threads') flags.DEFINE_integer("intra_op_parallelism", default=None, help='Number of intra op threads') # Checkpointing flags.DEFINE_string("save_checkpoint_path", default=None, help="Path to which to save a checkpoint file at the end of the training") flags.DEFINE_string("restore_checkpoint_path", default=None, help="Path from which to restore a checkpoint before training") # Evaluation, logging, profiling flags.DEFINE_integer("auc_thresholds", default=8000, help="Number of thresholds for the AUC computation") flags.DEFINE_integer("epochs", default=1, help="Number of epochs to train for") flags.DEFINE_integer("max_steps", default=-1, help="Stop the training/inference after this many optimiation steps") flags.DEFINE_integer("evals_per_epoch", default=1, help='Number of evaluations per epoch') flags.DEFINE_float("print_freq", default=100, help='Number of steps between debug prints') flags.DEFINE_integer("profiler_start_step", default=None, help='Step at which to start profiling') flags.DEFINE_integer("profiled_rank", default=1, help='Rank to profile') flags.DEFINE_string("log_path", default='dlrm_tf_log.json', help="Path to JSON file for storing benchmark results") # dataset and dataloading settings flags.DEFINE_string("dataset_path", default=None, help="Path to dataset directory") flags.DEFINE_string("feature_spec", default="feature_spec.yaml", help="Name of the feature spec file in the dataset directory") flags.DEFINE_enum("dataset_type", default="tf_raw", enum_values=['tf_raw', 'synthetic', 'split_tfrecords'], help='The type of the dataset to use') flags.DEFINE_boolean("data_parallel_input", default=False, help="Use a data-parallel dataloader," " i.e., load a local batch of of data for all input features") # Synthetic dataset settings flags.DEFINE_boolean("synthetic_dataset_use_feature_spec", default=False, help="Create a temporary synthetic dataset based on a real one. " "Uses --dataset_path and --feature_spec" "Overrides synthetic dataset dimension flags, except the number of batches") flags.DEFINE_integer('synthetic_dataset_train_batches', default=64008, help='Number of training batches in the synthetic dataset') flags.DEFINE_integer('synthetic_dataset_valid_batches', default=1350, help='Number of validation batches in the synthetic dataset') flags.DEFINE_list('synthetic_dataset_cardinalities', default=26*[1000], help='Number of categories for each embedding table of the synthetic dataset') flags.DEFINE_list('synthetic_dataset_hotness', default=26*[20], help='Number of categories for each embedding table of the synthetic dataset') flags.DEFINE_integer('synthetic_dataset_num_numerical_features', default=13, help='Number of numerical features of the synthetic dataset') define_common_flags() FLAGS = flags.FLAGS app.define_help_flags() app.parse_flags_with_usage(sys.argv) if FLAGS.xla: if FLAGS.cpu: os.environ['TF_XLA_FLAGS'] = '--tf_xla_auto_jit=fusible --tf_xla_cpu_global_jit' else: os.environ['TF_XLA_FLAGS'] = '--tf_xla_auto_jit=fusible' import time import tensorflow as tf import tensorflow_addons as tfa import numpy as np import horovod.tensorflow as hvd from tensorflow.keras.mixed_precision import LossScaleOptimizer import dllogger from utils.logging import IterTimer, init_logging from utils.distributed import dist_print from dataloading.dataloader import create_input_pipelines, get_dataset_metadata from nn.lr_scheduler import LearningRateScheduler from nn.model import Model from nn.evaluator import Evaluator from nn.trainer import Trainer def init_tf(FLAGS): """ Set global options for TensorFlow """ gpus = tf.config.experimental.list_physical_devices('GPU') for gpu in gpus: tf.config.experimental.set_memory_growth(gpu, True) visible_gpus = [] if gpus and not FLAGS.cpu: visible_gpus = gpus[hvd.local_rank()] tf.config.experimental.set_visible_devices(visible_gpus, 'GPU') if FLAGS.amp: policy = tf.keras.mixed_precision.Policy("mixed_float16") tf.keras.mixed_precision.set_global_policy(policy) tf.config.run_functions_eagerly(FLAGS.run_eagerly) if FLAGS.tfdata_debug: tf.data.experimental.enable_debug_mode() if FLAGS.inter_op_parallelism: tf.config.threading.set_inter_op_parallelism_threads(FLAGS.inter_op_parallelism) if FLAGS.intra_op_parallelism: tf.config.threading.set_intra_op_parallelism_threads(FLAGS.intra_op_parallelism) tf.random.set_seed(hash((FLAGS.seed, hvd.rank()))) def parse_embedding_dimension(embedding_dim, num_embeddings): try: embedding_dim = int(embedding_dim) embedding_dim = [embedding_dim] * num_embeddings return embedding_dim except: pass if not isinstance(embedding_dim, str): return ValueError(f'Unsupported embedding_dimension type: f{type(embedding_dim)}') if os.path.exists(embedding_dim): # json file with a list of dimensions for each feature with open(embedding_dim) as f: edim = json.load(f) else: edim = embedding_dim.split(',') edim = [int(d) for d in edim] if len(edim) != num_embeddings: raise ValueError(f'Length of specified embedding dimensions ({len(edim)}) does not match' f' the number of embedding layers in the neural network ({num_embeddings})') return edim def compute_eval_points(train_batches, evals_per_epoch): eval_points = np.linspace(0, train_batches - 1, evals_per_epoch + 1)[1:] eval_points = np.round(eval_points).tolist() return eval_points def inference_benchmark(validation_pipeline, dlrm, timer, FLAGS): if FLAGS.max_steps == -1: FLAGS.max_steps = 1000 evaluator = Evaluator(model=dlrm, timer=timer, auc_thresholds=FLAGS.auc_thresholds, max_steps=FLAGS.max_steps, cast_dtype=None) auc, test_loss, latencies = evaluator(validation_pipeline) # don't benchmark the first few warmup steps latencies = latencies[10:] result_data = { 'mean_inference_throughput': FLAGS.valid_batch_size / np.mean(latencies), 'mean_inference_latency': np.mean(latencies) } for percentile in [90, 95, 99]: result_data[f'p{percentile}_inference_latency'] = np.percentile(latencies, percentile) result_data['auc'] = auc if hvd.rank() == 0: dllogger.log(data=result_data, step=tuple()) def validate_cmd_line_flags(): if FLAGS.cpu and hvd.size() > 1: raise ValueError('MultiGPU mode is not supported when training on CPU') if FLAGS.cpu and FLAGS.interaction == 'custom_cuda': raise ValueError('"custom_cuda" dot interaction not supported for CPU. ' 'Please specify "--dot_interaction tensorflow" if you want to run on CPU') if FLAGS.concat_embedding and hvd.size() != 1: raise ValueError('Concat embedding is currently unsupported in multiGPU mode.') if FLAGS.concat_embedding and FLAGS.dataset_type != 'tf_raw': raise ValueError('Concat embedding is only supported for dataset_type="tf_raw",' f'got dataset_type={FLAGS.dataset_type}') all_embedding_dims_equal = all(dim == FLAGS.embedding_dim[0] for dim in FLAGS.embedding_dim) if FLAGS.concat_embedding and not all_embedding_dims_equal: raise ValueError('Concat embedding is only supported when all embeddings have the same output dimension,' f'got embedding_dim={FLAGS.embedding_dim}') def create_optimizers(flags): if flags.optimizer == 'sgd': embedding_optimizer = tf.keras.optimizers.legacy.SGD(learning_rate=flags.learning_rate, momentum=0) if flags.amp: embedding_optimizer = LossScaleOptimizer(embedding_optimizer, initial_scale=flags.loss_scale, dynamic=False) mlp_optimizer = embedding_optimizer elif flags.optimizer == 'adam': embedding_optimizer = tfa.optimizers.LazyAdam(learning_rate=flags.learning_rate, beta_1=flags.beta1, beta_2=flags.beta2) mlp_optimizer = tf.keras.optimizers.legacy.Adam(learning_rate=flags.learning_rate, beta_1=flags.beta1, beta_2=flags.beta2) if flags.amp: # only wrap the mlp optimizer and not the embedding optimizer because the embeddings are not run in FP16 mlp_optimizer = LossScaleOptimizer(mlp_optimizer, initial_scale=flags.loss_scale, dynamic=False) return mlp_optimizer, embedding_optimizer def main(): hvd.init() init_logging(log_path=FLAGS.log_path, params_dict=FLAGS.flag_values_dict(), enabled=hvd.rank()==0) init_tf(FLAGS) dataset_metadata = get_dataset_metadata(FLAGS.dataset_path, FLAGS.feature_spec) FLAGS.embedding_dim = parse_embedding_dimension(FLAGS.embedding_dim, num_embeddings=len(dataset_metadata.categorical_cardinalities)) validate_cmd_line_flags() if FLAGS.restore_checkpoint_path is not None: model = Model.create_from_checkpoint(FLAGS.restore_checkpoint_path) else: model = Model(**FLAGS.flag_values_dict(), num_numerical_features=dataset_metadata.num_numerical_features, categorical_cardinalities=dataset_metadata.categorical_cardinalities, transpose=False) table_ids = model.sparse_model.get_local_table_ids(hvd.rank()) print(f'local feature ids={table_ids}') train_pipeline, validation_pipeline = create_input_pipelines(dataset_type=FLAGS.dataset_type, dataset_path=FLAGS.dataset_path, train_batch_size=FLAGS.batch_size, test_batch_size=FLAGS.valid_batch_size, table_ids=table_ids, feature_spec=FLAGS.feature_spec, rank=hvd.rank(), world_size=hvd.size(), concat_features=FLAGS.concat_embedding, data_parallel_input=FLAGS.data_parallel_input) mlp_optimizer, embedding_optimizer = create_optimizers(FLAGS) scheduler = LearningRateScheduler([mlp_optimizer, embedding_optimizer], warmup_steps=FLAGS.warmup_steps, base_lr=FLAGS.learning_rate, decay_start_step=FLAGS.decay_start_step, decay_steps=FLAGS.decay_steps) timer = IterTimer(train_batch_size=FLAGS.batch_size, test_batch_size=FLAGS.batch_size, optimizer=embedding_optimizer, print_freq=FLAGS.print_freq, enabled=hvd.rank() == 0) if FLAGS.mode == 'inference': inference_benchmark(validation_pipeline, model, timer, FLAGS) return elif FLAGS.mode == 'eval': evaluator = Evaluator(model=model, timer=timer, auc_thresholds=FLAGS.auc_thresholds, max_steps=FLAGS.max_steps) test_auc, test_loss, _ = evaluator(validation_pipeline) if hvd.rank() == 0: dllogger.log(data=dict(auc=test_auc, test_loss=test_loss), step=tuple()) return eval_points = compute_eval_points(train_batches=len(train_pipeline), evals_per_epoch=FLAGS.evals_per_epoch) trainer = Trainer(model, embedding_optimizer=embedding_optimizer, mlp_optimizer=mlp_optimizer, amp=FLAGS.amp, lr_scheduler=scheduler, tf_dataset_op=train_pipeline.op, cpu=FLAGS.cpu) evaluator = Evaluator(model=model, timer=timer, auc_thresholds=FLAGS.auc_thresholds, distributed=hvd.size() > 1) best_auc = 0 best_loss = 1e6 train_begin = time.time() for epoch in range(FLAGS.epochs): print('Starting epoch: ', epoch) for step in range(len(train_pipeline)): if step == FLAGS.profiler_start_step and hvd.rank() == FLAGS.profiled_rank: tf.profiler.experimental.start('logdir') if FLAGS.profiler_start_step and step == FLAGS.profiler_start_step + 100 and hvd.rank() == FLAGS.profiled_rank: tf.profiler.experimental.stop() loss = trainer.train_step() if step == 0 and hvd.size() > 1: dmp.broadcast_variables(model.variables, root_rank=0) if step % FLAGS.print_freq == 0: if tf.math.is_nan(loss): print('NaN loss encountered in training. Aborting.') break timer.step_train(loss=loss) if FLAGS.max_steps != -1 and step > FLAGS.max_steps: dist_print(f'Max steps of {FLAGS.max_steps} reached, exiting') break if step in eval_points: test_auc, test_loss, _ = evaluator(validation_pipeline) dist_print(f'Evaluation completed, AUC: {test_auc:.6f}, test_loss: {test_loss:.6f}') timer.test_idx = 0 best_auc = max(best_auc, test_auc) best_loss = min(best_loss, test_loss) elapsed = time.time() - train_begin if FLAGS.save_checkpoint_path is not None: model.save_checkpoint(FLAGS.save_checkpoint_path) if hvd.rank() == 0: dist_print(f'Training run completed, elapsed: {elapsed:.0f} [s]') results = { 'throughput': FLAGS.batch_size / timer.mean_train_time(), 'mean_step_time_ms': timer.mean_train_time() * 1000, 'auc': best_auc, 'validation_loss': best_loss } dllogger.log(data=results, step=tuple())
PyTorch/Translation/Transformer
Transformer
preprocess
#!/usr/bin/env python3 # Copyright (c) 2017-present, Facebook, Inc. # All rights reserved. # # This source code is licensed under the license found in the LICENSE file in # the root directory of this source tree. An additional grant of patent rights # can be found in the PATENTS file in the same directory. # import argparse from itertools import zip_longest import os import shutil from fairseq.data import indexed_dataset, dictionary from fairseq.tokenizer import Tokenizer, tokenize_line def get_parser(): parser = argparse.ArgumentParser( description='Data pre-processing: Create dictionary and store data in binary format') parser.add_argument('-s', '--source-lang', default=None, metavar='SRC', help='source language') parser.add_argument('-t', '--target-lang', default=None, metavar='TARGET', help='target language') parser.add_argument('--trainpref', metavar='FP', default=None, help='train file prefix') parser.add_argument('--validpref', metavar='FP', default=None, help='comma separated, valid file prefixes') parser.add_argument('--testpref', metavar='FP', default=None, help='comma separated, test file prefixes') parser.add_argument('--destdir', metavar='DIR', default='data-bin', help='destination dir') parser.add_argument('--thresholdtgt', metavar='N', default=0, type=int, help='map words appearing less than threshold times to unknown') parser.add_argument('--thresholdsrc', metavar='N', default=0, type=int, help='map words appearing less than threshold times to unknown') parser.add_argument('--tgtdict', metavar='FP', help='reuse given target dictionary') parser.add_argument('--srcdict', metavar='FP', help='reuse given source dictionary') parser.add_argument('--nwordstgt', metavar='N', default=-1, type=int, help='number of target words to retain') parser.add_argument('--nwordssrc', metavar='N', default=-1, type=int, help='number of source words to retain') parser.add_argument('--alignfile', metavar='ALIGN', default=None, help='an alignment file (optional)') parser.add_argument('--output-format', metavar='FORMAT', default='binary', choices=['binary', 'raw'], help='output format (optional)') parser.add_argument('--joined-dictionary', action='store_true', help='Generate joined dictionary') parser.add_argument('--only-source', action='store_true', help='Only process the source language') parser.add_argument('--padding-factor', metavar='N', default=8, type=int, help='Pad dictionary size to be multiple of N') return parser def main(args): print(args) os.makedirs(args.destdir, exist_ok=True) target = not args.only_source def build_dictionary(filenames): d = dictionary.Dictionary() for filename in filenames: Tokenizer.add_file_to_dictionary(filename, d, tokenize_line) return d def train_path(lang): return '{}{}'.format(args.trainpref, ('.' + lang) if lang else '') def file_name(prefix, lang): fname = prefix if lang is not None: fname += f'.{lang}' return fname def dest_path(prefix, lang): return os.path.join(args.destdir, file_name(prefix, lang)) def dict_path(lang): return dest_path('dict', lang) + '.txt' def dataset_dest_path(output_prefix, lang, extension): base = f'{args.destdir}/{output_prefix}' lang_part = f'.{args.source_lang}-{args.target_lang}.{lang}' if lang is not None else '' return f'{base}{lang_part}.{extension}' if args.joined_dictionary: assert not args.srcdict, 'cannot combine --srcdict and --joined-dictionary' assert not args.tgtdict, 'cannot combine --tgtdict and --joined-dictionary' src_dict = build_dictionary({ train_path(lang) for lang in [args.source_lang, args.target_lang] }) tgt_dict = src_dict else: if args.srcdict: src_dict = dictionary.Dictionary.load(args.srcdict) else: assert args.trainpref, "--trainpref must be set if --srcdict is not specified" src_dict = build_dictionary([train_path(args.source_lang)]) if target: if args.tgtdict: tgt_dict = dictionary.Dictionary.load(args.tgtdict) else: assert args.trainpref, "--trainpref must be set if --tgtdict is not specified" tgt_dict = build_dictionary([train_path(args.target_lang)]) src_dict.finalize( threshold=args.thresholdsrc, nwords=args.nwordssrc, padding_factor=args.padding_factor, ) src_dict.save(dict_path(args.source_lang)) if target: if not args.joined_dictionary: tgt_dict.finalize( threshold=args.thresholdtgt, nwords=args.nwordstgt, padding_factor=args.padding_factor, ) tgt_dict.save(dict_path(args.target_lang)) def make_binary_dataset(input_prefix, output_prefix, lang): _dict = dictionary.Dictionary.load(dict_path(lang)) print('| [{}] Dictionary: {} types'.format(lang, len(_dict) - 1)) ds = indexed_dataset.IndexedDatasetBuilder(dataset_dest_path(output_prefix, lang, 'bin')) def consumer(tensor): ds.add_item(tensor) input_file = '{}{}'.format(input_prefix, ('.' + lang) if lang is not None else '') res = Tokenizer.binarize(input_file, _dict, consumer) print('| [{}] {}: {} sents, {} tokens, {:.3}% replaced by {}'.format( lang, input_file, res['nseq'], res['ntok'], 100 * res['nunk'] / res['ntok'], _dict.unk_word)) ds.finalize(dataset_dest_path(output_prefix, lang, 'idx')) def make_dataset(input_prefix, output_prefix, lang): if args.output_format == 'binary': make_binary_dataset(input_prefix, output_prefix, lang) elif args.output_format == 'raw': # Copy original text file to destination folder output_text_file = dest_path( output_prefix + '.{}-{}'.format(args.source_lang, args.target_lang), lang, ) shutil.copyfile(file_name(input_prefix, lang), output_text_file) def make_all(lang): if args.trainpref: make_dataset(args.trainpref, 'train', lang) if args.validpref: for k, validpref in enumerate(args.validpref.split(',')): outprefix = 'valid{}'.format(k) if k > 0 else 'valid' make_dataset(validpref, outprefix, lang) if args.testpref: for k, testpref in enumerate(args.testpref.split(',')): outprefix = 'test{}'.format(k) if k > 0 else 'test' make_dataset(testpref, outprefix, lang) make_all(args.source_lang) if target: make_all(args.target_lang) print('| Wrote preprocessed data to {}'.format(args.destdir)) if args.alignfile: assert args.trainpref, "--trainpref must be set if --alignfile is specified" src_file_name = train_path(args.source_lang) tgt_file_name = train_path(args.target_lang) src_dict = dictionary.Dictionary.load(dict_path(args.source_lang)) tgt_dict = dictionary.Dictionary.load(dict_path(args.target_lang)) freq_map = {} with open(args.alignfile, 'r') as align_file: with open(src_file_name, 'r') as src_file: with open(tgt_file_name, 'r') as tgt_file: for a, s, t in zip_longest(align_file, src_file, tgt_file): si = Tokenizer.tokenize(s, src_dict, add_if_not_exist=False) ti = Tokenizer.tokenize(t, tgt_dict, add_if_not_exist=False) ai = list(map(lambda x: tuple(x.split('-')), a.split())) for sai, tai in ai: srcidx = si[int(sai)] tgtidx = ti[int(tai)] if srcidx != src_dict.unk() and tgtidx != tgt_dict.unk(): assert srcidx != src_dict.pad() assert srcidx != src_dict.eos() assert tgtidx != tgt_dict.pad() assert tgtidx != tgt_dict.eos() if srcidx not in freq_map: freq_map[srcidx] = {} if tgtidx not in freq_map[srcidx]: freq_map[srcidx][tgtidx] = 1 else: freq_map[srcidx][tgtidx] += 1 align_dict = {} for srcidx in freq_map: align_dict[srcidx] = max(freq_map[srcidx], key=freq_map[srcidx].get) with open(os.path.join(args.destdir, 'alignment.{}-{}.txt'.format( args.source_lang, args.target_lang)), 'w') as f: for k, v in align_dict.items(): print('{} {}'.format(src_dict[k], tgt_dict[v]), file=f) if __name__ == '__main__': parser = get_parser() ARGS = parser.parse_args() main(ARGS)
PyTorch/Forecasting/TFT/triton/deployment_toolkit
deployment_toolkit
core
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import abc import importlib import logging import os from enum import Enum from pathlib import Path from typing import Any, Dict, List, NamedTuple, Optional, Tuple, Union import numpy as np LOGGER = logging.getLogger(__name__) DATALOADER_FN_NAME = "get_dataloader_fn" GET_MODEL_FN_NAME = "get_model" GET_SERVING_INPUT_RECEIVER_FN = "get_serving_input_receiver_fn" GET_ARGPARSER_FN_NAME = "update_argparser" class TensorSpec(NamedTuple): name: str dtype: str shape: Tuple class Parameter(Enum): def __lt__(self, other: "Parameter") -> bool: return self.value < other.value def __str__(self): return self.value class Accelerator(Parameter): NONE = "none" AMP = "amp" TRT = "trt" CUDA = NONE # backward compatibility class Precision(Parameter): INT8 = "int8" FP16 = "fp16" FP32 = "fp32" TF32 = "tf32" # Deprecated class Format(Parameter): TF_GRAPHDEF = "tf-graphdef" TF_SAVEDMODEL = "tf-savedmodel" TF_TRT = "tf-trt" TF_ESTIMATOR = "tf-estimator" TF_KERAS = "tf-keras" ONNX = "onnx" TRT = "trt" TS_SCRIPT = "ts-script" TS_TRACE = "ts-trace" PYT = "pyt" FASTERTRANSFORMER = "fastertransformer" class Model(NamedTuple): handle: object # TODO: precision should be removed precision: Optional[Precision] inputs: Dict[str, TensorSpec] outputs: Dict[str, TensorSpec] def load_from_file(file_path, label, target): spec = importlib.util.spec_from_file_location(name=label, location=file_path) my_module = importlib.util.module_from_spec(spec) spec.loader.exec_module(my_module) # pytype: disable=attribute-error return getattr(my_module, target, None) class BaseLoader(abc.ABC): required_fn_name_for_signature_parsing: Optional[str] = None @abc.abstractmethod def load(self, model_path: Union[str, Path], **kwargs) -> Model: """ Loads and process model from file based on given set of args """ pass class BaseSaver(abc.ABC): required_fn_name_for_signature_parsing: Optional[str] = None @abc.abstractmethod def save(self, model: Model, model_path: Union[str, Path], dataloader_fn) -> None: """ Save model to file """ pass class BaseRunner(abc.ABC): required_fn_name_for_signature_parsing: Optional[str] = None @abc.abstractmethod def init_inference(self, model: Model): raise NotImplementedError class BaseRunnerSession(abc.ABC): def __init__(self, model: Model): self._model = model @abc.abstractmethod def __enter__(self): raise NotImplementedError() @abc.abstractmethod def __exit__(self, exc_type, exc_value, traceback): raise NotImplementedError() @abc.abstractmethod def __call__(self, x: Dict[str, object]): raise NotImplementedError() def _set_env_variables(self) -> Dict[str, object]: """this method not remove values; fix it if needed""" to_set = {} old_values = {k: os.environ.pop(k, None) for k in to_set} os.environ.update(to_set) return old_values def _recover_env_variables(self, old_envs: Dict[str, object]): for name, value in old_envs.items(): if value is None: del os.environ[name] else: os.environ[name] = str(value) class BaseConverter(abc.ABC): required_fn_name_for_signature_parsing: Optional[str] = None @abc.abstractmethod def convert(self, model: Model, dataloader_fn) -> Model: raise NotImplementedError() @staticmethod def required_source_model_precision(requested_model_precision: Precision) -> Precision: return requested_model_precision class BaseMetricsCalculator(abc.ABC): required_fn_name_for_signature_parsing: Optional[str] = None def calc( self, *, ids: List[Any], y_pred: Dict[str, np.ndarray], x: Optional[Dict[str, np.ndarray]], y_real: Optional[Dict[str, np.ndarray]], ) -> Dict[str, float]: """ Calculates error/accuracy metrics Args: ids: List of ids identifying each sample in the batch y_pred: model output as dict where key is output name and value is output value x: model input as dict where key is input name and value is input value y_real: input ground truth as dict where key is output name and value is output value Returns: dictionary where key is metric name and value is its value """ pass @abc.abstractmethod def update( self, ids: List[Any], y_pred: Dict[str, np.ndarray], x: Optional[Dict[str, np.ndarray]], y_real: Optional[Dict[str, np.ndarray]], ): pass @property @abc.abstractmethod def metrics(self) -> Dict[str, Any]: pass class ShapeSpec(NamedTuple): min: Tuple opt: Tuple max: Tuple class MeasurementMode(Enum): COUNT_WINDOWS = "count_windows" TIME_WINDOWS = "time_windows" class PerformanceTool(Enum): """ Available performance evaluation tools """ MODEL_ANALYZER = "model_analyzer" PERF_ANALYZER = "perf_analyzer" class BatchingMode(Enum): """ Available batching modes """ STATIC = "static" DYNAMIC = "dynamic" class EvaluationMode(Enum): """ Available evaluation modes """ OFFLINE = "offline" ONLINE = "online" class OfflineMode(Enum): SYSTEM = "system" CUDA = "cuda"
TensorFlow/Detection/SSD/models/research/slim/datasets
datasets
cifar10
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Provides data for the Cifar10 dataset. The dataset scripts used to create the dataset can be found at: tensorflow/models/research/slim/datasets/download_and_convert_cifar10.py """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import tensorflow as tf from datasets import dataset_utils slim = tf.contrib.slim _FILE_PATTERN = 'cifar10_%s.tfrecord' SPLITS_TO_SIZES = {'train': 50000, 'test': 10000} _NUM_CLASSES = 10 _ITEMS_TO_DESCRIPTIONS = { 'image': 'A [32 x 32 x 3] color image.', 'label': 'A single integer between 0 and 9', } def get_split(split_name, dataset_dir, file_pattern=None, reader=None): """Gets a dataset tuple with instructions for reading cifar10. Args: split_name: A train/test split name. dataset_dir: The base directory of the dataset sources. file_pattern: The file pattern to use when matching the dataset sources. It is assumed that the pattern contains a '%s' string so that the split name can be inserted. reader: The TensorFlow reader type. Returns: A `Dataset` namedtuple. Raises: ValueError: if `split_name` is not a valid train/test split. """ if split_name not in SPLITS_TO_SIZES: raise ValueError('split name %s was not recognized.' % split_name) if not file_pattern: file_pattern = _FILE_PATTERN file_pattern = os.path.join(dataset_dir, file_pattern % split_name) # Allowing None in the signature so that dataset_factory can use the default. if not reader: reader = tf.TFRecordReader keys_to_features = { 'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''), 'image/format': tf.FixedLenFeature((), tf.string, default_value='png'), 'image/class/label': tf.FixedLenFeature( [], tf.int64, default_value=tf.zeros([], dtype=tf.int64)), } items_to_handlers = { 'image': slim.tfexample_decoder.Image(shape=[32, 32, 3]), 'label': slim.tfexample_decoder.Tensor('image/class/label'), } decoder = slim.tfexample_decoder.TFExampleDecoder( keys_to_features, items_to_handlers) labels_to_names = None if dataset_utils.has_labels(dataset_dir): labels_to_names = dataset_utils.read_label_file(dataset_dir) return slim.dataset.Dataset( data_sources=file_pattern, reader=reader, decoder=decoder, num_samples=SPLITS_TO_SIZES[split_name], items_to_descriptions=_ITEMS_TO_DESCRIPTIONS, num_classes=_NUM_CLASSES, labels_to_names=labels_to_names)
TensorFlow2/LanguageModeling/BERT
BERT
create_finetuning_data
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """BERT finetuning task dataset generator.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import functools import json from absl import app from absl import flags import tensorflow as tf import classifier_data_lib # word-piece tokenizer based squad_lib import squad_lib as squad_lib_wp # sentence-piece tokenizer based squad_lib import squad_lib_sp import tokenization FLAGS = flags.FLAGS flags.DEFINE_enum( "fine_tuning_task_type", "classification", ["classification", "squad"], "The name of the BERT fine tuning task for which data " "will be generated..") # BERT classification specific flags. flags.DEFINE_string( "input_data_dir", None, "The input data dir. Should contain the .tsv files (or other data files) " "for the task.") flags.DEFINE_enum("classification_task_name", "MNLI", ["COLA", "MNLI", "MRPC", "QNLI", "SST-2", "XNLI"], "The name of the task to train BERT classifier.") # BERT Squad task specific flags. flags.DEFINE_string( "squad_data_file", None, "The input data file in for generating training data for BERT squad task.") flags.DEFINE_integer( "doc_stride", 128, "When splitting up a long document into chunks, how much stride to " "take between chunks.") flags.DEFINE_integer( "max_query_length", 64, "The maximum number of tokens for the question. Questions longer than " "this will be truncated to this length.") flags.DEFINE_bool( "version_2_with_negative", False, "If true, the SQuAD examples contain some that do not have an answer.") # Shared flags across BERT fine-tuning tasks. flags.DEFINE_string("vocab_file", None, "The vocabulary file that the BERT model was trained on.") flags.DEFINE_string( "train_data_output_path", None, "The path in which generated training input data will be written as tf" " records.") flags.DEFINE_string( "eval_data_output_path", None, "The path in which generated training input data will be written as tf" " records.") flags.DEFINE_string("meta_data_file_path", None, "The path in which input meta data will be written.") flags.DEFINE_bool( "do_lower_case", True, "Whether to lower case the input text. Should be True for uncased " "models and False for cased models.") flags.DEFINE_integer( "max_seq_length", 128, "The maximum total input sequence length after WordPiece tokenization. " "Sequences longer than this will be truncated, and sequences shorter " "than this will be padded.") flags.DEFINE_string("sp_model_file", "", "The path to the model used by sentence piece tokenizer.") flags.DEFINE_enum( "tokenizer_impl", "word_piece", ["word_piece", "sentence_piece"], "Specifies the tokenizer implementation, i.e., whehter to use word_piece " "or sentence_piece tokenizer. Canonical BERT uses word_piece tokenizer, " "while ALBERT uses sentence_piece tokenizer.") def generate_classifier_dataset(): """Generates classifier dataset and returns input meta data.""" assert FLAGS.input_data_dir and FLAGS.classification_task_name processors = { "cola": classifier_data_lib.ColaProcessor, "mnli": classifier_data_lib.MnliProcessor, "mrpc": classifier_data_lib.MrpcProcessor, "qnli": classifier_data_lib.QnliProcessor, "sst-2": classifier_data_lib.SstProcessor, "xnli": classifier_data_lib.XnliProcessor, } task_name = FLAGS.classification_task_name.lower() if task_name not in processors: raise ValueError("Task not found: %s" % (task_name)) if FLAGS.tokenizer_impl == "word_piece": tokenizer = tokenization.FullTokenizer( vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case) processor_text_fn = tokenization.convert_to_unicode else: assert FLAGS.tokenizer_impl == "sentence_piece" tokenizer = tokenization.FullSentencePieceTokenizer(FLAGS.sp_model_file) processor_text_fn = functools.partial( tokenization.preprocess_text, lower=FLAGS.do_lower_case) processor = processors[task_name](processor_text_fn) return classifier_data_lib.generate_tf_record_from_data_file( processor, FLAGS.input_data_dir, tokenizer, train_data_output_path=FLAGS.train_data_output_path, eval_data_output_path=FLAGS.eval_data_output_path, max_seq_length=FLAGS.max_seq_length) def generate_squad_dataset(): """Generates squad training dataset and returns input meta data.""" assert FLAGS.squad_data_file if FLAGS.tokenizer_impl == "word_piece": return squad_lib_wp.generate_tf_record_from_json_file( FLAGS.squad_data_file, FLAGS.vocab_file, FLAGS.train_data_output_path, FLAGS.max_seq_length, FLAGS.do_lower_case, FLAGS.max_query_length, FLAGS.doc_stride, FLAGS.version_2_with_negative) else: assert FLAGS.tokenizer_impl == "sentence_piece" return squad_lib_sp.generate_tf_record_from_json_file( FLAGS.squad_data_file, FLAGS.sp_model_file, FLAGS.train_data_output_path, FLAGS.max_seq_length, FLAGS.do_lower_case, FLAGS.max_query_length, FLAGS.doc_stride, FLAGS.version_2_with_negative) def main(_): if FLAGS.tokenizer_impl == "word_piece": if not FLAGS.vocab_file: raise ValueError( "FLAG vocab_file for word-piece tokenizer is not specified.") else: assert FLAGS.tokenizer_impl == "sentence_piece" if not FLAGS.sp_model_file: raise ValueError( "FLAG sp_model_file for sentence-piece tokenizer is not specified.") if FLAGS.fine_tuning_task_type == "classification": input_meta_data = generate_classifier_dataset() else: input_meta_data = generate_squad_dataset() with tf.io.gfile.GFile(FLAGS.meta_data_file_path, "w") as writer: writer.write(json.dumps(input_meta_data, indent=4) + "\n") if __name__ == "__main__": flags.mark_flag_as_required("train_data_output_path") flags.mark_flag_as_required("meta_data_file_path") app.run(main)
PyTorch/Classification/ConvNets/resnet50v1.5/training/FP32
FP32
DGX2V_resnet50_FP32_250E
python ./multiproc.py --nproc_per_node 8 ./launch.py --model resnet50 --precision FP32 --mode convergence --platform DGX2V /imagenet --workspace ${1:-./} --raport-file raport.json
TensorFlow/Recommendation/NCF
NCF
prepare_dataset
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #!/bin/bash set -e DATASET_NAME=${1:-'ml-20m'} RAW_DATADIR=${2:-'/data'} CACHED_DATADIR=${3:-"${RAW_DATADIR}/cache/${DATASET_NAME}"} # you can add another option to this case in order to support other datasets case ${DATASET_NAME} in 'ml-20m') ZIP_PATH=${RAW_DATADIR}/'ml-20m.zip' RATINGS_PATH=${RAW_DATADIR}'/ml-20m/ratings.csv' ;; 'ml-1m') ZIP_PATH=${RAW_DATADIR}/'ml-1m.zip' RATINGS_PATH=${RAW_DATADIR}'/ml-1m/ratings.dat' ;; *) echo "Unsupported dataset name: $DATASET_NAME" exit 1 esac if [ ! -d ${RAW_DATADIR} ]; then mkdir -p ${RAW_DATADIR} fi if [ ! -d ${CACHED_DATADIR} ]; then mkdir -p ${CACHED_DATADIR} fi rm -f log if [ ! -f ${ZIP_PATH} ]; then echo "Dataset not found!" echo "To continue please download the dataset from http://files.grouplens.org/datasets/movielens/ml-20m.zip \\ put it in ${ZIP_PATH} and rerun this script" exit 1 fi if [ ! -f ${RATINGS_PATH} ]; then unzip -u ${ZIP_PATH} -d ${RAW_DATADIR} fi if [ ! -f ${CACHED_DATADIR}/train_ratings.pickle ]; then echo "preprocessing ${RATINGS_PATH} and save to disk" t0=$(date +%s) python convert.py --path ${RATINGS_PATH} --output ${CACHED_DATADIR} t1=$(date +%s) delta=$(( $t1 - $t0 )) echo "Finish preprocessing in $delta seconds" else echo 'Using cached preprocessed data' fi echo "Dataset $DATASET_NAME successfully prepared at: $CACHED_DATADIR"
TensorFlow/Detection/SSD/models/research/object_detection/predictors
predictors
convolutional_keras_box_predictor_test
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for object_detection.predictors.convolutional_keras_box_predictor.""" import numpy as np import tensorflow as tf from google.protobuf import text_format from object_detection.builders import box_predictor_builder from object_detection.builders import hyperparams_builder from object_detection.predictors import convolutional_keras_box_predictor as box_predictor from object_detection.protos import hyperparams_pb2 from object_detection.utils import test_case class ConvolutionalKerasBoxPredictorTest(test_case.TestCase): def _build_conv_hyperparams(self): conv_hyperparams = hyperparams_pb2.Hyperparams() conv_hyperparams_text_proto = """ activation: RELU_6 regularizer { l2_regularizer { } } initializer { truncated_normal_initializer { } } """ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams) return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams) def test_get_boxes_for_five_aspect_ratios_per_location(self): def graph_fn(image_features): conv_box_predictor = ( box_predictor_builder.build_convolutional_keras_box_predictor( is_training=False, num_classes=0, conv_hyperparams=self._build_conv_hyperparams(), freeze_batchnorm=False, inplace_batchnorm_update=False, num_predictions_per_location_list=[5], min_depth=0, max_depth=32, num_layers_before_predictor=1, use_dropout=True, dropout_keep_prob=0.8, kernel_size=1, box_code_size=4 )) box_predictions = conv_box_predictor([image_features]) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) objectness_predictions = tf.concat( box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], axis=1) return (box_encodings, objectness_predictions) image_features = np.random.rand(4, 8, 8, 64).astype(np.float32) (box_encodings, objectness_predictions) = self.execute(graph_fn, [image_features]) self.assertAllEqual(box_encodings.shape, [4, 320, 1, 4]) self.assertAllEqual(objectness_predictions.shape, [4, 320, 1]) def test_get_boxes_for_one_aspect_ratio_per_location(self): def graph_fn(image_features): conv_box_predictor = ( box_predictor_builder.build_convolutional_keras_box_predictor( is_training=False, num_classes=0, conv_hyperparams=self._build_conv_hyperparams(), freeze_batchnorm=False, inplace_batchnorm_update=False, num_predictions_per_location_list=[1], min_depth=0, max_depth=32, num_layers_before_predictor=1, use_dropout=True, dropout_keep_prob=0.8, kernel_size=1, box_code_size=4 )) box_predictions = conv_box_predictor([image_features]) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) objectness_predictions = tf.concat(box_predictions[ box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], axis=1) return (box_encodings, objectness_predictions) image_features = np.random.rand(4, 8, 8, 64).astype(np.float32) (box_encodings, objectness_predictions) = self.execute(graph_fn, [image_features]) self.assertAllEqual(box_encodings.shape, [4, 64, 1, 4]) self.assertAllEqual(objectness_predictions.shape, [4, 64, 1]) def test_get_multi_class_predictions_for_five_aspect_ratios_per_location( self): num_classes_without_background = 6 image_features = np.random.rand(4, 8, 8, 64).astype(np.float32) def graph_fn(image_features): conv_box_predictor = ( box_predictor_builder.build_convolutional_keras_box_predictor( is_training=False, num_classes=num_classes_without_background, conv_hyperparams=self._build_conv_hyperparams(), freeze_batchnorm=False, inplace_batchnorm_update=False, num_predictions_per_location_list=[5], min_depth=0, max_depth=32, num_layers_before_predictor=1, use_dropout=True, dropout_keep_prob=0.8, kernel_size=1, box_code_size=4 )) box_predictions = conv_box_predictor([image_features]) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) class_predictions_with_background = tf.concat( box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], axis=1) return (box_encodings, class_predictions_with_background) (box_encodings, class_predictions_with_background) = self.execute(graph_fn, [image_features]) self.assertAllEqual(box_encodings.shape, [4, 320, 1, 4]) self.assertAllEqual(class_predictions_with_background.shape, [4, 320, num_classes_without_background+1]) def test_get_predictions_with_feature_maps_of_dynamic_shape( self): image_features = tf.placeholder(dtype=tf.float32, shape=[4, None, None, 64]) conv_box_predictor = ( box_predictor_builder.build_convolutional_keras_box_predictor( is_training=False, num_classes=0, conv_hyperparams=self._build_conv_hyperparams(), freeze_batchnorm=False, inplace_batchnorm_update=False, num_predictions_per_location_list=[5], min_depth=0, max_depth=32, num_layers_before_predictor=1, use_dropout=True, dropout_keep_prob=0.8, kernel_size=1, box_code_size=4 )) box_predictions = conv_box_predictor([image_features]) box_encodings = tf.concat( box_predictions[box_predictor.BOX_ENCODINGS], axis=1) objectness_predictions = tf.concat( box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], axis=1) init_op = tf.global_variables_initializer() resolution = 32 expected_num_anchors = resolution*resolution*5 with self.test_session() as sess: sess.run(init_op) (box_encodings_shape, objectness_predictions_shape) = sess.run( [tf.shape(box_encodings), tf.shape(objectness_predictions)], feed_dict={image_features: np.random.rand(4, resolution, resolution, 64)}) actual_variable_set = set( [var.op.name for var in tf.trainable_variables()]) self.assertAllEqual(box_encodings_shape, [4, expected_num_anchors, 1, 4]) self.assertAllEqual(objectness_predictions_shape, [4, expected_num_anchors, 1]) expected_variable_set = set([ 'BoxPredictor/SharedConvolutions_0/Conv2d_0_1x1_32/bias', 'BoxPredictor/SharedConvolutions_0/Conv2d_0_1x1_32/kernel', 'BoxPredictor/ConvolutionalBoxHead_0/BoxEncodingPredictor/bias', 'BoxPredictor/ConvolutionalBoxHead_0/BoxEncodingPredictor/kernel', 'BoxPredictor/ConvolutionalClassHead_0/ClassPredictor/bias', 'BoxPredictor/ConvolutionalClassHead_0/ClassPredictor/kernel']) self.assertEqual(expected_variable_set, actual_variable_set) self.assertEqual(conv_box_predictor._sorted_head_names, ['box_encodings', 'class_predictions_with_background']) # TODO(kaftan): Remove conditional after CMLE moves to TF 1.10 if __name__ == '__main__': tf.test.main()
TensorFlow/Detection/SSD/models/research/object_detection/models
models
ssd_mobilenet_v1_ppn_feature_extractor_test
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for ssd_mobilenet_v1_ppn_feature_extractor.""" import numpy as np import tensorflow as tf from object_detection.models import ssd_feature_extractor_test from object_detection.models import ssd_mobilenet_v1_ppn_feature_extractor slim = tf.contrib.slim class SsdMobilenetV1PpnFeatureExtractorTest( ssd_feature_extractor_test.SsdFeatureExtractorTestBase): def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, is_training=True, use_explicit_padding=False): """Constructs a new feature extractor. Args: depth_multiplier: float depth multiplier for feature extractor pad_to_multiple: the nearest multiple to zero pad the input height and width dimensions to. is_training: whether the network is in training mode. use_explicit_padding: Use 'VALID' padding for convolutions, but prepad inputs so that the output dimensions are the same as if 'SAME' padding were used. Returns: an ssd_meta_arch.SSDFeatureExtractor object. """ min_depth = 32 return (ssd_mobilenet_v1_ppn_feature_extractor. SSDMobileNetV1PpnFeatureExtractor( is_training, depth_multiplier, min_depth, pad_to_multiple, self.conv_hyperparams_fn, use_explicit_padding=use_explicit_padding)) def test_extract_features_returns_correct_shapes_320(self): image_height = 320 image_width = 320 depth_multiplier = 1.0 pad_to_multiple = 1 expected_feature_map_shape = [(2, 20, 20, 512), (2, 10, 10, 512), (2, 5, 5, 512), (2, 3, 3, 512), (2, 2, 2, 512), (2, 1, 1, 512)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False) self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True) def test_extract_features_returns_correct_shapes_300(self): image_height = 300 image_width = 300 depth_multiplier = 1.0 pad_to_multiple = 1 expected_feature_map_shape = [(2, 19, 19, 512), (2, 10, 10, 512), (2, 5, 5, 512), (2, 3, 3, 512), (2, 2, 2, 512), (2, 1, 1, 512)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False) self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True) def test_extract_features_returns_correct_shapes_640(self): image_height = 640 image_width = 640 depth_multiplier = 1.0 pad_to_multiple = 1 expected_feature_map_shape = [(2, 40, 40, 512), (2, 20, 20, 512), (2, 10, 10, 512), (2, 5, 5, 512), (2, 3, 3, 512), (2, 2, 2, 512)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False) self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True) def test_extract_features_with_dynamic_image_shape(self): image_height = 320 image_width = 320 depth_multiplier = 1.0 pad_to_multiple = 1 expected_feature_map_shape = [(2, 20, 20, 512), (2, 10, 10, 512), (2, 5, 5, 512), (2, 3, 3, 512), (2, 2, 2, 512), (2, 1, 1, 512)] self.check_extract_features_returns_correct_shapes_with_dynamic_inputs( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False) self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True) def test_extract_features_returns_correct_shapes_with_pad_to_multiple(self): image_height = 299 image_width = 299 depth_multiplier = 1.0 pad_to_multiple = 32 expected_feature_map_shape = [(2, 20, 20, 512), (2, 10, 10, 512), (2, 5, 5, 512), (2, 3, 3, 512), (2, 2, 2, 512)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False) self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True) def test_extract_features_returns_correct_shapes_enforcing_min_depth(self): image_height = 256 image_width = 256 depth_multiplier = 0.5**12 pad_to_multiple = 1 expected_feature_map_shape = [(2, 16, 16, 32), (2, 8, 8, 32), (2, 4, 4, 32), (2, 2, 2, 32), (2, 1, 1, 32)] self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=False) self.check_extract_features_returns_correct_shape( 2, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shape, use_explicit_padding=True) def test_extract_features_raises_error_with_invalid_image_size(self): image_height = 32 image_width = 32 depth_multiplier = 1.0 pad_to_multiple = 1 self.check_extract_features_raises_error_with_invalid_image_size( image_height, image_width, depth_multiplier, pad_to_multiple) def test_preprocess_returns_correct_value_range(self): image_height = 128 image_width = 128 depth_multiplier = 1 pad_to_multiple = 1 test_image = np.random.rand(2, image_height, image_width, 3) feature_extractor = self._create_feature_extractor(depth_multiplier, pad_to_multiple) preprocessed_image = feature_extractor.preprocess(test_image) self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0))) def test_variables_only_created_in_scope(self): depth_multiplier = 1 pad_to_multiple = 1 scope_name = 'MobilenetV1' self.check_feature_extractor_variables_under_scope( depth_multiplier, pad_to_multiple, scope_name) def test_has_fused_batchnorm(self): image_height = 320 image_width = 320 depth_multiplier = 1 pad_to_multiple = 1 image_placeholder = tf.placeholder(tf.float32, [1, image_height, image_width, 3]) feature_extractor = self._create_feature_extractor(depth_multiplier, pad_to_multiple) preprocessed_image = feature_extractor.preprocess(image_placeholder) _ = feature_extractor.extract_features(preprocessed_image) self.assertTrue(any(op.type == 'FusedBatchNorm' for op in tf.get_default_graph().get_operations())) if __name__ == '__main__': tf.test.main()
TensorFlow/Detection/SSD
SSD
README
# SSD320 v1.2 For TensorFlow This repository provides a script and recipe to train SSD320 v1.2 to achieve state of the art accuracy, and is tested and maintained by NVIDIA. SSD model for TensorFlow1 is no longer maintained and will soon become unavailable, please consider a PyTorch version or EfficientDet TensorFlow2 model as a substitute for your requirements. ## Table Of Contents * [Model overview](#model-overview) * [Model architecture](#model-architecture) * [Default configuration](#default-configuration) * [Feature support matrix](#feature-support-matrix) * [Features](#features) * [Mixed precision training](#mixed-precision-training) * [Enabling mixed precision](#enabling-mixed-precision) * [Enabling TF32](#enabling-tf32) * [Glossary](#glossary) * [Setup](#setup) * [Requirements](#requirements) * [Quick Start Guide](#quick-start-guide) * [Advanced](#advanced) * [Scripts and sample code](#scripts-and-sample-code) * [Parameters](#parameters) * [Command line options](#command-line-options) * [Getting the data](#getting-the-data) * [Training process](#training-process) * [Data preprocessing](#data-preprocessing) * [Data augmentation](#data-augmentation) * [Enabling mixed precision](#enabling-mixed-precision) * [Performance](#performance) * [Benchmarking](#benchmarking) * [Training performance benchmark](#training-performance-benchmark) * [Inference performance benchmark](#inference-performance-benchmark) * [Results](#results) * [Training accuracy results](#training-accuracy-results) * [Training accuracy: NVIDIA DGX A100 (8x A100 40GB)](#training-accuracy-nvidia-dgx-a100-8x-a100-40gb) * [Training accuracy: NVIDIA DGX-1 (8x V100 16GB)](#training-accuracy-nvidia-dgx-1-8x-v100-16gb) * [Training performance results](#training-performance-results) * [Training performance: NVIDIA DGX A100 (8x A100 40GB)](#training-performance-nvidia-dgx-a100-8x-a100-40gb) * [Training performance: NVIDIA DGX-1 (8x V100 16GB)](#training-performance-nvidia-dgx-1-8x-v100-16gb) * [Inference performance results](#inference-performance-results) * [Inference performance: NVIDIA DGX A100 (1x A100 40GB)](#inference-performance-nvidia-dgx-a100-1x-a100-40gb) * [Inference performance: NVIDIA DGX-1 (1x V100 16GB)](#inference-performance-nvidia-dgx-1-1x-v100-16gb) * [Inference performance: NVIDIA T4](#inference-performance-nvidia-t4) * [Release notes](#release-notes) * [Changelog](#changelog) * [Known issues](#known-issues) ## Model overview The SSD320 v1.2 model is based on the [SSD: Single Shot MultiBox Detector](https://arxiv.org/abs/1512.02325) paper, which describes SSD as "a method for detecting objects in images using a single deep neural network". This model is trained with mixed precision using Tensor Cores on Volta, Turing, and the NVIDIA Ampere GPU architectures. Therefore, researchers can get results 1.5x faster than training without Tensor Cores, while experiencing the benefits of mixed precision training. This model is tested against each NGC monthly container release to ensure consistent accuracy and performance over time. ### Model architecture Our implementation is based on the existing [model from the TensorFlow models repository](https://github.com/tensorflow/models/blob/master/research/object_detection/samples/configs/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync.config). The network was altered in order to improve accuracy and increase throughput. Changes include: - Replacing the VGG backbone with the more popular ResNet50. - Adding multi-scale detection to the backbone using [Feature Pyramid Networks](https://arxiv.org/pdf/1612.03144.pdf). - Replacing the original hard negative mining loss function with [Focal Loss](https://arxiv.org/pdf/1708.02002.pdf). - Decreasing the input size to 320 x 320. ### Default configuration We trained the model for 12500 steps (27 epochs) with the following setup: - [SGDR](https://arxiv.org/pdf/1608.03983.pdf) with cosine decay learning rate - Learning rate base = 0.16 - Momentum = 0.9 - Warm-up learning rate = 0.0693312 - Warm-up steps = 1000 - Batch size per GPU = 32 - Number of GPUs = 8 ### Feature support matrix The following features are supported by this model: | **Feature** | **Transformer-XL** | |:------------|-------------------:| |[Automatic mixed precision (AMP)](https://nvidia.github.io/apex/amp.html) | Yes | |[Horovod Multi-GPU (NCCL)](https://github.com/horovod/horovod) | Yes | #### Features [TF-AMP](https://docs.nvidia.com/deeplearning/dgx/tensorflow-user-guide/index.html#tfamp) - a tool that enables Tensor Core-accelerated training. Refer to the [Enabling mixed precision](#enabling-mixed-precision) section for more details. [Horovod](https://github.com/horovod/horovod) - Horovod is a distributed training framework for TensorFlow, Keras, PyTorch, and MXNet. The goal of Horovod is to make distributed deep learning fast and easy to use. For more information about how to get started with Horovod, see the [Horovod: Official repository](https://github.com/horovod/horovod). [Multi-GPU training with Horovod](https://github.com/horovod/horovod/#usage) - our model uses Horovod to implement efficient multi-GPU training with NCCL. For details, see example sources in this repository or see the [TensorFlow tutorial](https://github.com/horovod/horovod/#usage). ### Mixed precision training Mixed precision is the combined use of different numerical precisions in a computational method. [Mixed precision](https://arxiv.org/abs/1710.03740) training offers significant computational speedup by performing operations in half-precision format while storing minimal information in single-precision to retain as much information as possible in critical parts of the network. Since the introduction of [Tensor Cores](https://developer.nvidia.com/tensor-cores) in Volta, and following with both the Turing and Ampere architectures, significant training speedups are experienced by switching to mixed precision -- up to 3x overall speedup on the most arithmetically intense model architectures. Using mixed precision training previously required two steps: 1. Porting the model to use the FP16 data type where appropriate. 2. Adding loss scaling to preserve small gradient values. This can now be achieved using Automatic Mixed Precision (AMP) for TensorFlow to enablethe full [mixed precision methodology](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html#tensorflow) in your existing TensorFlow model code. AMP enables mixed precision training on Volta and Turing GPUs automatically. The TensorFlow framework code makes all necessary model changes internally. In TF-AMP, the computational graph is optimized to use as few casts as necessary and maximize the use of FP16, and the loss scaling is automatically applied inside of supported optimizers. AMP can be configured to work with the existing tf.contrib loss scaling manager by disabling the AMP scaling with a single environment variable to perform only the automatic mixed-precision optimization. It accomplishes this by automatically rewriting all computation graphs with the necessary operations to enable mixed precision training and automatic loss scaling. For information about: * How to train using mixed precision, see the [Mixed Precision Training](https://arxiv.org/abs/1710.03740) paper and [Training With Mixed Precision](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html) documentation. * Techniques used for mixed precision training, see the [Mixed-Precision Training of Deep Neural Networks](https://devblogs.nvidia.com/mixed-precision-training-deep-neural-networks/) blog. * How to access and enable AMP for TensorFlow, see [Using TF-AMP](https://docs.nvidia.com/deeplearning/dgx/tensorflow-user-guide/index.html#tfamp) from the TensorFlow User Guide. #### Enabling mixed precision Mixed precision is enabled in TensorFlow by using the Automatic Mixed Precision (TF-AMP) extension which casts variables to half-precision upon retrieval, while storing variables in single-precision format. Furthermore, to preserve small gradient magnitudes in backpropagation, a [loss scaling](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html#lossscaling) step must be included when applying gradients. In TensorFlow, loss scaling can be applied statically by using simple multiplication of loss by a constant value or automatically, by TF-AMP. Automatic mixed precision makes all the adjustments internally in TensorFlow, providing two benefits over manual operations. First, programmers need not modify network model code, reducing development and maintenance effort. Second, using AMP maintains forward and backward compatibility with all the APIs for defining and running TensorFlow models. To enable mixed precision, you can simply add the values to the environmental variables inside your training script: - Enable TF-AMP graph rewrite: ``` os.environ["TF_ENABLE_AUTO_MIXED_PRECISION_GRAPH_REWRITE"] = "1" ``` - Enable Automated Mixed Precision: ``` os.environ['TF_ENABLE_AUTO_MIXED_PRECISION'] = '1' ``` #### Enabling TF32 TensorFloat-32 (TF32) is the new math mode in [NVIDIA A100](https://www.nvidia.com/en-us/data-center/a100/) GPUs for handling the matrix math also called tensor operations. TF32 running on Tensor Cores in A100 GPUs can provide up to 10x speedups compared to single-precision floating-point math (FP32) on Volta GPUs. TF32 Tensor Cores can speed up networks using FP32, typically with no loss of accuracy. It is more robust than FP16 for models which require high dynamic range for weights or activations. For more information, refer to the [TensorFloat-32 in the A100 GPU Accelerates AI Training, HPC up to 20x](https://blogs.nvidia.com/blog/2020/05/14/tensorfloat-32-precision-format/) blog post. TF32 is supported in the NVIDIA Ampere GPU architecture and is enabled by default. ## Setup The following section list the requirements in order to start training the SSD320 v1.2 model. ### Requirements This repository contains `Dockerfile` which extends the TensorFlow NGC container and encapsulates some dependencies. Aside from these dependencies, ensure you have the following software: * [NVIDIA Docker](https://github.com/NVIDIA/nvidia-docker) * [TensorFlow 20.06-py3](https://ngc.nvidia.com/catalog/containers/nvidia:tensorflow) (or later) NGC container * GPU-based architecture: * [NVIDIA Volta](https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/) * [NVIDIA Turing](https://www.nvidia.com/en-us/geforce/turing/) * [NVIDIA Ampere architecture](https://www.nvidia.com/en-us/data-center/nvidia-ampere-gpu-architecture/) For more information about how to get started with NGC containers, see the following sections from the NVIDIA GPU Cloud Documentation and the Deep Learning Documentation: * [Getting Started Using NVIDIA GPU Cloud](https://docs.nvidia.com/ngc/ngc-getting-started-guide/index.html) * [Accessing And Pulling From The NGC Container Registry](https://docs.nvidia.com/deeplearning/dgx/user-guide/index.html#accessing_registry) * [Running TensorFlow](https://docs.nvidia.com/deeplearning/dgx/tensorflow-release-notes/running.html#running) ## Quick Start Guide To train your model using mixed precision or TF32 with tensor cores or using TF32, FP32, perform the following steps using the default parameters of the SSD320 v1.2 model on the [COCO 2017](http://cocodataset.org/#download) dataset. ### 1. Clone the repository. ``` git clone https://github.com/NVIDIA/DeepLearningExamples cd DeepLearningExamples/TensorFlow/Detection/SSD ``` ### 2. Build the SSD320 v1.2 TensorFlow NGC container. ``` docker build . -t nvidia_ssd ``` ### 3. Download and preprocess the dataset. Extract the COCO 2017 dataset with: ``` download_all.sh nvidia_ssd <data_dir_path> <checkpoint_dir_path> ``` Data will be downloaded, preprocessed to tfrecords format and saved in the `<data_dir_path>` directory (on the host). Moreover the script will download pre-trained RN50 checkpoint in the `<checkpoint_dir_path>` directory ### 4. Launch the NGC container to run training/inference. ``` nvidia-docker run --rm -it --shm-size=1g --ulimit memlock=-1 --ulimit stack=67108864 -v <data_dir_path>:/data/coco2017_tfrecords -v <checkpoint_dir_path>:/checkpoints --ipc=host nvidia_ssd ``` ### 5. Start training. The `./examples` directory provides several sample scripts for various GPU settings and act as wrappers around `object_detection/model_main.py` script. The example scripts can be modified by arguments: - A path to directory for checkpoints - A path to directory for configs - Additional arguments to `object_detection/model_main.py` If you want to run 8 GPUs, training with tensor cores acceleration and save checkpoints in `/checkpoints` directory, run: ``` bash ./examples/SSD320_FP16_8GPU.sh /checkpoints ``` ### 6. Start validation/evaluation. The `model_main.py` training script automatically runs validation during training. The results from the validation are printed to `stdout`. Pycocotools’ open-sourced scripts provides a consistent way to evaluate models on the COCO dataset. We are using these scripts during validation to measure models performance in AP metric. Metrics below are evaluated using pycocotools’ methodology, in the following format:during validation to measure models performance in AP metric. Metrics below are evaluated using pycocotools’ methodology, in the following format: ``` Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.273 Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.423 Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.291 Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.024 Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.218 Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.451 Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.257 Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.398 Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.427 Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.070 Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.418 Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.645 ``` The metric reported in our results is present in the first row. To evaluate a checkpointed model saved in the previous step, you can use script from examples directory. If you want to run inference with tensor cores acceleration, run: ``` bash examples/SSD320_evaluate.sh <path to checkpoint> ``` ## Advanced The following sections provide greater details of the dataset, running training and inference, and the training results. ### Scripts and sample code * `Dockerfile`: a container with the basic set of dependencies to run SSD In the `model/research/object_detection` directory, the most important files are: * `model_main.py`: serves as the entry point to launch the training and inference * `models/ssd_resnet_v1_fpn_feature_extractor.py`: implementation of the model * `metrics/coco_tools.py`: implementation of mAP metric * `utils/exp_utils.py`: utility functions for running training and benchmarking ### Parameters The complete list of available parameters for the `model/research/object_detection/model_main.py` script contains: ``` ./object_detection/model_main.py: --[no]allow_xla: Enable XLA compilation (default: 'false') --checkpoint_dir: Path to directory holding a checkpoint. If `checkpoint_dir` is provided, this binary operates in eval-only mode, writing resulting metrics to `model_dir`. --eval_count: How many times the evaluation should be run (default: '1') (an integer) --[no]eval_training_data: If training data should be evaluated for this job. Note that one call only use this in eval- only mode, and `checkpoint_dir` must be supplied. (default: 'false') --hparams_overrides: Hyperparameter overrides, represented as a string containing comma-separated hparam_name=value pairs. --model_dir: Path to output model directory where event and checkpoint files will be written. --num_train_steps: Number of train steps. (an integer) --pipeline_config_path: Path to pipeline config file. --raport_file: Path to dlloger json (default: 'summary.json') --[no]run_once: If running in eval-only mode, whether to run just one round of eval vs running continuously (default). (default: 'false') --sample_1_of_n_eval_examples: Will sample one of every n eval input examples, where n is provided. (default: '1') (an integer) --sample_1_of_n_eval_on_train_examples: Will sample one of every n train input examples for evaluation, where n is provided. This is only used if `eval_training_data` is True. (default: '5') (an integer) ``` ### Command line options The SSD model training is conducted by the script from the object_detection library, `model_main.py`. Our experiments were done with settings described in the `examples` directory. If you would like to get more details about available arguments, please run: ``` python object_detection/model_main.py --help ``` ### Getting the data The SSD320 v1.2 model was trained on the COCO 2017 dataset. The val2017 validation set was used as a validation dataset. The `download_data.sh` script will preprocess the data to tfrecords format. This repository contains the `download_dataset.sh` script which will automatically download and preprocess the training, validation and test datasets. By default, data will be downloaded to the `/data/coco2017_tfrecords` directory. ### Training process Training the SSD model is implemented in the `object_detection/model_main.py` script. All training parameters are set in the config files. Because evaluation is relatively time consuming, it does not run every epoch. By default, evaluation is executed only once at the end of the training. The model is evaluated using pycocotools distributed with the COCO dataset. The number of evaluations can be changed using the `eval_count` parameter. To run training with tensor cores, use `./examples/SSD320_FP16_{1,4,8}GPU.sh` scripts. For more details see [Enabling mixed precision](#enabling-mixed-precision) section below. #### Data preprocessing Before we feed data to the model, both during training and inference, we perform: * Normalization * Encoding bounding boxes * Resize to 320x320 #### Data augmentation During training we perform the following augmentation techniques: * Random crop * Random horizontal flip * Color jitter ### Enabling mixed precision [Mixed precision](https://arxiv.org/abs/1710.03740) training offers significant computational speedup by performing operations in half-precision format, while storing minimal information in single-precision to retain as much information as possible in critical parts of the network. Since the introduction of [tensor cores](https://developer.nvidia.com/tensor-cores) in the Volta and Turing architectures, significant training speedups are experienced by switching to mixed precision -- up to 3x overall speedup on the most arithmetically intense model architectures. Using [mixed precision](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html) training previously required two steps: 1. Porting the model to use the FP16 data type where appropriate. 2. Manually adding loss scaling to preserve small gradient values. This can now be achieved using Automatic Mixed Precision (AMP) for TensorFlow to enable the full [mixed precision methodology](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html#tensorflow) in your existing TensorFlow model code. AMP enables mixed precision training on Volta and Turing GPUs automatically. The TensorFlow framework code makes all necessary model changes internally. In TF-AMP, the computational graph is optimized to use as few casts as necessary and maximize the use of FP16, and the loss scaling is automatically applied inside of supported optimizers. AMP can be configured to work with the existing `tf.contrib` loss scaling manager by disabling the AMP scaling with a single environment variable to perform only the automatic mixed-precision optimization. It accomplishes this by automatically rewriting all computation graphs with the necessary operations to enable mixed precision training and automatic loss scaling. For information about: - How to train using mixed precision, see the [Mixed Precision Training](https://arxiv.org/abs/1710.03740) paper and [Training With Mixed Precision](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html) documentation. - How to access and enable AMP for TensorFlow, see [Using TF-AMP](https://docs.nvidia.com/deeplearning/dgx/tensorflow-user-guide/index.html#tfamp) from the TensorFlow User Guide. - Techniques used for mixed precision training, see the [Mixed-Precision Training of Deep Neural Networks](https://devblogs.nvidia.com/mixed-precision-training-deep-neural-networks/) blog. ## Performance The performance measurements in this document were conducted at the time of publication and may not reflect the performance achieved from NVIDIA’s latest software release. For the most up-to-date performance measurements, go to [NVIDIA Data Center Deep Learning Product Performance](https://developer.nvidia.com/deep-learning-performance-training-inference). ### Benchmarking The following section shows how to run benchmarks measuring the model performance in training and inference modes. #### Training performance benchmark Training benchmark was run in various scenarios on V100 16G GPU. For each scenario, batch size was set to 32. To benchmark training, run: ``` bash examples/SSD320_{PREC}_{NGPU}GPU_BENCHMARK.sh ``` Where the `{NGPU}` defines number of GPUs used in benchmark, and the `{PREC}` defines precision. The benchmark runs training with only 1200 steps and computes average training speed of last 300 steps. #### Inference performance benchmark Inference benchmark was run with various batch-sizes on V100 16G GPU. For inference we are using single GPU setting. Examples are taken from the validation dataset. To benchmark inference, run: ``` bash examples/SSD320_FP{16,32}_inference.sh --batch_size <batch size> --checkpoint_dir <path to checkpoint> ``` Batch size for the inference benchmark is controlled by the `--batch_size` argument, while the checkpoint is provided to the script with the `--checkpoint_dir` argument. The benchmark script provides extra arguments for extra control over the experiment. We were using default values for the extra arguments during the experiments. For more details about them, please run: ``` bash examples/SSD320_FP16_inference.sh --help ``` ### Results The following sections provide details on how we achieved our performance and accuracy in training and inference. #### Training accuracy results ##### Training accuracy: NVIDIA DGX A100 (8x A100 40GB) Our results were obtained by running the `./examples/SSD320_FP{16,32}_{1,4,8}GPU.sh` script in the TensorFlow-20.06-py3 NGC container on NVIDIA DGX A100 (8x A100 40GB) GPUs. All the results are obtained with batch size set to 32. | **Number of GPUs** | **Mixed precision mAP** | **Training time with mixed precision** | **TF32 mAP** | **Training time with TF32** | |:------------------:|:-----------------------:|:--------------------------------------:|:------------:|:---------------------------:| | 1 | 0.279 | 4h 48min | 0.280 | 6h 40min | | 4 | 0.280 | 1h 20min | 0.279 | 1h 53min | | 8 | 0.281 | 0h 53min | 0.282 | 1h 05min | ##### Training accuracy: NVIDIA DGX-1 (8x V100 16GB) Our results were obtained by running the `./examples/SSD320_FP{16,32}_{1,4,8}GPU.sh` script in the TensorFlow-20.06-py3 NGC container on NVIDIA DGX-1 with 8x V100 16G GPUs. All the results are obtained with batch size set to 32. | **Number of GPUs** | **Mixed precision mAP** | **Training time with mixed precision** | **FP32 mAP** | **Training time with FP32** | |:------------------:|:-----------------------:|:--------------------------------------:|:------------:|:---------------------------:| | 1 | 0.279 | 7h 36min | 0.278 | 10h 38min | | 4 | 0.277 | 2h 18min | 0.279 | 2h 58min | | 8 | 0.280 | 1h 28min | 0.282 | 1h 55min | Here are example graphs of TF32, FP32 and FP16 training on 8 GPU configuration: ![TrainingLoss](./img/training_loss.png) #### Training performance results ##### Training performance: NVIDIA DGX A100 (8x A100 40GB) Our results were obtained by running: ``` python bash examples/SSD320_FP*GPU_BENCHMARK.sh ``` scripts in the TensorFlow-20.06-py3 NGC container on NVIDIA DGX A100 (8x A100 40GB) GPUs. | **Number of GPUs** | **Batch size per GPU** | **Mixed precision img/s** | **TF32 img/s** | **Speed-up with mixed precision** | **Multi-gpu weak scaling with mixed precision** | **Multi-gpu weak scaling with TF32** | |:------------------:|:----------------------:|:-------------------------:|:--------------:|:---------------------------------:|:-----------------------------------------------:|:------------------------------------:| | 1 | 32 | 180.55 | 123.48 | 1.46 | 1.00 | 1.00 | | 4 | 32 | 624.35 | 449.17 | 1.39 | 3.46 | 3.64 | | 8 | 32 | 1008.46 | 779.96 | 1.29 | 5.59 | 6.32 | To achieve same results, follow the [Quick start guide](#quick-start-guide) outlined above. Those results can be improved when [XLA](https://www.tensorflow.org/xla) is used in conjunction with mixed precision, delivering up to 2x speedup over FP32 on a single GPU (~179 img/s). However XLA is still considered experimental. ##### Training performance: NVIDIA DGX-1 (8x V100 16GB) Our results were obtained by running: ``` python bash examples/SSD320_FP*GPU_BENCHMARK.sh ``` scripts in the TensorFlow-20.06-py3 NGC container on NVIDIA DGX-1 with V100 16G GPUs. | **Number of GPUs** | **Batch size per GPU** | **Mixed precision img/s** | **FP32 img/s** | **Speed-up with mixed precision** | **Multi-gpu weak scaling with mixed precision** | **Multi-gpu weak scaling with FP32** | |:------------------:|:----------------------:|:-------------------------:|:--------------:|:---------------------------------:|:-----------------------------------------------:|:------------------------------------:| | 1 | 32 | 127.96 | 84.96 | 1.51 | 1.00 | 1.00 | | 4 | 32 | 396.38 | 283.30 | 1.40 | 3.10 | 3.33 | | 8 | 32 | 676.83 | 501.30 | 1.35 | 5.29 | 5.90 | To achieve same results, follow the [Quick start guide](#quick-start-guide) outlined above. Those results can be improved when [XLA](https://www.tensorflow.org/xla) is used in conjunction with mixed precision, delivering up to 2x speedup over FP32 on a single GPU (~179 img/s). However XLA is still considered experimental. #### Inference performance results ##### Inference performance: NVIDIA DGX A100 (1x A100 40GB) Our results were obtained by running the `examples/SSD320_FP{16,32}_inference.sh` script in the TensorFlow-20.06-py3 NGC container on NVIDIA DGX A100 (1x A100 40GB) GPU. FP16 | **Batch size** | **Throughput Avg** | **Latency Avg** | **Latency 90%** |**Latency 95%** |**Latency 99%** | |------------|----------------|-------|-------|-------|-------| | 1 | 40.88 | 24.46 | 25.76 | 26.47 | 27.91 | | 2 | 49.26 | 40.60 | 42.09 | 42.61 | 45.26 | | 4 | 58.81 | 68.01 | 73.12 | 76.02 | 80.38 | | 8 | 69.13 |115.73 |121.58 |123.87 |129.00 | | 16 | 78.10 |204.85 |212.40 |216.38 |225.80 | | 32 | 76.19 |420.00 |437.24 |443.21 |479.80 | | 64 | 77.92 |821.37 |840.82 |867.62 |1204.64| TF32 | **Batch size** | **Throughput Avg** | **Latency Avg** | **Latency 90%** |**Latency 95%** |**Latency 99%** | |------------|----------------|-------|-------|-------|-------| | 1 | 36.93 | 27.08 | 29.10 | 29.89 | 32.24 | | 2 | 44.03 | 45.42 | 48.67 | 49.56 | 51.12 | | 4 | 54.65 | 73.20 | 77.50 | 78.89 | 85.81 | | 8 | 62.96 |127.06 |137.04 |141.64 |152.92 | | 16 | 71.48 |223.83 |231.36 |233.35 |247.51 | | 32 | 73.11 |437.71 |450.86 |455.14 |467.11 | | 64 | 73.74 |867.88 |898.99 |912.07 |1077.13| To achieve same results, follow the [Quick start guide](#quick-start-guide) outlined above. ##### Inference performance: NVIDIA DGX-1 (1x V100 16GB) Our results were obtained by running the `examples/SSD320_FP{16,32}_inference.sh` script in the TensorFlow-20.06-py3 NGC container on NVIDIA DGX-1 with 1x V100 16G GPU. FP16 | **Batch size** | **Throughput Avg** | **Latency Avg** | **Latency 90%** |**Latency 95%** |**Latency 99%** | |------------|----------------|-------|-------|-------|-------| | 1 | 28.34 | 35.29 | 38.09 | 39.06 | 41.07 | | 2 | 41.21 | 48.54 | 52.77 | 54.45 | 57.10 | | 4 | 55.41 | 72.19 | 75.44 | 76.99 | 84.15 | | 8 | 61.83 |129.39 |133.37 |136.89 |145.69 | | 16 | 66.36 |241.12 |246.05 |249.47 |259.79 | | 32 | 65.01 |492.21 |510.01 |516.45 |526.83 | | 64 | 64.75 |988.47 |1012.11|1026.19|1290.54| FP32 | **Batch size** | **Throughput Avg** | **Latency Avg** | **Latency 90%** |**Latency 95%** |**Latency 99%** | |------------|----------------|-------|-------|-------|-------| | 1 | 29.15 | 34.31 | 36.26 | 37.63 | 39.95 | | 2 | 41.20 | 48.54 | 53.08 | 54.47 | 57.32 | | 4 | 50.72 | 78.86 | 82.49 | 84.08 | 92.15 | | 8 | 55.72 |143.57 |147.20 |148.92 |152.44 | | 16 | 59.41 |269.32 |278.30 |281.06 |286.54 | | 32 | 59.81 |534.99 |542.49 |551.58 |572.16 | | 64 | 58.93 |1085.96|1111.20|1118.21|1253.74| To achieve same results, follow the [Quick start guide](#quick-start-guide) outlined above. ##### Inference performance: NVIDIA T4 Our results were obtained by running the `examples/SSD320_FP{16,32}_inference.sh` script in the TensorFlow-20.06-py3 NGC container on NVIDIA T4. FP16 | **Batch size** | **Throughput Avg** | **Latency Avg** | **Latency 90%** |**Latency 95%** |**Latency 99%** | |------------|----------------|-------|-------|-------|-------| | 1 | 19.29 | 51.90 | 53.77 | 54.95 | 59.21 | | 2 | 30.36 | 66.04 | 70.13 | 71.49 | 73.97 | | 4 | 37.71 |106.21 |111.32 |113.04 |118.03 | | 8 | 40.95 |195.49 |201.66 |204.00 |210.32 | | 16 | 41.04 |390.05 |399.73 |402.88 |410.02 | | 32 | 40.36 |794.48 |815.81 |825.39 |841.45 | | 64 | 40.27 |1590.98|1631.00|1642.22|1838.95| FP32 | **Batch size** | **Throughput Avg** | **Latency Avg** | **Latency 90%** |**Latency 95%** |**Latency 99%** | |------------|----------------|-------|-------|-------|-------| | 1 | 14.30 | 69.99 | 72.30 | 73.29 | 76.35 | | 2 | 20.04 | 99.87 |104.50 |106.03 |108.15 | | 4 | 25.01 |159.99 |163.00 |164.13 |168.63 | | 8 | 28.42 |281.58 |286.57 |289.01 |294.37 | | 16 | 32.56 |492.08 |501.98 |505.29 |509.95 | | 32 | 34.14 |939.11 |961.35 |968.26 |983.77 | | 64 | 33.47 |1915.36|1971.90|1992.24|2030.54| To achieve same results, follow the [Quick start guide](#quick-start-guide) outlined above. ## Release notes ### Changelog April 2023 * Ceased maintenance of this model in TensorFlow1 June 2020 * Updated performance tables to include A100 results March 2019 * Initial release May 2019 * Test scripts updated ## Known issues There are no known issues with this model.
TensorFlow/Detection/SSD/models/research/object_detection/legacy
legacy
eval
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""Evaluation executable for detection models. This executable is used to evaluate DetectionModels. There are two ways of configuring the eval job. 1) A single pipeline_pb2.TrainEvalPipelineConfig file maybe specified instead. In this mode, the --eval_training_data flag may be given to force the pipeline to evaluate on training data instead. Example usage: ./eval \ --logtostderr \ --checkpoint_dir=path/to/checkpoint_dir \ --eval_dir=path/to/eval_dir \ --pipeline_config_path=pipeline_config.pbtxt 2) Three configuration files may be provided: a model_pb2.DetectionModel configuration file to define what type of DetectionModel is being evaluated, an input_reader_pb2.InputReader file to specify what data the model is evaluating and an eval_pb2.EvalConfig file to configure evaluation parameters. Example usage: ./eval \ --logtostderr \ --checkpoint_dir=path/to/checkpoint_dir \ --eval_dir=path/to/eval_dir \ --eval_config_path=eval_config.pbtxt \ --model_config_path=model_config.pbtxt \ --input_config_path=eval_input_config.pbtxt """ import functools import os import tensorflow as tf from object_detection.builders import dataset_builder from object_detection.builders import graph_rewriter_builder from object_detection.builders import model_builder from object_detection.legacy import evaluator from object_detection.utils import config_util from object_detection.utils import label_map_util tf.logging.set_verbosity(tf.logging.INFO) flags = tf.app.flags flags.DEFINE_boolean('eval_training_data', False, 'If training data should be evaluated for this job.') flags.DEFINE_string( 'checkpoint_dir', '', 'Directory containing checkpoints to evaluate, typically ' 'set to `train_dir` used in the training job.') flags.DEFINE_string('eval_dir', '', 'Directory to write eval summaries to.') flags.DEFINE_string( 'pipeline_config_path', '', 'Path to a pipeline_pb2.TrainEvalPipelineConfig config ' 'file. If provided, other configs are ignored') flags.DEFINE_string('eval_config_path', '', 'Path to an eval_pb2.EvalConfig config file.') flags.DEFINE_string('input_config_path', '', 'Path to an input_reader_pb2.InputReader config file.') flags.DEFINE_string('model_config_path', '', 'Path to a model_pb2.DetectionModel config file.') flags.DEFINE_boolean( 'run_once', False, 'Option to only run a single pass of ' 'evaluation. Overrides the `max_evals` parameter in the ' 'provided config.') FLAGS = flags.FLAGS @tf.contrib.framework.deprecated(None, 'Use object_detection/model_main.py.') def main(unused_argv): assert FLAGS.checkpoint_dir, '`checkpoint_dir` is missing.' assert FLAGS.eval_dir, '`eval_dir` is missing.' tf.gfile.MakeDirs(FLAGS.eval_dir) if FLAGS.pipeline_config_path: configs = config_util.get_configs_from_pipeline_file( FLAGS.pipeline_config_path) tf.gfile.Copy( FLAGS.pipeline_config_path, os.path.join(FLAGS.eval_dir, 'pipeline.config'), overwrite=True) else: configs = config_util.get_configs_from_multiple_files( model_config_path=FLAGS.model_config_path, eval_config_path=FLAGS.eval_config_path, eval_input_config_path=FLAGS.input_config_path) for name, config in [('model.config', FLAGS.model_config_path), ('eval.config', FLAGS.eval_config_path), ('input.config', FLAGS.input_config_path)]: tf.gfile.Copy(config, os.path.join(FLAGS.eval_dir, name), overwrite=True) model_config = configs['model'] eval_config = configs['eval_config'] input_config = configs['eval_input_config'] if FLAGS.eval_training_data: input_config = configs['train_input_config'] model_fn = functools.partial( model_builder.build, model_config=model_config, is_training=False) def get_next(config): return dataset_builder.make_initializable_iterator( dataset_builder.build(config)).get_next() create_input_dict_fn = functools.partial(get_next, input_config) categories = label_map_util.create_categories_from_labelmap( input_config.label_map_path) if FLAGS.run_once: eval_config.max_evals = 1 graph_rewriter_fn = None if 'graph_rewriter_config' in configs: graph_rewriter_fn = graph_rewriter_builder.build( configs['graph_rewriter_config'], is_training=False) evaluator.evaluate( create_input_dict_fn, model_fn, eval_config, categories, FLAGS.checkpoint_dir, FLAGS.eval_dir, graph_hook_fn=graph_rewriter_fn) if __name__ == '__main__': tf.app.run()
CUDA-Optimized/FastSpeech/fastspeech/trt/plugins/repeat
repeat
RepeatPlugin
// Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of the NVIDIA CORPORATION nor the // names of its contributors may be used to endorse or promote products // derived from this software without specific prior written permission. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND // ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "RepeatPlugin.h" #include "cuda_fp16.h" #include <thread> #include <cub/cub.cuh> #define ck(call) check(call, __LINE__, __FILE__) inline bool check(cudaError_t e, int iLine, const char *szFile) { if (e != cudaSuccess) { cout << "CUDA runtime API error " << cudaGetErrorName(e) << " at line " << iLine << " in file " << szFile << endl; return false; } return true; } cudaDeviceProp getCudaDeviceProp() { cudaError_t error; cudaDeviceProp dev; int device; cudaGetDevice(&device); error = cudaGetDeviceProperties(&dev, device); if(error != cudaSuccess) { printf("Error: %s\n", cudaGetErrorString(error)); exit(-1); } return dev; } __global__ void ComputeOffset(float *pRepeatCnt, int *pOffset) { int x = blockIdx.x * blockDim.x + threadIdx.x; const int maxInputLength = 1024; cub::BlockScan<int, maxInputLength>().ExclusiveSum(static_cast<int>(pRepeatCnt[x]), pOffset[x]); } template<typename T> __global__ void RepeatTensor(T *pOut, T *pIn, float *pRepeatCnt, int *pOffset, int maxOutputLength) { int b = blockIdx.x; int t = blockIdx.y; int h = threadIdx.x; int dTime = gridDim.y; int dHid = blockDim.x; int offset_time = pOffset[b * dTime + t]; int duration = static_cast<int>(pRepeatCnt[b * dTime + t]); T in = pIn[(b * dTime + t) * dHid + h]; for (int i=offset_time; i < min(offset_time + duration, maxOutputLength); i++) { int offset_batch = b * maxOutputLength; pOut[(offset_batch + i) * dHid + h] = in; } } int RepeatPlugin::enqueue(int nBatch, const void * const *inputs, void **outputs, void* workspace, cudaStream_t stream) { int dTime = m.inputDim.d[0]; int dHid = m.inputDim.d[1]; int maxOutputLength = m.maxOutputLength; #ifndef NDEBUG cudaDeviceProp dev = getCudaDeviceProp(); assert (dHid <= dev.maxThreadsPerBlock); #endif float * pRepeatCnt = (float *)inputs[1]; // get output time dim offset int * pOffset; ck(cudaMalloc(&pOffset, nBatch * dTime * sizeof(int))); ComputeOffset<<<nBatch, dTime>>>(pRepeatCnt, pOffset); if (m.dataType == DataType::kFLOAT || m.dataType == DataType::kINT32) { // std::cout << "[RepeatPlugin] Running kernel in fp32" << std::endl; float * pIn = (float *)inputs[0]; float * pOut = (float *)outputs[0]; dim3 dimGrid(nBatch, dTime); dim3 dimBlock(dHid); RepeatTensor<<<dimGrid, dimBlock>>>(pOut, pIn, pRepeatCnt, pOffset, maxOutputLength); } else if (m.dataType == DataType::kHALF) { // std::cout << "[RepeatPlugin] Running kernel in fp16" << std::endl; __half * pIn = (__half *)inputs[0]; __half * pOut = (__half *)outputs[0]; dim3 dimGrid(nBatch, dTime); dim3 dimBlock(dHid); RepeatTensor<<<dimGrid, dimBlock>>>(pOut, pIn, pRepeatCnt, pOffset, maxOutputLength); } return 0; } REGISTER_TENSORRT_PLUGIN(RepeatPluginCreator);
TensorFlow/Detection/SSD/models/research/slim/datasets
datasets
imagenet_2012_validation_synset_labels
n01751748 n09193705 n02105855 n04263257 n03125729 n01735189 n02346627 n02776631 n03794056 n02328150 n01917289 n02125311 n02484975 n04065272 n03496892 n02066245 n01914609 n01616318 n02971356 n03126707 n02346627 n02091244 n07742313 n03956157 n01616318 n04380533 n02114548 n02089973 n01729977 n04435653 n02280649 n03444034 n02077923 n09835506 n03478589 n04532106 n01644900 n02666196 n04141327 n01773797 n03125729 n04049303 n02006656 n02097209 n02111277 n03950228 n03393912 n02089973 n03930630 n02640242 n01828970 n01632777 n04372370 n03485794 n02443114 n02930766 n02112018 n13040303 n04485082 n03482405 n02963159 n02093859 n01910747 n01693334 n04371430 n02526121 n01871265 n04532106 n04482393 n04370456 n02927161 n02074367 n01608432 n02966193 n01795545 n02791270 n02087394 n02116738 n02091635 n02895154 n09193705 n02088094 n04200800 n01737021 n02974003 n03032252 n02483708 n01632458 n02992529 n01698640 n02114548 n02497673 n02480855 n04147183 n02487347 n03895866 n02325366 n02033041 n07745940 n02415577 n02951585 n02087394 n04485082 n04505470 n02097658 n04591157 n01770081 n02992211 n03691459 n03594734 n01983481 n03937543 n02105412 n03843555 n02091244 n07831146 n03710637 n03733281 n03782006 n03733131 n03933933 n02980441 n04409515 n02606052 n02226429 n02883205 n02422699 n01614925 n07697537 n02123394 n04252077 n03337140 n02117135 n02107142 n04037443 n02397096 n03187595 n02319095 n07932039 n03372029 n02088466 n02319095 n04125021 n03954731 n09421951 n04487394 n02113624 n03843555 n03485407 n09332890 n03642806 n03710193 n01677366 n01950731 n07714990 n02114855 n02119022 n04086273 n04201297 n03733281 n02100877 n03016953 n03733805 n03063599 n07714990 n03854065 n04149813 n03786901 n03467068 n02087046 n04326547 n02100735 n03775546 n02111500 n02814533 n02097047 n02027492 n02109961 n02389026 n02105855 n02445715 n03259280 n07711569 n03710637 n03670208 n02128757 n04467665 n02114855 n01873310 n03476684 n02093428 n03891251 n02859443 n04125021 n01978287 n02643566 n07697537 n01560419 n03290653 n13037406 n03891332 n02883205 n02106382 n02672831 n04330267 n02489166 n02058221 n03584829 n07565083 n03125729 n02123597 n04536866 n02965783 n09428293 n02965783 n11879895 n01560419 n01775062 n03595614 n02110958 n03709823 n03777754 n02951585 n02100877 n01629819 n02909870 n02101388 n02091244 n01667114 n03998194 n01986214 n04192698 n02128757 n02793495 n09256479 n01443537 n02089973 n01981276 n02837789 n03888605 n03201208 n02480855 n03814639 n04090263 n01986214 n02415577 n01534433 n02093256 n03134739 n03016953 n12620546 n03937543 n02815834 n03776460 n10565667 n03207743 n02992529 n01631663 n03729826 n04033995 n04462240 n01443537 n02091831 n03874293 n03874599 n04238763 n07584110 n02749479 n02110185 n09193705 n04311004 n02788148 n02445715 n06874185 n04074963 n01631663 n03803284 n01828970 n02096437 n04554684 n03599486 n03595614 n02123394 n04515003 n04591157 n04560804 n02794156 n03344393 n02687172 n04328186 n04479046 n03967562 n01440764 n04465501 n03457902 n04532670 n01688243 n01749939 n01768244 n02091831 n02321529 n02939185 n02129604 n12985857 n03485794 n02408429 n01443537 n03590841 n07697537 n04154565 n03443371 n02514041 n09468604 n03769881 n02787622 n02526121 n03888605 n01622779 n01872401 n07745940 n03085013 n02445715 n02120505 n01751748 n04141327 n02443484 n02089078 n01608432 n01514668 n03160309 n04070727 n07715103 n02110958 n03976657 n03902125 n02909870 n01740131 n04532106 n03197337 n02493509 n10148035 n02172182 n02437616 n03062245 n04286575 n03018349 n02951358 n02130308 n04277352 n02096585 n04589890 n02965783 n02978881 n02804414 n02112137 n02007558 n03670208 n02894605 n03657121 n03876231 n02165105 n01669191 n02011460 n03710193 n03796401 n02916936 n03492542 n03998194 n04552348 n01824575 n01917289 n03461385 n03874293 n03272010 n02099712 n02999410 n04179913 n07831146 n02096177 n04350905 n04507155 n03743016 n02105505 n03649909 n03680355 n01910747 n03529860 n02787622 n02012849 n02011460 n02094114 n02950826 n02105855 n09288635 n01773797 n01774750 n04409515 n02497673 n02113799 n02786058 n02443484 n02981792 n03095699 n01664065 n02092002 n07711569 n02219486 n13133613 n02114548 n03529860 n02097298 n13133613 n04355933 n01537544 n01847000 n04428191 n02666196 n02268443 n03291819 n01828970 n04099969 n02747177 n07720875 n02088094 n02113624 n03710637 n03637318 n03942813 n02093859 n03794056 n02930766 n02930766 n04525038 n03796401 n03709823 n02097047 n04604644 n03938244 n01560419 n02097298 n02091635 n04136333 n07718747 n02417914 n03355925 n02445715 n02445715 n03495258 n04447861 n02111500 n03584829 n03977966 n04116512 n04019541 n04200800 n02408429 n02085936 n03992509 n02769748 n04613696 n07716906 n02085782 n07718472 n04398044 n03920288 n01860187 n03272010 n04008634 n04090263 n02028035 n01677366 n13037406 n04067472 n02095889 n04532670 n01582220 n03476684 n02395406 n04487394 n02443484 n02510455 n04550184 n02814860 n12144580 n03126707 n02486410 n02125311 n03777754 n03924679 n04613696 n07875152 n02058221 n03188531 n02777292 n02489166 n02066245 n04579432 n01630670 n02666196 n02091635 n02114548 n02356798 n03201208 n03240683 n03590841 n03018349 n02104029 n04251144 n10148035 n02169497 n02089867 n01734418 n04476259 n02843684 n04008634 n03400231 n02119022 n02137549 n03761084 n02490219 n03840681 n04346328 n01677366 n02102318 n04458633 n04476259 n04209239 n01795545 n10565667 n02114367 n02107574 n03032252 n02104365 n03133878 n04336792 n02112137 n03000684 n04553703 n02102480 n03825788 n01695060 n03250847 n07860988 n04310018 n02071294 n01945685 n01855672 n02037110 n03868863 n04229816 n12057211 n02408429 n02481823 n07716358 n04487394 n03662601 n02979186 n02910353 n04266014 n03895866 n04443257 n02917067 n04149813 n03041632 n02364673 n02999410 n04435653 n04228054 n02814860 n01531178 n03662601 n07880968 n04487081 n07614500 n03532672 n01807496 n02011460 n02074367 n04462240 n02977058 n02281406 n03041632 n04350905 n02788148 n02137549 n04562935 n04590129 n02093991 n03995372 n02111889 n04081281 n02133161 n02006656 n02107908 n04347754 n02950826 n02504013 n04560804 n02088364 n02128385 n02860847 n04399382 n02105412 n02115641 n07753592 n07880968 n03598930 n03724870 n02066245 n02128925 n04465501 n02094258 n02086646 n04141076 n04136333 n13133613 n02342885 n02281406 n03443371 n07613480 n04008634 n04141327 n04347754 n03314780 n02165456 n03930313 n04392985 n01872401 n04204238 n07831146 n02690373 n12144580 n02776631 n02877765 n02108089 n03532672 n03126707 n01560419 n02268853 n03691459 n03404251 n02364673 n02101556 n02326432 n03954731 n07831146 n03584254 n02012849 n03804744 n02128385 n01530575 n03933933 n04409515 n02823428 n01877812 n03920288 n02510455 n02112350 n03594945 n03642806 n02395406 n03452741 n02860847 n03673027 n02102040 n04505470 n04086273 n02099849 n01990800 n03781244 n04461696 n02106166 n04141076 n07717556 n02361337 n03976657 n03832673 n03109150 n01776313 n03788195 n03884397 n04019541 n01693334 n03633091 n02325366 n03623198 n02795169 n01744401 n01955084 n02002556 n07754684 n02174001 n02793495 n02095889 n02484975 n02094433 n09229709 n03207941 n02655020 n03773504 n04367480 n03933933 n01955084 n04355933 n13040303 n02786058 n04090263 n02101006 n02124075 n03720891 n07749582 n04517823 n01534433 n04335435 n03661043 n02101556 n03785016 n03133878 n02113978 n02930766 n02783161 n03958227 n02441942 n02859443 n02096437 n02447366 n07742313 n07583066 n02110063 n03146219 n12998815 n03425413 n02123394 n03594734 n02006656 n02992211 n04442312 n03032252 n01608432 n02927161 n03485794 n07583066 n03347037 n01847000 n04557648 n03478589 n01530575 n02098105 n01755581 n03045698 n02028035 n03538406 n03956157 n01871265 n13044778 n02119789 n07875152 n02107908 n02791124 n03697007 n03207743 n02791270 n02865351 n03345487 n03976467 n03124043 n04252225 n02165105 n03314780 n04040759 n02730930 n02236044 n07873807 n02006656 n02514041 n03534580 n03179701 n04366367 n02138441 n03450230 n01943899 n07836838 n03691459 n04467665 n02115641 n01742172 n02795169 n02481823 n07583066 n02749479 n01665541 n04131690 n03769881 n02009229 n04487081 n02123159 n04542943 n07760859 n02097658 n02113799 n07932039 n02097474 n03793489 n02791124 n04591713 n01735189 n01631663 n02892767 n04458633 n02277742 n07697537 n03781244 n02791270 n03854065 n04356056 n07802026 n03733131 n01980166 n02174001 n07684084 n01981276 n03874293 n03146219 n02099267 n02018207 n04398044 n03832673 n02493509 n03478589 n06359193 n02971356 n02093754 n04487081 n03929855 n03485407 n01930112 n01592084 n02088238 n04613696 n03967562 n03814639 n04311174 n04286575 n03884397 n03534580 n03793489 n02106382 n03045698 n03661043 n03814906 n02669723 n03459775 n03785016 n04584207 n03657121 n03476991 n04243546 n04560804 n03788365 n01796340 n04019541 n03496892 n07711569 n03788195 n02133161 n04548362 n02113712 n03673027 n12144580 n02481823 n02132136 n03956157 n01532829 n04493381 n02094258 n03483316 n01770081 n02006656 n02871525 n01580077 n07730033 n02097474 n02093647 n02088466 n01795545 n07716906 n03481172 n01608432 n02097209 n01629819 n07695742 n02389026 n02977058 n04090263 n04522168 n02871525 n04258138 n02127052 n04476259 n03617480 n04273569 n03485794 n06794110 n03085013 n02974003 n02869837 n02086240 n01685808 n02088466 n03584829 n01514668 n02114367 n03447447 n04435653 n03065424 n01616318 n02841315 n02655020 n03496892 n04040759 n01496331 n02094258 n03787032 n02172182 n01693334 n02168699 n03793489 n07613480 n01824575 n01665541 n04065272 n02699494 n02526121 n01774750 n03126707 n04254777 n02325366 n01665541 n02007558 n01873310 n01734418 n03271574 n01776313 n01644373 n02486410 n02106662 n03125729 n02087394 n02094433 n07684084 n04532670 n01843383 n02835271 n12985857 n04485082 n02167151 n03394916 n01664065 n04286575 n03874293 n02699494 n01601694 n01582220 n02486261 n02268853 n03947888 n13040303 n03967562 n03602883 n01882714 n04505470 n02226429 n04522168 n02481823 n02108422 n03670208 n07718747 n01688243 n02747177 n07248320 n02328150 n02963159 n02117135 n03676483 n06596364 n01775062 n03724870 n03347037 n13133613 n02319095 n03944341 n02088238 n02110185 n01443537 n06794110 n02606052 n02113186 n02704792 n03692522 n03018349 n02095314 n04523525 n02356798 n04228054 n02108000 n04371430 n01770393 n04456115 n02110958 n01631663 n02708093 n02835271 n02807133 n02280649 n02277742 n03857828 n03452741 n03388043 n06596364 n04252225 n04458633 n01689811 n03935335 n01560419 n02500267 n02319095 n02412080 n02096437 n03814639 n03494278 n01518878 n02486261 n01629819 n04606251 n03787032 n01877812 n01773157 n02104365 n02113978 n02123394 n02966687 n01728920 n02916936 n01860187 n03255030 n02011460 n02087394 n02817516 n02085620 n02437616 n02606052 n03447721 n01773157 n02497673 n04380533 n02056570 n01917289 n12267677 n04325704 n02130308 n02730930 n03933933 n02981792 n07892512 n02112018 n02398521 n02009912 n02002724 n02086079 n02100236 n03085013 n02837789 n02018795 n02106382 n02489166 n03937543 n02910353 n07836838 n15075141 n02877765 n03602883 n02233338 n13037406 n01580077 n04069434 n04371774 n03938244 n02326432 n03085013 n02804610 n04141975 n02484975 n02930766 n03000134 n02488702 n02113023 n02088632 n02783161 n02490219 n04505470 n02123394 n04357314 n02825657 n02493509 n03720891 n03673027 n03492542 n01739381 n02105056 n03481172 n03947888 n02099601 n02105505 n01514859 n07871810 n03445924 n12267677 n04536866 n03314780 n12768682 n02028035 n01980166 n02099601 n01981276 n07730033 n02909870 n04179913 n02089973 n02111277 n12057211 n01632458 n02123394 n04350905 n03937543 n02730930 n01795545 n02091244 n01632777 n03584829 n03709823 n02086646 n01824575 n03977966 n03417042 n02892201 n01806143 n02105855 n02115913 n03902125 n01774384 n07880968 n02112137 n09428293 n04116512 n02486410 n03930630 n04090263 n01843383 n07802026 n04429376 n02317335 n02027492 n01818515 n02086646 n02018207 n04371430 n03347037 n03014705 n04125021 n03764736 n02981792 n02114367 n04192698 n04330267 n03729826 n02607072 n02504458 n03769881 n02018207 n03929855 n04591157 n03947888 n04317175 n03125729 n01749939 n04399382 n02276258 n03598930 n02606052 n03089624 n02099601 n03770439 n02655020 n07745940 n02095314 n04336792 n04033995 n02112018 n02132136 n02860847 n03100240 n02966687 n02111129 n04273569 n04149813 n02092002 n03769881 n04599235 n03825788 n04118776 n04336792 n02115641 n01622779 n02909870 n02276258 n02977058 n02326432 n01608432 n03347037 n02978881 n02787622 n02093256 n02101556 n02100735 n02085782 n02342885 n03733281 n02085782 n03706229 n02002724 n13037406 n02422106 n07614500 n02113712 n04336792 n02486261 n02356798 n02268443 n04179913 n04277352 n02346627 n03089624 n02835271 n02086240 n04579432 n03180011 n04285008 n02408429 n04392985 n02091244 n02815834 n02834397 n04009552 n02488291 n03290653 n03325584 n03637318 n02730930 n02865351 n02119789 n03929855 n03676483 n04423845 n03874293 n03908618 n03598930 n02090379 n01944390 n04152593 n09288635 n02066245 n01768244 n03272010 n01531178 n03255030 n03676483 n02002556 n02749479 n02415577 n02403003 n07565083 n02981792 n01776313 n02097474 n02667093 n02096177 n03255030 n01819313 n02791124 n02279972 n04090263 n09193705 n04335435 n03733131 n03250847 n04263257 n02096585 n03976467 n02963159 n04613696 n04310018 n02107574 n03724870 n09428293 n02101006 n04372370 n03930630 n07584110 n01735189 n04599235 n02835271 n04330267 n02108915 n02110185 n07684084 n04204347 n02672831 n03742115 n04131690 n09428293 n04487394 n03710193 n09332890 n03478589 n04486054 n02951358 n09428293 n04596742 n01872401 n04505470 n04154565 n02666196 n02437616 n03724870 n02120079 n01828970 n03141823 n01698640 n03095699 n04099969 n02123045 n04482393 n04026417 n02110806 n04033901 n04041544 n02869837 n04136333 n02112350 n03388043 n03065424 n02128757 n04330267 n02879718 n02859443 n01968897 n01847000 n01871265 n02129165 n02408429 n04263257 n13054560 n02090379 n04553703 n03929660 n01990800 n03494278 n01514859 n02804610 n01773157 n02087046 n07802026 n03777754 n07720875 n01694178 n06794110 n02795169 n07583066 n02094114 n03841143 n01985128 n03776460 n02859443 n02808304 n02092339 n02441942 n02002724 n04296562 n02086910 n02690373 n01616318 n07718472 n02086240 n04049303 n04235860 n06359193 n02110958 n01518878 n02950826 n03447721 n02111129 n04517823 n03769881 n02112350 n07693725 n07747607 n02444819 n02109047 n04485082 n10148035 n03127925 n04328186 n03347037 n02102480 n07614500 n02676566 n04599235 n03534580 n02093256 n03710721 n02167151 n04116512 n04141975 n03877472 n02092339 n03042490 n04604644 n03355925 n04009552 n03598930 n02672831 n03425413 n03649909 n02099429 n01819313 n02640242 n02978881 n03670208 n02342885 n03888257 n03729826 n02457408 n02860847 n09246464 n02097298 n03649909 n04228054 n02113624 n01978287 n03895866 n03393912 n03127925 n03720891 n01774384 n04065272 n03485407 n04033901 n02488291 n12057211 n01774750 n01798484 n01537544 n07720875 n03838899 n04120489 n02264363 n02113978 n02799071 n02114367 n04332243 n03062245 n02077923 n02398521 n04435653 n01692333 n07831146 n04523525 n02342885 n07753275 n01807496 n02098413 n01744401 n07836838 n02104029 n02092339 n02092339 n02115913 n01608432 n03325584 n02066245 n03345487 n03394916 n01773797 n02113186 n02667093 n02124075 n04118538 n02134084 n02317335 n03047690 n03938244 n02219486 n07718747 n02490219 n04326547 n02690373 n07717556 n01580077 n02443484 n04443257 n04033995 n07590611 n02403003 n07768694 n03803284 n04371774 n02802426 n06794110 n04483307 n02791270 n02028035 n03764736 n07860988 n09421951 n03773504 n04152593 n04367480 n02950826 n02168699 n04458633 n01983481 n04404412 n04252225 n04596742 n02480495 n02281787 n01795545 n02089867 n02169497 n02666196 n04311004 n02879718 n03457902 n02074367 n03297495 n02481823 n04485082 n02091244 n07718747 n02102480 n04147183 n03014705 n02814860 n04532670 n02094114 n01532829 n01664065 n04090263 n03995372 n03134739 n06596364 n03710637 n01807496 n02096294 n04026417 n02165105 n03998194 n02112706 n04366367 n02177972 n04152593 n04442312 n01697457 n03775071 n07892512 n02091831 n02101388 n01749939 n03384352 n02484975 n03868242 n01753488 n02687172 n02807133 n02231487 n02018795 n04270147 n03063599 n04591713 n03895866 n03481172 n04456115 n01755581 n02319095 n02526121 n01796340 n02094433 n01558993 n04238763 n03127925 n03017168 n02692877 n04179913 n02791124 n03494278 n06596364 n01751748 n02074367 n03249569 n04357314 n07579787 n04550184 n06596364 n03761084 n07718472 n03376595 n04428191 n01773157 n07248320 n03400231 n04447861 n03854065 n01694178 n02111500 n04111531 n02090622 n03450230 n04536866 n01817953 n02843684 n03776460 n04201297 n04204238 n02094114 n04238763 n01667114 n02116738 n03709823 n04153751 n02422699 n01796340 n07836838 n02027492 n03478589 n01689811 n02110958 n03538406 n03207743 n01669191 n06794110 n02087394 n01641577 n07873807 n03314780 n04591157 n02487347 n04277352 n07749582 n03792782 n03947888 n03792782 n01669191 n02102318 n03788365 n03899768 n04392985 n01629819 n04557648 n02640242 n02325366 n07749582 n04264628 n04487081 n02978881 n03720891 n01494475 n02951358 n01828970 n04286575 n04540053 n04332243 n04367480 n03840681 n02106662 n03376595 n02113186 n03085013 n09246464 n03127747 n04367480 n03290653 n07760859 n02102973 n03290653 n01751748 n02089973 n02086910 n02112350 n03272562 n04456115 n03785016 n02110341 n01728920 n04554684 n02417914 n01756291 n03590841 n01877812 n02113186 n02093256 n02099849 n02397096 n03642806 n02231487 n04179913 n02012849 n02279972 n04447861 n04355933 n01560419 n02445715 n03770679 n03929855 n01688243 n06596364 n07930864 n01945685 n01631663 n03216828 n03995372 n02782093 n01860187 n04443257 n04579432 n07745940 n04146614 n02177972 n04392985 n01644373 n02317335 n04553703 n02138441 n13040303 n01985128 n02134418 n01945685 n02526121 n02317335 n01820546 n04501370 n01560419 n02268443 n03796401 n03916031 n02992211 n03127747 n03180011 n02102480 n04277352 n01776313 n03017168 n02111129 n02190166 n02098413 n02090721 n01776313 n09421951 n02113023 n02672831 n03764736 n04146614 n03347037 n03868242 n02667093 n02093647 n02169497 n02089973 n07747607 n02085782 n02815834 n02105412 n02086910 n04204238 n03530642 n07583066 n04039381 n02965783 n04501370 n04086273 n04263257 n02443484 n04162706 n07613480 n04525038 n04266014 n03721384 n04467665 n04523525 n04162706 n02025239 n04146614 n01677366 n04179913 n04125021 n02917067 n04392985 n04550184 n02090721 n03796401 n03014705 n04344873 n02091635 n01608432 n03690938 n04141975 n01629819 n04523525 n01955084 n01756291 n04443257 n02927161 n07880968 n07836838 n02484975 n02091032 n07714571 n03535780 n04149813 n09468604 n02033041 n03584254 n04550184 n03887697 n03838899 n02174001 n03272010 n03297495 n04074963 n03649909 n03496892 n03467068 n02268853 n03400231 n02093256 n04367480 n02091134 n04118776 n02086646 n07753592 n02504013 n02104365 n02096177 n03961711 n04069434 n03376595 n01817953 n01955084 n02107142 n03344393 n03709823 n02974003 n02090379 n04332243 n03125729 n03935335 n02814860 n01860187 n03220513 n02094114 n03877472 n02009912 n02108000 n02229544 n03697007 n03124170 n02206856 n03841143 n04153751 n01742172 n13133613 n04525305 n01930112 n02795169 n02233338 n02417914 n03935335 n01770393 n02125311 n03482405 n04604644 n02009912 n03791053 n03223299 n03032252 n04501370 n03372029 n03485794 n02110341 n04200800 n02106166 n04592741 n02950826 n04041544 n07831146 n04116512 n01514859 n03868242 n03026506 n02443484 n02701002 n04116512 n02815834 n03929855 n03676483 n01534433 n02701002 n02113978 n04371430 n03991062 n07718472 n02268853 n04264628 n02098105 n07565083 n02112706 n02094114 n02093991 n02488291 n02093859 n03047690 n01682714 n07717410 n01883070 n04562935 n01498041 n07745940 n02109525 n01644900 n01694178 n03063689 n02894605 n01682714 n03544143 n02101556 n02966687 n03485407 n03657121 n02236044 n07860988 n01677366 n07718747 n02690373 n04099969 n03814639 n02098413 n01985128 n02093647 n02504458 n01944390 n03445924 n03866082 n03355925 n02105855 n03041632 n03791053 n03954731 n07695742 n02102040 n03956157 n03983396 n02105855 n03249569 n03976467 n03843555 n02641379 n03272562 n03658185 n03976467 n02398521 n03791053 n03065424 n03759954 n03216828 n03796401 n01980166 n09193705 n01773797 n02129604 n04009552 n02980441 n03188531 n02100735 n07860988 n03929855 n04037443 n03467068 n02094114 n03899768 n04525038 n02074367 n04033901 n02012849 n02009229 n02109961 n03804744 n02396427 n02233338 n03240683 n03393912 n03777568 n02494079 n02106662 n04033995 n02231487 n04355338 n04550184 n02699494 n04118538 n03388043 n02869837 n02097047 n03063689 n01530575 n02091032 n03042490 n03930313 n02264363 n02442845 n02325366 n01883070 n01614925 n03447721 n03444034 n02979186 n02815834 n02123394 n03250847 n02883205 n04554684 n03047690 n01773157 n02172182 n03249569 n04613696 n03692522 n04044716 n12985857 n02342885 n03425413 n02895154 n01704323 n01560419 n02974003 n07695742 n03016953 n03729826 n03250847 n02927161 n02091635 n01990800 n02980441 n02676566 n02114548 n02422699 n04208210 n02109961 n04332243 n04127249 n03871628 n02391049 n01537544 n02124075 n02422106 n01775062 n03188531 n02443114 n01694178 n03063689 n02088364 n04476259 n04442312 n03792972 n07831146 n02483708 n04346328 n04591713 n03794056 n04153751 n03782006 n02058221 n04162706 n04522168 n03673027 n04483307 n03691459 n03478589 n02102318 n07749582 n07730033 n01829413 n01729977 n04501370 n09472597 n03781244 n02134084 n01742172 n03782006 n04553703 n09835506 n03804744 n02088238 n04067472 n03764736 n02992529 n03874599 n03124043 n04065272 n02782093 n03788195 n04389033 n03673027 n04389033 n03775071 n07753113 n12144580 n02013706 n02190166 n04275548 n03250847 n03947888 n01729977 n02138441 n04264628 n03967562 n03445924 n04355338 n02640242 n01440764 n12267677 n02489166 n02165105 n03599486 n03272010 n02018207 n02747177 n04487081 n02119789 n02666196 n02606052 n02086646 n04040759 n01984695 n12998815 n01751748 n04584207 n04149813 n01981276 n02841315 n03777754 n04376876 n02859443 n04389033 n01665541 n04208210 n04041544 n02071294 n13052670 n01616318 n03871628 n02028035 n03110669 n01819313 n04229816 n02769748 n03832673 n02095889 n01806143 n02708093 n07753113 n02804610 n02879718 n03595614 n02769748 n07802026 n04357314 n09288635 n07753592 n04525038 n04590129 n01981276 n01530575 n02006656 n03903868 n02095570 n03602883 n03476991 n04328186 n03617480 n03272562 n02328150 n04536866 n02814860 n03710193 n04263257 n02699494 n04418357 n01496331 n02086079 n03495258 n03417042 n03065424 n03041632 n04467665 n02085936 n03956157 n02110341 n07760859 n03467068 n02825657 n02669723 n07579787 n02097658 n03717622 n03590841 n02268443 n07697313 n02859443 n01622779 n02999410 n01877812 n01744401 n01669191 n04507155 n02108000 n10148035 n04009552 n09421951 n03457902 n02091032 n03759954 n01443537 n02011460 n01984695 n02791270 n03617480 n02089973 n02105641 n03595614 n03207941 n03146219 n04367480 n07695742 n03376595 n09835506 n02342885 n03393912 n04311004 n04589890 n02114367 n02104029 n01945685 n02094114 n01824575 n04380533 n02025239 n03218198 n02110627 n04026417 n02749479 n07613480 n02437312 n03347037 n02403003 n03942813 n03450230 n04252225 n02108000 n03837869 n02165105 n03000247 n04344873 n02504458 n02110185 n01498041 n04270147 n04239074 n03924679 n02086646 n09835506 n03424325 n04370456 n03777754 n03529860 n02102040 n01688243 n02110627 n02100735 n02102177 n04086273 n01883070 n04366367 n02107574 n02102480 n04008634 n02169497 n04141327 n02442845 n03662601 n01855032 n04589890 n02018795 n03271574 n02097298 n03445777 n02102040 n03617480 n02108422 n02097474 n02109525 n02097474 n11879895 n03223299 n02100583 n03840681 n02091032 n01843065 n03769881 n02091467 n02134418 n02109047 n04456115 n03866082 n04239074 n02484975 n04259630 n07760859 n09246464 n01484850 n02443114 n04251144 n03843555 n04131690 n07716906 n03584254 n04033901 n04146614 n03633091 n13037406 n04254680 n07583066 n03483316 n02056570 n02102177 n04355338 n01669191 n04039381 n01532829 n02978881 n03691459 n04118776 n02672831 n06785654 n07749582 n02536864 n02116738 n04239074 n02483708 n03124170 n07930864 n02018207 n04074963 n01514859 n02089867 n03804744 n04116512 n02802426 n03627232 n03787032 n02281406 n07613480 n02526121 n02860847 n01806143 n03706229 n03982430 n04009552 n01616318 n01828970 n03920288 n03680355 n02727426 n02963159 n02102973 n04209133 n01798484 n02190166 n02091635 n02089078 n04371774 n04515003 n02655020 n02104029 n01877812 n02794156 n02974003 n02096585 n04525305 n02672831 n02113712 n02917067 n02096437 n07745940 n02326432 n03314780 n02236044 n02102973 n02093428 n03297495 n03676483 n03775071 n04536866 n04554684 n03400231 n04346328 n01530575 n04133789 n03160309 n01930112 n03494278 n03063599 n03891332 n04476259 n02410509 n03417042 n07753113 n03498962 n03991062 n04086273 n01739381 n07753275 n03065424 n03476991 n07565083 n01608432 n04258138 n03803284 n02120079 n02454379 n01537544 n02492035 n02219486 n01735189 n03594734 n02442845 n04485082 n03599486 n02086079 n03995372 n04501370 n02113712 n02102480 n03599486 n04162706 n03868242 n04209133 n02791124 n01819313 n02116738 n02894605 n03764736 n03476684 n02123159 n02325366 n03457902 n02123597 n09399592 n02488291 n03788365 n01770081 n01498041 n02110341 n02834397 n02391049 n02113023 n02099712 n01739381 n02980441 n02027492 n03208938 n07734744 n02027492 n02108000 n03902125 n04044716 n09428293 n01981276 n02869837 n03425413 n03085013 n03804744 n02443114 n01983481 n02088466 n02077923 n01740131 n09468604 n02783161 n03888257 n02797295 n04252225 n01622779 n01669191 n03710637 n01669191 n01983481 n02108422 n04111531 n04179913 n04204238 n04389033 n02087046 n01872401 n02692877 n01632777 n02640242 n02927161 n02814860 n03792972 n04039381 n02480855 n03599486 n04326547 n03691459 n04592741 n03014705 n01582220 n13052670 n02802426 n01797886 n04263257 n04350905 n03372029 n02484975 n09428293 n03887697 n02112350 n03110669 n02910353 n02096294 n02102177 n02115913 n02804610 n04239074 n04005630 n04118538 n04067472 n02128757 n02097658 n02099849 n01882714 n02494079 n03379051 n02808440 n04392985 n02114548 n02206856 n03976657 n01729322 n07831146 n01883070 n02361337 n02128757 n02097130 n04447861 n13052670 n02096177 n03691459 n02134084 n02494079 n03642806 n04136333 n02268853 n02417914 n03891332 n09246464 n03032252 n02825657 n03498962 n03160309 n04026417 n04296562 n03534580 n03216828 n07880968 n03393912 n02948072 n04560804 n04152593 n04509417 n03884397 n02129604 n01944390 n04310018 n04086273 n07584110 n04258138 n04264628 n13040303 n02109525 n04462240 n02791270 n03384352 n04070727 n02108422 n03485407 n02093647 n03000134 n03089624 n07615774 n03956157 n02776631 n01729977 n03868242 n03899768 n01871265 n03180011 n03630383 n01968897 n02939185 n02097474 n04154565 n04462240 n02028035 n04041544 n02111129 n03026506 n04389033 n02808440 n03124170 n02129165 n02776631 n04259630 n03902125 n07760859 n01744401 n02128757 n02843684 n02091134 n02256656 n03814639 n02666196 n02497673 n13054560 n01914609 n01580077 n02089867 n03630383 n02025239 n02123597 n02807133 n03673027 n04317175 n15075141 n01795545 n03888257 n03062245 n04209133 n01531178 n02410509 n04162706 n03814639 n02102177 n04399382 n03220513 n06874185 n04152593 n07880968 n02066245 n01735189 n03271574 n01592084 n04355933 n02085936 n01978455 n04597913 n07871810 n02093859 n01773549 n03126707 n03452741 n02027492 n02408429 n01985128 n03670208 n04458633 n04273569 n03785016 n01751748 n03188531 n02917067 n02086240 n03770439 n03240683 n03920288 n03954731 n02109525 n03016953 n02107683 n01665541 n04310018 n03485407 n03187595 n03814639 n02095570 n01968897 n03874599 n02493509 n02130308 n02749479 n01945685 n02536864 n04154565 n02328150 n03908618 n01737021 n02408429 n02231487 n04131690 n03970156 n01530575 n04336792 n02951358 n02879718 n03944341 n03788195 n02895154 n03838899 n02037110 n04009552 n03141823 n02102973 n07730033 n01984695 n07693725 n04065272 n01631663 n02699494 n03095699 n02112350 n04019541 n09835506 n01484850 n07697313 n01729322 n03085013 n04041544 n02396427 n02879718 n03891332 n04590129 n03271574 n02454379 n01944390 n02099267 n02097658 n07720875 n02484975 n03733805 n02086240 n04204238 n03483316 n03201208 n02095570 n01630670 n03201208 n01755581 n02879718 n03065424 n02037110 n02108915 n02807133 n04023962 n01669191 n02098286 n04252225 n02115641 n02281787 n06794110 n02391049 n04486054 n01817953 n04041544 n04277352 n02107574 n09193705 n04371774 n04372370 n03724870 n03388183 n04371430 n02788148 n01817953 n02699494 n07730033 n09468604 n04254777 n04501370 n03637318 n02782093 n04152593 n01882714 n02916936 n03661043 n04336792 n02422699 n04019541 n01664065 n03325584 n03976657 n04423845 n04404412 n03527444 n02123045 n02094114 n01558993 n03062245 n02113712 n03662601 n03065424 n03388183 n03447721 n01667778 n03584254 n03000247 n07718747 n01737021 n02676566 n01795545 n07860988 n04086273 n04332243 n03447721 n01829413 n02236044 n02165105 n01796340 n02092339 n01443537 n04370456 n03961711 n07579787 n01753488 n02708093 n02111277 n01774750 n04286575 n02483708 n02002724 n02536864 n03400231 n03485794 n02480495 n02509815 n04111531 n07716358 n01968897 n04579145 n02892201 n02091134 n04118776 n03249569 n01601694 n04522168 n02441942 n03271574 n02692877 n03930313 n02100735 n04428191 n03706229 n02119789 n02111277 n01629819 n04476259 n03958227 n03240683 n02504458 n04461696 n09229709 n01728920 n02422106 n03450230 n02268853 n03902125 n03868863 n09428293 n04482393 n03680355 n01744401 n12620546 n02002556 n04136333 n02447366 n02226429 n03249569 n02281406 n03721384 n03874599 n02951585 n04074963 n02480495 n03929855 n03016953 n03376595 n07747607 n15075141 n02085620 n04141975 n03733805 n03670208 n02085620 n01491361 n03803284 n02415577 n07714571 n03929855 n13037406 n01740131 n01580077 n03891251 n02128925 n01664065 n02090379 n07920052 n02279972 n02490219 n02906734 n01914609 n01704323 n02105412 n03492542 n04482393 n02788148 n01985128 n03388549 n04251144 n02939185 n02114548 n07836838 n10148035 n03976467 n03447721 n02006656 n07802026 n04370456 n02417914 n01776313 n02112018 n03938244 n02536864 n07802026 n04501370 n02963159 n03759954 n02028035 n04044716 n02123394 n02823428 n01491361 n04008634 n01877812 n07615774 n09256479 n01833805 n04127249 n04507155 n03673027 n01882714 n03697007 n03637318 n04332243 n12267677 n07714571 n03485794 n04004767 n02795169 n02120505 n02086646 n02107908 n03888257 n01795545 n03272010 n07714571 n02097047 n03874293 n02391049 n01855672 n01871265 n04208210 n02487347 n02013706 n02096051 n03598930 n03873416 n02871525 n02102973 n03710637 n01773157 n03208938 n04325704 n02002724 n02137549 n02125311 n01440764 n01806567 n03345487 n04209239 n07860988 n07802026 n07714571 n12768682 n02108422 n01770393 n03124043 n04023962 n02105056 n04476259 n02871525 n03598930 n02206856 n03223299 n02259212 n02607072 n02834397 n02364673 n03131574 n02802426 n02117135 n04370456 n01829413 n04033901 n02123159 n02794156 n02132136 n02883205 n07720875 n03920288 n02892201 n04285008 n03345487 n03661043 n04423845 n02013706 n01924916 n03095699 n09428293 n04153751 n02865351 n03384352 n02786058 n02099429 n03014705 n02113712 n01833805 n03924679 n03937543 n02892767 n01819313 n02109047 n01694178 n01729322 n02808440 n04266014 n01978287 n04111531 n04540053 n02100735 n03935335 n04372370 n03930630 n02443114 n03854065 n03724870 n09193705 n02640242 n03967562 n07711569 n04147183 n03710721 n02965783 n02951585 n01582220 n03014705 n02643566 n01739381 n03814906 n01882714 n01729322 n02860847 n04350905 n01697457 n03220513 n04311004 n03877472 n04209239 n04149813 n03770679 n04548362 n07930864 n03661043 n03400231 n02930766 n04613696 n03866082 n01990800 n01534433 n03947888 n02492660 n01985128 n03793489 n03977966 n01795545 n04086273 n01688243 n02423022 n04277352 n03877472 n03208938 n04476259 n04550184 n03063599 n04523525 n02123597 n02708093 n02134418 n02086079 n11879895 n03676483 n02107574 n02113978 n03764736 n03642806 n01748264 n02167151 n04612504 n02817516 n02051845 n03724870 n02077923 n01443537 n03065424 n02105505 n02051845 n02087394 n01735189 n04310018 n01632458 n02509815 n02093859 n01669191 n03868242 n03400231 n02423022 n02090622 n03146219 n02397096 n03532672 n02013706 n01622779 n02483708 n03187595 n02114712 n03131574 n03476991 n03838899 n02105162 n04604644 n01689811 n02113624 n03691459 n15075141 n01773797 n01491361 n04209133 n04476259 n03444034 n02488291 n03485407 n01630670 n04599235 n02174001 n02834397 n02509815 n03538406 n03535780 n02105855 n04501370 n02098105 n03763968 n03095699 n04591713 n02363005 n03599486 n01491361 n02090622 n03590841 n03832673 n02013706 n06874185 n06596364 n04074963 n04389033 n02447366 n01631663 n02841315 n03733805 n03146219 n02974003 n03947888 n02095570 n02422106 n04049303 n02396427 n03891251 n02422106 n04486054 n02091831 n07760859 n03179701 n03947888 n03692522 n02097298 n03602883 n02974003 n02951585 n04141327 n04357314 n02786058 n02268853 n04596742 n03788365 n02111277 n02104365 n03584254 n04509417 n03494278 n02939185 n02363005 n03047690 n04366367 n04409515 n04380533 n03187595 n01882714 n03680355 n03124170 n01986214 n04004767 n01833805 n04141076 n02033041 n03109150 n04560804 n07930864 n02114548 n02877765 n02093754 n01737021 n02093647 n03794056 n01843383 n01978287 n01669191 n02870880 n02071294 n02098286 n04120489 n04239074 n01537544 n02504013 n03929855 n09193705 n03534580 n03018349 n04179913 n01735189 n01665541 n12768682 n02669723 n03930313 n04200800 n02363005 n04552348 n03992509 n02123159 n04505470 n01518878 n01742172 n02445715 n03584254 n02101556 n02398521 n02106166 n04372370 n04346328 n02109047 n03498962 n01980166 n07753275 n04447861 n09332890 n04417672 n07248320 n02412080 n03218198 n04428191 n04447861 n04557648 n01677366 n01774750 n09399592 n02859443 n04456115 n02018795 n03935335 n04465501 n02112706 n02799071 n07684084 n01614925 n02167151 n04606251 n04317175 n04311004 n02077923 n04326547 n02483708 n02963159 n07565083 n04557648 n02397096 n04133789 n02229544 n04317175 n07749582 n03803284 n04456115 n01828970 n02408429 n01632458 n03028079 n03291819 n01773797 n02096585 n02110341 n01669191 n01986214 n03742115 n01910747 n02966687 n02025239 n07615774 n02090721 n01855672 n02965783 n03924679 n11879895 n02113186 n04270147 n02804610 n06359193 n02965783 n03777754 n09399592 n01693334 n04033901 n02098413 n01981276 n03657121 n02096437 n03841143 n02123394 n02447366 n03345487 n02963159 n01580077 n03481172 n02483362 n02894605 n02109525 n04525038 n01917289 n03983396 n04462240 n04153751 n03992509 n02906734 n03290653 n02017213 n02808440 n04515003 n02422106 n02115913 n03720891 n10148035 n02794156 n02096294 n03220513 n02437312 n02058221 n04540053 n07753592 n02105641 n04325704 n04447861 n07695742 n03666591 n03642806 n01910747 n03733281 n01768244 n03888605 n13133613 n03590841 n03127925 n02488291 n04208210 n04592741 n04557648 n02169497 n01773549 n02672831 n03742115 n01983481 n02113978 n03494278 n02490219 n02488291 n03062245 n02167151 n02676566 n04392985 n03877472 n02168699 n02488291 n02840245 n03014705 n04044716 n02119022 n01824575 n02840245 n04023962 n03032252 n02486410 n03197337 n02974003 n04086273 n02441942 n03496892 n03721384 n03538406 n03041632 n02927161 n02408429 n03759954 n03690938 n01930112 n01744401 n02992529 n03873416 n07615774 n02012849 n03777568 n03676483 n01968897 n03866082 n04005630 n04285008 n02841315 n02106030 n02276258 n02422106 n03649909 n03017168 n02097474 n02948072 n02256656 n04179913 n09835506 n02111889 n02988304 n07836838 n02051845 n02971356 n02640242 n03065424 n04201297 n02281406 n02134418 n02500267 n02895154 n02870880 n03617480 n02415577 n03733131 n03594734 n04152593 n04258138 n04286575 n04336792 n02484975 n04041544 n04081281 n03291819 n04584207 n02100877 n03459775 n01498041 n04429376 n04252077 n04515003 n02108089 n03876231 n03838899 n07716358 n02025239 n02965783 n04033901 n03841143 n02102318 n03888605 n03777568 n04350905 n02870880 n04277352 n07720875 n02317335 n02504458 n02488291 n02137549 n02490219 n04428191 n03662601 n04532670 n02105412 n02091831 n04154565 n01531178 n07753275 n02117135 n01882714 n03272010 n03759954 n03866082 n03992509 n02137549 n01537544 n01494475 n03179701 n01694178 n04554684 n04204347 n11879895 n04366367 n04371430 n12057211 n02730930 n03461385 n01728572 n01688243 n04141975 n02174001 n04310018 n02077923 n02105505 n03250847 n01776313 n04532106 n02346627 n04493381 n07742313 n04335435 n02112018 n02097298 n04254120 n02231487 n03394916 n01806143 n04311004 n03216828 n07615774 n07614500 n07768694 n07248320 n03594734 n04008634 n02091134 n02606052 n04310018 n07714990 n01945685 n02326432 n01704323 n01944390 n01514668 n01514668 n01740131 n04356056 n03492542 n02643566 n03759954 n03854065 n03781244 n03125729 n02087394 n02093754 n02802426 n03527444 n07747607 n03394916 n01644373 n02823428 n02106550 n03954731 n01944390 n09472597 n03126707 n02102973 n03443371 n03529860 n02489166 n04606251 n04371774 n03197337 n04252225 n01986214 n03841143 n02111129 n04251144 n02782093 n03786901 n04542943 n03196217 n01735189 n03125729 n02089867 n04009552 n02860847 n02229544 n01871265 n03930313 n04296562 n03388549 n02437616 n02423022 n02190166 n04522168 n04136333 n02009229 n07716358 n01798484 n01990800 n04525038 n07754684 n01582220 n03673027 n02977058 n04317175 n03495258 n02692877 n02089973 n01843065 n03584254 n02802426 n02364673 n01807496 n02172182 n03742115 n02687172 n02769748 n07716358 n03028079 n02107142 n02749479 n02417914 n04296562 n01829413 n01698640 n03935335 n02096294 n02112706 n02692877 n01740131 n07754684 n04136333 n02112137 n02326432 n02113624 n07715103 n02484975 n03781244 n01630670 n02701002 n03776460 n01978455 n01755581 n01819313 n03838899 n04146614 n04251144 n02113023 n02483362 n04456115 n02101006 n02992211 n02037110 n03045698 n02963159 n03249569 n06359193 n03196217 n01693334 n02085936 n03697007 n02092002 n02099712 n02793495 n03710721 n02102318 n03895866 n02097209 n03127747 n01950731 n02106166 n01443537 n03372029 n04229816 n01990800 n04258138 n03637318 n03633091 n03770439 n01818515 n04069434 n02110063 n01664065 n02504458 n01641577 n04562935 n03825788 n03873416 n02484975 n01984695 n03761084 n02892201 n04392985 n04357314 n02097130 n03394916 n03124170 n03938244 n01582220 n04133789 n07871810 n02114855 n02445715 n03017168 n01729977 n02101006 n04153751 n07730033 n02802426 n02130308 n02096585 n01860187 n01980166 n02825657 n03450230 n04037443 n04090263 n02361337 n02823750 n02843684 n03372029 n01749939 n02808440 n03384352 n02129165 n02095570 n02916936 n02098105 n02093256 n03445777 n02111500 n04553703 n03871628 n03876231 n03062245 n03207941 n04428191 n02408429 n04005630 n02777292 n03877845 n04599235 n02514041 n04081281 n02111889 n03208938 n02105855 n10565667 n02493793 n02676566 n02219486 n04147183 n01531178 n04542943 n02492660 n04235860 n02321529 n01687978 n02066245 n01818515 n03461385 n03710637 n03854065 n01872401 n01847000 n03690938 n06596364 n07932039 n02102973 n01806567 n02106382 n15075141 n02109047 n02087394 n01774750 n02128385 n07871810 n02086240 n04209239 n07749582 n04392985 n02058221 n01644373 n03127925 n03690938 n04485082 n03388183 n02110627 n02165105 n03785016 n02259212 n02108915 n02099267 n04044716 n01990800 n01986214 n01632777 n01580077 n02106030 n01632458 n03337140 n01695060 n09399592 n04116512 n03443371 n02097658 n04039381 n02422699 n02105855 n03792782 n02229544 n01950731 n02256656 n03916031 n01534433 n03791053 n04200800 n03314780 n04120489 n04584207 n01820546 n04125021 n02930766 n02093647 n02910353 n03452741 n03482405 n04380533 n01622779 n07768694 n03042490 n03461385 n04285008 n04540053 n02099267 n12057211 n04118776 n04162706 n12620546 n01534433 n01675722 n02089078 n03290653 n02883205 n07697537 n03393912 n02113186 n03014705 n04435653 n03590841 n03773504 n02782093 n02980441 n04239074 n04228054 n03877845 n04023962 n04404412 n02088238 n03617480 n03670208 n09229709 n02971356 n04553703 n01748264 n02091467 n07697537 n02113186 n07615774 n02328150 n02883205 n07579787 n01514668 n03877845 n02108915 n07760859 n02125311 n03899768 n01924916 n02487347 n02979186 n03594945 n03895866 n02441942 n13040303 n03710193 n03709823 n03544143 n02843684 n02085782 n02088466 n01910747 n04599235 n01847000 n02423022 n03476991 n02690373 n07730033 n03733281 n02129604 n02027492 n04443257 n03977966 n03992509 n02108422 n07875152 n03793489 n03127925 n04579145 n02395406 n02119022 n03706229 n03902125 n03777568 n02125311 n04458633 n02672831 n01784675 n02138441 n04328186 n02120505 n01644373 n03544143 n01818515 n03877472 n04044716 n04009552 n03220513 n04067472 n02172182 n02823750 n02317335 n04467665 n02229544 n04049303 n02116738 n07584110 n02018795 n03930313 n02480495 n02172182 n09399592 n01530575 n02971356 n02105641 n01698640 n04553703 n02280649 n01807496 n02504458 n03617480 n03884397 n02011460 n02704792 n03393912 n01667114 n03598930 n01775062 n07717410 n04118776 n03218198 n03255030 n02111129 n02892201 n03444034 n03692522 n02364673 n07718747 n04418357 n04235860 n03000684 n03929660 n03670208 n01560419 n02494079 n03197337 n01737021 n07697313 n02127052 n03764736 n04270147 n02097474 n04204347 n03291819 n03134739 n02086240 n03691459 n01924916 n04550184 n02093754 n03110669 n02643566 n02108422 n02795169 n02483362 n03983396 n02093647 n02815834 n04069434 n03930313 n02326432 n02086079 n03958227 n04258138 n03498962 n03697007 n03126707 n02980441 n03530642 n02086910 n02087394 n02280649 n04285008 n02093256 n01950731 n03733131 n04277352 n02086240 n03544143 n03782006 n01632777 n02086646 n03297495 n09246464 n02123597 n02687172 n04487081 n02236044 n03710193 n02607072 n02788148 n01776313 n04376876 n02102973 n07873807 n03372029 n02104029 n02669723 n01693334 n12985857 n03785016 n02066245 n01698640 n04086273 n03047690 n04026417 n01773797 n03742115 n02018207 n01978455 n02988304 n03595614 n02965783 n02992529 n01773157 n03417042 n03376595 n04435653 n07711569 n03970156 n02877765 n04111531 n09256479 n02641379 n04179913 n02113023 n03977966 n04525038 n02190166 n04070727 n02111277 n02128757 n01784675 n02412080 n03146219 n03485794 n01773157 n02119022 n02704792 n01737021 n03697007 n03450230 n01770081 n03792782 n02089867 n02817516 n03141823 n01773157 n07860988 n02317335 n04442312 n04428191 n04049303 n12620546 n04591157 n03980874 n03314780 n02514041 n03376595 n01774384 n01774384 n04579432 n04336792 n01872401 n02483708 n03127925 n03314780 n03843555 n01770081 n02480855 n04118776 n01910747 n03126707 n02233338 n02114855 n02808304 n02107683 n03590841 n01737021 n01514859 n04346328 n02102480 n02093754 n09472597 n09332890 n03630383 n02492035 n04026417 n02110185 n03125729 n04465501 n07695742 n03775546 n02930766 n07753275 n07684084 n04486054 n01677366 n03127747 n02917067 n04347754 n02704792 n07583066 n07714990 n02111500 n03085013 n02233338 n03977966 n03876231 n07760859 n03623198 n02268853 n07730033 n02097047 n02981792 n01984695 n04584207 n01665541 n01734418 n02100877 n03109150 n02099712 n01855672 n02486410 n02099267 n03804744 n04179913 n02091032 n04200800 n04127249 n01833805 n01855672 n02909870 n04423845 n03345487 n04456115 n04517823 n07714990 n03492542 n01531178 n07892512 n01534433 n03982430 n04116512 n02097130 n04612504 n03146219 n02097130 n04517823 n07684084 n01978455 n02236044 n01798484 n04200800 n01985128 n09468604 n02268853 n02090622 n03000684 n04447861 n04154565 n02840245 n03126707 n02391049 n04532106 n01728572 n03124043 n01773549 n02480855 n07860988 n02105056 n03888605 n02116738 n02804610 n02113799 n03899768 n01729322 n07873807 n02116738 n02795169 n02256656 n07720875 n03584829 n02097209 n02092002 n07614500 n03599486 n02825657 n02966687 n04428191 n02488702 n01774384 n03908618 n03814639 n02444819 n02825657 n02325366 n03394916 n02077923 n03709823 n04579432 n03967562 n01514668 n04548280 n03899768 n02892201 n01704323 n01484850 n03535780 n03775546 n03337140 n01514859 n01580077 n01580077 n04509417 n03977966 n02115641 n07697313 n07753275 n04542943 n02910353 n02087046 n04443257 n03788365 n04429376 n01484850 n02843684 n04479046 n01990800 n09193705 n02115641 n01773549 n09246464 n03956157 n03065424 n02174001 n01824575 n02099267 n02093647 n03133878 n01580077 n01622779 n03271574 n07768694 n04376876 n01877812 n03110669 n01728920 n04141327 n04389033 n02096294 n02492035 n03876231 n07716906 n02097474 n02086240 n02708093 n02105641 n01984695 n03125729 n03944341 n03450230 n02109525 n04389033 n07760859 n01704323 n04540053 n02823428 n02115641 n03733281 n02093754 n01532829 n07802026 n09472597 n02091134 n03041632 n04372370 n01608432 n04265275 n02804414 n03109150 n04328186 n02107312 n03100240 n03250847 n03393912 n02090622 n02840245 n02870880 n04562935 n02397096 n03995372 n02106662 n02096177 n02493509 n02965783 n01981276 n01990800 n01698640 n02088238 n02107908 n09399592 n02790996 n02091134 n04252225 n02447366 n03179701 n02123394 n02974003 n03124170 n03045698 n03271574 n04067472 n01494475 n01984695 n02321529 n03062245 n07892512 n02123045 n02099849 n02672831 n03854065 n02825657 n01644900 n07745940 n04366367 n09288635 n03447447 n03124043 n12267677 n02091244 n02111277 n02088632 n12985857 n04517823 n03594945 n04049303 n03908714 n03697007 n07714571 n01986214 n03014705 n04238763 n02950826 n01755581 n02108089 n02111500 n02028035 n03425413 n02276258 n03690938 n03478589 n04579432 n04209133 n02492035 n04479046 n03131574 n04026417 n01981276 n01514668 n02643566 n03791053 n02870880 n04235860 n06596364 n04019541 n09246464 n03065424 n13054560 n04597913 n02111500 n04252077 n03857828 n02100236 n04442312 n02363005 n04040759 n03127925 n04033995 n03662601 n02966193 n03761084 n03838899 n04081281 n04243546 n04252077 n04487081 n04417672 n03662601 n03476991 n01829413 n07614500 n02701002 n07754684 n04258138 n01744401 n03259280 n02676566 n03017168 n01817953 n04049303 n01692333 n02108551 n03134739 n02410509 n03871628 n04525305 n02093754 n04461696 n04523525 n11939491 n04612504 n03706229 n02167151 n01582220 n03692522 n03595614 n02823428 n03950228 n04399382 n03877845 n04596742 n04005630 n03724870 n03445924 n07614500 n01883070 n03710637 n04120489 n03127925 n03249569 n02879718 n04562935 n03630383 n02106662 n02097474 n02114855 n09332890 n02096051 n03995372 n03016953 n03447447 n10565667 n07579787 n02102040 n02097298 n01514668 n04332243 n03770679 n02102040 n01616318 n01694178 n02817516 n02086240 n03787032 n01582220 n02097130 n03690938 n02825657 n02106662 n02490219 n02514041 n03958227 n03658185 n03187595 n02107908 n07734744 n02093859 n02011460 n04447861 n02640242 n02793495 n02514041 n01534433 n02132136 n02108422 n01768244 n04399382 n01734418 n02037110 n02444819 n03272562 n02906734 n01740131 n03325584 n03598930 n02277742 n03443371 n03447721 n02097130 n04347754 n03903868 n03529860 n06785654 n01985128 n02892767 n02074367 n02445715 n03131574 n02892201 n02114548 n02096294 n03787032 n03776460 n02870880 n04347754 n03930313 n02095889 n02124075 n01641577 n07753592 n02100583 n04591157 n02488291 n03690938 n03791053 n02860847 n04612504 n01677366 n02112350 n03062245 n02909870 n09428293 n01860187 n02999410 n13044778 n04070727 n02105855 n01950731 n04443257 n02110341 n04265275 n04273569 n03000247 n01675722 n03838899 n13040303 n03016953 n03793489 n02119022 n04366367 n03388549 n06874185 n02980441 n03676483 n04065272 n02102040 n04501370 n01740131 n04162706 n04325704 n01443537 n02672831 n02101006 n04417672 n01990800 n02133161 n02264363 n04548280 n03935335 n02906734 n01985128 n02107574 n03125729 n03208938 n02074367 n03133878 n02085782 n02607072 n03388043 n02096585 n07693725 n02786058 n01443537 n01873310 n02791124 n04325704 n03530642 n04147183 n02484975 n02091635 n03100240 n02879718 n02093991 n11879895 n01737021 n13054560 n01945685 n04356056 n02342885 n04192698 n04536866 n04435653 n01829413 n01496331 n03887697 n03770679 n12057211 n12985857 n04266014 n02916936 n04429376 n02229544 n03763968 n03595614 n02837789 n02109047 n02106030 n03180011 n02102973 n02865351 n02074367 n02169497 n02087046 n03141823 n02124075 n02437312 n07892512 n01776313 n02641379 n01644900 n03042490 n03630383 n03785016 n07730033 n03544143 n02007558 n02109047 n02910353 n02107312 n02389026 n01698640 n03633091 n04442312 n07248320 n04525038 n03459775 n03297495 n03676483 n03476991 n02097658 n03888257 n02115913 n01532829 n02085936 n01532829 n02107312 n02403003 n03933933 n02483362 n02105162 n02066245 n01518878 n01685808 n03782006 n07695742 n09835506 n04141076 n02454379 n02107683 n03874293 n02177972 n02106166 n04590129 n03388549 n04399382 n02096585 n02093256 n02319095 n04560804 n02089973 n03223299 n02091244 n02089867 n04335435 n03825788 n02056570 n01669191 n02113978 n03141823 n02640242 n02841315 n04146614 n03400231 n02490219 n03791053 n07880968 n02025239 n03873416 n02437616 n03220513 n02089973 n03045698 n02100735 n04228054 n06785654 n04554684 n03595614 n03933933 n03954731 n02110806 n02056570 n04476259 n03032252 n02445715 n03895866 n02317335 n04479046 n02782093 n02172182 n02417914 n03041632 n04507155 n02672831 n02108000 n07714990 n03532672 n02123597 n03218198 n02091134 n02825657 n02916936 n03874599 n03876231 n03160309 n04118538 n03259280 n03670208 n07745940 n03733805 n01669191 n03404251 n07718747 n07831146 n02403003 n02883205 n02415577 n01784675 n02492035 n03599486 n01877812 n01877812 n03498962 n04355338 n03617480 n03404251 n02277742 n02169497 n02113624 n04067472 n04465501 n04335435 n02444819 n09421951 n04591157 n01622779 n03425413 n02346627 n04162706 n03874293 n02138441 n04005630 n03769881 n03942813 n04285008 n02114855 n02114712 n02708093 n03124170 n01498041 n07613480 n02363005 n03355925 n13054560 n03180011 n04552348 n02423022 n04525038 n02504013 n02107312 n02091467 n02101006 n03721384 n07695742 n02823428 n04589890 n04584207 n04111531 n03160309 n01531178 n02123394 n02777292 n04208210 n01667114 n01667114 n04597913 n03529860 n03450230 n02123045 n12768682 n01924916 n02536864 n04442312 n02747177 n07831146 n02951358 n03857828 n03482405 n03028079 n04040759 n02417914 n01689811 n03188531 n04070727 n07720875 n02168699 n11939491 n01704323 n03223299 n01930112 n02747177 n03903868 n02093428 n01728572 n03459775 n04409515 n03977966 n03220513 n04355933 n03662601 n03916031 n07836838 n07714571 n03891332 n02105251 n03028079 n02117135 n02096585 n04458633 n02883205 n01818515 n01641577 n04070727 n02093428 n03494278 n03255030 n03769881 n07716358 n03877845 n07760859 n03495258 n04370456 n02091134 n03874293 n03026506 n03259280 n02097209 n03873416 n07760859 n02108422 n01872401 n01981276 n04153751 n02110185 n02095570 n01496331 n04285008 n03075370 n02815834 n09256479 n02092339 n02808304 n09428293 n02101006 n02412080 n04285008 n03954731 n04311004 n03476991 n01518878 n02687172 n02342885 n02346627 n02883205 n03457902 n02097658 n02504458 n03930313 n02087394 n02802426 n03272010 n02102318 n02091467 n02099849 n04552348 n02443114 n02276258 n03642806 n02342885 n03916031 n02125311 n02837789 n02130308 n04509417 n03207941 n03877845 n13052670 n02317335 n03444034 n03179701 n04371774 n03924679 n02950826 n02110958 n02113978 n02109961 n02363005 n02090622 n07930864 n03857828 n03763968 n07684084 n02497673 n02102480 n04275548 n04264628 n02058221 n01687978 n02877765 n01748264 n02028035 n02909870 n04332243 n09835506 n04192698 n03877845 n03832673 n04179913 n03623198 n02107908 n04548362 n01641577 n02992211 n04326547 n02783161 n03743016 n01729977 n04146614 n01695060 n03649909 n02087394 n03424325 n01688243 n03223299 n01914609 n02091032 n02095570 n07720875 n02606052 n03584829 n02110185 n03220513 n07745940 n01824575 n02099601 n11939491 n07749582 n03457902 n01784675 n02112018 n03733131 n04328186 n04037443 n03717622 n01694178 n02871525 n02808440 n04560804 n02097474 n02137549 n01981276 n02443114 n02101006 n04550184 n12985857 n02236044 n02488291 n04532106 n03895866 n03617480 n03417042 n03903868 n03584254 n02389026 n04435653 n02492035 n01796340 n03447721 n03447447 n03595614 n04579145 n02777292 n04147183 n02006656 n03843555 n02504458 n03444034 n03673027 n04417672 n10148035 n04179913 n03792972 n04552348 n02281406 n02326432 n02493509 n03314780 n03485407 n01980166 n04442312 n03602883 n01986214 n02108915 n02492660 n03384352 n04367480 n04467665 n02814860 n01728572 n03733281 n03216828 n02494079 n03733805 n02279972 n01692333 n02091635 n04487081 n03866082 n03208938 n07714990 n02906734 n02807133 n02095570 n03594945 n03492542 n02442845 n01833805 n02395406 n06874185 n02490219 n02071294 n02447366 n01537544 n02281787 n02268443 n03775546 n04429376 n03832673 n04398044 n04370456 n02128757 n04162706 n04146614 n04482393 n07860988 n02167151 n02095889 n02487347 n01632777 n02992211 n02097658 n02107683 n03980874 n07753592 n02037110 n03388183 n01695060 n04258138 n02802426 n03425413 n02403003 n03868242 n02006656 n02667093 n02607072 n02093647 n02536864 n04591713 n02669723 n03733805 n03259280 n03709823 n04483307 n03877472 n02113023 n04133789 n06359193 n03903868 n03089624 n02013706 n04266014 n02504013 n02101006 n02124075 n01774750 n02112350 n02526121 n03485407 n03496892 n02655020 n07714571 n02087394 n03160309 n02091831 n03047690 n04612504 n02859443 n04033995 n02950826 n03187595 n01592084 n07892512 n04507155 n01692333 n01981276 n02823750 n04251144 n04548362 n07565083 n04209133 n01877812 n04486054 n09421951 n02231487 n02113799 n02098413 n04081281 n02999410 n02107312 n02346627 n01675722 n02795169 n03649909 n04090263 n03871628 n01877812 n03670208 n03866082 n03496892 n07248320 n04162706 n02098413 n04069434 n03938244 n02101006 n02325366 n03388549 n03393912 n01739381 n02108089 n03000134 n03124170 n02037110 n02098105 n01986214 n03314780 n10148035 n04200800 n03457902 n02091831 n02835271 n03642806 n02101388 n02128757 n04004767 n02091635 n04311004 n04328186 n01829413 n02108000 n03877845 n03935335 n01744401 n01531178 n13044778 n02699494 n01775062 n02088364 n04239074 n03781244 n02442845 n03028079 n09421951 n12768682 n02454379 n03065424 n02113023 n01873310 n03594945 n03792782 n03529860 n02174001 n02487347 n01692333 n02837789 n04487394 n02509815 n03970156 n02445715 n02666196 n02009912 n01797886 n07583066 n02111500 n03461385 n04371774 n04296562 n02978881 n02066245 n02129604 n03761084 n09229709 n01774750 n02108915 n01797886 n04482393 n03792782 n02095314 n01693334 n04560804 n04376876 n07718747 n01532829 n03888605 n02980441 n01494475 n02093754 n07802026 n04562935 n02165456 n02356798 n03977966 n03124170 n02797295 n04201297 n04392985 n04579432 n02106550 n02782093 n04252077 n04326547 n02454379 n02437312 n01729977 n02123045 n04229816 n02077923 n03788195 n02124075 n02051845 n02087394 n02096437 n02403003 n02769748 n04392985 n02134084 n02840245 n04273569 n03125729 n03967562 n03961711 n03961711 n07579787 n04270147 n02965783 n02006656 n03995372 n03444034 n02814860 n04070727 n04208210 n04486054 n03729826 n02120079 n04591713 n02808304 n02105641 n03770439 n04228054 n02094114 n03400231 n02106166 n03868863 n02089078 n03954731 n04355338 n02669723 n04200800 n04266014 n03929855 n02107312 n04023962 n03958227 n01677366 n02791124 n03485407 n02129165 n03075370 n01558993 n02988304 n04355933 n02134418 n01675722 n07920052 n02321529 n02018795 n03992509 n03868863 n03796401 n02892767 n04254120 n03785016 n04591157 n01518878 n06794110 n01930112 n02951585 n07711569 n01496331 n02788148 n03207743 n03794056 n04332243 n04356056 n07873807 n02667093 n03271574 n02794156 n02493793 n03527444 n02951585 n03240683 n02109961 n01795545 n03599486 n04599235 n01644900 n07880968 n04317175 n02840245 n02408429 n07248320 n04285008 n02096585 n02704792 n04560804 n03785016 n02927161 n03697007 n07930864 n07248320 n02028035 n02123597 n02676566 n07583066 n02871525 n02134084 n02091032 n04462240 n02117135 n02009912 n09193705 n09472597 n02834397 n03764736 n01753488 n03895866 n02112018 n02165105 n02837789 n03457902 n04522168 n04023962 n04536866 n04005630 n02110627 n02708093 n04554684 n01514668 n02090379 n07836838 n02108089 n03095699 n04366367 n04039381 n07802026 n03100240 n03255030 n04235860 n02980441 n03218198 n01514668 n03000684 n02088094 n02815834 n03657121 n03891251 n02808440 n02916936 n03661043 n04243546 n04065272 n03666591 n04604644 n04509417 n03937543 n04509417 n02109961 n04251144 n02869837 n02113712 n02492660 n02841315 n07734744 n04456115 n02640242 n03929855 n04266014 n01644900 n02807133 n03814639 n01514859 n01784675 n04023962 n02256656 n01695060 n03532672 n04070727 n03742115 n03482405 n01773797 n03388183 n03792782 n09246464 n03394916 n13052670 n03498962 n02356798 n02966193 n01798484 n03394916 n04476259 n03854065 n03950228 n02708093 n02206856 n03026506 n04004767 n03691459 n01682714 n02095570 n02480855 n03424325 n01531178 n03868863 n02883205 n02795169 n04399382 n02840245 n02808304 n01695060 n02110063 n01601694 n04229816 n02927161 n03187595 n02454379 n04483307 n01986214 n02104029 n04485082 n02808304 n03384352 n02107574 n02927161 n03924679 n01685808 n02364673 n04389033 n07718472 n01558993 n03047690 n03595614 n02071294 n03028079 n01806143 n03814639 n02007558 n04525038 n02128385 n02391049 n04372370 n03769881 n02100877 n09288635 n03950228 n02786058 n03788365 n01667114 n02119789 n02279972 n02033041 n02086910 n01749939 n03337140 n07693725 n02492660 n02442845 n02917067 n03733281 n07920052 n02490219 n02111277 n02123394 n02128757 n02992211 n03424325 n03942813 n04399382 n04417672 n01828970 n03854065 n02325366 n02492035 n03220513 n02087046 n03602883 n01983481 n01498041 n02834397 n03791053 n04604644 n07730033 n01675722 n02105056 n04039381 n02835271 n02787622 n04591157 n02484975 n04044716 n02977058 n03000247 n03602883 n02112018 n04584207 n03733281 n04209133 n02106662 n01740131 n03983396 n04141327 n03476684 n03337140 n04311174 n02510455 n03476991 n04456115 n03141823 n04009552 n03461385 n01797886 n01734418 n02108915 n04251144 n04192698 n04525038 n03995372 n01985128 n07930864 n02514041 n02098413 n03388183 n02095889 n02992529 n07920052 n03249569 n02667093 n03393912 n03743016 n03876231 n02138441 n07875152 n02099601 n01630670 n02099429 n03706229 n03992509 n03141823 n03109150 n02504013 n02992529 n01943899 n03796401 n01675722 n04141327 n07697537 n04141327 n02871525 n04254680 n07836838 n03133878 n02346627 n03649909 n02090622 n03124170 n04458633 n04525305 n03666591 n02699494 n03680355 n01692333 n02480495 n03109150 n02342885 n02776631 n04596742 n03018349 n04525305 n01824575 n01882714 n02115641 n02788148 n04335435 n02085936 n02782093 n03095699 n03127925 n09468604 n07717410 n03417042 n12998815 n02113023 n07742313 n04296562 n07714571 n02107312 n01806143 n04033995 n02025239 n03930313 n02641379 n03804744 n07745940 n02097658 n07930864 n03089624 n02492035 n02791124 n02172182 n02865351 n01739381 n03950228 n02099429 n01644900 n02788148 n01622779 n02027492 n04254120 n03929855 n02814533 n02226429 n07715103 n03840681 n02256656 n01833805 n12267677 n01687978 n04592741 n04592741 n07873807 n02110627 n02277742 n04266014 n01776313 n02794156 n02093428 n04311004 n03920288 n03047690 n03992509 n02112350 n04591157 n03017168 n03459775 n01667778 n01820546 n03485794 n02804610 n03602883 n03666591 n01872401 n04589890 n02730930 n02090379 n03670208 n02892201 n03372029 n03062245 n02486410 n04562935 n01697457 n02099429 n04111531 n01728920 n04153751 n02113624 n01770393 n04266014 n02017213 n03483316 n01742172 n02480855 n01739381 n01768244 n03908714 n02006656 n02089867 n03026506 n01558993 n03980874 n03775546 n01980166 n09399592 n02804610 n04336792 n02027492 n04251144 n02100735 n03788365 n13040303 n02328150 n15075141 n07802026 n01532829 n03594734 n02676566 n04404412 n02346627 n02843684 n02108000 n02871525 n02606052 n03982430 n02165456 n02823750 n01871265 n02730930 n03770679 n04505470 n03404251 n01883070 n02979186 n02093991 n01630670 n04120489 n01443537 n04371774 n03866082 n01833805 n03527444 n03998194 n03873416 n02930766 n03776460 n06596364 n02321529 n04392985 n03796401 n04483307 n02526121 n02396427 n02113023 n03443371 n07747607 n01980166 n02058221 n02167151 n02769748 n03127925 n02190166 n03272562 n02097130 n04560804 n02086240 n04326547 n02095314 n01843383 n02107312 n03954731 n02281406 n02105641 n03075370 n02883205 n01829413 n02099849 n02112137 n07684084 n03095699 n02408429 n10565667 n02641379 n02259212 n02128757 n03344393 n01665541 n04004767 n07734744 n02088364 n02100583 n02672831 n01820546 n03376595 n04070727 n02981792 n03709823 n02206856 n01537544 n01776313 n04579145 n02492035 n02804414 n02113799 n02104365 n03483316 n09256479 n03642806 n07590611 n02094433 n02089973 n02497673 n01968897 n02090721 n02167151 n02974003 n02514041 n03781244 n02408429 n02279972 n04311174 n01990800 n02804610 n03146219 n13040303 n07930864 n04423845 n02437616 n03388043 n04487394 n04201297 n02704792 n01729322 n04371430 n03937543 n03216828 n02486261 n02666196 n04612504 n03180011 n03240683 n03627232 n01877812 n04486054 n02782093 n02814533 n02119022 n03788195 n07720875 n02096051 n03903868 n02105162 n04125021 n03272010 n03794056 n02058221 n03457902 n04584207 n03785016 n04311004 n03837869 n02101556 n03840681 n03425413 n03496892 n02127052 n01980166 n03770439 n04398044 n02105412 n03032252 n03594734 n02096437 n10148035 n01443537 n04125021 n03649909 n02939185 n01737021 n02510455 n02398521 n02490219 n03595614 n04277352 n03649909 n07716906 n02808440 n03124170 n03538406 n03376595 n02860847 n01797886 n04243546 n03673027 n04462240 n03595614 n04579432 n01558993 n04081281 n04136333 n03223299 n03197337 n02094114 n03452741 n04392985 n02666196 n02786058 n09332890 n03759954 n04125021 n03000684 n04597913 n01768244 n02099601 n07716358 n03530642 n01860187 n02012849 n02814860 n02110063 n03160309 n02091032 n15075141 n02127052 n02699494 n04447861 n02109961 n03532672 n04099969 n03594945 n02101556 n04200800 n02100236 n04149813 n07920052 n04149813 n02097209 n03793489 n09428293 n03840681 n02799071 n04332243 n01807496 n04479046 n02101388 n02099849 n02085620 n02655020 n02802426 n04204347 n02094433 n02814533 n04398044 n04090263 n02051845 n04548362 n04259630 n04209133 n04596742 n02114855 n02091635 n01795545 n02231487 n07831146 n02110341 n01728920 n02802426 n01978455 n03388043 n03041632 n03976657 n02443484 n01735189 n04310018 n02009229 n02325366 n03075370 n04149813 n03891251 n02125311 n04074963 n02105855 n04525038 n02002724 n03924679 n03947888 n03544143 n01704323 n02177972 n04509417 n07754684 n03961711 n02364673 n07614500 n04239074 n02825657 n02391049 n03447721 n03042490 n04442312 n02098105 n03388043 n03692522 n04428191 n02100236 n04591157 n03729826 n03775071 n02480855 n03697007 n02088094 n02012849 n02119789 n02085782 n03424325 n01872401 n01631663 n02788148 n01698640 n02672831 n04162706 n04591157 n02128385 n02992529 n03443371 n03792782 n04200800 n04069434 n02490219 n03868242 n04277352 n03770439 n01773157 n04026417 n03492542 n02107908 n04548362 n03379051 n01582220 n02109047 n04579145 n02114548 n04152593 n02769748 n04296562 n02097209 n01983481 n04366367 n03657121 n02879718 n02119789 n03947888 n02342885 n04152593 n04370456 n03032252 n07880968 n04328186 n02107574 n02017213 n01945685 n04550184 n01514859 n04479046 n07695742 n03481172 n07747607 n02437312 n03742115 n01924916 n01608432 n04584207 n02825657 n12144580 n01689811 n04228054 n02113624 n07697313 n04367480 n04026417 n01616318 n02643566 n04228054 n01443537 n04252077 n01734418 n02490219 n02814533 n01796340 n03160309 n04355933 n03666591 n02443114 n03595614 n02948072 n03786901 n04380533 n01824575 n02018207 n02111500 n03188531 n03417042 n13037406 n02869837 n03627232 n07716906 n02130308 n02422106 n03544143 n02108551 n03314780 n01694178 n02437312 n02978881 n04243546 n02823428 n03916031 n01616318 n01496331 n15075141 n02071294 n03095699 n04525305 n02483362 n02109047 n02930766 n03792972 n04507155 n02091032 n01744401 n03929660 n01632458 n02090622 n13037406 n01580077 n03028079 n04366367 n03000247 n02088094 n04376876 n02110341 n03983396 n02791124 n02977058 n03384352 n03042490 n02643566 n04522168 n02804414 n07760859 n02445715 n01728920 n04285008 n01697457 n03961711 n03134739 n01882714 n07716358 n02364673 n02536864 n07880968 n03662601 n02699494 n04133789 n04141076 n04366367 n02892201 n02100877 n01695060 n07747607 n02971356 n02804414 n01665541 n02422699 n03065424 n07693725 n04336792 n07932039 n04311174 n07715103 n02268853 n02096585 n01981276 n04133789 n02814860 n03388183 n01631663 n02447366 n01560419 n02319095 n04370456 n04152593 n02939185 n01534433 n02909870 n01537544 n07565083 n02106030 n01630670 n02837789 n03633091 n01614925 n13052670 n02104029 n02877765 n02106166 n02011460 n03590841 n02130308 n01968897 n02397096 n02966193 n02129165 n03393912 n03133878 n03743016 n03947888 n02133161 n02102480 n02457408 n02111889 n02364673 n02980441 n02138441 n03908714 n04599235 n03220513 n01729977 n02808304 n03223299 n03444034 n03538406 n03384352 n02607072 n07684084 n07697537 n07565083 n02939185 n04483307 n01843065 n03272010 n04370456 n03627232 n03259280 n01698640 n01775062 n02769748 n04428191 n04326547 n02090721 n02051845 n03124170 n02422106 n02134418 n09399592 n03447721 n04090263 n04584207 n03884397 n02356798 n02105641 n03786901 n02835271 n02090379 n03379051 n04389033 n01847000 n02125311 n02089078 n01498041 n01749939 n02102177 n04023962 n03788365 n02127052 n04326547 n01641577 n02484975 n07768694 n03777754 n04487394 n07873807 n02089078 n02112137 n03733281 n04141975 n02105251 n04040759 n13052670 n07684084 n03179701 n03804744 n03127747 n01748264 n02408429 n03126707 n03595614 n04235860 n02117135 n03938244 n02497673 n03425413 n04192698 n03980874 n01774384 n04591157 n02403003 n01729322 n02834397 n03527444 n03763968 n04120489 n02100735 n01955084 n02483362 n02510455 n01817953 n03868242 n02483362 n04418357 n01968897 n03691459 n01882714 n02883205 n01829413 n02870880 n02396427 n01843383 n10148035 n02699494 n01580077 n04238763 n03496892 n07684084 n02950826 n03445777 n01798484 n03877845 n04239074 n01622779 n02099712 n02837789 n07730033 n09835506 n04532106 n03976467 n03854065 n01756291 n07892512 n15075141 n02971356 n02113023 n04023962 n02108551 n02002724 n09288635 n03457902 n03124170 n01484850 n04548362 n03201208 n01734418 n02090622 n03929660 n03868863 n02480855 n02028035 n01692333 n02206856 n03970156 n07768694 n04376876 n02089973 n03976467 n03134739 n03788195 n04399382 n04023962 n03393912 n12620546 n03085013 n02277742 n03272562 n01698640 n04039381 n02877765 n03680355 n01873310 n04039381 n02980441 n04376876 n01729322 n02795169 n01530575 n04515003 n02794156 n02165105 n03594945 n02093991 n02256656 n02105412 n03216828 n02110806 n03297495 n02112137 n03710721 n02110185 n09421951 n02480855 n04336792 n02510455 n02087046 n02110627 n04005630 n02536864 n04277352 n01774750 n02667093 n04554684 n02823750 n03196217 n01496331 n01855032 n02128757 n03764736 n02981792 n03876231 n04458633 n03888257 n01860187 n04326547 n09421951 n07880968 n02500267 n01770081 n03584254 n07711569 n09468604 n01614925 n03788365 n04560804 n01729977 n03717622 n02410509 n02437312 n03000684 n01632777 n02028035 n07873807 n01630670 n03388183 n02110185 n02098413 n02107142 n04209133 n07932039 n03992509 n04612504 n01986214 n04270147 n06874185 n02909870 n02168699 n03785016 n01532829 n04264628 n02484975 n02799071 n04209133 n07584110 n01560419 n02117135 n07684084 n03814906 n03908618 n02279972 n02098413 n02097658 n04154565 n02125311 n02018795 n02168699 n02096177 n03047690 n02747177 n03788365 n02128385 n03000134 n03775546 n04204238 n04604644 n03980874 n03598930 n01855672 n02090721 n07715103 n02443114 n02102177 n04258138 n04591713 n03297495 n01667778 n04350905 n04589890 n06794110 n03884397 n04367480 n03877845 n10148035 n03492542 n04116512 n03785016 n01968897 n02111889 n04579432 n03492542 n02111277 n03535780 n03786901 n02113799 n04347754 n03535780 n02963159 n03249569 n03617480 n04070727 n02108000 n03075370 n03355925 n04418357 n02783161 n02112137 n03179701 n02114367 n02098286 n02119022 n03000684 n01695060 n15075141 n02877765 n02107683 n03721384 n02107142 n02092339 n02687172 n02396427 n01629819 n03272010 n10148035 n04141076 n04044716 n04277352 n02364673 n04141975 n01819313 n03775546 n03379051 n01756291 n03785016 n04476259 n04612504 n01632777 n03838899 n02007558 n01440764 n02088094 n01735189 n02356798 n02095889 n09229709 n02132136 n02091635 n07754684 n03146219 n03467068 n03047690 n02408429 n02086910 n02012849 n04522168 n01943899 n12144580 n01820546 n01824575 n01677366 n03868242 n03814639 n02091635 n04033901 n02074367 n04597913 n07880968 n01871265 n03000684 n01983481 n07753592 n04235860 n02229544 n03814906 n03527444 n04532106 n02447366 n04179913 n04116512 n01631663 n04037443 n03947888 n02708093 n03874293 n04612504 n04589890 n02097130 n03089624 n03670208 n04579145 n03344393 n07614500 n04462240 n01751748 n04201297 n07802026 n02795169 n07613480 n07747607 n02115913 n02493793 n03770679 n02268443 n02009912 n04423845 n01530575 n01685808 n07715103 n03016953 n03355925 n04554684 n04366367 n03207941 n03887697 n04336792 n03759954 n03595614 n02480855 n04525038 n04355338 n02129165 n03255030 n02843684 n04493381 n02992211 n03814906 n04239074 n06794110 n03977966 n02979186 n03207941 n07875152 n01798484 n02484975 n02127052 n02133161 n03929660 n02966687 n12985857 n01873310 n07584110 n02088094 n01748264 n02101006 n03450230 n03657121 n03991062 n02013706 n03742115 n03595614 n04591713 n03891251 n01943899 n03065424 n04127249 n03584829 n02018207 n02089973 n03773504 n01751748 n02119022 n02276258 n04086273 n01877812 n02917067 n02168699 n02107574 n03954731 n02443114 n02101556 n01943899 n03457902 n01644900 n01770081 n03495258 n02606052 n02109047 n01532829 n02099429 n02100735 n03216828 n04204347 n02095889 n03794056 n02104365 n03595614 n01630670 n03223299 n04389033 n01796340 n02098286 n02109525 n04509417 n01580077 n04209239 n01675722 n07718747 n02787622 n04553703 n02100877 n02708093 n01687978 n01944390 n02807133 n03908714 n12620546 n04009552 n04591713 n02112350 n02168699 n03773504 n03127747 n03393912 n03617480 n02704792 n03590841 n03445924 n02486261 n03803284 n03954731 n02971356 n03000247 n03887697 n02894605 n04286575 n02172182 n01873310 n04118538 n04357314 n02113624 n02667093 n03141823 n04423845 n03742115 n02085620 n02727426 n04606251 n02088466 n03109150 n03134739 n02361337 n03832673 n02087394 n02177972 n04347754 n07718747 n03710721 n03970156 n04229816 n01601694 n02606052 n03425413 n03447447 n04336792 n04486054 n04201297 n07614500 n02226429 n01622779 n04435653 n09288635 n02790996 n02108000 n03961711 n03417042 n03017168 n03840681 n02509815 n04019541 n01692333 n01843065 n03461385 n04296562 n02493509 n03133878 n02110627 n07932039 n02091831 n03249569 n02091467 n03680355 n07714990 n02412080 n03250847 n03447721 n02916936 n02107683 n02492035 n03404251 n02102177 n07932039 n04557648 n04372370 n03891251 n02974003 n15075141 n02444819 n04462240 n02100236 n02108551 n04515003 n02002556 n02794156 n04204238 n04090263 n04584207 n02120505 n03773504 n02165456 n07684084 n04311174 n02002556 n02106382 n01695060 n02783161 n02422699 n03982430 n02397096 n03976657 n02692877 n03841143 n03710637 n04259630 n02099601 n03942813 n12998815 n11939491 n04399382 n03065424 n01644373 n04462240 n03992509 n03534580 n02398521 n02095889 n02808440 n04264628 n02786058 n04399382 n03933933 n04487081 n01873310 n04409515 n02108089 n02091831 n07734744 n04552348 n04162706 n02123045 n13040303 n02492035 n03657121 n02488291 n02027492 n02769748 n07753113 n03814639 n01704323 n02276258 n04557648 n03478589 n04435653 n03535780 n04371774 n02823750 n02124075 n07695742 n03337140 n03884397 n01917289 n07720875 n07742313 n04019541 n02130308 n02102040 n02104365 n02963159 n01687978 n07754684 n02328150 n02791124 n04286575 n04606251 n03814639 n09246464 n02009229 n01665541 n04399382 n04429376 n04033995 n04238763 n09256479 n01632458 n04004767 n04111531 n03710637 n02107908 n04008634 n02106382 n02086079 n07871810 n02105505 n02013706 n03733131 n07875152 n03376595 n03594945 n01776313 n03016953 n04243546 n04252225 n03709823 n02939185 n02107574 n02097047 n02109525 n03916031 n02116738 n07579787 n02018795 n03967562 n03075370 n12998815 n01818515 n02190166 n02701002 n01685808 n12267677 n02107683 n07695742 n02085782 n03692522 n02086646 n03623198 n03534580 n02133161 n07584110 n03980874 n03710721 n03838899 n04311174 n03976467 n02966687 n03785016 n02097658 n04442312 n04380533 n03042490 n03982430 n02510455 n02408429 n02093859 n07718472 n02086079 n02834397 n03670208 n01728572 n02444819 n02091467 n04325704 n04332243 n03223299 n01734418 n03496892 n01697457 n03884397 n03483316 n04285008 n01795545 n03220513 n02007558 n01532829 n02236044 n06596364 n04111531 n03032252 n03814639 n04317175 n04033995 n02086079 n07684084 n01829413 n02128757 n03983396 n04487081 n02190166 n04523525 n04328186 n04116512 n03450230 n04228054 n02102177 n03873416 n02488702 n02226429 n02018207 n04044716 n03394916 n01818515 n01910747 n03584829 n03240683 n04133789 n03095699 n04325704 n02606052 n02102318 n02106382 n03424325 n02906734 n01818515 n04548362 n04086273 n07590611 n02033041 n04501370 n02486261 n03793489 n02974003 n09428293 n02088466 n04355933 n02113712 n02777292 n02490219 n02105056 n02071294 n02655020 n03425413 n02808440 n02493509 n03384352 n02108422 n04350905 n07695742 n02077923 n03476991 n03857828 n02494079 n01440764 n02277742 n02509815 n07730033 n01774384 n02951585 n02892201 n02488702 n02782093 n03854065 n04517823 n03467068 n07920052 n03180011 n02111129 n02361337 n03544143 n07717556 n03291819 n02110063 n03825788 n02110185 n02108422 n01744401 n04204347 n01744401 n02086079 n01773549 n03498962 n02979186 n01694178 n04265275 n04371774 n01669191 n01582220 n02128925 n02747177 n02108551 n02105056 n02107312 n01532829 n01698640 n03661043 n02834397 n03956157 n01739381 n02500267 n02317335 n02951358 n02105505 n07718747 n04192698 n04536866 n03710637 n02346627 n03476684 n02086910 n02747177 n02096177 n04548280 n01630670 n01682714 n04275548 n03538406 n02113712 n09421951 n01560419 n04252225 n02423022 n01697457 n02389026 n03595614 n02415577 n04004767 n02672831 n03018349 n03998194 n03089624 n04273569 n02058221 n03544143 n02395406 n03535780 n03450230 n03888605 n13052670 n01910747 n01843065 n03982430 n03447721 n01955084 n01630670 n03803284 n02120079 n03372029 n02504458 n03874599 n02011460 n02108089 n03627232 n02492660 n04399382 n02412080 n03325584 n03706229 n02500267 n02123159 n04238763 n02883205 n13044778 n07836838 n02799071 n01917289 n04273569 n04552348 n01795545 n02011460 n03944341 n02356798 n04264628 n02859443 n02108915 n02108422 n04591713 n02099849 n07693725 n01795545 n04596742 n03868242 n03958227 n02093991 n03134739 n01917289 n02099712 n03314780 n11879895 n10148035 n02018795 n02747177 n04542943 n03141823 n02797295 n01704323 n02777292 n02769748 n04033995 n01860187 n02321529 n01917289 n03785016 n03956157 n03100240 n04041544 n02165105 n03947888 n03891251 n03709823 n02988304 n02106030 n02095570 n02814860 n03649909 n03110669 n02444819 n04044716 n04487394 n02422106 n04069434 n02165456 n02098105 n02106382 n02280649 n02002556 n01980166 n02091032 n09229709 n03642806 n03770679 n02172182 n07892512 n01944390 n04462240 n02114548 n02403003 n03899768 n09472597 n03530642 n02974003 n02777292 n02093428 n01829413 n02097298 n01882714 n01833805 n03481172 n02094114 n03218198 n02640242 n02422699 n03297495 n04592741 n01644373 n02066245 n03028079 n04399382 n03355925 n03187595 n02071294 n01494475 n02119789 n02963159 n03976657 n03759954 n02916936 n02120079 n03109150 n04370456 n02817516 n01734418 n02415577 n03691459 n04023962 n02114712 n03995372 n06359193 n01943899 n01860187 n02859443 n02268443 n02488702 n03110669 n03250847 n02165105 n02102480 n03026506 n04465501 n03733131 n01910747 n04277352 n03065424 n01644900 n02951358 n04399382 n02326432 n03529860 n03764736 n02444819 n02093256 n02091134 n02091635 n11879895 n03657121 n04613696 n03452741 n04596742 n02097474 n02672831 n01968897 n02486410 n02488291 n02356798 n07749582 n04033995 n03000684 n04428191 n02089078 n04005630 n03476991 n02817516 n04371774 n12144580 n12144580 n03950228 n02009912 n03425413 n04141975 n02790996 n01818515 n07583066 n04116512 n03417042 n01739381 n01944390 n03447721 n03891332 n01689811 n04081281 n02892767 n04590129 n01632777 n02086910 n01742172 n04579145 n02814860 n04458633 n04487394 n02088632 n03942813 n04162706 n07613480 n02098413 n04037443 n02457408 n04461696 n02110185 n03887697 n03344393 n04336792 n04209239 n02480495 n02102480 n04040759 n03372029 n03017168 n02087046 n02110185 n04131690 n02133161 n02749479 n02092002 n04612504 n03388183 n03417042 n02168699 n07248320 n02012849 n03791053 n02027492 n07768694 n02115913 n02093428 n01630670 n02226429 n01514859 n07716358 n02860847 n04041544 n02105505 n02107683 n03394916 n03384352 n04536866 n02107312 n04487081 n02447366 n02113186 n03777754 n03496892 n09421951 n02097298 n02112706 n02128757 n02169497 n03933933 n02109961 n04254120 n04562935 n02457408 n02093754 n15075141 n02788148 n01751748 n02837789 n06359193 n01630670 n03908618 n07754684 n02013706 n03680355 n02788148 n06794110 n02102040 n01496331 n03482405 n02107312 n13054560 n03843555 n01644373 n02894605 n01818515 n03899768 n02134084 n01692333 n02948072 n03743016 n07583066 n02279972 n07760859 n03868863 n02422699 n02825657 n02480855 n02226429 n04033901 n01817953 n04285008 n04550184 n04476259 n02100877 n09835506 n02410509 n03207743 n03877845 n03947888 n01774750 n02641379 n04584207 n02481823 n07768694 n02130308 n04147183 n04596742 n02395406 n07754684 n04252225 n04118538 n09256479 n07742313 n02769748 n03888257 n03658185 n04067472 n02481823 n03255030 n03903868 n03124043 n03874599 n06596364 n04355933 n04613696 n04357314 n02814860 n02099601 n01806567 n02396427 n02106166 n03769881 n02113023 n04146614 n02640242 n02966193 n02841315 n02481823 n03724870 n03998194 n04522168 n02747177 n02317335 n04067472 n02129165 n07714571 n03992509 n03379051 n04141975 n02028035 n02085936 n04540053 n02112137 n03977966 n03637318 n03887697 n09468604 n03424325 n04584207 n01917289 n07579787 n03325584 n01829413 n04540053 n03127925 n01558993 n02027492 n03424325 n03109150 n06794110 n01773797 n03188531 n02106382 n03788365 n02123159 n01773797 n02229544 n02727426 n02823428 n02454379 n02106030 n01924916 n12998815 n04179913 n04099969 n07684084 n03450230 n04435653 n02422106 n03637318 n03018349 n04429376 n03868863 n02110806 n02226429 n02006656 n03843555 n06359193 n01860187 n01694178 n02138441 n03630383 n04009552 n02101006 n03496892 n03447721 n07920052 n07873807 n01729977 n03220513 n01614925 n02134084 n03908618 n03763968 n03544143 n02797295 n04392985 n01728920 n03876231 n03259280 n03325584 n04296562 n02909870 n02493793 n02112706 n02776631 n02447366 n01514859 n03954731 n03344393 n04125021 n03930630 n04116512 n02441942 n03344393 n02125311 n02643566 n03840681 n02106662 n03325584 n07695742 n01491361 n03814906 n03075370 n02098286 n02666196 n07718472 n02948072 n01698640 n03777754 n07714571 n01945685 n03085013 n03445777 n04380533 n01986214 n03673027 n03710193 n02441942 n01734418 n02105412 n03447447 n04591157 n02727426 n04486054 n02510455 n03958227 n01978455 n04461696 n03908618 n04522168 n02107908 n07715103 n04009552 n03457902 n03447447 n01820546 n02692877 n03874599 n02101388 n02115641 n03532672 n03127925 n04081281 n02814533 n02916936 n02483708 n02791124 n04505470 n04417672 n03876231 n01829413 n09246464 n01728920 n02363005 n07754684 n07717556 n03000247 n01873310 n02091635 n07831146 n02794156 n03825788 n03476991 n04033901 n02607072 n02123394 n03534580 n01770081 n02011460 n02843684 n02109525 n03916031 n04418357 n03710637 n03075370 n01644900 n04254680 n07768694 n04228054 n04258138 n04357314 n07836838 n03000134 n04310018 n03000134 n02098413 n02108000 n04252077 n02457408 n04483307 n02105505 n03125729 n02091467 n03868242 n02106166 n03240683 n02917067 n02105056 n04525305 n01753488 n02978881 n03977966 n02486261 n04162706 n02120079 n03709823 n03127747 n02089973 n03089624 n03814906 n01534433 n04613696 n03325584 n04505470 n03325584 n02115641 n03630383 n01930112 n04204238 n03063689 n02233338 n03916031 n02786058 n02113799 n03935335 n04179913 n03690938 n02442845 n01819313 n01534433 n01753488 n02823750 n01491361 n03124043 n01749939 n02328150 n03272562 n02094258 n04597913 n01773549 n03724870 n01871265 n01751748 n04039381 n03733805 n02783161 n02948072 n02397096 n02233338 n02093647 n03016953 n04344873 n02640242 n01677366 n02106166 n07745940 n03710637 n03529860 n02988304 n04350905 n02105056 n01630670 n12998815 n02094258 n03481172 n04515003 n04418357 n03075370 n04273569 n01592084 n03290653 n04487394 n02109047 n02259212 n04604644 n03976467 n04023962 n02910353 n03394916 n02106662 n01882714 n03494278 n01770393 n03445924 n02102177 n02110958 n02089973 n01924916 n02113799 n01817953 n02091134 n01697457 n03443371 n04482393 n01749939 n01985128 n04116512 n03452741 n03220513 n02510455 n03761084 n02916936 n02089867 n02281406 n03445777 n03642806 n03255030 n09428293 n01774750 n03220513 n04254777 n13037406 n04235860 n07875152 n01877812 n02086240 n03876231 n02484975 n03595614 n03733805 n02099712 n03884397 n03016953 n02088632 n04086273 n02797295 n04392985 n03124043 n02102480 n02100583 n01855032 n02667093 n01945685 n03250847 n01644373 n04147183 n02641379 n02342885 n03666591 n03000134 n03197337 n02807133 n03394916 n01797886 n02443114 n02056570 n02916936 n04090263 n01756291 n03724870 n02747177 n04553703 n01983481 n04479046 n07920052 n01631663 n01981276 n02097474 n02268443 n01944390 n02108422 n04487081 n07734744 n02091244 n02835271 n01824575 n02056570 n03773504 n01688243 n03345487 n03345487 n02486410 n03271574 n03485407 n02483362 n02113712 n02786058 n04579145 n02948072 n03595614 n03594734 n01491361 n01729977 n04033995 n04597913 n01871265 n02992211 n02361337 n04070727 n02007558 n03110669 n09399592 n02009912 n03249569 n02415577 n02190166 n02701002 n03042490 n01871265 n02091467 n03208938 n02105505 n04589890 n02138441 n04591157 n03344393 n01622779 n01924916 n02137549 n04328186 n07590611 n01776313 n04389033 n02058221 n03786901 n02865351 n02536864 n04154565 n02108422 n07583066 n03770439 n04235860 n03594945 n02096051 n03590841 n04525038 n02264363 n04592741 n02364673 n01735189 n02977058 n02488291 n07871810 n03062245 n04557648 n03837869 n01770081 n04273569 n03290653 n03124043 n02971356 n02423022 n02094114 n01695060 n01917289 n02814533 n03250847 n02110063 n02666196 n02488291 n02504013 n02130308 n01695060 n03089624 n02906734 n02791124 n09835506 n07695742 n06874185 n04229816 n02408429 n02087394 n03297495 n02058221 n03763968 n01491361 n03781244 n03873416 n02111277 n13052670 n02119022 n02108000 n02791124 n03028079 n02906734 n02112350 n02102318 n04118776 n02823428 n04435653 n03786901 n02105505 n01514859 n02860847 n01871265 n07742313 n01695060 n01735189 n03141823 n02692877 n04254680 n02483708 n02011460 n02927161 n02113978 n02106166 n03770679 n02169497 n04482393 n02277742 n04485082 n01984695 n03658185 n01697457 n09428293 n02102480 n04501370 n04141975 n01614925 n02089078 n03935335 n02486410 n01843065 n01984695 n02363005 n04536866 n04141076 n01950731 n03445777 n02102040 n07715103 n09256479 n03781244 n02090379 n02129165 n04532670 n02939185 n04259630 n03788365 n03461385 n04606251 n04428191 n02488702 n01518878 n02107142 n01622779 n02483708 n07753113 n07930864 n01984695 n03476684 n02655020 n03376595 n01806143 n04286575 n02490219 n02640242 n04141975 n03938244 n02100735 n04041544 n02108915 n03769881 n02108551 n02110185 n02086646 n03388043 n07697313 n02098105 n04597913 n04090263 n02492660 n02795169 n02086240 n02097130 n02346627 n01622779 n01978287 n01924916 n02655020 n02787622 n02108551 n03717622 n07697313 n02105505 n07753113 n04204347 n02909870 n01828970 n02018795 n07836838 n01775062 n07716358 n01675722 n02807133 n02493793 n02091467 n02804414 n12144580 n02823428 n09229709 n03379051 n02791270 n01828970 n03832673 n04366367 n03877845 n03372029 n03961711 n03916031 n03788365 n04265275 n01806143 n04008634 n02794156 n03777754 n01630670 n07860988 n04239074 n04270147 n03761084 n04270147 n04487081 n02481823 n02395406 n02093859 n03991062 n04264628 n04258138 n06359193 n02074367 n07614500 n02865351 n07718747 n04074963 n04482393 n03347037 n02110063 n07836838 n02090379 n03595614 n03482405 n13052670 n04023962 n03991062 n04548280 n02056570 n02794156 n13133613 n02100877 n03272010 n02107683 n04149813 n04152593 n02002556 n03954731 n01968897 n03388043 n03764736 n02690373 n02966193 n01518878 n02128385 n03197337 n02092002 n03110669 n03478589 n02457408 n02870880 n02011460 n02093428 n03063689 n03337140 n04356056 n02963159 n04435653 n03871628 n02110627 n02088238 n03160309 n03983396 n02992529 n03843555 n01773549 n02389026 n09468604 n04505470 n02109961 n02794156 n03854065 n04355338 n02094433 n13133613 n03272010 n01667778 n03494278 n12768682 n02481823 n03085013 n03179701 n01667778 n02102040 n02112706 n02951585 n02108089 n02099601 n07860988 n04033995 n03388183 n02127052 n02107142 n03814639 n04004767 n02099712 n01582220 n02102177 n02100735 n03958227 n02481823 n01773549 n03131574 n04540053 n03424325 n03871628 n02116738 n09229709 n02797295 n02704792 n02825657 n02115913 n03888605 n02009229 n03063689 n07734744 n02669723 n02101556 n03045698 n04532106 n03961711 n04372370 n02655020 n02094433 n02088466 n04005630 n12144580 n02892767 n02091244 n03110669 n03759954 n03594945 n03594945 n04462240 n07711569 n03259280 n04482393 n02018207 n03134739 n03832673 n04467665 n04285008 n02169497 n03796401 n02099267 n02909870 n02105412 n04265275 n01728572 n04336792 n02834397 n02804414 n04548362 n03109150 n02895154 n03929660 n01685808 n02111500 n04033995 n01768244 n02002556 n03887697 n04069434 n03594734 n02500267 n07714990 n02137549 n03014705 n02447366 n01537544 n07802026 n03895866 n04330267 n03602883 n02795169 n04153751 n03782006 n02489166 n03447721 n03417042 n04550184 n02500267 n02112706 n03347037 n02088364 n02640242 n03983396 n02817516 n01695060 n13133613 n02095314 n03887697 n02892767 n07697313 n11939491 n04332243 n02667093 n02643566 n02493509 n04251144 n02730930 n04118776 n02097209 n04335435 n03016953 n03691459 n04037443 n02100583 n02104029 n02088466 n09193705 n03495258 n02095314 n03355925 n07613480 n02971356 n04153751 n01945685 n01697457 n04532106 n02895154 n04548362 n04485082 n02002724 n02999410 n03976467 n02951358 n03874293 n02442845 n04229816 n01614925 n02769748 n04461696 n02486410 n03916031 n04562935 n02098413 n02097474 n03584829 n02606052 n02123394 n03871628 n04311004 n02865351 n01601694 n02111129 n04509417 n01882714 n03908714 n02102973 n03983396 n02093859 n03775071 n02667093 n02906734 n07873807 n04277352 n04153751 n01675722 n01601694 n04263257 n01582220 n03000134 n04263257 n04286575 n06359193 n02445715 n03179701 n04275548 n02444819 n02002724 n03124170 n02018795 n02776631 n12144580 n03041632 n02101556 n04435653 n04254120 n04505470 n03297495 n02093256 n03529860 n01734418 n04462240 n02089867 n03259280 n03804744 n02484975 n03372029 n02992529 n01629819 n03814639 n04004767 n02280649 n04275548 n04023962 n03476684 n01843383 n02490219 n03450230 n02088238 n02129165 n07716906 n02006656 n07615774 n04033901 n02101388 n02412080 n02871525 n01689811 n02447366 n02951585 n03325584 n04238763 n01817953 n07753275 n03803284 n03724870 n01694178 n04613696 n03961711 n04553703 n04493381 n04507155 n03388183 n04483307 n02840245 n01739381 n03837869 n03980874 n02093647 n02992529 n03983396 n02110958 n01688243 n02100236 n01873310 n04525038 n03496892 n04350905 n02115913 n01824575 n04443257 n01729322 n03197337 n09421951 n07614500 n03445777 n03680355 n04579145 n03345487 n03062245 n02655020 n02769748 n03930630 n03956157 n04332243 n03690938 n04153751 n04456115 n02883205 n01631663 n02841315 n02480495 n02396427 n04357314 n01695060 n02101556 n03947888 n04367480 n03958227 n01924916 n02111129 n02939185 n01829413 n02108915 n03388183 n02410509 n04273569 n02119789 n04505470 n02094258 n02231487 n02916936 n02441942 n04039381 n02883205 n02098413 n01496331 n03534580 n07714990 n04286575 n03000247 n03691459 n03376595 n01729322 n12144580 n04192698 n03998194 n02979186 n02102973 n02110627 n01728572 n03272010 n03786901 n04033901 n02097047 n03947888 n07873807 n02097047 n07754684 n02276258 n02104365 n01734418 n03976467 n02825657 n01694178 n01682714 n02747177 n03710193 n09288635 n02510455 n02319095 n02088364 n02129604 n04326547 n03871628 n02096177 n09246464 n03127925 n02488702 n06785654 n02066245 n12998815 n01632777 n02091244 n01742172 n03908618 n04536866 n03841143 n01917289 n02276258 n03457902 n04041544 n03259280 n02236044 n02090379 n04127249 n03873416 n02415577 n03590841 n02094258 n03884397 n01978287 n02172182 n01990800 n04476259 n03871628 n03584829 n04118776 n02509815 n02102480 n01729977 n02776631 n03125729 n02948072 n01774384 n01695060 n07734744 n01990800 n02445715 n03017168 n02606052 n04612504 n02119789 n02113978 n03706229 n02115913 n02655020 n02640242 n03478589 n03891251 n02892201 n02676566 n01877812 n02037110 n07745940 n02090721 n04548280 n02971356 n03042490 n02865351 n04310018 n07802026 n01843065 n01944390 n03443371 n01496331 n13044778 n03196217 n02111889 n09288635 n03777568 n03970156 n02027492 n09332890 n04326547 n04458633 n02093428 n03992509 n03908618 n03290653 n04311004 n03764736 n04465501 n03345487 n04099969 n02843684 n02361337 n02066245 n02099601 n03259280 n02105641 n01755581 n03937543 n03249569 n02124075 n03761084 n02834397 n03891251 n07753275 n04389033 n03599486 n04392985 n01582220 n03642806 n01749939 n01944390 n03146219 n09428293 n02112350 n03249569 n02085936 n03240683 n04597913 n03249569 n02256656 n07248320 n04376876 n03089624 n04118538 n02966687 n03891332 n01773157 n02948072 n01685808 n04371430 n02107312 n01749939 n02085936 n02091831 n02098105 n02708093 n02120505 n01601694 n06874185 n02319095 n01616318 n01775062 n13040303 n03796401 n04482393 n03272562 n03478589 n02190166 n02910353 n02951358 n01749939 n12985857 n04254120 n03944341 n03743016 n01855672 n04228054 n03642806 n03956157 n04162706 n02992211 n01883070 n03045698 n02018207 n01872401 n04239074 n07932039 n04392985 n02641379 n01484850 n01742172 n04376876 n04550184 n03733805 n04371774 n04317175 n03873416 n02361337 n02002556 n02168699 n02098413 n02104365 n03841143 n02074367 n04344873 n07615774 n04149813 n02321529 n12144580 n02509815 n03938244 n01978455 n03047690 n04252077 n02487347 n03141823 n02666196 n02123045 n02486410 n02492660 n03796401 n02112350 n07730033 n03950228 n04162706 n02895154 n02105641 n03404251 n02007558 n01739381 n02481823 n04409515 n02443114 n02879718 n03345487 n02268853 n12620546 n03930313 n04380533 n01518878 n04596742 n03680355 n02074367 n01667778 n03376595 n04366367 n02097047 n02101006 n01873310 n03876231 n04507155 n02086910 n04370456 n02687172 n03724870 n02966193 n02776631 n03089624 n04456115 n03325584 n01770081 n04428191 n01667778 n02132136 n02105162 n03743016 n04367480 n02098105 n03000134 n02100236 n02011460 n02097047 n02177972 n04493381 n03874293 n02017213 n03908714 n02361337 n02669723 n02119022 n02105505 n03884397 n02190166 n03216828 n02410509 n02101556 n02098286 n03250847 n02117135 n03929660 n04332243 n03891332 n02018207 n01498041 n03977966 n02892767 n03781244 n02094433 n02112137 n02910353 n03791053 n01773157 n03599486 n11939491 n01496331 n02950826 n09246464 n02099429 n02108551 n02895154 n09229709 n07932039 n03721384 n03529860 n02113186 n03929660 n02086646 n02787622 n02676566 n02006656 n02104365 n03045698 n03100240 n03599486 n03924679 n03937543 n02869837 n02123394 n01980166 n04355933 n03133878 n03709823 n06794110 n02110341 n01796340 n02978881 n03495258 n03452741 n02091032 n04442312 n04118776 n01630670 n03662601 n02174001 n04606251 n02107142 n03814906 n03457902 n02085782 n03598930 n02094258 n03000247 n02966193 n02489166 n04367480 n02110063 n07753275 n07715103 n04485082 n03075370 n02098105 n13054560 n02730930 n03670208 n02281787 n04462240 n02510455 n02814860 n04482393 n03498962 n09229709 n02097130 n04265275 n04004767 n02093647 n01443537 n01704323 n02096437 n03394916 n04423845 n02108422 n03706229 n02869837 n01737021 n03930313 n04039381 n02113186 n02403003 n02037110 n03637318 n02823750 n01677366 n02093256 n02096294 n06596364 n03220513 n02106030 n02917067 n02090622 n04141076 n01749939 n02981792 n02111889 n02116738 n09246464 n02791124 n02091244 n02119022 n02445715 n03216828 n03095699 n03481172 n04442312 n02802426 n09428293 n03065424 n02363005 n12057211 n02422106 n02999410 n03207743 n03786901 n02363005 n02417914 n01698640 n03063599 n04409515 n03891251 n03794056 n02101388 n04044716 n02226429 n01818515 n01558993 n02110806 n03337140 n03627232 n04204238 n07873807 n03930630 n04311174 n01616318 n04330267 n04179913 n04501370 n02687172 n02086079 n03976467 n03950228 n01773797 n03197337 n02640242 n01440764 n02342885 n02389026 n02895154 n02056570 n04584207 n03042490 n09421951 n01616318 n03384352 n07248320 n03590841 n03903868 n02129165 n02123159 n03837869 n03630383 n02119789 n07768694 n02102973 n03788195 n01682714 n02130308 n03495258 n03770439 n02398521 n02965783 n02033041 n02088094 n02939185 n01914609 n04147183 n03720891 n02105641 n01843383 n01818515 n02730930 n02109961 n04398044 n04131690 n01914609 n03481172 n04317175 n03344393 n04557648 n02120505 n02109961 n02128385 n02391049 n03041632 n09246464 n03666591 n02111129 n02974003 n02643566 n03492542 n02090622 n02389026 n01735189 n03478589 n03785016 n03854065 n03207743 n04399382 n02108422 n04428191 n07760859 n03888605 n02704792 n03697007 n03657121 n04141975 n04008634 n02799071 n02018795 n02877765 n07613480 n11939491 n02108089 n02098413 n01440764 n01776313 n03804744 n01817953 n02788148 n03400231 n03899768 n02027492 n02028035 n02087394 n04392985 n01944390 n04204238 n03995372 n02437616 n03000684 n03146219 n01496331 n02128925 n02025239 n03903868 n06596364 n01990800 n03877845 n02704792 n01773549 n03271574 n02667093 n01514668 n02089867 n02410509 n09193705 n04204238 n02110806 n02823428 n01807496 n07753592 n02835271 n04579432 n03763968 n01667114 n01770393 n02364673 n03777568 n04204238 n04252077 n01496331 n02877765 n01532829 n02640242 n04483307 n04332243 n03197337 n02094433 n03995372 n03485407 n02085782 n04591157 n07930864 n02086079 n01983481 n04162706 n02981792 n02447366 n03733805 n02097298 n04120489 n04442312 n07714990 n02823428 n02788148 n02791270 n11879895 n03776460 n02834397 n03657121 n02423022 n03785016 n03888257 n02018207 n01742172 n04154565 n02536864 n03447721 n02229544 n04540053 n04266014 n03457902 n03425413 n02504013 n02107312 n02177972 n02489166 n04330267 n03791053 n04311004 n02422699 n02319095 n04606251 n04229816 n02101556 n04592741 n03666591 n02088094 n02017213 n03759954 n02128925 n03544143 n03188531 n03459775 n04254680 n03496892 n02483362 n02906734 n07753275 n02879718 n02641379 n02814860 n03400231 n02966687 n09246464 n02114712 n02087046 n02115913 n03424325 n03529860 n01943899 n04238763 n03146219 n02747177 n02233338 n13044778 n03109150 n02112350 n03180011 n02091831 n03134739 n03133878 n01740131 n02125311 n02398521 n02219486 n04086273 n02091244 n02099849 n02119789 n04039381 n02094114 n04562935 n03938244 n07693725 n12998815 n04542943 n02389026 n03417042 n01440764 n02095889 n02090379 n02493509 n02672831 n01534433 n02794156 n02396427 n02117135 n03782006 n04336792 n03042490 n03075370 n02488291 n04332243 n02708093 n02097209 n02356798 n03837869 n04355338 n03584829 n03041632 n06359193 n03041632 n03888257 n03717622 n04235860 n04275548 n01592084 n03388549 n01669191 n07760859 n02090622 n01440764 n01729322 n02480495 n07871810 n04505470 n04418357 n03404251 n03676483 n02165105 n04008634 n03958227 n02480855 n02823750 n07579787 n02009912 n07734744 n03372029 n01440764 n02102177 n03840681 n07753275 n03026506 n01601694 n03047690 n02086079 n02979186 n02089078 n02397096 n12985857 n02808304 n04118538 n04229816 n09428293 n07880968 n04548280 n03804744 n01622779 n02110063 n02814860 n02128385 n01824575 n01496331 n04286575 n03599486 n03857828 n03866082 n03495258 n02526121 n02098105 n02102973 n03124043 n04357314 n07768694 n03000134 n03970156 n04040759 n02112706 n04008634 n04040759 n06794110 n02086646 n02066245 n03884397 n03967562 n04125021 n02910353 n02236044 n01981276 n07871810 n02099849 n03146219 n04146614 n09193705 n02113023 n02100236 n13044778 n03584829 n03180011 n02027492 n03240683 n02526121 n01494475 n02492660 n01774750 n07768694 n02113712 n03666591 n12998815 n03657121 n02110806 n03717622 n02087394 n02692877 n02497673 n04507155 n02114855 n04332243 n02100877 n04332243 n02110627 n03424325 n02104365 n01943899 n03535780 n02883205 n01667778 n01986214 n02666196 n02966687 n02097658 n03866082 n04239074 n02488702 n01735189 n04090263 n04008634 n03742115 n03877472 n03788195 n03794056 n01768244 n02797295 n02009229 n03085013 n02119789 n04557648 n02099267 n03424325 n03666591 n01667778 n07875152 n01514668 n02492660 n03482405 n04033901 n04044716 n03290653 n12057211 n02981792 n01496331 n02483362 n03314780 n04099969 n02669723 n02113799 n02074367 n02094258 n03866082 n04540053 n02777292 n03782006 n02105251 n03761084 n01955084 n02643566 n02106662 n01580077 n01828970 n02690373 n03063599 n02114548 n03014705 n03724870 n02088364 n07716358 n03724870 n03937543 n02091635 n02106382 n07613480 n13133613 n04591157 n02396427 n03776460 n02108089 n02017213 n04350905 n02107683 n04228054 n01773549 n03888257 n02488291 n04493381 n01817953 n01641577 n02012849 n01797886 n02787622 n02910353 n04067472 n03100240 n02087046 n03733131 n02643566 n02916936 n02480495 n02815834 n02086079 n02814860 n02114712 n07742313 n01728920 n02356798 n13044778 n01798484 n04613696 n02108915 n02109047 n03272010 n04008634 n02097209 n01843065 n02999410 n04086273 n03888257 n02123394 n04356056 n09468604 n01601694 n03950228 n04344873 n02672831 n12768682 n02110341 n10148035 n02114367 n04409515 n03240683 n04285008 n07831146 n03584254 n01855672 n02489166 n03216828 n03297495 n04086273 n01514859 n01629819 n02643566 n02113023 n02791270 n03983396 n07880968 n02268853 n03970156 n02091831 n02268853 n02167151 n03742115 n03947888 n04591157 n03729826 n02988304 n03717622 n02391049 n02096585 n02219486 n02093647 n02002556 n02504458 n01665541 n03938244 n03776460 n02093256 n02056570 n02096051 n02488702 n07693725 n01796340 n02950826 n01828970 n03534580 n03394916 n04404412 n03895866 n01944390 n04554684 n02444819 n03623198 n04263257 n04099969 n02105855 n03584829 n04442312 n01514668 n02088364 n01943899 n02091831 n02071294 n03461385 n04485082 n01630670 n01873310 n02011460 n02113978 n01629819 n07711569 n04023962 n01631663 n02815834 n01797886 n03662601 n02704792 n02494079 n02124075 n03530642 n03424325 n02974003 n01685808 n02086910 n04004767 n03720891 n04200800 n01755581 n04118776 n02058221 n03124170 n03584829 n01978455 n02100583 n03131574 n03467068 n02490219 n02978881 n02096051 n04254120 n03028079 n04371774 n02105641 n02397096 n04258138 n03297495 n02108000 n02096585 n02090721 n02786058 n02025239 n01784675 n03393912 n01755581 n02437616 n02219486 n03388549 n02769748 n03384352 n03998194 n02699494 n04277352 n03637318 n02415577 n03788365 n01943899 n02009229 n04325704 n04532670 n01498041 n03793489 n04141076 n04525038 n04548362 n02012849 n02093754 n03534580 n04532670 n02859443 n02027492 n04070727 n03673027 n11879895 n02643566 n04606251 n04613696 n03680355 n01860187 n04251144 n01739381 n02098413 n04019541 n02101556 n03201208 n04532106 n02879718 n02951585 n04604644 n04275548 n02097474 n03482405 n07734744 n03868242 n04332243 n04589890 n03788365 n03649909 n02090721 n02672831 n02109525 n02112018 n07615774 n02102480 n03125729 n01632458 n04252225 n01824575 n02666196 n03832673 n02105641 n07768694 n03871628 n03127925 n03344393 n02096177 n03887697 n03424325 n03014705 n03796401 n03617480 n04065272 n03982430 n04479046 n03763968 n02486410 n07742313 n02687172 n03794056 n04254680 n03661043 n02837789 n02454379 n01560419 n04443257 n07613480 n02110806 n01818515 n02099712 n03384352 n04366367 n03676483 n02892767 n02110627 n02096294 n01667778 n02870880 n03425413 n01751748 n04275548 n03187595 n02437312 n03623198 n01796340 n09472597 n04523525 n02486261 n01531178 n02493509 n02979186 n03584829 n03924679 n02099601 n03259280 n04229816 n01872401 n04579432 n01855672 n01622779 n02509815 n04525305 n04131690 n02484975 n09193705 n02097658 n02877765 n02749479 n06596364 n01806567 n02093428 n01773157 n03207941 n03947888 n01818515 n02092339 n02276258 n03207743 n02794156 n02106166 n03529860 n04493381 n02086079 n02011460 n03961711 n03680355 n04263257 n01819313 n02102177 n04254120 n03888257 n03729826 n04136333 n04346328 n02107908 n02447366 n03125729 n03476684 n02443114 n03788195 n03710637 n03657121 n03633091 n03141823 n07802026 n02113978 n01665541 n01744401 n02834397 n03633091 n04335435 n02011460 n02099712 n03527444 n03180011 n02408429 n02123394 n03980874 n04070727 n03445777 n04465501 n03530642 n03291819 n04252077 n01689811 n02058221 n02112137 n01950731 n01682714 n02231487 n07684084 n03481172 n02963159 n07768694 n03977966 n02165456 n02939185 n04258138 n02123045 n02128757 n02037110 n02128925 n02483362 n03483316 n04273569 n04208210 n03942813 n03291819 n03467068 n02091467 n02113624 n03950228 n03786901 n04228054 n03649909 n01629819 n02104365 n02865351 n02097047 n03902125 n02231487 n04033995 n02172182 n01632777 n02494079 n02391049 n02093256 n03992509 n03710721 n03272010 n03124043 n02422699 n02492035 n02410509 n04120489 n02793495 n03594734 n03841143 n03124043 n04265275 n02088466 n02123159 n03461385 n01675722 n02965783 n07753113 n07614500 n04154565 n03590841 n02361337 n07720875 n01843383 n04162706 n02134418 n03271574 n01494475 n01729977 n01689811 n01582220 n02655020 n03594945 n02099712 n02110627 n02441942 n02791124 n02007558 n03891332 n02791270 n02037110 n02127052 n01910747 n01829413 n04523525 n02417914 n04465501 n01860187 n03935335 n03908714 n02018207 n02006656 n07802026 n03950228 n07590611 n02092002 n04423845 n02790996 n04252225 n03666591 n02109961 n03930630 n02860847 n04552348 n02092339 n09229709 n02791270 n07579787 n03196217 n02500267 n02790996 n01622779 n02484975 n02669723 n02280649 n11879895 n03769881 n02167151 n02403003 n03717622 n02093991 n03942813 n04254680 n04443257 n01860187 n09229709 n02028035 n02087394 n01986214 n02115641 n02640242 n04328186 n03908618 n04154565 n02797295 n02097209 n02125311 n07932039 n02102973 n03529860 n01980166 n02443114 n03733131 n07718472 n03255030 n02009912 n02087394 n03218198 n02106550 n03888605 n01704323 n02091635 n03710721 n02325366 n02112350 n03207743 n03980874 n03042490 n07590611 n02096051 n02408429 n02091244 n03773504 n01491361 n02120505 n02607072 n02487347 n02504458 n04204347 n02037110 n02790996 n02107312 n04044716 n02002556 n02727426 n04606251 n02091831 n03598930 n03089624 n01807496 n07613480 n04404412 n04542943 n09229709 n03467068 n01943899 n11939491 n02086646 n02095314 n02328150 n02992529 n02281787 n04008634 n07697313 n03347037 n02012849 n02099429 n04179913 n02106662 n03841143 n07768694 n07880968 n02111129 n04456115 n04330267 n01629819 n04146614 n03710193 n03250847 n02808304 n03018349 n01943899 n02398521 n03388549 n02097658 n03529860 n02782093 n01592084 n04311174 n02823750 n04067472 n02422699 n03832673 n04367480 n04557648 n02051845 n01882714 n02012849 n03796401 n01735189 n09256479 n03529860 n11939491 n03673027 n01669191 n03742115 n02692877 n02328150 n07715103 n02268443 n02268853 n01770393 n07718747 n07714571 n01695060 n01843065 n03404251 n02823750 n04264628 n03478589 n02643566 n01514859 n02086646 n01692333 n03841143 n03977966 n04136333 n02089973 n02097298 n04311174 n01677366 n01930112 n02128925 n03710721 n02909870 n02027492 n04252077 n03544143 n09332890 n04118776 n04553703 n02488702 n02109525 n04443257 n01728572 n03384352 n04136333 n07718472 n03773504 n04273569 n02730930 n02259212 n03125729 n01748264 n03095699 n02504458 n04579432 n02231487 n04442312 n03447447 n02939185 n02110341 n04458633 n03492542 n02841315 n04285008 n02787622 n01514668 n03877472 n04486054 n04238763 n02480495 n07871810 n01968897 n03954731 n03584829 n03379051 n02123394 n03259280 n07920052 n02113712 n02092002 n02727426 n04149813 n01775062 n03457902 n03791053 n02106550 n09288635 n01742172 n02219486 n04332243 n02490219 n04033901 n03590841 n04344873 n07753592 n02085936 n03447721 n01580077 n02120505 n02504458 n03633091 n02113023 n02109525 n11879895 n03445924 n01882714 n02089867 n04604644 n03697007 n02814533 n02094114 n01631663 n02105251 n02948072 n04200800 n01820546 n03125729 n03290653 n02102480 n04525038 n03347037 n03950228 n02319095 n03160309 n03787032 n02107574 n04487394 n04548280 n07697537 n01580077 n03599486 n04599235 n01735189 n04612504 n02786058 n03000247 n02906734 n13054560 n02132136 n02939185 n02101006 n04141975 n04127249 n07565083 n01641577 n02017213 n02095889 n02096585 n03461385 n02231487 n04493381 n02092339 n04332243 n02497673 n02119022 n02099601 n04311004 n03920288 n02704792 n02091032 n03240683 n03538406 n04560804 n01440764 n02776631 n02013706 n02099849 n01532829 n02110341 n01944390 n03218198 n02099712 n04429376 n03249569 n02422106 n04254777 n04009552 n03617480 n03337140 n01692333 n02493509 n12144580 n03095699 n03781244 n03782006 n02099429 n09428293 n04179913 n02105251 n07716358 n04357314 n03895866 n02948072 n03888257 n03447447 n07248320 n01537544 n02487347 n03982430 n02910353 n07892512 n09468604 n03857828 n03290653 n03388043 n03843555 n04423845 n04404412 n04347754 n01537544 n02992529 n02101388 n02056570 n02093859 n02105412 n03933933 n02704792 n03063599 n12267677 n04482393 n01443537 n03670208 n04590129 n07565083 n04111531 n03188531 n02114712 n04409515 n03272010 n02107312 n02112018 n03676483 n03770439 n13133613 n04259630 n02105641 n04049303 n02807133 n03249569 n02099267 n04065272 n07716906 n02087394 n01669191 n04376876 n01847000 n02123597 n04131690 n02033041 n04357314 n01530575 n02841315 n01698640 n04179913 n01824575 n02092002 n02058221 n03617480 n04146614 n02097130 n09399592 n02892201 n02116738 n04204347 n04522168 n04136333 n01531178 n02346627 n02168699 n01980166 n07711569 n03347037 n04208210 n02823750 n02124075 n02509815 n03404251 n02088364 n01798484 n02009912 n03814639 n02172182 n03840681 n02002556 n03888257 n03065424 n03325584 n02317335 n02281406 n03658185 n02095570 n03920288 n03710637 n02123597 n03877472 n04357314 n07802026 n04067472 n02437616 n03482405 n01532829 n04553703 n03065424 n02058221 n07718472 n04252225 n02096585 n02097658 n04525305 n12057211 n04259630 n02490219 n04285008 n01534433 n01622779 n04067472 n04557648 n03888257 n02096051 n01632458 n02808304 n12985857 n01756291 n02111500 n02963159 n02790996 n03630383 n07714990 n04589890 n02128757 n02786058 n02951358 n03763968 n02356798 n01818515 n02607072 n07717410 n03877472 n04069434 n02483362 n04479046 n02268853 n10148035 n02815834 n02116738 n04501370 n03131574 n02099712 n02108915 n04209239 n03770439 n02226429 n12144580 n02906734 n02783161 n02667093 n04239074 n02110063 n01582220 n07768694 n01774750 n03787032 n12057211 n03764736 n01795545 n03623198 n01443537 n02892201 n03868242 n03384352 n02403003 n03658185 n03485794 n02085782 n04328186 n03388183 n04344873 n07716358 n02097047 n01737021 n01695060 n02098286 n04258138 n03127747 n07565083 n01667114 n03929660 n03476684 n03785016 n04041544 n02100236 n03854065 n03529860 n02097209 n02100236 n04540053 n02002556 n03495258 n02834397 n04346328 n03485407 n02835271 n01729977 n02802426 n03781244 n02793495 n02892767 n02086240 n02490219 n02119022 n06359193 n03207743 n01980166 n04467665 n04332243 n03598930 n04523525 n03877472 n03976657 n02256656 n02097130 n02606052 n04037443 n02793495 n03929855 n04118776 n02727426 n01833805 n02536864 n03710721 n03459775 n04311004 n02113712 n02480495 n03041632 n02966193 n03476684 n07716358 n04310018 n07579787 n02493793 n02094433 n07734744 n01744401 n03770679 n04523525 n02364673 n03355925 n07715103 n02403003 n01644900 n01518878 n02815834 n04251144 n02690373 n02124075 n04553703 n04081281 n02408429 n01704323 n02640242 n03478589 n04447861 n07875152 n04209133 n07734744 n04487081 n02177972 n02892767 n02113624 n03016953 n07753275 n02319095 n07745940 n02108000 n02028035 n02504458 n02106550 n07754684 n03063599 n03787032 n02098105 n03467068 n02089867 n02093428 n07718747 n07831146 n03496892 n03961711 n01924916 n01883070 n01704323 n03733281 n03791053 n02930766 n03478589 n01980166 n01985128 n09472597 n03967562 n02087394 n01914609 n02497673 n03924679 n03706229 n02108089 n15075141 n03977966 n07715103 n03187595 n02236044 n04599235 n03529860 n04023962 n02092339 n02977058 n07584110 n07730033 n03272010 n03676483 n02493509 n09468604 n02091467 n03534580 n03125729 n04467665 n01665541 n04330267 n02917067 n03196217 n02009229 n03042490 n01632458 n03100240 n02965783 n02172182 n03920288 n03109150 n07747607 n02093859 n02655020 n03658185 n03584254 n02110806 n04596742 n02113799 n01530575 n03345487 n02917067 n03788195 n02105162 n15075141 n04317175 n04251144 n02112018 n04326547 n03838899 n01955084 n02417914 n02099849 n02317335 n03095699 n02699494 n04554684 n03729826 n04005630 n02108422 n03127925 n02123045 n03832673 n02504013 n01806567 n04069434 n04023962 n04111531 n02097209 n02105056 n02097209 n03376595 n02095314 n01756291 n03773504 n01980166 n06794110 n04074963 n02747177 n02108551 n03255030 n03891251 n03935335 n03673027 n02111277 n03188531 n02100236 n02992529 n02607072 n02095889 n02002556 n02834397 n02134084 n07716906 n02804414 n02134084 n04008634 n02509815 n04254120 n04147183 n04204238 n03908714 n04162706 n03197337 n11879895 n03787032 n04111531 n02978881 n02102177 n03379051 n04371774 n01704323 n03710721 n01518878 n03016953 n02106382 n04540053 n01558993 n02105412 n02981792 n03028079 n03782006 n02086079 n04192698 n02233338 n03649909 n03496892 n02276258 n03832673 n04070727 n03899768 n03017168 n03485794 n04591157 n02493509 n02093754 n02107683 n04208210 n02992529 n03124043 n03876231 n03691459 n01667778 n07730033 n04252225 n04208210 n02860847 n01742172 n02094114 n03000134 n07860988 n01775062 n03958227 n03045698 n03759954 n02086240 n03676483 n04532670 n02100583 n02793495 n01855032 n04275548 n04409515 n03733131 n03710193 n07760859 n03854065 n01629819 n02840245 n03691459 n03452741 n03297495 n03877472 n02125311 n04037443 n02526121 n01698640 n04591713 n02860847 n02412080 n01728572 n04152593 n02879718 n02699494 n02115913 n03000134 n02326432 n02966193 n04326547 n04049303 n04501370 n07590611 n02088466 n01665541 n03141823 n02037110 n02110958 n03481172 n07860988 n02509815 n02869837 n03930313 n03492542 n02480855 n02486261 n03495258 n03478589 n03063599 n04525038 n02109525 n02787622 n01592084 n02437616 n13040303 n04118776 n02104365 n02927161 n03532672 n03814639 n01910747 n01737021 n03877845 n07579787 n09288635 n01981276 n03133878 n02667093 n02747177 n02500267 n04370456 n01601694 n03769881 n04372370 n02114712 n02326432 n03134739 n03041632 n01685808 n02233338 n01614925 n03982430 n03929855 n04069434 n04367480 n03961711 n03201208 n02092002 n04370456 n04376876 n02395406 n03717622 n04317175 n02088094 n02950826 n01697457 n04591157 n01784675 n03930630 n04251144 n02802426 n07697537 n01689811 n12998815 n04550184 n04486054 n01667778 n03916031 n01795545 n02790996 n01910747 n02085936 n03938244 n03976467 n02325366 n03527444 n02268443 n03290653 n03444034 n02105056 n02096437 n03457902 n03843555 n02500267 n02088094 n02769748 n04525038 n02606052 n04487081 n02486261 n03492542 n03733131 n02120505 n07745940 n02112137 n07579787 n02105505 n03452741 n10148035 n04125021 n04026417 n02089867 n03995372 n02177972 n03903868 n04409515 n01943899 n02100236 n03124170 n03197337 n02361337 n04325704 n03920288 n03825788 n02101388 n11879895 n03443371 n02071294 n07880968 n03769881 n03902125 n02110806 n03637318 n04019541 n03840681 n02342885 n03476684 n02094114 n04023962 n03706229 n02730930 n02877765 n04548362 n02088632 n04285008 n07873807 n03903868 n04501370 n04118538 n02025239 n03530642 n02018207 n03476684 n03602883 n02948072 n02102040 n02123394 n01944390 n02268853 n04590129 n01530575 n02117135 n03691459 n02504013 n03179701 n04357314 n04399382 n03218198 n02865351 n03598930 n02113978 n03697007 n01843383 n02074367 n02264363 n01742172 n02123045 n02795169 n03721384 n02129165 n03544143 n04522168 n12985857 n02814860 n02110958 n02100735 n13044778 n02817516 n07730033 n04429376 n04033995 n04367480 n03729826 n02493793 n04141975 n01740131 n01914609 n02134418 n01739381 n02687172 n02483362 n13037406 n01742172 n02396427 n02397096 n01689811 n09399592 n04347754 n02865351 n04344873 n02111889 n02939185 n04033995 n02037110 n01773157 n03599486 n02093647 n01532829 n02097209 n02492660 n04009552 n04033901 n02099429 n02056570 n02098413 n02992211 n03788195 n03207743 n03444034 n03814639 n04485082 n01981276 n01978455 n03461385 n01688243 n02277742 n03388043 n02871525 n02101556 n03131574 n02236044 n07248320 n03041632 n02095314 n04344873 n02119022 n02172182 n13054560 n01978287 n03532672 n04536866 n02105412 n04118538 n02443484 n01695060 n02909870 n02441942 n02017213 n02799071 n04147183 n04589890 n02056570 n02486261 n03345487 n04328186 n02328150 n04476259 n04346328 n04273569 n03290653 n03627232 n02791124 n02012849 n02259212 n02090379 n03627232 n03764736 n02817516 n04326547 n03065424 n02909870 n01675722 n04522168 n13133613 n02655020 n04209133 n02783161 n03796401 n03250847 n01872401 n01682714 n01873310 n01631663 n04005630 n02843684 n02769748 n02804610 n03782006 n01978455 n02097298 n02787622 n07716906 n02111129 n02123045 n02279972 n02497673 n02980441 n02111129 n03297495 n04487081 n04370456 n01667778 n03710193 n02096294 n03876231 n03938244 n02950826 n04311174 n04081281 n01687978 n04371774 n06794110 n02281406 n04326547 n02395406 n02096051 n02113186 n04070727 n02206856 n02690373 n01729977 n03000684 n01514859 n03197337 n03445924 n04604644 n02280649 n02090379 n02012849 n01534433 n07734744 n03838899 n02177972 n04423845 n03899768 n02098105 n03633091 n02701002 n04371430 n02114367 n03947888 n01820546 n02088238 n03929855 n04612504 n02963159 n02966193 n02037110 n03982430 n02107574 n02966193 n04355933 n03372029 n02113978 n04398044 n02087046 n02106166 n04465501 n03179701 n10565667 n03492542 n01735189 n02120079 n02105251 n01873310 n02110063 n03388183 n02444819 n02687172 n01871265 n02445715 n04590129 n12985857 n01819313 n03938244 n02443114 n04380533 n04277352 n02444819 n02536864 n02111277 n02948072 n03938244 n07753113 n01440764 n09193705 n02509815 n01770393 n01828970 n03794056 n03902125 n02097474 n07714571 n02107908 n01698640 n04590129 n02481823 n04418357 n02504013 n02815834 n01530575 n03131574 n02104365 n04204238 n02454379 n04147183 n02077923 n02488291 n02342885 n02097474 n07716358 n03337140 n04417672 n01694178 n04311004 n06785654 n07768694 n04149813 n01560419 n03970156 n04125021 n09428293 n04258138 n03720891 n04086273 n02804610 n03642806 n03133878 n02974003 n01629819 n03983396 n04154565 n02483362 n04019541 n03065424 n04040759 n06596364 n04131690 n01770393 n04550184 n02120079 n03255030 n02326432 n03344393 n12985857 n01675722 n01729322 n02112137 n04398044 n02013706 n04162706 n04069434 n03630383 n02840245 n01644900 n03680355 n04229816 n09193705 n02788148 n04462240 n03775546 n06596364 n02090721 n03388183 n04252077 n03042490 n01843065 n02111129 n01616318 n04409515 n10148035 n01677366 n02655020 n02107683 n02105162 n03888257 n02128925 n03868863 n04069434 n01773797 n03792782 n03792782 n01560419 n07742313 n13054560 n02981792 n03916031 n03623198 n04146614 n11879895 n01675722 n02097130 n04423845 n02089973 n04592741 n01968897 n07718747 n02992529 n07753275 n07745940 n02108422 n02804414 n02342885 n03379051 n02457408 n02437312 n03787032 n02091032 n02002556 n03666591 n03717622 n07831146 n03208938 n02840245 n03891332 n04589890 n03887697 n04141076 n03770439 n02113023 n02009912 n02823750 n04252077 n02396427 n02099601 n02279972 n01843383 n02749479 n04228054 n04590129 n01773797 n02027492 n02093428 n02259212 n01910747 n02088364 n02093754 n07860988 n02093428 n01494475 n03888605 n04589890 n02092339 n07584110 n02190166 n02096051 n04023962 n02484975 n03980874 n02870880 n01807496 n02090721 n02011460 n02033041 n01514668 n02094114 n02687172 n02013706 n04523525 n07718747 n02361337 n07720875 n04005630 n04509417 n07613480 n01622779 n03131574 n01631663 n02701002 n03014705 n02607072 n01560419 n03197337 n09193705 n02099849 n03000134 n02480495 n03733805 n07802026 n01749939 n03956157 n01955084 n03445777 n02927161 n02105162 n02088238 n06794110 n09332890 n02823428 n03773504 n03657121 n04044716 n07760859 n03207941 n07717410 n01664065 n03291819 n01580077 n02132136 n01687978 n09332890 n04590129 n04487081 n03838899 n01981276 n03899768 n04004767 n03207743 n02106166 n07873807 n04039381 n03388549 n03977966 n03384352 n02114367 n07695742 n02105412 n04591157 n01729322 n02066245 n03938244 n03240683 n07880968 n03782006 n02086646 n01632777 n02793495 n02281406 n02443484 n03208938 n04350905 n03179701 n03658185 n02480855 n01737021 n09256479 n04357314 n03424325 n02807133 n01855032 n01828970 n03980874 n02107683 n03895866 n07768694 n02090721 n02110958 n02669723 n04599235 n02105641 n02692877 n02927161 n01582220 n02325366 n04039381 n02790996 n07760859 n02114712 n02099712 n04275548 n04366367 n02687172 n02113624 n02454379 n04120489 n03785016 n02279972 n04209239 n01677366 n01682714 n01601694 n02483708 n07718747 n04344873 n02483362 n07717556 n01981276 n02699494 n03160309 n02123597 n03970156 n01669191 n01756291 n02606052 n02795169 n03478589 n02259212 n06785654 n02114712 n04311174 n03891332 n04443257 n01687978 n04259630 n02128925 n02526121 n03447721 n04239074 n03877472 n03710637 n07711569 n04153751 n01682714 n03598930 n04131690 n01819313 n02085620 n02113023 n03133878 n07768694 n04579432 n04532670 n03976467 n04326547 n02951358 n02279972 n03000247 n03837869 n09288635 n03196217 n03733805 n02111889 n04286575 n01985128 n02105056 n02783161 n03902125 n02643566 n04553703 n03787032 n02799071 n02137549 n03445777 n03240683 n02093256 n01847000 n01978455 n02089973 n03482405 n06874185 n02280649 n02129604 n02892767 n02480495 n02106662 n12144580 n03599486 n02066245 n02454379 n01873310 n03690938 n02389026 n02264363 n02966193 n02500267 n03538406 n01843065 n04254680 n04346328 n03961711 n03970156 n03207941 n03791053 n02085936 n03954731 n03857828 n02807133 n02443114 n02219486 n03670208 n04263257 n03110669 n01795545 n03467068 n02115913 n02119789 n04487081 n02791124 n04201297 n04265275 n01784675 n02814533 n02417914 n07932039 n02606052 n01768244 n04311004 n03662601 n02607072 n01773549 n02085620 n02730930 n04347754 n02051845 n01914609 n03729826 n02129165 n01537544 n03888605 n03764736 n04579145 n01630670 n01950731 n03599486 n03786901 n04243546 n04040759 n03594945 n01632458 n02823750 n04442312 n02859443 n01629819 n04254777 n04039381 n01641577 n04553703 n03443371 n04467665 n03991062 n02219486 n02799071 n04026417 n03930313 n02096585 n03534580 n07753113 n03868863 n01773549 n03720891 n02727426 n02096177 n03272562 n02100236 n03450230 n03697007 n02927161 n01798484 n02865351 n01631663 n02100236 n03871628 n03394916 n03983396 n03908714 n02641379 n07892512 n01877812 n01824575 n02106030 n02100583 n03424325 n02106166 n01682714 n04456115 n01784675 n03868242 n02100877 n04033901 n04266014 n04332243 n02443114 n04487081 n01774750 n02129165 n01984695 n03769881 n02422106 n04328186 n02108915 n02088364 n02795169 n01773157 n03063689 n04326547 n01644900 n09229709 n02133161 n03016953 n02085620 n07565083 n02317335 n04485082 n02125311 n04591157 n02396427 n04347754 n02129604 n02422699 n02123597 n03388183 n03590841 n02807133 n03676483 n03255030 n02174001 n04536866 n02104029 n02817516 n02087046 n02085782 n02115641 n02086910 n02834397 n03201208 n02086240 n02454379 n02422699 n02106662 n04560804 n02699494 n02871525 n04591157 n04149813 n03920288 n02099267 n02105412 n01667778 n03535780 n02085936 n03344393 n03871628 n02268853 n02276258 n03773504 n04505470 n02895154 n01740131 n02101388 n01847000 n04111531 n02280649 n04509417 n01496331 n02264363 n02109525 n03372029 n03903868 n01796340 n02988304 n02486261 n07932039 n03841143 n02089867 n02099429 n03062245 n02799071 n03485794 n03944341 n02090379 n04370456 n04125021 n03929855 n02110063 n02794156 n04141076 n02085936 n04606251 n02099712 n01773549 n02992529 n03347037 n02120505 n02727426 n03483316 n04479046 n03544143 n03888605 n04548362 n13037406 n04044716 n02259212 n02835271 n01797886 n02823428 n04086273 n02127052 n03133878 n03733281 n02676566 n02667093 n04026417 n07932039 n04252077 n03976467 n04366367 n03443371 n04346328 n02112018 n03781244 n03459775 n03876231 n01534433 n03017168 n02808304 n07730033 n02169497 n02514041 n04458633 n02002556 n03980874 n03131574 n01807496 n04330267 n01773549 n02123159 n04204347 n02395406 n02321529 n03124043 n03617480 n01910747 n01784675 n03733131 n07875152 n04599235 n09428293 n07565083 n02206856 n03127747 n02086240 n04146614 n04532670 n03259280 n02104365 n01855032 n04366367 n02977058 n02444819 n02088632 n04562935 n03891251 n07718747 n02783161 n03929855 n01872401 n07693725 n02859443 n04370456 n02259212 n02231487 n04065272 n02361337 n02395406 n02094433 n01833805 n02097474 n03868242 n04041544 n02493793 n02174001 n02085620 n12620546 n02412080 n02808440 n02489166 n04069434 n03763968 n03721384 n04522168 n03527444 n04147183 n02277742 n03743016 n02490219 n01443537 n01534433 n02965783 n02106382 n02007558 n03908618 n04357314 n02108089 n01980166 n03642806 n04090263 n02093256 n02841315 n01695060 n04152593 n04532670 n04201297 n03476684 n02236044 n02769748 n03187595 n02841315 n04081281 n07873807 n04548362 n03595614 n04532670 n03047690 n04552348 n01806143 n04542943 n07717556 n03782006 n02107574 n04118776 n04523525 n04141327 n03000684 n02124075 n02667093 n03976467 n02965783 n06785654 n04548280 n03840681 n04243546 n03447721 n03720891 n03825788 n02791270 n02870880 n03535780 n02165456 n02132136 n04044716 n03970156 n03692522 n01744401 n04418357 n02167151 n02790996 n03903868 n02860847 n02417914 n01985128 n02281787 n10148035 n02974003 n03777754 n03445777 n04532106 n02085782 n03452741 n03670208 n03866082 n02105162 n03220513 n03529860 n04376876 n01440764 n03498962 n02687172 n01665541 n04344873 n02489166 n03384352 n02443484 n03976657 n04540053 n01817953 n02098105 n02655020 n01756291 n02099267 n04141327 n07734744 n03690938 n02133161 n10148035 n03461385 n03840681 n02099267 n03908618 n02483708 n03710637 n02804610 n02906734 n07836838 n03930313 n02786058 n01795545 n02804610 n02095570 n03447721 n04311004 n04229816 n04208210 n03710193 n03584829 n04355338 n03146219 n02085620 n04522168 n02106030 n03908618 n02113624 n04429376 n02100877 n02894605 n02088632 n02490219 n02264363 n04204238 n07717556 n02699494 n13040303 n02782093 n04238763 n03935335 n02111889 n04147183 n02089078 n03598930 n04131690 n01534433 n04039381 n02113023 n03649909 n02804610 n02950826 n07695742 n03899768 n03662601 n02100877 n06359193 n04270147 n03527444 n04023962 n03207743 n03691459 n02086646 n04456115 n04335435 n04493381 n03355925 n02128757 n03710637 n02749479 n04111531 n02669723 n04591157 n02106550 n04069434 n01669191 n03496892 n01855672 n03803284 n04371774 n02965783 n01955084 n03710637 n04147183 n03792782 n04597913 n04266014 n02790996 n02099601 n03627232 n02219486 n07760859 n02877765 n07715103 n02259212 n07747607 n04376876 n01748264 n04317175 n02687172 n13037406 n02321529 n02981792 n02992211 n03891332 n01944390 n02398521 n07753275 n01687978 n03325584 n01806143 n01795545 n02256656 n13133613 n06785654 n02236044 n04033901 n02892767 n03792972 n07753592 n01580077 n03535780 n03602883 n02423022 n03599486 n02279972 n02655020 n03637318 n02108000 n03355925 n04486054 n01986214 n03014705 n04599235 n02107312 n04522168 n03782006 n02091244 n04238763 n01641577 n02268853 n07711569 n03662601 n02102318 n01677366 n02097209 n03763968 n03786901 n02509815 n02086910 n06794110 n07920052 n03379051 n02346627 n02018795 n02480495 n07711569 n04532670 n02099712 n02110806 n03759954 n02123597 n04154565 n03347037 n02077923 n02514041 n01616318 n02641379 n04086273 n02097298 n02930766 n01983481 n03995372 n03891332 n03218198 n02058221 n01729322 n02799071 n01820546 n04127249 n02834397 n02097209 n03196217 n03216828 n02096585 n04229816 n11879895 n03977966 n03876231 n03908618 n03255030 n02106662 n02488702 n02978881 n03868242 n03710721 n03494278 n02363005 n02939185 n07768694 n04505470 n02028035 n02894605 n07717410 n07745940 n04429376 n04344873 n02727426 n01753488 n02110806 n03661043 n01806567 n01955084 n03467068 n02110063 n03902125 n03450230 n01692333 n02114855 n01644900 n07742313 n07565083 n04505470 n02088364 n03733131 n02105056 n02606052 n03179701 n07715103 n02641379 n03259280 n07873807 n04584207 n02110063 n03218198 n02494079 n01644373 n04332243 n02115913 n02120079 n09229709 n02481823 n04235860 n02113799 n02823428 n04371774 n02442845 n01498041 n03944341 n09332890 n02091134 n02690373 n02788148 n02869837 n04204238 n01675722 n02236044 n02280649 n12144580 n01882714 n04120489 n02999410 n03692522 n01729322 n04532670 n03337140 n02966193 n07742313 n03793489 n04355933 n03220513 n02445715 n04443257 n04026417 n02823428 n03976467 n02102177 n03773504 n04487394 n02085936 n07614500 n02089078 n02206856 n04147183 n04501370 n02422699 n02085782 n02097130 n03929660 n01751748 n02099849 n01924916 n01692333 n04275548 n03991062 n01824575 n03218198 n02018207 n03530642 n03782006 n03697007 n07734744 n01820546 n02280649 n02115913 n04325704 n02104029 n03250847 n11879895 n03709823 n03271574 n04483307 n04525038 n02835271 n02102318 n04285008 n01491361 n01742172 n02077923 n01728572 n01914609 n03388549 n03085013 n02395406 n03868863 n04033901 n02011460 n02123159 n02391049 n04039381 n01695060 n02129165 n03944341 n04462240 n02403003 n03920288 n03649909 n04515003 n03372029 n02091467 n04372370 n02129165 n01753488 n02113712 n03445777 n04525305 n01768244 n02493509 n03743016 n12998815 n03770439 n02777292 n02097298 n01687978 n04179913 n02749479 n03627232 n03207743 n03476991 n07745940 n01883070 n03792972 n03769881 n02011460 n02870880 n02123045 n04040759 n07684084 n02111277 n01877812 n04019541 n03197337 n02494079 n03187595 n02687172 n02883205 n07754684 n09399592 n02791270 n03063689 n03902125 n02415577 n02086240 n02093991 n02802426 n03782006 n03478589 n02128385 n02894605 n02115641 n02011460 n02951358 n02128757 n02871525 n02346627 n03450230 n09229709 n02417914 n01796340 n02128925 n04486054 n02749479 n02346627 n01930112 n02091032 n02963159 n01944390 n02793495 n02018207 n04153751 n02790996 n02129165 n03538406 n02965783 n03179701 n03160309 n01644373 n01770393 n02109961 n01873310 n03085013 n01735189 n04370456 n02018207 n02018795 n02110627 n03804744 n03534580 n07760859 n01631663 n04482393 n02917067 n07753592 n03447447 n02112706 n03947888 n02927161 n04228054 n03259280 n07753275 n07753592 n02948072 n07697313 n01984695 n11879895 n02125311 n12998815 n03976657 n02096294 n04264628 n04548362 n02276258 n03891251 n03127925 n02834397 n03854065 n02979186 n07920052 n02110627 n02095314 n04049303 n02965783 n02895154 n02013706 n04044716 n03709823 n02138441 n02777292 n01943899 n07892512 n02091831 n03743016 n01514668 n04243546 n02105251 n03032252 n01855032 n04612504 n03770679 n03866082 n02091134 n03443371 n03777568 n03773504 n02480855 n07745940 n02391049 n01910747 n02277742 n03938244 n02788148 n01440764 n03425413 n03895866 n03950228 n02133161 n01843065 n02992211 n02834397 n02066245 n03337140 n07716358 n03584829 n02095314 n02093991 n02974003 n02025239 n04596742 n02916936 n01768244 n03720891 n02056570 n02102177 n04557648 n02268853 n02098105 n01514859 n04141975 n02071294 n03188531 n04254777 n03709823 n03095699 n04517823 n03733131 n07693725 n03476684 n03724870 n03983396 n02342885 n02510455 n03874293 n02823428 n04356056 n01494475 n04251144 n02894605 n02097658 n04273569 n02123045 n03250847 n01687978 n02012849 n03733131 n02096294 n02279972 n01641577 n03804744 n02871525 n04479046 n07697313 n02786058 n01924916 n07932039 n02099712 n03271574 n02488702 n02927161 n02815834 n02877765 n04560804 n03297495 n04590129 n03944341 n03980874 n02105056 n01734418 n03947888 n02363005 n06596364 n07753275 n02930766 n02093859 n03207941 n01818515 n03657121 n01629819 n03063689 n03255030 n02808440 n02981792 n09246464 n04591713 n03492542 n04517823 n03240683 n07716358 n07717556 n02814533 n01843383 n03691459 n02134418 n02110185 n02093754 n02807133 n07684084 n02091244 n03873416 n02113624 n02094433 n02917067 n03450230 n03888605 n01616318 n04435653 n02111277 n02006656 n02363005 n02497673 n07753592 n07711569 n01693334 n03954731 n04033995 n04208210 n02817516 n07754684 n02256656 n13052670 n04417672 n11939491 n02443114 n03445777 n02093859 n07684084 n03026506 n04081281 n02002724 n02317335 n03584829 n04039381 n03062245 n02091134 n07745940 n02092002 n03991062 n02843684 n03961711 n04069434 n01558993 n07745940 n04486054 n04347754 n02011460 n02808304 n02109961 n04229816 n04409515 n04116512 n03857828 n02445715 n03920288 n02488702 n03126707 n07932039 n02835271 n03445924 n01797886 n03476684 n03658185 n01943899 n02951358 n03532672 n02966193 n02988304 n02229544 n02095570 n02841315 n04536866 n02268853 n03445924 n03803284 n04254777 n02443484 n03133878 n02799071 n13133613 n02102040 n02107908 n03947888 n04487394 n03599486 n03452741 n02097298 n04417672 n02493793 n02325366 n07747607 n03188531 n04482393 n02088632 n04461696 n03249569 n07693725 n02096437 n01773797 n02105162 n02843684 n02950826 n02492660 n04366367 n01981276 n03207941 n02966193 n03534580 n02112018 n01688243 n04584207 n02415577 n01847000 n02514041 n02488291 n02749479 n04380533 n02510455 n02526121 n07745940 n03930313 n03877845 n01755581 n01667114 n02108000 n02699494 n02363005 n02100877 n03770439 n02114712 n02100735 n02108000 n02028035 n02108551 n02484975 n07718747 n03498962 n01665541 n02894605 n04118776 n02119022 n04258138 n04604644 n02115641 n07768694 n12267677 n03908714 n03876231 n07717556 n11879895 n01688243 n03208938 n12267677 n02669723 n02965783 n02276258 n01631663 n04487394 n02825657 n01749939 n04037443 n04041544 n03376595 n04532670 n02104365 n02233338 n02793495 n03770439 n01910747 n04154565 n01980166 n03793489 n02025239 n02480495 n03781244 n04399382 n07871810 n04065272 n02017213 n01943899 n04067472 n03761084 n02094433 n03538406 n02494079 n04147183 n04141076 n04589890 n01601694 n02123394 n06874185 n02114548 n03637318 n03710193 n04536866 n09399592 n03452741 n03594945 n07860988 n03085013 n02814533 n03461385 n04252077 n02859443 n04033901 n01530575 n03476684 n04069434 n02105056 n02128385 n01694178 n01688243 n03372029 n04465501 n02808440 n04235860 n02177972 n13044778 n02096177 n01770081 n01669191 n02481823 n07880968 n03888605 n02117135 n02096437 n02397096 n01592084 n03769881 n03026506 n02107574 n02114367 n03124170 n03733281 n03692522 n02037110 n02167151 n01930112 n03995372 n03355925 n03676483 n03000247 n02966193 n02910353 n01682714 n02910353 n02510455 n02106550 n02120079 n03841143 n04229816 n02447366 n02091467 n04456115 n03937543 n01818515 n04086273 n02865351 n03109150 n02808304 n03483316 n01560419 n07930864 n04392985 n04592741 n04192698 n02089973 n03485794 n07613480 n02951585 n01494475 n01443537 n02097298 n02877765 n02101388 n03271574 n03041632 n03895866 n02865351 n02091134 n02027492 n03201208 n03983396 n02364673 n02134084 n02165105 n01773549 n04127249 n04275548 n01883070 n02112706 n03776460 n02108000 n02397096 n04525305 n02113624 n02268853 n02091134 n03476991 n02815834 n04525305 n03857828 n03272010 n04523525 n04335435 n03595614 n07932039 n03345487 n03877472 n04485082 n02794156 n03877472 n03492542 n02114712 n02883205 n02106662 n03417042 n03617480 n02978881 n02101556 n04039381 n02105641 n02098413 n04552348 n02823750 n07753113 n02110063 n09332890 n09468604 n02457408 n01537544 n02497673 n09229709 n04311004 n02776631 n02692877 n03623198 n04328186 n03697007 n02102177 n01687978 n03207743 n03733131 n02099429 n03769881 n02099601 n02787622 n03000134 n03895866 n02127052 n04136333 n02106662 n13044778 n01981276 n03680355 n03372029 n03908618 n03877472 n04346328 n04557648 n04270147 n04428191 n02870880 n03297495 n02871525 n02391049 n02123045 n01871265 n02071294 n02119022 n04592741 n02509815 n03424325 n02514041 n02101006 n02747177 n01950731 n02172182 n04336792 n04356056 n04252077 n01740131 n04613696 n04023962 n04485082 n02128925 n02086079 n03983396 n02134084 n02133161 n02128925 n04517823 n07875152 n02128385 n04204347 n02077923 n03272010 n02840245 n02105641 n01817953 n04146614 n04554684 n03796401 n04039381 n02788148 n04483307 n02493793 n03692522 n03075370 n03733281 n04238763 n02815834 n03065424 n02672831 n03602883 n04346328 n02066245 n03444034 n03594734 n15075141 n12144580 n07579787 n02992529 n04515003 n02107142 n02117135 n01734418 n01693334 n02105505 n02992211 n02869837 n13133613 n02666196 n04041544 n03857828 n04418357 n02113978 n01744401 n02797295 n02699494 n02489166 n02098286 n04243546 n02134418 n02106662 n03670208 n04090263 n02692877 n03467068 n04238763 n03788365 n03657121 n02906734 n02326432 n02676566 n02607072 n03627232 n02894605 n03538406 n04136333 n01632458 n04125021 n03134739 n01697457 n03924679 n04243546 n09256479 n02493793 n07871810 n02177972 n01917289 n02088466 n04069434 n03891251 n02113799 n07711569 n01833805 n04270147 n04259630 n02859443 n04270147 n02110063 n03042490 n03290653 n02002724 n02100583 n01608432 n03710193 n03777754 n02971356 n04482393 n13037406 n01768244 n03929855 n03016953 n07584110 n02113023 n04447861 n02128925 n02988304 n04201297 n02006656 n01807496 n03658185 n03394916 n07716358 n07579787 n02102177 n01729322 n03775071 n04482393 n02415577 n02607072 n02909870 n03255030 n03344393 n02325366 n02102480 n02102177 n04423845 n02130308 n03785016 n02787622 n04200800 n02087046 n04487394 n04152593 n04065272 n07831146 n02843684 n07248320 n03498962 n02128757 n04523525 n02999410 n03697007 n02097209 n11939491 n04141327 n07248320 n04461696 n02110185 n02483708 n03902125 n02168699 n02834397 n02108915 n02963159 n03841143 n02120505 n02111129 n02112350 n03793489 n03649909 n04090263 n02727426 n04033995 n01608432 n02364673 n02895154 n07730033 n02423022 n02999410 n07579787 n02086079 n01631663 n02494079 n04118776 n03467068 n03476684 n03954731 n03775546 n02981792 n01873310 n01980166 n04049303 n04099969 n02965783 n02281787 n02823750 n02655020 n02403003 n02951358 n02028035 n02504458 n03814639 n02085620 n04486054 n03761084 n07930864 n04522168 n04347754 n01644373 n02992211 n04483307 n02102973 n04467665 n03026506 n03026506 n07697537 n01532829 n04442312 n02108551 n01824575 n04254777 n03109150 n01728920 n04380533 n02795169 n04493381 n03141823 n01817953 n04026417 n02909870 n01601694 n02834397 n03376595 n02909870 n07711569 n03891251 n01806567 n03854065 n03814906 n02808304 n04153751 n07768694 n04532106 n02102973 n02346627 n13133613 n02129604 n02443484 n03792972 n02804414 n02097298 n02708093 n01748264 n03992509 n04591713 n02105162 n03840681 n02276258 n02100583 n02408429 n03770679 n07717556 n02280649 n02006656 n04560804 n04285008 n03868863 n02088238 n02799071 n04560804 n02108551 n02487347 n01614925 n04505470 n04090263 n03661043 n01675722 n01531178 n01632458 n01695060 n04254777 n04355933 n03743016 n04259630 n01534433 n02110958 n02112350 n02488702 n02687172 n09246464 n02071294 n02497673 n03871628 n07717556 n02105412 n02999410 n02105412 n04208210 n04589890 n03379051 n03404251 n03014705 n04146614 n03938244 n02107142 n03452741 n01667114 n04311174 n01667778 n03127747 n02105412 n09399592 n07716906 n03673027 n03197337 n03450230 n02113186 n01775062 n04380533 n06359193 n03483316 n02172182 n03496892 n03843555 n04476259 n02110806 n04467665 n04548280 n01518878 n02281787 n02093647 n04404412 n04356056 n03840681 n03995372 n02326432 n02777292 n01776313 n03220513 n02795169 n02074367 n01968897 n07693725 n02906734 n03777754 n02497673 n03126707 n04259630 n03729826 n04026417 n01855032 n02808440 n04346328 n03930313 n04560804 n03127925 n07684084 n04417672 n02172182 n02325366 n03899768 n01644900 n02113186 n03710637 n03857828 n02114548 n04326547 n02643566 n02092002 n03124170 n02281406 n01806567 n04254680 n03344393 n01532829 n02116738 n02116738 n02094258 n03690938 n03272562 n03110669 n03786901 n07920052 n04355933 n01978455 n01806143 n01944390 n03450230 n02088364 n03956157 n02437312 n03590841 n04344873 n02277742 n02111277 n01784675 n04483307 n02132136 n04019541 n01693334 n01608432 n01667114 n02236044 n03775546 n01739381 n02100583 n02090622 n01729322 n04350905 n02056570 n04612504 n04505470 n12057211 n03837869 n01531178 n04376876 n02454379 n02124075 n02395406 n02114367 n03481172 n02109047 n07715103 n04154565 n02423022 n01756291 n02108089 n02493793 n03602883 n02168699 n01978455 n02097298 n02447366 n04229816 n07583066 n03207743 n07248320 n02100583 n02823750 n01608432 n04418357 n01833805 n03930630 n03425413 n02788148 n03637318 n04265275 n02281787 n04335435 n02093428 n06359193 n03944341 n04041544 n04515003 n02106550 n02097130 n02837789 n07753275 n04026417 n03673027 n03887697 n03110669 n03769881 n01532829 n02006656 n04296562 n04347754 n01828970 n03125729 n03877472 n02096051 n04483307 n02398521 n03770679 n02106662 n03775546 n04347754 n02676566 n03690938 n07831146 n04398044 n01985128 n02109047 n03785016 n03494278 n03792972 n02114367 n03777754 n04090263 n02132136 n03134739 n01491361 n09332890 n03803284 n02120079 n03075370 n02104365 n03884397 n02790996 n01751748 n07695742 n02123045 n03759954 n03733131 n12998815 n03223299 n07745940 n04532106 n02111889 n02708093 n01944390 n01534433 n02361337 n02113624 n02090721 n02093256 n02025239 n04355933 n03452741 n01530575 n01443537 n04209239 n02037110 n04154565 n03594945 n04465501 n07714990 n03868863 n01819313 n04026417 n04553703 n02112706 n01980166 n02797295 n03888257 n02342885 n03216828 n03388043 n03804744 n02138441 n01689811 n04553703 n02231487 n04208210 n03372029 n02096177 n04429376 n03272010 n02493509 n03127747 n02786058 n03777568 n04238763 n03535780 n03938244 n02408429 n02097658 n02123159 n03891251 n02165105 n02437312 n02114712 n04540053 n04270147 n02113186 n02281406 n03899768 n04442312 n04023962 n02963159 n02102973 n01860187 n03297495 n03733805 n03980874 n04336792 n04366367 n02412080 n02966687 n03763968 n02098286 n01756291 n03929855 n03944341 n03271574 n04026417 n07754684 n01985128 n07753113 n01675722 n02106166 n02116738 n03916031 n04065272 n03110669 n07747607 n02009912 n03950228 n03483316 n07716358 n03216828 n09835506 n03393912 n02526121 n03770439 n02002724 n02871525 n01776313 n04355933 n03450230 n02025239 n02107312 n04606251 n03063599 n01795545 n04254777 n02120079 n01833805 n02099601 n13052670 n02676566 n03457902 n03720891 n03793489 n01775062 n01978287 n10565667 n02916936 n03599486 n02110958 n01443537 n04204238 n02672831 n07717410 n04209239 n01491361 n02963159 n03424325 n03697007 n03344393 n03445777 n02999410 n02441942 n04525038 n02403003 n07684084 n03125729 n02095570 n01796340 n03599486 n07747607 n04507155 n07768694 n04501370 n07734744 n02676566 n01871265 n03680355 n02088466 n10565667 n02110958 n02096437 n01498041 n02130308 n07836838 n03884397 n04065272 n02033041 n02607072 n13040303 n02808304 n03095699 n03485407 n02395406 n04560804 n02676566 n04589890 n02110958 n02837789 n01669191 n02123045 n07579787 n01667778 n12998815 n04613696 n02951585 n03623198 n03764736 n02892767 n02102318 n04040759 n02123045 n03062245 n02701002 n03201208 n04266014 n01873310 n04597913 n03595614 n07716906 n02988304 n03445924 n02860847 n02095889 n02115913 n01756291 n02114548 n02457408 n03995372 n01614925 n02107312 n03930630 n03017168 n03535780 n01985128 n02177972 n03045698 n13133613 n04398044 n02099267 n01829413 n02114712 n02104029 n01440764 n04263257 n04251144 n03584254 n03874599 n06359193 n04070727 n04209133 n04065272 n01748264 n02980441 n02093754 n02097658 n03187595 n01742172 n04590129 n03188531 n02504013 n02017213 n02979186 n02843684 n04040759 n01667778 n01820546 n02116738 n04243546 n04090263 n03888605 n01985128 n02823750 n04141975 n03376595 n02108915 n03372029 n02423022 n01728920 n02102973 n01580077 n02492660 n07716906 n02096294 n03259280 n03884397 n02102973 n03666591 n02486410 n02102480 n02105162 n09246464 n02823750 n04152593 n03196217 n01818515 n04591157 n04328186 n01742172 n01753488 n02971356 n09428293 n02927161 n03180011 n04099969 n02795169 n02895154 n03929660 n01910747 n03854065 n02747177 n03803284 n02123394 n04264628 n04243546 n02123159 n01983481 n02526121 n12267677 n06785654 n04606251 n01855672 n02281406 n04296562 n01773549 n02127052 n02090622 n02088094 n04125021 n01728920 n03595614 n02090622 n04285008 n03874293 n02823428 n02028035 n02077923 n02017213 n03903868 n02127052 n04317175 n02107683 n01984695 n03995372 n02090721 n02089867 n10148035 n01737021 n01883070 n01819313 n03958227 n03841143 n03459775 n03777568 n03417042 n02110185 n03388549 n03924679 n02672831 n02165456 n03207743 n04136333 n02971356 n04039381 n04162706 n02791124 n03124170 n01843065 n04428191 n03874599 n02102480 n04487394 n01883070 n02966193 n01494475 n02110341 n07716358 n07248320 n02814860 n04133789 n02443114 n02110063 n04509417 n02108089 n04548362 n01748264 n03710637 n02091467 n02110341 n02113624 n01819313 n02939185 n03272562 n02787622 n12267677 n04141327 n02110958 n01687978 n04429376 n01729322 n02093647 n07920052 n01910747 n02107908 n03895866 n02086079 n02895154 n13037406 n03876231 n04590129 n01692333 n03717622 n02109525 n04355338 n03777568 n03314780 n03887697 n04141975 n01978287 n04597913 n04141975 n02782093 n03868242 n02002724 n03196217 n04153751 n01629819 n02808440 n02058221 n01531178 n02114712 n03494278 n04204347 n03793489 n03483316 n04209239 n03776460 n04336792 n02114548 n02667093 n02834397 n04456115 n03394916 n04346328 n01776313 n02124075 n02356798 n03895866 n02963159 n01883070 n03355925 n02226429 n03417042 n02106550 n02101388 n04200800 n02011460 n02112706 n04326547 n01985128 n03110669 n03804744 n04141327 n11939491 n02105251 n03201208 n07754684 n01632777 n04553703 n04149813 n02481823 n03947888 n01534433 n03457902 n02776631 n04209239 n04523525 n04074963 n02233338 n03930313 n03249569 n03884397 n01601694 n04560804 n02514041 n03417042 n07880968 n03594734 n03344393 n02088632 n02106662 n02108551 n01744401 n02483708 n02971356 n02909870 n02841315 n03496892 n02100583 n03476684 n07718472 n01641577 n06596364 n03954731 n04357314 n04259630 n07695742 n04423845 n03249569 n04111531 n02895154 n04149813 n02114712 n04252225 n03770679 n02837789 n04428191 n02361337 n02100236 n01728920 n03594945 n02268443 n07875152 n07695742 n02108551 n01531178 n01980166 n02106382 n03658185 n02988304 n04141076 n02906734 n02012849 n02786058 n01614925 n02206856 n01631663 n03100240 n03047690 n03180011 n02895154 n02782093 n03595614 n09332890 n07749582 n04258138 n03095699 n02096177 n01728920 n03538406 n01806143 n02088238 n04501370 n09229709 n04423845 n02397096 n02133161 n02088238 n02264363 n02101006 n04515003 n02870880 n04548280 n04461696 n03028079 n02268853 n03874599 n01877812 n02699494 n12985857 n02454379 n04326547 n02089867 n01560419 n02093256 n04204347 n04347754 n02086240 n04286575 n04482393 n03840681 n04065272 n02480855 n02749479 n03492542 n02096437 n02317335 n02174001 n04525305 n04039381 n07753592 n13037406 n02494079 n04258138 n02229544 n01843383 n01728920 n04330267 n02325366 n02808304 n04462240 n03874293 n03482405 n01629819 n03781244 n04392985 n04258138 n03160309 n02096585 n01614925 n02017213 n04133789 n04277352 n02106030 n04428191 n03400231 n03249569 n01514668 n10148035 n02397096 n07697313 n07802026 n03887697 n07248320 n01855032 n03908618 n02086910 n04254680 n02104365 n03445777 n02011460 n07695742 n04344873 n01667778 n02091244 n01534433 n02097474 n02701002 n03208938 n03676483 n03770439 n01755581 n02108915 n01753488 n02102480 n03633091 n03662601 n01770393 n07590611 n04264628 n03998194 n02396427 n02102040 n01770393 n04162706 n02281406 n12768682 n01945685 n03483316 n01978287 n02119022 n02169497 n03991062 n04465501 n07614500 n01990800 n01534433 n03770679 n09288635 n03188531 n09256479 n04259630 n02110627 n04560804 n02113978 n02095889 n04599235 n03259280 n02111277 n02794156 n04328186 n04254680 n03661043 n03599486 n02097130 n02033041 n02071294 n03937543 n09288635 n03709823 n02489166 n03673027 n01828970 n04532106 n03496892 n01924916 n04548280 n02319095 n02395406 n02782093 n04554684 n02086240 n03916031 n02791270 n07717410 n04238763 n02730930 n01514859 n01748264 n02988304 n03461385 n03272562 n04330267 n07860988 n02276258 n07871810 n02097474 n02999410 n04037443 n01614925 n04033901 n03944341 n02655020 n01608432 n03874599 n03594945 n04252225 n07892512 n03717622 n03763968 n02110627 n02795169 n03000134 n02494079 n03042490 n03100240 n07875152 n02802426 n02484975 n09229709 n02747177 n06596364 n04557648 n02123394 n02002724 n02167151 n02504013 n01616318 n03770439 n04428191 n02051845 n04579145 n02093754 n12267677 n01641577 n02963159 n02807133 n04590129 n03467068 n01629819 n02443484 n02088238 n02412080 n03532672 n04591157 n04486054 n02692877 n02727426 n04371774 n04273569 n03733131 n03544143 n02104365 n02109961 n03447447 n01872401 n03961711 n02116738 n01688243 n01749939 n03141823 n02509815 n12985857 n01829413 n02109047 n02526121 n02097658 n03216828 n02870880 n04266014 n04355338 n03633091 n01910747 n02006656 n03445924 n02906734 n04099969 n02099712 n02229544 n04443257 n02687172 n04273569 n02489166 n03924679 n12985857 n02167151 n02321529 n02102040 n02870880 n01693334 n02097298 n01882714 n04040759 n03791053 n02979186 n02454379 n03131574 n04141327 n02981792 n02974003 n02090721 n04131690 n02106030 n02493793 n02963159 n04596742 n11879895 n03457902 n02823750 n01774750 n03788365 n02389026 n02823750 n02493509 n07583066 n01682714 n03899768 n02279972 n07747607 n01692333 n04243546 n04317175 n02106550 n01664065 n01677366 n02093754 n04346328 n02106550 n02127052 n03666591 n03877845 n03125729 n03786901 n03775071 n02412080 n01518878 n03720891 n01735189 n02356798 n02110806 n03047690 n04462240 n02951585 n01558993 n03065424 n02860847 n02486410 n02398521 n04346328 n02106030 n02445715 n04153751 n02509815 n01828970 n04069434 n07714571 n13044778 n01955084 n03662601 n01664065 n02708093 n02408429 n03920288 n02190166 n02091635 n04229816 n01773549 n02106662 n02009912 n01558993 n02127052 n02843684 n02174001 n03345487 n01990800 n03584254 n02389026 n02389026 n04069434 n03032252 n07749582 n02110627 n02807133 n02012849 n03208938 n02107142 n03995372 n02927161 n03888257 n02802426 n09193705 n07716906 n03345487 n02088094 n03297495 n02871525 n02363005 n02206856 n02445715 n02783161 n02948072 n09421951 n02410509 n02808304 n03903868 n02110063 n03724870 n07836838 n04141975 n02487347 n02112137 n02804610 n07734744 n04462240 n03372029 n02177972 n02085620 n01917289 n04070727 n02823428 n02860847 n04392985 n02791124 n01847000 n01784675 n02093991 n03457902 n02939185 n04493381 n03271574 n02509815 n03793489 n02690373 n03983396 n02927161 n03018349 n03908618 n02110341 n03776460 n02124075 n04335435 n03127747 n02948072 n03085013 n02442845 n02916936 n01688243 n02879718 n02097298 n04589890 n02607072 n02948072 n04525038 n02100735 n02814533 n03000134 n03478589 n02037110 n04235860 n02112137 n04435653 n04273569 n03794056 n01910747 n01748264 n01883070 n04200800 n04590129 n03443371 n02791124 n03075370 n03673027 n01742172 n03476684 n01484850 n01675722 n02978881 n03938244 n02106166 n01729977 n04118776 n04209239 n03376595 n04008634 n02095889 n01855032 n03376595 n04456115 n02879718 n04238763 n02268443 n02794156 n02105505 n01914609 n03899768 n02676566 n02099601 n02106382 n04264628 n04501370 n03594734 n03895866 n04332243 n04008634 n02492035 n01773797 n04228054 n02110958 n06359193 n02403003 n04409515 n03337140 n02483708 n02106166 n04209133 n02114367 n03743016 n03201208 n03207941 n02804414 n04487081 n01945685 n02606052 n03388043 n03661043 n02804610 n04235860 n02795169 n03476991 n03444034 n03942813 n04026417 n03337140 n02108422 n04033995 n03041632 n02134418 n04554684 n03733131 n02116738 n03786901 n03937543 n04147183 n04131690 n03400231 n02125311 n02410509 n01775062 n02814533 n02110185 n04008634 n04597913 n01883070 n07714990 n02112350 n02437616 n03662601 n02074367 n04239074 n03063689 n07831146 n02869837 n03920288 n13052670 n03016953 n02788148 n04613696 n02113023 n03866082 n02992529 n04479046 n04467665 n04540053 n02927161 n03992509 n04347754 n03495258 n03633091 n02105251 n02231487 n02102318 n02667093 n01749939 n02133161 n03372029 n02486261 n04004767 n02088466 n07579787 n02791270 n03131574 n02391049 n01664065 n02099429 n01776313 n03920288 n02109047 n02317335 n04612504 n03584254 n03457902 n02051845 n03047690 n04507155 n02704792 n01748264 n02017213 n03450230 n02841315 n04070727 n02992211 n03404251 n02092339 n12768682 n07873807 n03041632 n03379051 n04435653 n04146614 n02012849 n03443371 n04152593 n04507155 n03447447 n04252225 n03770439 n13037406 n01748264 n04550184 n03207941 n07716906 n03595614 n07875152 n04560804 n04479046 n03127925 n07248320 n02342885 n02088466 n03485407 n09399592 n04039381 n04548280 n02099267 n04254777 n06785654 n02190166 n03868242 n04141076 n02980441 n03868863 n02437312 n02096177 n02701002 n03259280 n02834397 n15075141 n07880968 n02096585 n09256479 n02091032 n03457902 n02099849 n02398521 n02129165 n03404251 n01774384 n03977966 n02980441 n02137549 n03920288 n01770081 n03891332 n03196217 n02782093 n02510455 n03535780 n04263257 n02790996 n03146219 n01601694 n03379051 n03188531 n02790996 n04596742 n01560419 n03376595 n12768682 n02504013 n03388043 n02231487 n03134739 n03775071 n02509815 n07695742 n02325366 n09835506 n04418357 n04483307 n04069434 n03991062 n02487347 n03223299 n02817516 n03207743 n02110627 n04604644 n02112350 n02109961 n03534580 n03208938 n03125729 n03947888 n04154565 n01860187 n02328150 n02777292 n02112018 n02113978 n02033041 n07871810 n10148035 n01981276 n07860988 n03492542 n04005630 n02093428 n04355933 n02108089 n03841143 n02704792 n02277742 n03874599 n04371774 n01775062 n03461385 n02096585 n02093754 n02011460 n02814533 n02787622 n02114367 n01641577 n03992509 n04265275 n02096051 n07745940 n02422106 n01496331 n03188531 n07614500 n02101006 n02101006 n13040303 n02085936 n03961711 n02093991 n07714571 n01986214 n01669191 n01984695 n03297495 n02108422 n03249569 n04398044 n03775546 n01986214 n04579432 n07714571 n01945685 n02640242 n06785654 n04116512 n02099429 n09229709 n01682714 n01749939 n02007558 n01498041 n04507155 n02124075 n02101006 n02104029 n02676566 n02606052 n04238763 n02101388 n02107312 n03347037 n02493509 n02396427 n04065272 n03840681 n04515003 n02091635 n02325366 n04033901 n01675722 n03788365 n13037406 n03527444 n01695060 n04328186 n07590611 n01728572 n02119022 n02974003 n02410509 n07892512 n07730033 n04330267 n03868863 n02018207 n02500267 n02980441 n01843065 n02093859 n02094114 n07768694 n04154565 n02123394 n03843555 n02123159 n02107574 n01795545 n02917067 n02071294 n03895866 n03179701 n03950228 n04259630 n02165105 n02120079 n02804610 n02279972 n01728920 n02978881 n03710637 n01872401 n03160309 n02442845 n09256479 n02950826 n02841315 n04357314 n02865351 n04111531 n07747607 n03594945 n03763968 n04606251 n03895866 n02113978 n04554684 n04344873 n04254120 n01740131 n03976467 n07753275 n02443484 n02939185 n02977058 n13037406 n07747607 n04467665 n01784675 n04536866 n02123159 n02119789 n04548362 n02111129 n06794110 n04239074 n03733805 n02088466 n03764736 n01914609 n02105505 n02412080 n04254680 n04523525 n07697537 n01728920 n02794156 n02113978 n13040303 n01514859 n04398044 n02364673 n01924916 n02007558 n03803284 n02795169 n03916031 n02088238 n02086646 n03063689 n01806143 n04366367 n03109150 n04523525 n04208210 n01978287 n03272010 n03146219 n03933933 n04525305 n03124043 n02510455 n01687978 n01824575 n04613696 n06359193 n03110669 n03388183 n03691459 n02280649 n03133878 n02085782 n02087046 n02090721 n02497673 n04344873 n04330267 n01514859 n02488702 n04525038 n07711569 n01978455 n01768244 n02105855 n04604644 n02281406 n01739381 n01693334 n02113978 n07749582 n03786901 n01883070 n09246464 n03841143 n03482405 n12998815 n03938244 n04238763 n03929855 n02892201 n02486261 n02676566 n01843065 n01728920 n03379051 n02823750 n02776631 n02488291 n02317335 n02002724 n01755581 n03110669 n04019541 n03095699 n04004767 n03877845 n02120505 n02113624 n07695742 n03127747 n03041632 n01744401 n02098286 n02100735 n02264363 n04456115 n02219486 n02129165 n04275548 n03874599 n03706229 n01770081 n02988304 n02105505 n02130308 n02113799 n06596364 n02028035 n01784675 n04266014 n02422106 n03271574 n01622779 n04229816 n02988304 n02977058 n03594734 n03196217 n04008634 n03947888 n03032252 n02037110 n03424325 n03873416 n03379051 n02096437 n03887697 n04154565 n03803284 n06794110 n03956157 n03297495 n03444034 n09256479 n02317335 n03871628 n04192698 n07873807 n02793495 n03764736 n02483362 n01773797 n03788195 n03032252 n04311174 n02111889 n03970156 n04447861 n02018795 n03666591 n03314780 n02229544 n02172182 n02486410 n02607072 n02276258 n04254777 n02403003 n02094114 n09246464 n02114367 n03788365 n03297495 n02492660 n04326547 n03201208 n04286575 n03492542 n03877472 n01910747 n01608432 n02490219 n03710637 n04344873 n02951358 n01498041 n01729322 n04409515 n04146614 n03873416 n02090721 n04081281 n03976467 n02837789 n04409515 n03759954 n02168699 n03127925 n03970156 n01665541 n03160309 n04251144 n04311174 n02098413 n02480855 n01773549 n02489166 n03494278 n02229544 n01729977 n04552348 n04033995 n01882714 n04366367 n03271574 n03666591 n02093428 n02791124 n03384352 n03498962 n03709823 n02422699 n02085782 n04133789 n02486261 n12985857 n04372370 n03857828 n04367480 n04612504 n04399382 n01632458 n03717622 n02514041 n02018207 n07615774 n02098413 n03691459 n02108915 n07920052 n04228054 n04493381 n04081281 n03832673 n13052670 n04584207 n04252225 n01608432 n02708093 n04398044 n02087046 n04599235 n02177972 n02326432 n02490219 n03761084 n02101556 n04599235 n04467665 n02097658 n01978287 n04612504 n02397096 n03018349 n02391049 n07584110 n02457408 n01776313 n02120079 n02727426 n02791270 n04590129 n02058221 n03599486 n03788365 n02098105 n02097047 n03794056 n02966193 n01494475 n02514041 n01773157 n07613480 n09332890 n02086910 n02071294 n02105412 n02966193 n02481823 n04228054 n02825657 n03775071 n02096177 n02328150 n01768244 n03028079 n03534580 n01484850 n09428293 n03788365 n02106550 n03782006 n04258138 n03710637 n02097298 n03721384 n02391049 n02013706 n02840245 n03249569 n02454379 n02865351 n02206856 n02093991 n01877812 n03485407 n02101388 n03014705 n04456115 n03976657 n03188531 n02342885 n02096437 n02102318 n03376595 n03271574 n02177972 n03594945 n03126707 n02099712 n01692333 n02966687 n03930313 n01667778 n07716906 n01580077 n03804744 n02111277 n03100240 n04548280 n02814533 n04204347 n04141327 n02066245 n02096585 n02102480 n03125729 n03272010 n03980874 n07753592 n02105412 n02443114 n04579432 n02101556 n03995372 n02950826 n01534433 n02088238 n07715103 n02795169 n01484850 n01753488 n02607072 n01530575 n01692333 n04153751 n02111500 n03131574 n03803284 n02437312 n02974003 n02776631 n04125021 n09428293 n02843684 n03047690 n02417914 n03998194 n03110669 n02445715 n04525305 n03998194 n01514668 n02321529 n02088466 n01644373 n07714571 n04357314 n03991062 n02088094 n02687172 n02110185 n02089078 n09468604 n02408429 n04389033 n03706229 n02488702 n03992509 n02417914 n04086273 n07613480 n04270147 n03887697 n01601694 n02123159 n01518878 n07836838 n04443257 n01592084 n03109150 n02264363 n02808304 n04252225 n01630670 n04507155 n03047690 n03344393 n02981792 n03680355 n07579787 n02526121 n01984695 n04485082 n03814639 n02977058 n03866082 n04404412 n04116512 n03100240 n03127925 n01847000 n02051845 n02177972 n02106030 n03770679 n03535780 n03676483 n01843383 n01873310 n02085936 n02328150 n03089624 n02102318 n02500267 n04040759 n04552348 n02101006 n07749582 n03884397 n02111129 n03662601 n03250847 n02129604 n03461385 n03970156 n04317175 n03958227 n07714990 n01980166 n03929660 n03314780 n01855032 n03630383 n01817953 n02095889 n04505470 n02727426 n03598930 n02105855 n02115913 n03110669 n10148035 n02106550 n02086079 n04380533 n10565667 n03249569 n02095889 n02492660 n07873807 n02797295 n04209239 n02786058 n02837789 n02841315 n02704792 n03935335 n04562935 n02099429 n02112137 n03325584 n04442312 n04033995 n07614500 n02108089 n03710721 n03100240 n02093859 n02906734 n04254777 n07871810 n02422106 n04049303 n03961711 n02777292 n04443257 n04597913 n02927161 n03424325 n03032252 n02795169 n02123394 n01498041 n01751748 n03793489 n03345487 n02091635 n02123159 n02107142 n02484975 n03666591 n03085013 n04325704 n03208938 n04562935 n04152593 n09472597 n07875152 n04597913 n04099969 n03976657 n02028035 n03796401 n02917067 n02110958 n02730930 n02802426 n02917067 n02704792 n07760859 n02123597 n01981276 n01688243 n03400231 n02088238 n07753275 n02100583 n01955084 n02777292 n01534433 n03908714 n02120079 n04465501 n02641379 n02098286 n01534433 n02917067 n04371774 n02110958 n03538406 n03443371 n03902125 n03075370 n04336792 n02091831 n02510455 n02097047 n03908618 n02817516 n02111889 n01531178 n02481823 n03110669 n02095570 n03982430 n03444034 n07714571 n07932039 n01768244 n02837789 n03637318 n04141975 n01910747 n03873416 n03018349 n02114548 n07717556 n03494278 n03924679 n02012849 n02361337 n02398521 n03443371 n07615774 n02009912 n02395406 n02777292 n02783161 n02445715 n03743016 n03891332 n04542943 n15075141 n02091244 n02114367 n03404251 n03000134 n01667114 n03763968 n02233338 n09428293 n03793489 n04258138 n04023962 n01667778 n03899768 n13133613 n03599486 n03042490 n04467665 n03633091 n02437616 n02835271 n03791053 n04486054 n07717410 n07613480 n01728920 n03400231 n02790996 n02676566 n04562935 n02264363 n04141975 n03089624 n03954731 n03467068 n02690373 n02102040 n01985128 n04116512 n02497673 n04392985 n03937543 n02006656 n01773549 n02704792 n02999410 n07930864 n02011460 n02107312 n02910353 n01795545 n04111531 n02894605 n01614925 n02793495 n02100877 n03761084 n02504013 n02408429 n07583066 n01744401 n03447447 n03125729 n01978287 n04346328 n03742115 n02483708 n13054560 n02096177 n03920288 n02837789 n03877472 n02165105 n03937543 n03982430 n03787032 n07880968 n04371774 n04146614 n03394916 n03903868 n02687172 n01494475 n02536864 n02129165 n07920052 n01496331 n02009912 n02692877 n02101006 n03271574 n04371774 n01496331 n04557648 n02027492 n02125311 n03376595 n01872401 n04346328 n02091134 n04238763 n01776313 n01796340 n01770081 n03141823 n01665541 n04133789 n02096437 n02096051 n10565667 n04542943 n03447447 n09421951 n02113624 n03160309 n02504458 n01774750 n03871628 n04590129 n12057211 n03481172 n03000247 n04090263 n04141076 n01914609 n03775071 n02869837 n04509417 n04371430 n02097209 n04613696 n02669723 n02883205 n01748264 n01955084 n04204238 n03743016 n02177972 n03868863 n04133789 n02168699 n04041544 n02115913 n02259212 n02096177 n02277742 n04493381 n02093859 n03160309 n04120489 n09246464 n04005630 n03938244 n03208938 n04033901 n02835271 n04049303 n02951585 n04229816 n01755581 n01734418 n01843065 n02114367 n09288635 n04147183 n03196217 n04367480 n03467068 n01491361 n02091831 n04154565 n07875152 n07873807 n02690373 n02730930 n04389033 n02879718 n03223299 n01784675 n03447721 n01742172 n01728572 n12985857 n03376595 n03089624 n03887697 n04270147 n01930112 n02814533 n07802026 n07920052 n03425413 n06596364 n03134739 n02108422 n12998815 n07753113 n02056570 n09256479 n04238763 n02951585 n04033901 n01833805 n01737021 n01694178 n06785654 n02500267 n02085782 n03825788 n03899768 n01843383 n02782093 n01855672 n04239074 n04604644 n07583066 n03041632 n02777292 n03627232 n03884397 n02328150 n04005630 n02093859 n01749939 n03000134 n04037443 n03888257 n01824575 n07875152 n02526121 n07920052 n02102040 n02869837 n02099849 n04356056 n01749939 n02442845 n04487081 n02087046 n04201297 n02094433 n02480495 n02096585 n01518878 n04141975 n02981792 n01632458 n02093647 n02018207 n04040759 n01820546 n03840681 n03832673 n02051845 n01883070 n03534580 n02028035 n03857828 n01682714 n04049303 n02096585 n04254120 n02071294 n03868863 n02206856 n04086273 n02177972 n02085782 n03942813 n01496331 n04355933 n02790996 n04265275 n03976467 n02279972 n02086240 n01824575 n09421951 n02123159 n02086079 n07717410 n02422106 n02236044 n01608432 n03062245 n07734744 n01983481 n04542943 n01773797 n02526121 n01688243 n01990800 n02169497 n01768244 n01770393 n03977966 n02096585 n03532672 n07711569 n01734418 n04326547 n09332890 n04584207 n02114712 n02093754 n03495258 n01616318 n02326432 n04507155 n03527444 n01981276 n02097298 n03958227 n02165105 n07718472 n04591157 n04286575 n04208210 n02120505 n04265275 n04147183 n03271574 n02128385 n02110958 n03888257 n02730930 n01978455 n02843684 n03590841 n03065424 n03854065 n01739381 n01773797 n03976657 n04116512 n02092339 n01817953 n02119789 n01748264 n02169497 n03125729 n02091467 n07714571 n02704792 n02085936 n02108915 n03314780 n02086646 n07697537 n03584829 n03773504 n04204347 n01796340 n03930313 n02033041 n02236044 n02895154 n02708093 n02115641 n04209239 n01735189 n03201208 n09468604 n03047690 n04254777 n06596364 n03627232 n01532829 n01694178 n04081281 n03495258 n02788148 n01775062 n04355933 n03017168 n04599235 n03785016 n07871810 n03980874 n02071294 n04493381 n04372370 n02087046 n04584207 n04086273 n02092339 n02817516 n03240683 n12998815 n03075370 n02804414 n01833805 n01695060 n04596742 n04398044 n02106382 n04204238 n02219486 n02437312 n04335435 n01531178 n04201297 n03920288 n03759954 n03792782 n02412080 n04536866 n03874293 n02708093 n02437312 n04509417 n01990800 n04579145 n02480495 n04371430 n02105056 n03930630 n03481172 n02808440 n07932039 n04428191 n02971356 n02090379 n03857828 n02988304 n02115913 n04599235 n04033901 n11879895 n03014705 n02002724 n02445715 n02870880 n02951585 n02129604 n02123394 n01860187 n03788195 n03729826 n01665541 n01531178 n04442312 n02777292 n13044778 n07720875 n02027492 n02480855 n04447861 n02403003 n03874599 n01622779 n02860847 n03884397 n13040303 n03796401 n03388549 n03970156 n02112137 n03775071 n01601694 n02093991 n01664065 n02077923 n02487347 n02444819 n02480855 n04505470 n03980874 n03447447 n01955084 n02056570 n03127747 n02692877 n06596364 n03400231 n03482405 n03920288 n03871628 n03496892 n12267677 n04310018 n02865351 n01924916 n03000247 n03393912 n02825657 n06785654 n02097474 n04179913 n02112350 n03444034 n03133878 n02132136 n02843684 n01770393 n01871265 n03290653 n03207941 n03476991 n03481172 n04590129 n01532829 n03642806 n03388183 n02094258 n03496892 n04467665 n02963159 n02328150 n02101388 n09256479 n03777568 n02165456 n03042490 n02363005 n13054560 n02808440 n04532670 n01688243 n03602883 n02206856 n03400231 n02346627 n01871265 n01806567 n02727426 n04067472 n02088094 n04553703 n13037406 n07718472 n04252077 n04258138 n02808440 n02328150 n03325584 n01774750 n02123159 n02111277 n04591157 n03871628 n03775071 n04136333 n03976467 n03908618 n03483316 n04487394 n02769748 n04523525 n12998815 n04553703 n04152593 n02346627 n02007558 n03110669 n01440764 n09472597 n02730930 n02782093 n04483307 n02028035 n04040759 n03372029 n02808440 n02120505 n03141823 n02100236 n01770393 n01739381 n03208938 n03954731 n04536866 n04456115 n03000247 n04612504 n02837789 n03538406 n02699494 n03967562 n04398044 n03710721 n04356056 n04033995 n02415577 n04270147 n03866082 n03271574 n02133161 n03483316 n01514668 n03770679 n04532670 n03720891 n02096437 n03444034 n02088632 n02328150 n02787622 n12998815 n07716358 n02817516 n03961711 n02823428 n01753488 n02443114 n04370456 n04542943 n03876231 n02509815 n04371430 n04141975 n02112350 n02321529 n02097474 n04461696 n03804744 n02786058 n12768682 n01855032 n03992509 n01773797 n02443484 n02101006 n09421951 n03837869 n04356056 n01744401 n02701002 n03977966 n02105056 n02102318 n03095699 n01728572 n01873310 n03930313 n03930630 n06359193 n02033041 n04604644 n03781244 n04599235 n02114548 n02356798 n03271574 n07932039 n02100735 n04069434 n04346328 n09332890 n12768682 n02795169 n04049303 n02403003 n04239074 n02493793 n02127052 n04317175 n02363005 n03832673 n04296562 n03630383 n01739381 n02107683 n02012849 n03786901 n04033995 n03782006 n02113624 n02783161 n02134418 n03532672 n02012849 n02415577 n02096437 n03220513 n01945685 n02892201 n04044716 n07742313 n03376595 n02643566 n01735189 n01729977 n02105251 n09421951 n02099712 n03388043 n02174001 n04147183 n02013706 n13054560 n02978881 n09246464 n02699494 n02107312 n03017168 n07745940 n02233338 n02791270 n01950731 n03857828 n02025239 n03452741 n02101388 n03388549 n01484850 n02111277 n01950731 n02174001 n02105162 n02480855 n03325584 n03272562 n03876231 n01644373 n04380533 n07697537 n04380533 n02190166 n07753592 n01630670 n02730930 n03788195 n02669723 n02100735 n03271574 n03179701 n02486261 n02105412 n02417914 n01770081 n02123394 n01855672 n02480495 n02692877 n01532829 n04372370 n01910747 n03400231 n02444819 n04099969 n03498962 n04154565 n02783161 n03124170 n03417042 n04254120 n07717410 n04372370 n07565083 n03661043 n04074963 n02504458 n03720891 n03445924 n03873416 n03775071 n02443114 n03623198 n03000247 n02423022 n03929660 n02782093 n01930112 n01776313 n03388183 n02133161 n02782093 n03393912 n03794056 n09256479 n07920052 n03384352 n02666196 n02894605 n03476684 n02526121 n02123045 n03673027 n03197337 n02114548 n04599235 n02085936 n02963159 n04258138 n03983396 n03187595 n03290653 n03179701 n01531178 n02398521 n02119789 n02089867 n04548362 n02486410 n01704323 n01494475 n04141327 n02790996 n02056570 n02106166 n02018795 n04523525 n03598930 n04118776 n03662601 n04509417 n02606052 n02966193 n03775071 n02317335 n03146219 n03355925 n02229544 n02443114 n03355925 n04590129 n02804414 n02114367 n03379051 n02138441 n03461385 n04200800 n03584829 n01755581 n04335435 n03127747 n04263257 n04192698 n01622779 n02422699 n02107683 n04532670 n02906734 n02804414 n12768682 n02108089 n02909870 n03837869 n02113186 n02112350 n01677366 n03630383 n02526121 n02840245 n01687978 n04515003 n15075141 n02841315 n02422106 n02783161 n02814533 n02102177 n02415577 n03782006 n01770081 n02114548 n03958227 n01728920 n03494278 n01873310 n02894605 n01833805 n03160309 n04458633 n03223299 n12620546 n12998815 n01496331 n04461696 n01981276 n03595614 n02101388 n03937543 n03100240 n03791053 n04613696 n02134084 n04141975 n02093859 n03125729 n02326432 n03680355 n03998194 n01494475 n02342885 n03976657 n01819313 n04606251 n01740131 n02797295 n02123394 n02169497 n03630383 n01689811 n03950228 n07584110 n04591713 n04127249 n12144580 n07831146 n03791053 n02808440 n02793495 n02437312 n02138441 n02111500 n02109961 n03459775 n03126707 n03388549 n02096294 n03961711 n04209133 n04243546 n02791270 n01685808 n02965783 n03775546 n02074367 n03775546 n03584254 n02119789 n02437312 n03888257 n03187595 n02123045 n03937543 n02412080 n01729322 n03908714 n02125311 n01494475 n02894605 n03908618 n02114855 n02123159 n03598930 n02107142 n03290653 n02791124 n03803284 n03937543 n03388043 n03131574 n02788148 n02106382 n04467665 n02100877 n04330267 n03697007 n03710721 n02403003 n02108089 n03017168 n03733281 n03792972 n02105056 n01806567 n01630670 n03337140 n03467068 n01873310 n02398521 n02013706 n04120489 n02708093 n02110341 n03770679 n02480495 n03450230 n03584254 n02823750 n04127249 n02410509 n04562935 n04019541 n04613696 n01632777 n07836838 n02114855 n02100236 n02102318 n07831146 n03742115 n03662601 n03720891 n02804610 n02107142 n03733131 n03791053 n03991062 n02808304 n03594945 n02749479 n04562935 n02134084 n02342885 n03538406 n02107683 n02012849 n01682714 n02988304 n07932039 n02206856 n03447447 n01753488 n01755581 n02119022 n04597913 n03314780 n02865351 n03459775 n01530575 n04335435 n09288635 n02769748 n02256656 n03131574 n03770439 n02123045 n02096177 n04131690 n02397096 n01798484 n02107574 n02113186 n01855672 n03791053 n03770679 n01983481 n02093256 n01968897 n02692877 n02356798 n07875152 n02107312 n02837789 n03042490 n03188531 n03447721 n02825657 n03868242 n04552348 n01770081 n02095314 n04204347 n02087394 n04065272 n02132136 n02134418 n01632777 n04325704 n03776460 n01955084 n02129604 n01644900 n02101006 n04357314 n12985857 n03670208 n07760859 n04067472 n02099849 n03770679 n02978881 n03623198 n03717622 n04536866 n02835271 n07717410 n04429376 n02869837 n03124170 n01632458 n01531178 n03127925 n02097047 n03950228 n03028079 n02107312 n13052670 n02090721 n07711569 n02091831 n01530575 n04146614 n01667114 n03958227 n02098286 n07871810 n01980166 n02412080 n02500267 n01924916 n04254680 n02480495 n01774384 n03216828 n07711569 n03026506 n01749939 n03344393 n03938244 n02098105 n01986214 n01917289 n04418357 n02058221 n02106030 n02966193 n03032252 n02206856 n03063599 n02107312 n03843555 n02108551 n01855672 n02107142 n02102040 n04357314 n04505470 n03529860 n02437312 n02129604 n03773504 n02100877 n03877472 n04501370 n07880968 n04458633 n02167151 n03721384 n02102480 n07579787 n02123394 n02484975 n03942813 n04270147 n03777568 n02085782 n01729977 n04404412 n04311174 n03160309 n02454379 n02096294 n04065272 n02483362 n02364673 n03100240 n07873807 n03594734 n04344873 n07590611 n01883070 n03770439 n03141823 n02133161 n01689811 n01833805 n02814860 n04367480 n03710637 n07714571 n02071294 n01768244 n03388183 n01847000 n03325584 n01667114 n02236044 n04141327 n03467068 n01687978 n04285008 n03483316 n03447447 n02264363 n02097209 n04501370 n09468604 n02930766 n01917289 n04554684 n02979186 n02442845 n03345487 n02486410 n02841315 n03899768 n09399592 n03344393 n02088364 n03763968 n02105162 n04235860 n03903868 n09428293 n03661043 n03249569 n02268443 n02444819 n02116738 n03902125 n02093991 n02110185 n03832673 n03983396 n07716358 n02113712 n03887697 n03424325 n03958227 n01534433 n02086646 n04591713 n07753113 n03841143 n02790996 n02165456 n02009229 n02814860 n04462240 n02730930 n02085620 n02098413 n03337140 n02807133 n04263257 n02108422 n02138441 n01630670 n04008634 n02113799 n02643566 n12057211 n01665541 n04404412 n03691459 n01729977 n03290653 n01924916 n02486410 n04332243 n13052670 n03598930 n02437616 n02093991 n01729977 n02115641 n02825657 n02786058 n02788148 n02094258 n02793495 n03388043 n02128757 n02443484 n02088094 n03110669 n01985128 n07714990 n02869837 n03595614 n04592741 n02127052 n07880968 n02643566 n09256479 n02356798 n02509815 n04487394 n03721384 n01728572 n02992211 n03877845 n02231487 n02445715 n02095570 n04579145 n03706229 n02107574 n01833805 n01629819 n03445777 n03710721 n03014705 n04336792 n04311174 n03724870 n03920288 n03063689 n03908618 n02085620 n02699494 n02096437 n03804744 n04209239 n03249569 n11939491 n01882714 n02129165 n03773504 n04346328 n02102040 n12620546 n02177972 n02066245 n03492542 n02090721 n04482393 n01914609 n02174001 n02233338 n01693334 n01665541 n02280649 n01514668 n01641577 n02107683 n04040759 n03355925 n04579432 n02280649 n02361337 n03937543 n03891251 n02492035 n03759954 n03763968 n01582220 n03866082 n04086273 n04330267 n04476259 n04118776 n03180011 n03838899 n03627232 n04264628 n02101006 n02113624 n02395406 n01675722 n04090263 n03785016 n02137549 n02277742 n03642806 n07718472 n03447447 n03792782 n04008634 n04254777 n01631663 n04254680 n02074367 n01744401 n03127747 n02190166 n03623198 n02607072 n02877765 n02790996 n02992529 n02492660 n02117135 n01580077 n03028079 n02102040 n01494475 n04461696 n01917289 n04146614 n04004767 n02906734 n01560419 n02085936 n12267677 n03075370 n01682714 n02669723 n01751748 n02999410 n10148035 n02797295 n03958227 n03134739 n01860187 n02443114 n03028079 n03495258 n03787032 n02108089 n01687978 n01484850 n02098105 n03942813 n02109525 n04613696 n01631663 n09835506 n01784675 n02137549 n09472597 n02895154 n03676483 n04209239 n01784675 n03028079 n03355925 n03483316 n03337140 n03495258 n04311004 n04270147 n03791053 n02488702 n02895154 n02100583 n10565667 n04548280 n02091134 n01806567 n02264363 n02708093 n02111277 n02692877 n03837869 n03240683 n03773504 n03706229 n03742115 n01734418 n12998815 n03452741 n06596364 n03041632 n02096585 n04317175 n07892512 n01755581 n03777568 n03457902 n02106382 n01601694 n03691459 n02114855 n03461385 n02096294 n03498962 n04482393 n02412080 n03857828 n02124075 n02106550 n03950228 n07730033 n02093991 n07768694 n02870880 n02672831 n02268443 n03773504 n09332890 n02025239 n04562935 n07742313 n04192698 n04049303 n01644900 n02769748 n01774384 n02894605 n03127747 n03045698 n03388549 n03724870 n03706229 n03825788 n01775062 n03670208 n02492035 n01983481 n04435653 n03028079 n03445924 n02108000 n01882714 n02346627 n09399592 n12620546 n03047690 n02807133 n03630383 n03325584 n02110063 n07860988 n01443537 n04523525 n02112706 n02815834 n03720891 n03843555 n02992211 n02107908 n03662601 n03207743 n04507155 n02094433 n02791270 n02788148 n02094258 n02105162 n04179913 n07930864 n03873416 n02027492 n02790996 n03924679 n07753275 n03658185 n02444819 n07802026 n01484850 n02113186 n02110341 n02090622 n04366367 n01773157 n03792972 n02690373 n02090622 n06794110 n02101388 n07697313 n03297495 n03032252 n01688243 n02090379 n02017213 n04152593 n02108551 n03658185 n02643566 n04049303 n03544143 n03709823 n01632458 n02111500 n07717556 n01688243 n07747607 n01592084 n03485794 n02443114 n03888257 n07753592 n01930112 n03127747 n01580077 n12057211 n03344393 n03697007 n01601694 n01818515 n04517823 n04584207 n02002724 n03424325 n03895866 n03787032 n02100236 n03110669 n04523525 n01983481 n04465501 n02090721 n02980441 n02088094 n02492035 n03109150 n02091635 n07695742 n02074367 n07754684 n02783161 n03761084 n02096585 n04099969 n01930112 n03379051 n02105412 n02097298 n04026417 n03866082 n04004767 n01704323 n04286575 n02321529 n04417672 n04389033 n02909870 n01685808 n01806143 n02006656 n03832673 n07697313 n07932039 n02206856 n12144580 n02108422 n07753113 n03777754 n04259630 n02641379 n13052670 n03788365 n02870880 n02799071 n02137549 n02999410 n04317175 n02094114 n03529860 n03188531 n03160309 n03697007 n02091831 n03594734 n04389033 n02799071 n07747607 n02504458 n04277352 n01914609 n02281787 n03868863 n09421951 n03792782 n02102318 n01484850 n04192698 n02089867 n03584254 n01728572 n03062245 n02109047 n02108422 n02088632 n02447366 n02236044 n02910353 n02105056 n03498962 n03250847 n04120489 n02999410 n03467068 n03187595 n03255030 n04004767 n02091635 n04507155 n03782006 n02317335 n02165456 n04243546 n02099849 n04239074 n09246464 n04335435 n03770439 n01978455 n01644373 n02256656 n02509815 n03584254 n03710721 n01795545 n07753592 n02412080 n07892512 n02091032 n04074963 n03197337 n03075370 n02111129 n03930630 n01770081 n04235860 n02132136 n02100735 n01978287 n02097658 n04540053 n04149813 n02105251 n01984695 n03314780 n02115641 n04235860 n02843684 n04311004 n04118776 n02276258 n02909870 n02701002 n02051845 n04599235 n01689811 n03637318 n03344393 n04591713 n02018795 n02795169 n04462240 n03776460 n03404251 n03188531 n07749582 n01631663 n02123597 n02328150 n02110958 n02125311 n04023962 n03133878 n03131574 n02091467 n01484850 n02096177 n01496331 n02058221 n03028079 n02113023 n02480855 n02892201 n04418357 n03042490 n03124170 n12985857 n04141975 n01860187 n02130308 n04037443 n13052670 n07714571 n02391049 n04149813 n04099969 n01729977 n04243546 n02978881 n03131574 n02127052 n04366367 n02229544 n01669191 n02489166 n07716906 n03208938 n02088466 n02093754 n01632777 n04118538 n02363005 n02114855 n09256479 n02787622 n02105412 n03498962 n12768682 n03216828 n03598930 n02643566 n03837869 n07695742 n01817953 n01667778 n04251144 n02231487 n04005630 n03445777 n04597913 n07615774 n02769748 n01833805 n01828970 n01796340 n01694178 n03995372 n03494278 n03271574 n03014705 n02088632 n03788195 n02328150 n02992529 n03498962 n02169497 n02112137 n02483362 n07836838 n02086240 n01739381 n02325366 n03877472 n04589890 n02133161 n01632777 n02105162 n04019541 n01775062 n02107574 n04509417 n01860187 n02088632 n03459775 n03133878 n04254680 n01755581 n02939185 n02091134 n02114712 n07714990 n02484975 n03445924 n03018349 n02802426 n01774384 n03124043 n03355925 n03146219 n03388183 n02226429 n07860988 n03388183 n04009552 n02488291 n03899768 n03649909 n03393912 n02797295 n03014705 n03729826 n01560419 n02114367 n03637318 n02115641 n04517823 n02346627 n02033041 n02804414 n07714990 n04120489 n03481172 n02099267 n10565667 n03825788 n03240683 n02123597 n02097130 n02090721 n02094433 n02667093 n03461385 n02101388 n09399592 n02109047 n04153751 n04479046 n03223299 n13133613 n01688243 n02363005 n04493381 n02445715 n02280649 n03804744 n04596742 n04597913 n01729322 n02793495 n04604644 n04592741 n03425413 n04332243 n04562935 n02494079 n07693725 n07717410 n06874185 n03063689 n02389026 n02110627 n03930630 n01871265 n07716358 n02114712 n03216828 n06596364 n03494278 n07579787 n04548280 n04409515 n02102040 n07753113 n01632777 n02843684 n02395406 n02100583 n03481172 n02099849 n02708093 n01980166 n02096294 n01744401 n03291819 n04004767 n01534433 n03223299 n03773504 n04090263 n02002724 n02422106 n04325704 n01531178 n02948072 n02281787 n04239074 n04399382 n03400231 n02802426 n02165456 n02256656 n02104029 n06794110 n07932039 n02793495 n02093754 n02834397 n02165456 n03394916 n02138441 n01729977 n02138441 n04311174 n03388043 n03344393 n03445924 n02504013 n13040303 n02363005 n02206856 n03982430 n03661043 n02107574 n03785016 n02231487 n04487394 n04376876 n04277352 n07718472 n04118776 n01914609 n01798484 n01944390 n03355925 n03742115 n02108089 n03924679 n03134739 n02011460 n02974003 n02100583 n01496331 n01860187 n02100236 n04596742 n02119789 n02342885 n04044716 n04099969 n03602883 n07717556 n04548280 n03843555 n04409515 n02093647 n01797886 n04429376 n03063599 n07760859 n02487347 n01697457 n03706229 n02988304 n03134739 n02979186 n02892201 n03840681 n03425413 n13044778 n04330267 n03425413 n02099849 n04044716 n01440764 n02105251 n03599486 n03240683 n02097130 n04162706 n03443371 n02492660 n03793489 n04347754 n04296562 n03666591 n04584207 n04136333 n02123159 n04070727 n02981792 n07718472 n01694178 n10565667 n04532670 n02480495 n07590611 n02111277 n04554684 n01695060 n04311004 n02102480 n04447861 n02807133 n04398044 n04418357 n03690938 n01644373 n03837869 n02493793 n01796340 n02095889 n03781244 n02088466 n02906734 n04596742 n12057211 n02097658 n03954731 n02447366 n03223299 n03710637 n03459775 n04458633 n02397096 n03877472 n07584110 n03393912 n07716906 n07836838 n03720891 n02109961 n04326547 n01753488 n02389026 n07734744 n07745940 n02094114 n02981792 n02097298 n03930630 n02783161 n04346328 n01774750 n01829413 n02910353 n02894605 n02132136 n04372370 n04040759 n02493509 n03788195 n04357314 n02106166 n02168699 n02091831 n02105056 n01986214 n02268443 n01739381 n01774384 n02444819 n02105641 n01687978 n04606251 n03325584 n04596742 n02325366 n02950826 n04067472 n02086646 n02113799 n04557648 n04429376 n01704323 n02056570 n02488291 n07614500 n03089624 n01532829 n03160309 n04550184 n07730033 n02095570 n04367480 n04081281 n04254120 n04443257 n03777568 n03584829 n04201297 n12144580 n02834397 n03127925 n02100735 n02256656 n02092002 n01753488 n04259630 n03197337 n02510455 n02108422 n02013706 n03840681 n02108089 n04485082 n03584829 n02134084 n03814639 n04522168 n04589890 n04252225 n03188531 n03594945 n03691459 n04041544 n04033901 n04090263 n02486410 n03873416 n03871628 n02325366 n02841315 n02037110 n02909870 n01629819 n07565083 n02088094 n03954731 n12998815 n03661043 n04332243 n02167151 n04099969 n04266014 n03733131 n02033041 n02165456 n02109047 n02999410 n02177972 n02033041 n03899768 n01685808 n04023962 n02114712 n03775546 n02092002 n02107142 n02977058 n01582220 n04127249 n03814906 n03769881 n03393912 n03291819 n02497673 n03127925 n09193705 n07831146 n03980874 n07753113 n01558993 n02808304 n03854065 n04483307 n02102040 n04326547 n02443484 n09256479 n03961711 n01641577 n03733131 n04254680 n02099601 n02089078 n03016953 n03216828 n02101388 n02229544 n02606052 n04141076 n01694178 n03063689 n01774384 n02607072 n02091244 n03937543 n04328186 n03532672 n03485407 n07717556 n02006656 n04525305 n02123597 n02708093 n02137549 n07614500 n03947888 n03983396 n03544143 n01440764 n01440764 n03717622 n02085620 n02727426 n03485794 n03825788 n04259630 n02788148 n03930630 n04392985 n02454379 n02100236 n01534433 n02102318 n04044716 n02113186 n02066245 n02127052 n01950731 n03000684 n02843684 n04147183 n02110063 n07590611 n02113712 n04074963 n03871628 n02168699 n09246464 n07802026 n01693334 n03908714 n02130308 n09193705 n02091244 n02111500 n03642806 n04033901 n02999410 n02128925 n06359193 n07717410 n02102318 n04208210 n02086079 n03868863 n03743016 n03062245 n03717622 n04069434 n03598930 n01978287 n04026417 n01748264 n02096294 n04483307 n01592084 n03787032 n03742115 n01795545 n02807133 n02769748 n02108915 n04509417 n02093754 n02129604 n02090622 n01806567 n04579432 n04542943 n03400231 n07871810 n09399592 n02114367 n04049303 n02979186 n02494079 n03944341 n03535780 n03297495 n07831146 n02457408 n04254680 n03028079 n03498962 n02883205 n02077923 n02090721 n04005630 n02056570 n01775062 n03866082 n02087394 n04336792 n01917289 n04111531 n02007558 n04086273 n02843684 n13037406 n04200800 n03000684 n03991062 n02488702 n02808440 n03887697 n01784675 n02058221 n02841315 n02114367 n03657121 n02787622 n03095699 n03450230 n02123394 n02869837 n03793489 n02094258 n04380533 n02978881 n07584110 n02927161 n02930766 n02093428 n04507155 n03534580 n03857828 n01872401 n03337140 n02980441 n02102177 n02509815 n02097047 n02992529 n02797295 n03866082 n02279972 n03485794 n03530642 n01518878 n04483307 n04033901 n07749582 n02917067 n03623198 n02233338 n03623198 n03594945 n02256656 n02999410 n02093991 n02002724 n03788365 n03623198 n02110063 n01740131 n04346328 n04033995 n02095889 n04311174 n02445715 n03218198 n02640242 n04462240 n03180011 n02093256 n03425413 n02504013 n03877472 n02087046 n03976467 n02091134 n04044716 n02088364 n02009912 n02206856 n03297495 n02871525 n03633091 n02105855 n03075370 n02119789 n01644373 n03216828 n03478589 n03929855 n02939185 n01847000 n02317335 n01983481 n03657121 n02086910 n02088238 n02168699 n03976467 n07697313 n03743016 n04086273 n04200800 n01632777 n03529860 n03404251 n03255030 n03476991 n04311174 n02093991 n03924679 n03478589 n04258138 n01774384 n02277742 n01980166 n02951358 n03983396 n03482405 n02091244 n01592084 n02415577 n02125311 n03888257 n03871628 n02096437 n03743016 n04118776 n02526121 n07711569 n01694178 n01744401 n03424325 n10565667 n02007558 n01860187 n03127925 n04380533 n03637318 n02088238 n04118538 n02101006 n02110958 n01820546 n02106550 n03874293 n02229544 n03937543 n03838899 n04147183 n03697007 n02655020 n01677366 n02415577 n03891332 n03673027 n02328150 n02363005 n04209133 n04065272 n04399382 n02114548 n03724870 n12620546 n04277352 n02105855 n01704323 n01697457 n02094433 n02110958 n02092339 n01734418 n02108915 n02791270 n01534433 n04111531 n03476684 n02708093 n01955084 n01580077 n01592084 n03602883 n02871525 n04037443 n02086910 n13040303 n07749582 n01930112 n13037406 n03792972 n01775062 n02403003 n02974003 n01644373 n02966193 n03481172 n02095570 n03297495 n01614925 n01440764 n02879718 n02105641 n03125729 n03891332 n01697457 n03443371 n03794056 n02231487 n02395406 n02787622 n03425413 n02111889 n01632458 n02110806 n03584829 n03733805 n04613696 n07747607 n02687172 n03792782 n02492035 n02489166 n03393912 n03018349 n03843555 n02769748 n02168699 n03272010 n04532106 n01943899 n01882714 n03127747 n02088632 n04589890 n12768682 n07715103 n02410509 n03995372 n01728920 n02091134 n01820546 n01739381 n02917067 n04591157 n07697313 n01728920 n02835271 n02028035 n03908714 n02096294 n02106030 n03384352 n02174001 n04522168 n03866082 n02817516 n01978287 n04259630 n04399382 n02113978 n03447721 n02749479 n03188531 n02483708 n07693725 n03014705 n01622779 n03642806 n02018207 n09332890 n03670208 n03291819 n02017213 n02098286 n04141327 n02105251 n02447366 n02321529 n03792782 n01443537 n01943899 n04522168 n13133613 n03891251 n02106166 n04592741 n04179913 n03216828 n04467665 n01883070 n07614500 n02105162 n04456115 n04332243 n04049303 n07615774 n01616318 n07802026 n03291819 n01688243 n02396427 n09229709 n09399592 n02027492 n04517823 n03325584 n02165456 n03803284 n02802426 n09428293 n02168699 n02106662 n03259280 n03733131 n04258138 n01924916 n01945685 n09428293 n02871525 n02786058 n03721384 n04285008 n03485794 n01784675 n04428191 n02092002 n04372370 n04099969 n03026506 n02971356 n02106030 n04131690 n01847000 n03794056 n12985857 n02488702 n01872401 n03372029 n01806567 n01917289 n03444034 n01776313 n02814533 n02672831 n03637318 n02113978 n02165456 n04548280 n02917067 n01560419 n02825657 n04552348 n02999410 n02190166 n03065424 n02825657 n07716358 n02877765 n09421951 n12267677 n01819313 n04264628 n03344393 n02002724 n01641577 n02256656 n01532829 n03854065 n02791270 n02951585 n03014705 n01592084 n01728572 n01774750 n03868242 n04370456 n03337140 n03124043 n03290653 n02488291 n04505470 n04553703 n02107574 n01692333 n12620546 n04086273 n03657121 n01582220 n03485407 n03840681 n07768694 n03782006 n02114548 n11939491 n04552348 n03208938 n02006656 n03764736 n07695742 n01820546 n02326432 n02009229 n02408429 n03018349 n03018349 n02504458 n02089973 n01917289 n01739381 n02130308 n04099969 n02102040 n03788195 n03764736 n02422699 n01978287 n02860847 n02749479 n03877845 n03404251 n04209133 n07695742 n04090263 n03720891 n04311174 n03642806 n03933933 n04005630 n02093991 n02977058 n09835506 n03417042 n01742172 n03888257 n02782093 n07802026 n03208938 n02130308 n02090622 n04040759 n02422699 n03594945 n02437616 n03337140 n09399592 n02129604 n02488291 n04597913 n03089624 n03710193 n02930766 n04435653 n01806567 n03100240 n01582220 n03871628 n02422106 n02494079 n04372370 n07716358 n04277352 n02236044 n03891332 n03814639 n02396427 n02793495 n02096437 n02504458 n02085936 n01978287 n04239074 n03532672 n02869837 n02127052 n03680355 n02206856 n03602883 n01817953 n03733805 n03938244 n03450230 n04044716 n02965783 n03938244 n01592084 n03290653 n04479046 n07831146 n01735189 n04525305 n02870880 n02776631 n02172182 n04081281 n03876231 n01985128 n01917289 n10148035 n04286575 n03598930 n02085782 n02699494 n04009552 n03492542 n07749582 n03017168 n03494278 n02134418 n03792782 n01687978 n13040303 n03220513 n03347037 n03476684 n01828970 n02114367 n07715103 n02119789 n01749939 n03791053 n02457408 n01440764 n01824575 n04372370 n07802026 n04270147 n04033901 n04515003 n03950228 n04005630 n02091032 n02090379 n02486410 n07684084 n04592741 n02106382 n02165456 n02483708 n01737021 n02814533 n04081281 n03884397 n07749582 n01641577 n03929855 n04550184 n04467665 n03930313 n02951585 n02747177 n04487394 n01773549 n04228054 n02410509 n04596742 n02795169 n03496892 n04613696 n02398521 n03814906 n02823750 n02106550 n02128385 n02364673 n03770679 n02099429 n01669191 n12057211 n04476259 n02229544 n03781244 n02509815 n02807133 n02132136 n03447721 n02840245 n03743016 n04118776 n04356056 n02190166 n03424325 n04606251 n04146614 n04040759 n07754684 n02119022 n02454379 n02443484 n04310018 n03527444 n04399382 n03843555 n01740131 n02127052 n02749479 n03045698 n02086240 n01795545 n04592741 n02701002 n04149813 n02823750 n01728920 n04493381 n02894605 n03970156 n03838899 n03877845 n03534580 n02094258 n03047690 n02033041 n03208938 n03124043 n03000134 n03250847 n01817953 n02727426 n01669191 n02268443 n03770439 n02389026 n04550184 n02804610 n03461385 n02091244 n02363005 n02391049 n07717410 n03404251 n07695742 n04462240 n01817953 n06359193 n01685808 n02509815 n09835506 n04523525 n04398044 n01955084 n02423022 n02129604 n02066245 n01773797 n02859443 n04090263 n03617480 n04548280 n03929855 n03777754 n02791270 n02317335 n03791053 n03180011 n01677366 n03976467 n02497673 n01729322 n03297495 n02268853 n01742172 n07716906 n03630383 n02825657 n02094258 n07873807 n03776460 n01843383 n02840245 n02607072 n01491361 n03109150 n03908618 n02132136 n01950731 n02133161 n04070727 n03384352 n03594945 n03933933 n03891332 n01968897 n09229709 n02095314 n02088364 n01641577 n03124170 n03272562 n02817516 n01943899 n07590611 n04235860 n03991062 n02006656 n04026417 n02113799 n04311004 n02815834 n04008634 n07718472 n02437616 n04325704 n03676483 n03207941 n02066245 n03873416 n02489166 n03782006 n04523525 n03710637 n02791270 n09835506 n01768244 n03888257 n04325704 n02007558 n01641577 n03983396 n04179913 n03786901 n03425413 n02012849 n03876231 n02802426 n04067472 n02112350 n02797295 n03895866 n07753113 n03297495 n02091635 n04487394 n03729826 n02104029 n02102973 n03000247 n01871265 n03920288 n03627232 n02229544 n02092339 n02802426 n03018349 n13044778 n03014705 n02776631 n03109150 n13052670 n03218198 n04125021 n04550184 n04479046 n04443257 n03908618 n02094433 n02113186 n02105162 n02980441 n02971356 n07697313 n02102177 n04613696 n02095889 n02979186 n09472597 n03476684 n02692877 n01756291 n03976657 n03494278 n03026506 n04228054 n04146614 n03100240 n02018795 n01873310 n04026417 n02086910 n04192698 n02093991 n04116512 n02107908 n02066245 n04026417 n02444819 n02536864 n02361337 n03770439 n02086646 n03444034 n04008634 n02727426 n07615774 n02107908 n03637318 n04317175 n03662601 n09256479 n03933933 n03666591 n02102318 n07802026 n04467665 n03109150 n03710721 n02817516 n01855672 n03259280 n02108089 n01943899 n02655020 n02817516 n07871810 n03935335 n03250847 n04417672 n04252077 n01910747 n03950228 n02009912 n02690373 n02787622 n01685808 n02486410 n04326547 n03467068 n01742172 n02965783 n04209133 n06874185 n01797886 n01755581 n03942813 n02087394 n02137549 n03047690 n04447861 n04275548 n02229544 n03530642 n01930112 n04548362 n04552348 n02486261 n02328150 n03355925 n02096177 n02403003 n01817953 n01629819 n03983396 n03207941 n01806567 n02089973 n07714990 n03590841 n02086646 n03781244 n02090622 n03445924 n02051845 n04560804 n09288635 n03840681 n01622779 n03445924 n02058221 n03837869 n02125311 n02783161 n01698640 n02787622 n03706229 n02840245 n02808440 n03680355 n01560419 n01978287 n02422699 n01687978 n01537544 n03793489 n03016953 n04044716 n01560419 n02056570 n03179701 n09468604 n03623198 n02690373 n02454379 n04467665 n02112018 n04591157 n04243546 n04254777 n01558993 n07932039 n04258138 n02085936 n03240683 n04409515 n03661043 n01532829 n03930630 n02112350 n02837789 n02098286 n04485082 n03272562 n02105505 n03916031 n07742313 n03042490 n02105855 n04229816 n04447861 n02916936 n02120505 n02917067 n01984695 n02454379 n03529860 n03482405 n04049303 n03452741 n02113023 n03447721 n01728572 n03942813 n03929855 n03344393 n01692333 n01945685 n03929660 n07565083 n04579432 n03594734 n03793489 n02114712 n02111129 n02091244 n12057211 n02493793 n03404251 n03026506 n01817953 n02130308 n02930766 n03594734 n02777292 n02486410 n09468604 n02489166 n01981276 n04275548 n02865351 n04118538 n01641577 n02113624 n04008634 n01945685 n02692877 n02749479 n03891332 n02795169 n02105641 n04136333 n04417672 n04263257 n06596364 n02091032 n03770679 n07749582 n02977058 n03594734 n02317335 n04550184 n02437312 n01728572 n02395406 n04522168 n04209133 n02108000 n01843383 n04004767 n03804744 n04398044 n02643566 n13052670 n03443371 n02101388 n02133161 n02641379 n03814906 n02115913 n02108915 n01978287 n04277352 n04493381 n01608432 n04548280 n03379051 n03796401 n02051845 n04350905 n04612504 n03207743 n02097298 n03447447 n02804610 n01770393 n10148035 n02094258 n03720891 n02089078 n02130308 n02536864 n03942813 n02110341 n04579432 n07716358 n03095699 n02128925 n04141975 n02119789 n03481172 n03532672 n02655020 n07749582 n02109961 n02101556 n03662601 n03803284 n02641379 n04367480 n02101388 n04562935 n01694178 n02088466 n02536864 n03781244 n04192698 n02167151 n02089078 n03544143 n03026506 n02128925 n04251144 n03929855 n03085013 n03125729 n01677366 n03661043 n04584207 n04200800 n02487347 n02321529 n03814906 n01924916 n02802426 n01693334 n02169497 n02128925 n07717556 n03895866 n02099429 n03085013 n11939491 n09468604 n02109047 n07565083 n04310018 n02988304 n07754684 n02058221 n02114367 n03485794 n03424325 n04443257 n01697457 n02219486 n02877765 n01644900 n03775071 n02097047 n02085620 n07693725 n03160309 n02815834 n03110669 n03868863 n04008634 n03743016 n02094114 n03208938 n07590611 n04273569 n03706229 n02013706 n07753592 n02916936 n02112137 n02108089 n03841143 n03595614 n03125729 n07742313 n02487347 n04235860 n02782093 n01742172 n04604644 n04554684 n04086273 n02906734 n02091635 n03201208 n07693725 n09332890 n02088364 n03017168 n03729826 n03983396 n03676483 n04204347 n04251144 n02917067 n04081281 n03930313 n03494278 n03160309 n02389026 n03250847 n03133878 n02091635 n02389026 n02087394 n02113799 n02281787 n04548280 n04509417 n03384352 n02009229 n04370456 n07753275 n02102177 n01494475 n03459775 n02804610 n04456115 n02099712 n01494475 n04344873 n03788195 n01944390 n01910747 n03868242 n03452741 n13044778 n01883070 n02701002 n02793495 n02692877 n03220513 n01978287 n02483362 n01776313 n02808304 n03721384 n02012849 n03733281 n07920052 n02326432 n04192698 n02113799 n02106550 n02097298 n02509815 n02835271 n04548280 n04522168 n03950228 n01689811 n09428293 n01877812 n02100583 n01704323 n03680355 n03000247 n03742115 n04486054 n02097298 n02091635 n03680355 n02002556 n02101388 n01818515 n02454379 n03216828 n03933933 n02107683 n04252077 n02980441 n04039381 n03201208 n02102177 n03388549 n04523525 n03770439 n03710193 n01675722 n04501370 n04501370 n02092002 n03598930 n07932039 n02101006 n02268853 n04259630 n03871628 n02786058 n03485794 n02009912 n02091244 n02808304 n01860187 n07613480 n01843065 n02095889 n01943899 n02859443 n02112350 n02165456 n01773797 n02328150 n03485407 n01955084 n01601694 n03290653 n01796340 n06359193 n01558993 n03950228 n02096437 n02093859 n01773549 n04154565 n02437616 n02017213 n04146614 n02488702 n02137549 n02013706 n02100735 n04465501 n02727426 n04467665 n02095889 n02415577 n03075370 n02097298 n02027492 n02441942 n02104029 n03617480 n03623198 n02536864 n07875152 n04208210 n02423022 n03016953 n01669191 n04344873 n02526121 n09472597 n03873416 n01829413 n12057211 n02950826 n02786058 n02486410 n02486261 n02423022 n02107574 n03773504 n01558993 n02096177 n03961711 n01873310 n04118538 n02091032 n03483316 n13040303 n03180011 n02125311 n02172182 n03976657 n02094258 n02980441 n02107312 n01755581 n02776631 n02492660 n01664065 n01514668 n02966193 n02492035 n03482405 n04019541 n03954731 n02106550 n04404412 n02797295 n01955084 n04612504 n04069434 n02492035 n10565667 n02091134 n01631663 n02727426 n02071294 n02124075 n02092002 n02321529 n04208210 n01819313 n02087046 n04409515 n03485794 n04356056 n02087046 n02492035 n02085782 n03788365 n02483708 n04532106 n02106030 n03742115 n03868242 n03000684 n02100236 n02398521 n03976657 n03595614 n03884397 n03109150 n02978881 n02279972 n02391049 n03417042 n01734418 n07565083 n03970156 n02256656 n01689811 n02107683 n04591713 n02105855 n04099969 n02980441 n07720875 n04259630 n07920052 n03777754 n02099429 n03777568 n03759954 n02109525 n04264628 n03584829 n04525305 n02099712 n01689811 n02169497 n02011460 n02109961 n03814906 n02095314 n03866082 n02966687 n03710721 n02690373 n02514041 n03062245 n02797295 n02167151 n01518878 n13040303 n13044778 n02088364 n03045698 n03857828 n09288635 n03873416 n10148035 n02837789 n03388183 n03272010 n13054560 n02699494 n02051845 n02966193 n02437312 n04557648 n02177972 n03792782 n01751748 n02892767 n04344873 n03902125 n01558993 n02087394 n02006656 n01784675 n02099601 n03930313 n02980441 n02097209 n02091032 n03742115 n02606052 n02104365 n02097130 n07860988 n02120079 n04235860 n02883205 n02727426 n02099267 n03884397 n02992211 n03095699 n04254777 n02093859 n03146219 n04548362 n04335435 n02489166 n01531178 n02259212 n02894605 n02114855 n03188531 n02088466 n03956157 n04589890 n04525038 n02233338 n04612504 n07711569 n02437312 n03976657 n12144580 n01843065 n02120505 n07745940 n04552348 n03710721 n03425413 n01697457 n02396427 n02092339 n02493509 n02087046 n02123159 n04251144 n04259630 n02096051 n04507155 n02106662 n03445777 n03494278 n01756291 n03063689 n02105162 n04346328 n04591713 n03662601 n02093428 n02917067 n03710721 n02493509 n02794156 n07720875 n01669191 n02088364 n01873310 n04037443 n03598930 n07714571 n04069434 n03888257 n07718472 n03676483 n03929660 n02514041 n02105056 n04275548 n03534580 n04296562 n03770439 n02165456 n02704792 n03995372 n04344873 n02123159 n11879895 n02094114 n02514041 n03388549 n01629819 n02776631 n02963159 n03857828 n07768694 n01847000 n02229544 n02834397 n04380533 n07717410 n02112706 n03014705 n11939491 n02769748 n03075370 n03534580 n02116738 n02111277 n03482405 n02096294 n01819313 n02105056 n04540053 n03028079 n03467068 n02107683 n12768682 n02481823 n02447366 n03255030 n02977058 n12620546 n03131574 n02981792 n02110063 n03494278 n02415577 n02398521 n04554684 n03063599 n04579145 n04335435 n04264628 n04311004 n02457408 n02106550 n04483307 n02977058 n02091244 n02169497 n03041632 n03630383 n02669723 n02104029 n02364673 n02749479 n02107312 n02128925 n02091831 n04554684 n01978287 n02655020 n02125311 n04136333 n07753113 n01943899 n04204347 n03372029 n04418357 n02980441 n02859443 n04235860 n09472597 n02328150 n02017213 n01734418 n03930313 n03868242 n04355338 n04118538 n02804610 n02028035 n02835271 n02114548 n03710193 n04033901 n01984695 n03443371 n03956157 n07753113 n03532672 n01664065 n02786058 n02125311 n02085620 n02655020 n04235860 n03018349 n13040303 n03658185 n04254680 n01484850 n03594945 n04209133 n03877845 n12985857 n02102040 n02112018 n03467068 n02115641 n04562935 n03042490 n04429376 n02895154 n13052670 n01514668 n01491361 n01924916 n04039381 n02437616 n04065272 n01855672 n03733281 n03935335 n02492035 n02130308 n04131690 n01484850 n03197337 n03761084 n03899768 n02128385 n04604644 n03623198 n04152593 n02783161 n04252225 n04118538 n02412080 n03717622 n02480495 n02102480 n02676566 n02492035 n04265275 n07742313 n03483316 n03706229 n02129165 n07718747 n03967562 n01443537 n02190166 n01943899 n02089078 n03627232 n02110958 n03902125 n04081281 n02172182 n02099849 n02492035 n02999410 n04435653 n03127925 n07880968 n04243546 n03544143 n01877812 n02823750 n02814533 n02916936 n02120505 n02088632 n02977058 n07734744 n02676566 n01770081 n04116512 n02871525 n02091032 n02536864 n03223299 n02963159 n03180011 n03207743 n03496892 n03444034 n03100240 n04592741 n02091831 n04613696 n02097130 n03196217 n04523525 n04505470 n04153751 n03786901 n03220513 n02808440 n04399382 n03594945 n01978455 n01824575 n01986214 n03792782 n02730930 n03208938 n02641379 n02106030 n02106550 n02110063 n03786901 n04532670 n03595614 n13054560 n02233338 n03803284 n03355925 n02236044 n02951585 n03063599 n03047690 n01496331 n02708093 n02356798 n04442312 n02107574 n03459775 n04026417 n02860847 n02655020 n03983396 n03658185 n04589890 n03956157 n02093991 n02091032 n02977058 n01667114 n02500267 n03347037 n07716906 n03598930 n02841315 n04254777 n04049303 n13040303 n03495258 n04596742 n15075141 n02105251 n01667114 n01775062 n02002724 n04536866 n01768244 n02808440 n02087046 n02917067 n04111531 n02190166 n03690938 n13040303 n04133789 n03877845 n01985128 n03220513 n03970156 n04483307 n01641577 n03384352 n02823750 n02088238 n04346328 n04423845 n04356056 n04509417 n02606052 n01704323 n07831146 n02120505 n02099601 n02799071 n02233338 n03394916 n02865351 n03272562 n03843555 n09246464 n02825657 n02951585 n03692522 n04517823 n03803284 n02086910 n07613480 n09399592 n03775071 n02099429 n07695742 n03527444 n04330267 n03832673 n02894605 n02951585 n09332890 n13054560 n03623198 n02363005 n04275548 n09288635 n03902125 n04435653 n04398044 n02666196 n04147183 n02454379 n02107574 n04592741 n04200800 n02066245 n01629819 n03272562 n03877472 n02009229 n03532672 n02437312 n02089078 n04127249 n03443371 n02091635 n02667093 n03935335 n02364673 n02165105 n03770439 n03063599 n02363005 n03100240 n02815834 n04275548 n02791270 n02325366 n01695060 n02787622 n07753113 n02128385 n04125021 n02395406 n04371430 n03388043 n12620546 n04597913 n03967562 n02708093 n02280649 n02113978 n09288635 n03425413 n03207941 n01740131 n04120489 n02106382 n02536864 n04458633 n03633091 n03967562 n04371430 n02690373 n02113186 n02870880 n02114855 n02396427 n02132136 n02107908 n01950731 n02992529 n03814639 n03594734 n07613480 n07932039 n03721384 n02641379 n03721384 n03661043 n04509417 n02814533 n02437616 n04192698 n02002724 n15075141 n03670208 n02974003 n02094433 n03617480 n04486054 n03290653 n03255030 n04435653 n02916936 n01728572 n01632777 n03028079 n02106382 n12267677 n02279972 n02111129 n01820546 n03680355 n03991062 n02090721 n02879718 n01514668 n01728572 n04442312 n03379051 n02930766 n03982430 n02497673 n02115641 n02389026 n02793495 n03594945 n03661043 n04398044 n01773797 n03630383 n07892512 n02259212 n02128757 n03595614 n03126707 n04200800 n12620546 n02091032 n01531178 n03775071 n02346627 n02096294 n04204347 n02892201 n01807496 n03825788 n02342885 n02128385 n07745940 n04404412 n03720891 n02109961 n03976657 n02093256 n03787032 n03794056 n04136333 n03787032 n02105855 n01774384 n02974003 n02106030 n04023962 n03485794 n02086910 n02091134 n02727426 n04591157 n03804744 n04111531 n03733805 n02787622 n02980441 n03347037 n01630670 n04579432 n01944390 n12620546 n02114712 n03527444 n04239074 n01807496 n01592084 n02879718 n04429376 n02643566 n07871810 n07753113 n03042490 n02281787 n03179701 n01685808 n03814906 n02927161 n02346627 n03160309 n04037443 n02708093 n03590841 n04370456 n02948072 n02494079 n06785654 n04507155 n02011460 n02256656 n04037443 n03485794 n03271574 n04254777 n02128757 n04154565 n03461385 n02966193 n02226429 n02101006 n02112018 n07695742 n02110341 n02443114 n02110185 n02948072 n02840245 n03854065 n02096294 n02980441 n03062245 n03584829 n01644900 n03891251 n03599486 n02701002 n02172182 n03888605 n03642806 n04562935 n01930112 n02389026 n02783161 n02807133 n04099969 n03457902 n03633091 n03594945 n07695742 n07714990 n03208938 n04479046 n09835506 n03595614 n01983481 n03670208 n01734418 n01978455 n03721384 n02091635 n02133161 n04026417 n01734418 n03530642 n04209133 n04099969 n01616318 n02279972 n03676483 n03868863 n02666196 n02396427 n01768244 n03240683 n02112018 n13133613 n03032252 n04235860 n02110627 n03404251 n04350905 n02087046 n01843383 n01797886 n02992211 n02950826 n02268853 n03888605 n07248320 n03160309 n07248320 n03868242 n01704323 n01944390 n04462240 n06794110 n03032252 n04376876 n02281406 n02134418 n03584829 n03598930 n04254777 n04435653 n02017213 n04049303 n03180011 n03782006 n02749479 n04525305 n02791270 n04429376 n02102318 n07584110 n02966687 n02423022 n02107142 n02101556 n04179913 n02999410 n02091134 n02797295 n04560804 n01955084 n07583066 n03743016 n03623198 n03843555 n02134084 n02093256 n02105505 n03788195 n07716906 n04542943 n04296562 n02120079 n03920288 n02892767 n04311174 n04141327 n02117135 n03888605 n04557648 n04523525 n02281787 n02951358 n03680355 n07693725 n02870880 n02007558 n06596364 n01984695 n03345487 n02091244 n09256479 n02105162 n07693725 n03838899 n03534580 n02493509 n02096177 n07892512 n02018795 n04592741 n01728920 n07875152 n01773797 n02051845 n04273569 n03125729 n01773549 n04376876 n04336792 n02137549 n03633091 n01877812 n02128757 n04423845 n02981792 n03452741 n01735189 n04532106 n02268853 n07615774 n03538406 n01917289 n01496331 n01773549 n03788195 n02916936 n03045698 n03743016 n03868863 n04479046 n01882714 n03197337 n02013706 n07873807 n02480855 n04409515 n02930766 n03888257 n03127925 n11939491 n02328150 n02895154 n02408429 n02361337 n02092339 n01484850 n03065424 n02167151 n01798484 n02110341 n02085620 n04417672 n02097047 n04235860 n02692877 n04599235 n04201297 n02110341 n03776460 n02037110 n02174001 n02797295 n02939185 n03637318 n03710721 n02086646 n03657121 n02509815 n07836838 n04592741 n04264628 n04399382 n02814533 n04311174 n02137549 n07753113 n02704792 n02093859 n01694178 n03444034 n01784675 n02088466 n03692522 n02091244 n02133161 n09835506 n01614925 n02168699 n02113624 n03109150 n02190166 n03710721 n02092002 n01644373 n04357314 n01704323 n01882714 n03908618 n04592741 n02095570 n02870880 n04277352 n03666591 n09332890 n02090721 n04326547 n04251144 n04033901 n02977058 n03095699 n02114548 n02966193 n07717410 n04562935 n02814860 n02963159 n02090721 n03891251 n02325366 n03630383 n03742115 n03400231 n07753275 n02174001 n01877812 n02870880 n02892201 n02727426 n02115913 n02395406 n03956157 n02074367 n07760859 n04476259 n03018349 n04208210 n04560804 n03794056 n03803284 n03476684 n01514668 n04347754 n01773157 n01820546 n04443257 n03976657 n04146614 n02100583 n04476259 n01776313 n02095570 n03180011 n02110806 n02129165 n02504013 n02808304 n03854065 n02066245 n01685808 n03290653 n01924916 n03776460 n02102973 n03871628 n04266014 n04350905 n02104029 n03598930 n04344873 n10565667 n02123045 n02437312 n03759954 n02437616 n02123159 n01664065 n02916936 n03124170 n02504013 n03272562 n03617480 n02091244 n02051845 n02090622 n04376876 n04613696 n02108551 n04328186 n01682714 n03777754 n02095570 n07802026 n02437616 n02169497 n02100735 n01748264 n03942813 n04296562 n02264363 n04517823 n03207743 n02927161 n04332243 n02110185 n04409515 n02480495 n09468604 n02100735 n07716358 n15075141 n03814639 n02105251 n01537544 n01855672 n01644900 n04037443 n02870880 n02264363 n04336792 n09229709 n03146219 n02837789 n03733281 n04599235 n04008634 n02111500 n04560804 n02116738 n02009229 n03272562 n02106030 n03666591 n02356798 n09835506 n02727426 n02113712 n02397096 n04153751 n02808304 n02033041 n02992529 n02837789 n03355925 n03492542 n03991062 n02457408 n03085013 n04501370 n02843684 n02490219 n02106382 n02489166 n03670208 n02447366 n02655020 n13054560 n03445924 n03903868 n02099601 n02119022 n02422106 n04019541 n04355933 n04200800 n02123597 n13052670 n03250847 n02992529 n02951585 n03085013 n01768244 n04525305 n03187595 n01798484 n03467068 n04370456 n03832673 n02097130 n03240683 n04371430 n04579432 n04458633 n04483307 n02980441 n02102318 n04154565 n03452741 n03961711 n02808440 n03063689 n02114855 n02096051 n04461696 n04487394 n02113186 n07892512 n03223299 n04081281 n04371774 n04417672 n03249569 n03197337 n02101006 n01768244 n02113186 n03899768 n02783161 n01734418 n01728920 n02497673 n03063599 n04479046 n02895154 n02100877 n01983481 n03908618 n04507155 n03344393 n01829413 n02342885 n02190166 n07802026 n03991062 n02974003 n01698640 n04447861 n03623198 n04347754 n07614500 n12144580 n04254680 n04482393 n01943899 n03887697 n03598930 n02483362 n02120079 n03680355 n03485407 n02130308 n02894605 n03841143 n02172182 n02727426 n04418357 n02097209 n03495258 n02701002 n03481172 n02860847 n04435653 n03384352 n04131690 n02701002 n03868863 n01644373 n03000247 n02397096 n04118776 n02117135 n02051845 n03649909 n02869837 n03661043 n02090622 n02190166 n02134084 n02701002 n03496892 n02871525 n04277352 n02966193 n07697313 n03447447 n03388183 n02483708 n03623198 n09421951 n02128925 n02823428 n02410509 n02099429 n04162706 n01601694 n06794110 n03929660 n07920052 n04273569 n02259212 n03180011 n01685808 n02095889 n04204347 n02804414 n02236044 n04111531 n02132136 n07717556 n03388183 n04200800 n04154565 n02099601 n03065424 n03942813 n01930112 n04049303 n02965783 n03444034 n03131574 n02090721 n02281787 n04389033 n07615774 n02086240 n02105412 n03794056 n03977966 n01728572 n03218198 n07584110 n02134084 n03991062 n03124170 n04070727 n03908618 n07932039 n02110806 n01630670 n03598930 n04355338 n03014705 n02172182 n03721384 n02095314 n02979186 n01742172 n04409515 n02089973 n02422699 n03763968 n02492660 n02910353 n03743016 n03196217 n02840245 n03804744 n04532106 n03773504 n02100236 n02325366 n07753275 n03483316 n01494475 n04344873 n04259630 n03627232 n02280649 n02883205 n04404412 n04357314 n04286575 n03803284 n02098413 n04209239 n01632777 n03908618 n02110185 n02457408 n02788148 n03467068 n01443537 n04310018 n03325584 n02395406 n03133878 n02134084 n02089867 n01833805 n03443371 n03838899 n03216828 n03485794 n03761084 n02500267 n04435653 n01514668 n10565667 n01675722 n02233338 n02497673 n01784675 n03761084 n02279972 n03721384 n02088238 n03017168 n01770081 n03347037 n02231487 n12768682 n03877472 n02730930 n02088238 n01592084 n03998194 n03478589 n03776460 n02086910 n02113624 n02669723 n01930112 n04356056 n12768682 n09421951 n03908618 n02120079 n02133161 n03345487 n02087046 n04118538 n03344393 n02704792 n02112018 n02100583 n03196217 n04133789 n02640242 n02817516 n01740131 n01532829 n04548362 n04509417 n02364673 n02415577 n04204347 n12267677 n03445777 n07584110 n03544143 n03764736 n07892512 n01770393 n01688243 n04033995 n04590129 n01978287 n02113712 n02093428 n01819313 n02437312 n03706229 n03535780 n02112137 n04266014 n02137549 n03630383 n03089624 n04208210 n03100240 n02480495 n02860847 n03062245 n04409515 n04404412 n02687172 n04065272 n03770439 n04049303 n03249569 n02088238 n01978287 n04532106 n01687978 n01751748 n02981792 n03792972 n04326547 n01728920 n04612504 n07714990 n03764736 n07717410 n04141327 n03032252 n02107574 n02226429 n01820546 n02088364 n03961711 n07753113 n02094114 n03733805 n02607072 n02028035 n03857828 n02807133 n04456115 n02640242 n02206856 n12144580 n02115913 n03627232 n02699494 n01756291 n03630383 n02280649 n02799071 n07749582 n01773157 n09256479 n04235860 n06874185 n02002556 n02454379 n03775546 n02177972 n02009229 n03297495 n03895866 n01694178 n01698640 n01796340 n03124043 n02107683 n02981792 n04540053 n07695742 n02102318 n02123597 n04152593 n01695060 n04252077 n01689811 n01882714 n04141327 n07753592 n02793495 n04136333 n03876231 n02860847 n04591157 n04380533 n03259280 n03530642 n01558993 n04355338 n02017213 n02091032 n07615774 n07693725 n02319095 n04335435 n06794110 n11879895 n09332890 n02708093 n02643566 n03895866 n03838899 n03393912 n02112137 n01955084 n02094433 n02791124 n03877472 n03792782 n01756291 n02097474 n03259280 n02190166 n07715103 n02095889 n04532106 n04597913 n03743016 n04548362 n02481823 n03388549 n02319095 n03792972 n02823750 n03623198 n03933933 n02231487 n03476684 n02098286 n02169497 n03379051 n02457408 n07742313 n07615774 n02206856 n04239074 n03393912 n01592084 n03680355 n02837789 n03590841 n01986214 n03657121 n03697007 n01697457 n02447366 n04418357 n04367480 n03220513 n04479046 n03100240 n03000684 n01978287 n02105855 n03127925 n02105855 n02092002 n02028035 n02094258 n04204347 n01795545 n02125311 n02823750 n02112137 n03126707 n02123597 n03223299 n01798484 n02280649 n01776313 n02641379 n01608432 n03249569 n01630670 n03895866 n03888257 n02422106 n02093859 n04125021 n04065272 n03814906 n03992509 n04423845 n03393912 n02066245 n02114548 n10148035 n01608432 n04355338 n04277352 n03976467 n02859443 n04141076 n02127052 n02088466 n07880968 n09835506 n03874293 n03481172 n04355338 n02894605 n03544143 n02977058 n01773157 n02486261 n02112137 n03075370 n01601694 n04004767 n04273569 n04275548 n02966193 n03443371 n01755581 n02100877 n04325704 n02090379 n02088466 n03347037 n03691459 n01616318 n01820546 n04009552 n03637318 n01795545 n02108000 n01843383 n03908618 n07753275 n02950826 n04069434 n02701002 n02799071 n02786058 n02526121 n03459775 n04552348 n04462240 n02108915 n02088364 n02791270 n01682714 n02123394 n02101388 n02840245 n04493381 n01990800 n04162706 n13054560 n01632777 n02093859 n02025239 n02797295 n03179701 n02980441 n04596742 n01980166 n09835506 n03445777 n03110669 n02094114 n02086079 n01443537 n02110063 n04355338 n01560419 n03355925 n02119022 n03447447 n02219486 n02113624 n04523525 n01983481 n10565667 n03803284 n04367480 n03400231 n01980166 n04596742 n02417914 n02514041 n02033041 n02094114 n02134084 n13040303 n03763968 n04111531 n02090622 n02486261 n03452741 n04458633 n02094114 n02097658 n01978455 n02988304 n04229816 n02892767 n02804414 n03240683 n01443537 n02088632 n02172182 n02786058 n02701002 n04515003 n07693725 n03594945 n02100735 n04204347 n02093754 n09428293 n03958227 n03042490 n06359193 n02102177 n03445924 n04141975 n03690938 n02108089 n03075370 n04517823 n03208938 n03958227 n10148035 n02444819 n02092002 n10565667 n02437312 n02280649 n02909870 n03977966 n03110669 n03777568 n07930864 n04560804 n03888605 n02120505 n03014705 n01744401 n03770439 n03393912 n02727426 n02093754 n03379051 n03788195 n02099601 n02481823 n03291819 n04127249 n03803284 n03794056 n03478589 n02009912 n07579787 n02951358 n03297495 n04517823 n03794056 n03854065 n04325704 n03902125 n03207941 n03160309 n02727426 n03498962 n02056570 n01530575 n03290653 n03133878 n02099267 n03742115 n04273569 n02977058 n03724870 n04597913 n03763968 n03201208 n02672831 n02096437 n02916936 n04398044 n03110669 n01580077 n03775546 n01665541 n03109150 n01843383 n01751748 n04487394 n02804414 n04200800 n03661043 n01806143 n01641577 n02325366 n03976467 n02917067 n01819313 n04465501 n01955084 n03063599 n04099969 n02793495 n02086079 n02859443 n03690938 n13052670 n02088238 n02699494 n03721384 n02006656 n02415577 n02981792 n02492035 n03379051 n02280649 n03095699 n03720891 n03459775 n02422106 n01644373 n03347037 n02834397 n03218198 n03627232 n04557648 n02423022 n01784675 n03425413 n04579432 n07875152 n03461385 n03404251 n03658185 n07720875 n01943899 n12620546 n03967562 n02102480 n02500267 n02087046 n03595614 n02100236 n07892512 n04505470 n01986214 n02447366 n01978455 n03942813 n02917067 n02125311 n04275548 n02077923 n01829413 n04557648 n02483362 n03250847 n02454379 n02793495 n03891251 n03938244 n03467068 n02226429 n02106166 n04465501 n04423845 n02108422 n02776631 n01773797 n03250847 n04606251 n01664065 n04127249 n04254777 n02483362 n03041632 n01729322 n02093859 n02977058 n04252225 n02116738 n02950826 n03494278 n02130308 n03786901 n04462240 n03617480 n04418357 n02879718 n03018349 n03272010 n03379051 n01614925 n02102040 n01630670 n03627232 n13037406 n09288635 n07584110 n02102177 n03347037 n01632458 n01768244 n03584254 n04346328 n03599486 n03109150 n03692522 n15075141 n01742172 n02841315 n13040303 n02117135 n02107142 n04266014 n03724870 n07248320 n02704792 n03871628 n01990800 n02129604 n02119789 n02125311 n04606251 n07768694 n03187595 n04376876 n04483307 n02110063 n02107142 n02782093 n04487081 n01675722 n01608432 n03297495 n02098105 n01950731 n04238763 n02105855 n04552348 n02051845 n02128925 n02877765 n02128385 n02877765 n01872401 n01682714 n03481172 n02509815 n02236044 n02280649 n02488702 n03492542 n01749939 n03207743 n03179701 n02100877 n01981276 n03710637 n03223299 n01630670 n03877472 n01560419 n02259212 n04127249 n03796401 n04486054 n01807496 n03492542 n01694178 n01740131 n01985128 n03637318 n03584254 n07717556 n07753592 n02791124 n03786901 n02965783 n03733131 n04458633 n01614925 n04435653 n03534580 n04532106 n02276258 n01697457 n03187595 n04590129 n04004767 n03877472 n07248320 n03207743 n02892767 n03976467 n03133878 n03594734 n01877812 n03785016 n04613696 n03534580 n02013706 n01985128 n02110806 n02441942 n04554684 n03916031 n01748264 n04204347 n03450230 n01622779 n02799071 n02017213 n03201208 n02487347 n02497673 n01795545 n02487347 n04487081 n03710637 n04026417 n07747607 n02092002 n02701002 n02492660 n03995372 n02415577 n02091831 n02423022 n02165456 n03666591 n04604644 n02107142 n02951358 n02219486 n04542943 n03777568 n03787032 n04332243 n02927161 n09288635 n01704323 n02091244 n02894605 n04554684 n02085936 n03014705 n01871265 n02113799 n02107683 n03347037 n04296562 n09256479 n02110341 n06874185 n03967562 n02708093 n04344873 n02437616 n04523525 n02099712 n04404412 n04277352 n02948072 n04111531 n03452741 n02966193 n03452741 n02100735 n04597913 n07747607 n03764736 n02123159 n02107574 n01729977 n03976467 n03788195 n07717556 n15075141 n04596742 n01729977 n03042490 n02102040 n02093991 n12144580 n02107908 n04612504 n02981792 n01644900 n02128385 n02128925 n02110806 n01748264 n02777292 n04209239 n02112350 n02361337 n04141327 n02229544 n02281406 n03895866 n02108915 n12768682 n02106030 n03218198 n04133789 n02093428 n03461385 n02119789 n03444034 n02877765 n03724870 n03773504 n01698640 n02504013 n02231487 n01558993 n06785654 n01981276 n02389026 n04277352 n02687172 n03291819 n04447861 n04310018 n02486410 n02105855 n02948072 n03785016 n02002724 n03417042 n03188531 n02259212 n02776631 n02951585 n03337140 n01751748 n02879718 n04277352 n12057211 n02951585 n03967562 n07714571 n02085620 n02510455 n02869837 n01980166 n01756291 n03792972 n02112137 n03680355 n03841143 n07565083 n07693725 n07715103 n01820546 n01873310 n03777568 n01833805 n02676566 n03447721 n02500267 n03602883 n04239074 n04118538 n04536866 n04548362 n02776631 n01667778 n03825788 n03891332 n04258138 n04542943 n02099849 n03041632 n04179913 n01632458 n01537544 n02930766 n03814639 n02643566 n03498962 n01798484 n02692877 n03134739 n03314780 n02870880 n07768694 n04141076 n03786901 n03314780 n02172182 n02092339 n03259280 n07880968 n02115641 n01990800 n12768682 n07930864 n03527444 n02091244 n03769881 n01494475 n03249569 n02395406 n03776460 n12985857 n02056570 n02486410 n01737021 n02488702 n01978455 n01622779 n02510455 n01776313 n07831146 n02018207 n02808304 n01855032 n03803284 n02514041 n02099849 n01806143 n03837869 n03902125 n02895154 n04208210 n02107142 n01855672 n02480495 n04065272 n03761084 n02100236 n02111277 n02089867 n04552348 n02791124 n02101556 n02480855 n02097658 n03180011 n03899768 n02087394 n02236044 n02794156 n04550184 n02099849 n02111129 n03976657 n01847000 n04465501 n03063599 n03733131 n09332890 n02892767 n01978455 n02111129 n03832673 n04141327 n02276258 n03786901 n02672831 n01978455 n02807133 n03290653 n03297495 n02112350 n02894605 n03763968 n02776631 n04606251 n03498962 n04443257 n04355933 n02727426 n12057211 n04376876 n02403003 n03495258 n04584207 n04462240 n01729322 n03207941 n02483708 n10565667 n03866082 n04019541 n04154565 n13052670 n02992211 n03642806 n03372029 n03832673 n03617480 n01797886 n04591157 n04443257 n03045698 n03207941 n04081281 n02165105 n02105412 n02980441 n02097658 n02823750 n02397096 n03662601 n01514859 n03759954 n02859443 n02011460 n03467068 n04458633 n02111277 n01751748 n03127747 n03838899 n07715103 n02894605 n02793495 n07248320 n03995372 n02094258 n03937543 n03642806 n02607072 n03483316 n02090622 n04525305 n02085936 n03920288 n03063599 n01843065 n02099267 n01739381 n03793489 n02018207 n03775071 n01496331 n06785654 n03935335 n03887697 n07747607 n03773504 n07860988 n04456115 n02492035 n03874293 n04275548 n03063689 n02101006 n01807496 n02113978 n02655020 n02488702 n02174001 n04004767 n04579432 n04141975 n03584254 n02112706 n03127747 n02097047 n04458633 n02814533 n02510455 n02106166 n02492035 n13054560 n04090263 n02110341 n02965783 n04235860 n01735189 n01698640 n07697313 n02276258 n03868242 n02321529 n03042490 n04418357 n03814906 n02607072 n04517823 n03496892 n07717556 n02051845 n03291819 n09399592 n02791124 n02259212 n02233338 n07802026 n03047690 n03995372 n03530642 n02966687 n02492035 n02229544 n01689811 n01532829 n03733805 n01776313 n02112137 n04200800 n07747607 n03016953 n03729826 n07734744 n02088094 n04542943 n02667093 n03400231 n04355933 n03544143 n02128385 n04356056 n02112018 n02859443 n02128925 n02091032 n04004767 n02096051 n02113712 n02927161 n03476991 n02423022 n12144580 n04548280 n03724870 n04335435 n07583066 n02871525 n03272010 n02484975 n02786058 n09472597 n04209133 n03717622 n03598930 n02417914 n01824575 n04204238 n02999410 n04467665 n04239074 n03444034 n04263257 n03903868 n02492035 n02110627 n02007558 n02090379 n03995372 n04325704 n04277352 n02494079 n02321529 n12144580 n01687978 n03095699 n02074367 n02128925 n02363005 n02346627 n04579145 n03133878 n02776631 n03787032 n03127747 n01749939 n01860187 n04317175 n12768682 n02219486 n03630383 n02097130 n02859443 n03529860 n02229544 n03272562 n04116512 n01685808 n03902125 n02174001 n02112706 n02840245 n04141975 n01641577 n02326432 n07749582 n02797295 n04596742 n02974003 n01729977 n02504013 n02843684 n03825788 n04517823 n03216828 n04346328 n02408429 n01797886 n02493509 n02799071 n04204347 n07716906 n06874185 n02093647 n02111889 n04254777 n02966687 n03938244 n02321529 n03089624 n02096585 n02877765 n03259280 n02895154 n02107574 n07615774 n03131574 n02497673 n01688243 n04273569 n03873416 n03763968 n01534433 n03187595 n02786058 n02165105 n02099601 n02782093 n01601694 n03459775 n01770081 n04019541 n01742172 n03452741 n03891251 n01818515 n03825788 n04141975 n02087394 n02325366 n02092339 n07584110 n03649909 n02113712 n04579145 n03908714 n04392985 n02124075 n13040303 n02051845 n02231487 n02493509 n01748264 n03457902 n03146219 n01675722 n03787032 n02361337 n07579787 n04479046 n02168699 n02992211 n02113624 n02974003 n04357314 n07920052 n07615774 n03452741 n03534580 n02094258 n04505470 n02641379 n03868863 n02422699 n03249569 n02123394 n02106662 n01784675 n04371430 n04557648 n02514041 n02051845 n03916031 n01751748 n02504458 n07734744 n02494079 n03902125 n02930766 n03977966 n03724870 n04116512 n03272010 n04049303 n03590841 n02361337 n04044716 n03680355 n03637318 n11939491 n03866082 n03272010 n02119789 n07615774 n03602883 n03492542 n04310018 n02231487 n02110185 n03544143 n03995372 n02268443 n01440764 n02480855 n02317335 n01692333 n02109961 n03379051 n03075370 n02687172 n04442312 n03584254 n01729977 n02727426 n03134739 n01828970 n02093428 n02233338 n02091831 n02939185 n04579432 n04266014 n03291819 n03954731 n03838899 n07871810 n02077923 n12057211 n02415577 n02115641 n03781244 n07880968 n07711569 n03838899 n03180011 n02114712 n03887697 n02930766 n01644900 n02111277 n02999410 n03534580 n02497673 n02410509 n02777292 n03461385 n04086273 n03627232 n01689811 n09193705 n01955084 n03916031 n04355338 n04259630 n03617480 n01498041 n02169497 n02423022 n02422106 n02699494 n02494079 n04515003 n03724870 n02113799 n03930630 n04458633 n04065272 n02939185 n02281787 n02504458 n02190166 n03691459 n02408429 n07579787 n02114712 n04125021 n04461696 n03384352 n03388183 n03837869 n03485407 n01986214 n03255030 n02804610 n03255030 n01924916 n04398044 n04540053 n02667093 n03146219 n02483708 n03125729 n09256479 n02089078 n02607072 n03742115 n04067472 n02114712 n03196217 n04254120 n02105412 n03250847 n02111500 n07565083 n04162706 n01917289 n03018349 n03530642 n02107908 n02169497 n02018795 n03658185 n03424325 n02018207 n03630383 n03903868 n07745940 n02138441 n03372029 n02319095 n01855672 n03062245 n07753592 n04147183 n04254777 n03838899 n02219486 n04270147 n07871810 n01910747 n02999410 n12768682 n03649909 n04120489 n02002724 n01756291 n02445715 n02009912 n01798484 n04532670 n04604644 n04044716 n02169497 n02669723 n04461696 n02134084 n03743016 n01798484 n03404251 n02783161 n03201208 n02134084 n02607072 n03180011 n02094433 n03388549 n07590611 n02640242 n02085782 n02871525 n03967562 n02119789 n04507155 n04149813 n03492542 n02437312 n02098105 n01443537 n01632458 n02860847 n02113023 n03337140 n12620546 n03459775 n11879895 n03085013 n02096585 n02088466 n01751748 n02497673 n02236044 n03109150 n02130308 n04325704 n03676483 n02105412 n03180011 n02787622 n02025239 n01693334 n02325366 n02281787 n04597913 n04346328 n04404412 n02006656 n02107312 n02165456 n03042490 n04418357 n02093428 n04133789 n07754684 n03075370 n03916031 n04536866 n07711569 n02895154 n02105251 n02692877 n03344393 n04493381 n04579145 n03201208 n04243546 n02167151 n01797886 n09256479 n01582220 n04548362 n03476684 n04606251 n04579432 n02086910 n02134084 n02109525 n04238763 n03764736 n04044716 n04548362 n02692877 n03207941 n04229816 n03598930 n04591157 n02317335 n01734418 n15075141 n03825788 n04536866 n04254777 n02277742 n03877845 n02747177 n01667778 n01664065 n03180011 n02701002 n13040303 n03388549 n04591713 n04389033 n02699494 n02105162 n02280649 n04254777 n02607072 n01985128 n03045698 n03717622 n02086240 n03903868 n02326432 n02229544 n03530642 n01685808 n02091467 n03544143 n03902125 n02125311 n09399592 n04070727 n07730033 n07684084 n04398044 n03372029 n03483316 n03495258 n01728572 n04037443 n02395406 n03457902 n03761084 n01734418 n02090721 n03976657 n03785016 n01514668 n04357314 n02835271 n02504013 n02489166 n03530642 n02950826 n02111889 n04371774 n04560804 n03445924 n02091831 n07753592 n03447721 n01770081 n02487347 n02794156 n02097209 n03891251 n02790996 n03109150 n04380533 n03595614 n04153751 n04591713 n02108915 n04429376 n01641577 n04264628 n03271574 n02114367 n07930864 n02105641 n02104365 n03717622 n04423845 n02094258 n02116738 n01692333 n02909870 n02606052 n02099849 n02363005 n07734744 n02841315 n01860187 n02090721 n03841143 n02892201 n04125021 n04612504 n01537544 n04505470 n02281406 n03983396 n02123045 n01784675 n02493509 n03476991 n03534580 n02123159 n02808440 n04074963 n01616318 n03786901 n03721384 n02086240 n02488702 n03642806 n03160309 n01796340 n13044778 n09256479 n03089624 n02086910 n04604644 n04040759 n07584110 n04552348 n04149813 n02066245 n01580077 n04443257 n04336792 n02107683 n01797886 n02134418 n02134418 n01632777 n06359193 n01797886 n03485407 n04259630 n03992509 n07248320 n04486054 n03026506 n02088632 n03124043 n02442845 n02091467 n03376595 n04310018 n02966687 n03777568 n03100240 n04350905 n02843684 n02109961 n01631663 n03240683 n03141823 n02091635 n01443537 n11939491 n02002724 n03733281 n02106662 n03942813 n03337140 n03777568 n04251144 n07716906 n01820546 n03929660 n03478589 n02441942 n02364673 n09835506 n04515003 n02264363 n01773157 n01770393 n03777568 n04049303 n02219486 n02130308 n02437312 n02815834 n02093647 n01616318 n04332243 n12620546 n10148035 n02927161 n02128757 n03496892 n03417042 n04200800 n02484975 n01689811 n02107574 n03976657 n03998194 n02088632 n04243546 n03788365 n02087046 n10565667 n03832673 n02412080 n01558993 n03492542 n04540053 n01796340 n04376876 n02395406 n03075370 n07753592 n02481823 n02457408 n02110806 n03877472 n01667778 n03131574 n03956157 n02108422 n02114548 n03272010 n03394916 n01774384 n03623198 n02027492 n04099969 n02106662 n02951358 n01798484 n13133613 n03207743 n04560804 n02268443 n03775071 n04346328 n01930112 n03584254 n02790996 n09256479 n01985128 n02480495 n02268853 n03627232 n03180011 n02233338 n03982430 n02841315 n03649909 n04336792 n09468604 n02056570 n02787622 n03764736 n02442845 n02437616 n03445924 n01917289 n02107312 n02137549 n03599486 n03721384 n04041544 n01824575 n04285008 n01687978 n01514668 n04554684 n04209239 n03272562 n03425413 n02797295 n02106382 n06359193 n03642806 n01677366 n03134739 n02105641 n01985128 n03594945 n07583066 n02667093 n02086646 n07590611 n02111889 n03857828 n04259630 n02730930 n04285008 n03095699 n03761084 n02167151 n04404412 n04254120 n04461696 n04192698 n01873310 n03763968 n02804414 n04325704 n01682714 n02120505 n03584829 n04356056 n04476259 n09332890 n04399382 n03676483 n03961711 n09332890 n02096294 n04532106 n04149813 n03891251 n06874185 n02769748 n04485082 n04277352 n03793489 n03788365 n02389026 n03709823 n03032252 n02606052 n03271574 n03492542 n01665541 n01675722 n03691459 n07892512 n02799071 n02007558 n02510455 n03742115 n04136333 n03630383 n02910353 n02111129 n02488702 n01950731 n04204238 n04461696 n02102318 n03538406 n03916031 n02130308 n04311174 n01667114 n02115641 n04487394 n02233338 n02099267 n01797886 n02051845 n04428191 n02124075 n04532670 n03775546 n07892512 n02100877 n04398044 n04590129 n02101388 n04254680 n04485082 n03026506 n04111531 n03924679 n01667778 n02169497 n04311004 n03947888 n02093754 n01818515 n03763968 n04380533 n02077923 n02488702 n01770393 n02226429 n07932039 n02095314 n01847000 n03250847 n04296562 n02100236 n03045698 n07590611 n03787032 n02101006 n01873310 n02009912 n02096051 n07749582 n02112018 n03000134 n03447721 n04118776 n03970156 n01944390 n07613480 n02879718 n01873310 n03187595 n03325584 n01496331 n02097298 n03793489 n02111500 n04311174 n01739381 n02114548 n02165105 n01930112 n02823428 n04111531 n02137549 n04355338 n03916031 n03791053 n02113186 n04081281 n02104029 n03483316 n04579145 n01558993 n01748264 n02791270 n03929660 n02129604 n02102040 n03796401 n02007558 n11879895 n06794110 n07614500 n02006656 n04065272 n02486261 n02640242 n01806143 n03991062 n02788148 n09472597 n03935335 n02510455 n03958227 n02105641 n04428191 n03018349 n02116738 n03773504 n02087046 n03709823 n01749939 n02190166 n02085782 n01843065 n03743016 n01828970 n01828970 n03908714 n03937543 n02817516 n04592741 n02869837 n03874293 n04540053 n03250847 n02971356 n02114548 n02113023 n04081281 n03857828 n03450230 n04127249 n02108089 n02093428 n04392985 n04254120 n02782093 n02012849 n03179701 n04357314 n13133613 n02992211 n04243546 n01664065 n01695060 n04005630 n03400231 n03733131 n02107142 n02104365 n04597913 n04238763 n04371430 n03877472 n04589890 n04154565 n01734418 n03781244 n07745940 n02109961 n01755581 n07742313 n04118776 n01734418 n02085782 n03100240 n02013706 n03658185 n03290653 n02105505 n03888257 n02865351 n02277742 n02099849 n03131574 n02102177 n02093428 n02814860 n01734418 n01580077 n04136333 n04483307 n01774384 n02364673 n06874185 n07754684 n07734744 n04487081 n07802026 n09399592 n03602883 n04435653 n02096437 n02672831 n02107683 n02086646 n01698640 n03485794 n03967562 n01664065 n03837869 n01950731 n02909870 n01756291 n02091467 n03658185 n02690373 n02012849 n03709823 n02123597 n13044778 n02167151 n03425413 n07730033 n03721384 n03126707 n02883205 n02111889 n03866082 n01698640 n04584207 n03485407 n02105251 n03743016 n03314780 n03769881 n01494475 n04005630 n03291819 n03721384 n04118776 n03868242 n04265275 n09835506 n03443371 n03459775 n04501370 n01688243 n03494278 n02486410 n02105251 n03956157 n02410509 n02116738 n04532106 n02100236 n04591157 n02398521 n04131690 n03935335 n02098105 n04428191 n02110627 n03970156 n03950228 n02110341 n04201297 n07932039 n07920052 n03063689 n02137549 n03100240 n01665541 n04099969 n02106382 n02009912 n03223299 n02091635 n03982430 n04548362 n01978455 n01614925 n02841315 n07711569 n04335435 n02892767 n03345487 n02948072 n04127249 n02909870 n02099712 n04162706 n01981276 n02085620 n02917067 n07716358 n04332243 n03724870 n04074963 n01984695 n03794056 n03929855 n01773157 n01806567 n04350905 n03804744 n10565667 n07747607 n03218198 n03942813 n01877812 n03924679 n07753592 n02113799 n02086079 n03814639 n02834397 n02109525 n07720875 n04273569 n03018349 n03404251 n03888257 n03485407 n07730033 n13052670 n02095889 n01739381 n01514859 n02106030 n07860988 n03775546 n04263257 n03485794 n03924679 n04228054 n02319095 n02747177 n03770679 n03980874 n02097658 n02988304 n07579787 n02137549 n01644373 n02870880 n04069434 n13040303 n02106550 n02804414 n07565083 n03877845 n03187595 n02074367 n02099712 n01950731 n03884397 n03776460 n04209133 n03697007 n01978287 n03792972 n07716906 n04146614 n03887697 n02095889 n02096177 n04435653 n02091032 n02840245 n02097658 n02002724 n02058221 n03127747 n04501370 n01817953 n02113186 n01877812 n04004767 n02441942 n02408429 n04116512 n02134418 n03529860 n03041632 n03447447 n03188531 n03770439 n03633091 n02086646 n02011460 n04209133 n04229816 n01622779 n01667114 n01685808 n02113186 n02097047 n03876231 n02699494 n03961711 n03530642 n03452741 n02708093 n01985128 n02894605 n03124170 n03633091 n13054560 n02112137 n02120505 n01532829 n03929660 n04589890 n04507155 n01685808 n02077923 n04523525 n04592741 n02056570 n03841143 n02226429 n04243546 n04285008 n02483708 n03944341 n04553703 n03977966 n02441942 n01818515 n03871628 n03692522 n07768694 n02607072 n04456115 n04590129 n03476991 n02091134 n03394916 n01990800 n02066245 n02279972 n01944390 n02105251 n04273569 n03857828 n02110185 n02096051 n01770081 n02259212 n02799071 n01806143 n03476684 n01796340 n03100240 n01632777 n02190166 n02066245 n03976657 n03788365 n02108422 n03400231 n04589890 n04435653 n02326432 n03954731 n04591157 n02823428 n07716358 n02088632 n01824575 n01631663 n02086079 n03995372 n04517823 n02480855 n03445777 n04357314 n03884397 n03445924 n03777754 n03133878 n03873416 n02086240 n04553703 n04133789 n07693725 n02895154 n02317335 n04613696 n01819313 n03977966 n02109047 n03000247 n02443114 n03272010 n01697457 n04200800 n02109047 n02840245 n01739381 n06794110 n01756291 n01748264 n03950228 n02971356 n02123159 n04346328 n02092339 n01729977 n03187595 n02454379 n03794056 n03967562 n04039381 n02879718 n02441942 n04515003 n04311174 n03100240 n03868242 n03126707 n04461696 n13054560 n04398044 n01667114 n01664065 n02106382 n04613696 n02948072 n12144580 n03877472 n02096585 n03935335 n04429376 n02110185 n03207941 n02123045 n03788195 n04259630 n02097209 n02092002 n01877812 n03529860 n02966687 n03980874 n02013706 n02776631 n02445715 n01496331 n01807496 n02112137 n02086646 n04118776 n03658185 n01985128 n02504013 n12998815 n02233338 n12057211 n07875152 n03840681 n03721384 n03908714 n02412080 n02113799 n02096437 n02669723 n03775546 n03393912 n07718472 n01883070 n02120079 n01532829 n04443257 n02917067 n02877765 n02115913 n07920052 n01773797 n02123159 n03447447 n04613696 n03933933 n04380533 n01728572 n03535780 n04599235 n02877765 n13037406 n02971356 n02504458 n02101388 n04370456 n09229709 n02113624 n02492035 n02089867 n09421951 n02219486 n02494079 n02963159 n03930630 n02206856 n02091831 n02504013 n02097298 n09428293 n04596742 n01632777 n02018207 n03344393 n03388549 n03791053 n01729322 n02018207 n03599486 n03297495 n02093859 n01629819 n04037443 n01693334 n02058221 n03141823 n04252225 n04418357 n01774384 n03871628 n03598930 n03032252 n02321529 n02117135 n02206856 n03944341 n02111129 n02346627 n03404251 n02113023 n02009229 n02879718 n01748264 n01773549 n04252077 n02825657 n03476991 n03584254 n04350905 n13052670 n04141076 n03388549 n02415577 n02607072 n04346328 n01914609 n02641379 n03782006 n01601694 n03388183 n03803284 n02690373 n02106662 n02097047 n07892512 n02277742 n10148035 n02412080 n02091635 n01917289 n03742115 n04074963 n03124043 n02669723 n04507155 n02808304 n02111500 n03761084 n01797886 n03874599 n03476991 n04404412 n02108915 n01694178 n02802426 n02974003 n03028079 n03944341 n03742115 n02111500 n02117135 n02092339 n04133789 n03868242 n07714990 n07579787 n04252077 n02096051 n02102480 n02174001 n03085013 n01740131 n02107312 n04162706 n02869837 n02412080 n04612504 n01807496 n04041544 n03459775 n02017213 n02101006 n07749582 n02109047 n07718472 n02877765 n01622779 n01882714 n03781244 n02137549 n02342885 n03498962 n04127249 n06785654 n02105412 n03447447 n09193705 n02326432 n04590129 n02892201 n03425413 n04235860 n03000247 n03272562 n03598930 n02174001 n03347037 n07920052 n01784675 n07718747 n02279972 n02097298 n03394916 n03977966 n03692522 n03825788 n07717556 n02727426 n02396427 n07747607 n04330267 n03062245 n02389026 n02871525 n02107142 n02012849 n02077923 n03532672 n03216828 n02486261 n01494475 n04251144 n02109047 n03649909 n01873310 n03710637 n01632458 n02077923 n04263257 n04423845 n02279972 n01728572 n02128757 n04552348 n07747607 n07932039 n02071294 n02951585 n02123159 n04201297 n03680355 n02892767 n03930630 n01798484 n01729977 n01798484 n04371430 n02090379 n03347037 n03998194 n03947888 n02108422 n02837789 n03888257 n01739381 n04179913 n07590611 n02279972 n03063599 n02113712 n02444819 n03532672 n02687172 n07720875 n01819313 n02445715 n03793489 n02092002 n03899768 n03424325 n02978881 n01534433 n02999410 n04557648 n01608432 n02391049 n03929660 n02835271 n03876231 n02102318 n02777292 n04004767 n03933933 n07836838 n01751748 n07718472 n04254777 n03424325 n03063599 n02095570 n01824575 n04311004 n01677366 n03062245 n03627232 n03134739 n04372370 n03075370 n02802426 n03447721 n01829413 n02090379 n04192698 n03743016 n01692333 n02099601 n03720891 n02951585 n01532829 n02281406 n02096177 n03920288 n02927161 n04179913 n02100236 n04515003 n07802026 n02088632 n03950228 n09193705 n03841143 n02093647 n04336792 n04357314 n03929660 n02093647 n02093428 n04049303 n01873310 n02268853 n03838899 n01484850 n03337140 n01537544 n02174001 n03063599 n02640242 n03721384 n04596742 n02795169 n02492660 n02892201 n02361337 n04417672 n02113624 n02028035 n02999410 n01629819 n02115913 n02089078 n01768244 n04263257 n01944390 n01945685 n02071294 n03937543 n02391049 n02018207 n02129165 n02074367 n01518878 n03445777 n04149813 n02669723 n02097047 n02865351 n07753592 n02814533 n03874599 n07720875 n04116512 n02417914 n02027492 n03877845 n02123159 n04264628 n02236044 n02108089 n04133789 n04147183 n02085620 n02091134 n03944341 n13037406 n02422106 n01498041 n03775071 n04357314 n02102040 n01682714 n01775062 n03014705 n01693334 n01616318 n04604644 n03109150 n02088238 n01981276 n02422106 n01985128 n04026417 n01644900 n02095570 n04266014 n02236044 n02115913 n01883070 n03840681 n02481823 n03447721 n01981276 n03673027 n02835271 n02123159 n02113186 n03947888 n02100877 n03814639 n02510455 n04037443 n03929660 n03837869 n02791270 n03461385 n02951585 n04525305 n02788148 n02165105 n04592741 n02091467 n03188531 n02091134 n03617480 n03954731 n04328186 n02105162 n02870880 n03028079 n04596742 n04204347 n02108422 n01740131 n02363005 n03840681 n04116512 n02138441 n04367480 n01773797 n04350905 n02095314 n09229709 n02494079 n03788365 n02117135 n01641577 n04192698 n02087046 n12620546 n02410509 n03777568 n02948072 n03662601 n02690373 n02441942 n03127925 n02066245 n02097130 n03187595 n02977058 n03977966 n03291819 n02788148 n03482405 n02090721 n02105641 n04525038 n04328186 n03424325 n03498962 n03223299 n04552348 n09193705 n07697537 n04596742 n01797886 n01980166 n02093991 n01688243 n01817953 n03485407 n01795545 n02794156 n02102480 n01819313 n03188531 n02965783 n03534580 n02395406 n02033041 n03337140 n04200800 n02797295 n02804414 n02088364 n03000247 n03937543 n02389026 n01682714 n02101388 n01685808 n07880968 n02509815 n03938244 n04532670 n03967562 n03196217 n02892767 n01843383 n02978881 n01748264 n04423845 n02396427 n03388043 n03000134 n04429376 n03483316 n03485407 n02256656 n04086273 n02356798 n02747177 n01773157 n03297495 n02403003 n07718472 n03445924 n01843383 n02328150 n03447447 n02124075 n02098105 n06596364 n03388183 n06596364 n02504013 n04041544 n02009912 n02093859 n04350905 n02317335 n07871810 n02105855 n02607072 n02095570 n02389026 n06785654 n09421951 n02114855 n03216828 n01855032 n03095699 n02115641 n01955084 n03095699 n03133878 n03902125 n02395406 n04371774 n04525305 n03345487 n02108551 n01774750 n02480495 n03594945 n02091635 n04557648 n03388549 n01784675 n13040303 n13037406 n01776313 n02099601 n03134739 n02110185 n01537544 n13133613 n02102040 n01530575 n01735189 n01491361 n07583066 n02137549 n03908714 n03045698 n01914609 n02326432 n01631663 n03868242 n03920288 n03729826 n02002724 n03776460 n03535780 n03146219 n02094258 n03841143 n02797295 n02500267 n04392985 n02504458 n01773797 n04325704 n03920288 n02999410 n02655020 n02097474 n09472597 n02099712 n02980441 n04461696 n02814533 n03495258 n01784675 n03000684 n07760859 n04141327 n02641379 n04200800 n04141327 n01943899 n04037443 n04357314 n02097474 n03857828 n01630670 n02417914 n02747177 n04590129 n02037110 n03841143 n04204238 n04252225 n02791270 n09193705 n04376876 n02815834 n01817953 n04356056 n02007558 n02917067 n03544143 n03954731 n03372029 n02930766 n04310018 n03630383 n04009552 n02132136 n07745940 n02094114 n02480855 n02093991 n02113624 n03662601 n12144580 n02443114 n01914609 n04040759 n02834397 n02276258 n04557648 n07718472 n02108915 n07753113 n02093428 n03976467 n01984695 n02492035 n04275548 n02100877 n04254777 n02799071 n03908618 n03773504 n03347037 n02107574 n03529860 n02093256 n03291819 n02110958 n04275548 n04273569 n02113023 n03958227 n04417672 n03272562 n01980166 n01514668 n02002556 n02086079 n02104365 n01677366 n03770679 n02096177 n02094258 n01440764 n01943899 n02099849 n03899768 n01729322 n01776313 n06359193 n02447366 n03857828 n03384352 n02111277 n02226429 n04366367 n01737021 n01537544 n02951358 n04371430 n03196217 n02100236 n04443257 n04479046 n03983396 n03218198 n02105505 n01978287 n04286575 n03866082 n04208210 n03891332 n03857828 n02504013 n03982430 n04554684 n04317175 n04552348 n12057211 n02483362 n02097474 n02361337 n02120505 n03594945 n03498962 n01978455 n01829413 n02105505 n01978455 n04356056 n07718472 n01518878 n02795169 n03617480 n03372029 n02099267 n04229816 n07717410 n02895154 n02110185 n04149813 n02056570 n04404412 n03028079 n02110341 n04120489 n02804414 n02988304 n02167151 n04392985 n07747607 n02966687 n09399592 n03761084 n03400231 n04136333 n04423845 n02978881 n02099429 n07892512 n02137549 n01807496 n04033995 n03876231 n03063599 n04005630 n02489166 n03197337 n04456115 n03388043 n03062245 n03899768 n04371430 n03729826 n02165456 n02769748 n02412080 n02086240 n01665541 n02412080 n02445715 n01735189 n02086079 n02110185 n07697537 n02112350 n02137549 n02398521 n02971356 n03980874 n02106030 n02980441 n09193705 n03393912 n04562935 n03691459 n02870880 n02443484 n02979186 n02100735 n01682714 n02607072 n01688243 n02454379 n02443484 n07248320 n03814639 n04509417 n04019541 n03938244 n01667114 n03791053 n04442312 n02226429 n01693334 n02794156 n01773549 n01685808 n03598930 n02017213 n02124075 n02091134 n01530575 n03657121 n01768244 n04552348 n02106030 n01667114 n02790996 n02699494 n03291819 n01694178 n02423022 n01855672 n03459775 n04070727 n03770439 n03709823 n01924916 n06785654 n03272562 n02099429 n03100240 n02174001 n06794110 n03759954 n04357314 n03584829 n03345487 n03443371 n02100236 n03709823 n04350905 n02086910 n02977058 n02112018 n04409515 n04118776 n03376595 n02101556 n02776631 n02108551 n03291819 n07745940 n02109047 n04336792 n03494278 n03388183 n02398521 n03485794 n03018349 n03967562 n02116738 n02085620 n02108551 n02894605 n07695742 n01693334 n04356056 n02120079 n04540053 n03134739 n01644900 n01697457 n02108000 n03720891 n03733281 n04404412 n02098105 n02089867 n01530575 n03884397 n03602883 n02090721 n04228054 n03208938 n02483708 n02017213 n02097047 n02509815 n02447366 n03532672 n01518878 n02123045 n01847000 n02690373 n02092002 n02096177 n04487081 n02526121 n02124075 n03717622 n02106030 n02002724 n03240683 n03902125 n03709823 n02974003 n02100583 n03201208 n01833805 n13052670 n02219486 n02107574 n07742313 n02112018 n02489166 n02441942 n07753275 n01819313 n02643566 n03110669 n04482393 n04613696 n02129604 n02088466 n02134418 n02114855 n04591157 n02277742 n02112350 n03590841 n04476259 n02326432 n01755581 n11939491 n04264628 n12998815 n02101388 n02137549 n02236044 n02123394 n02909870 n03733805 n04120489 n03958227 n02100877 n02169497 n02168699 n03794056 n04146614 n03787032 n03937543 n03388549 n01978455 n06874185 n03717622 n07875152 n01820546 n03445777 n02109961 n04127249 n07716358 n03661043 n01534433 n03982430 n02490219 n04152593 n03062245 n01644373 n02951358 n04041544 n02974003 n02102318 n04127249 n02500267 n04548280 n02690373 n02125311 n01950731 n02007558 n12267677 n03045698 n01443537 n02447366 n02124075 n03916031 n03146219 n02843684 n02980441 n03187595 n02091134 n03124170 n07749582 n03594734 n02666196 n03782006 n07697537 n02111889 n03724870 n02085620 n03492542 n02102177 n04515003 n02167151 n03877472 n07720875 n02097209 n03208938 n01601694 n04067472 n02174001 n02123394 n07583066 n03599486 n04005630 n01698640 n03047690 n03793489 n02916936 n02124075 n01592084 n03127747 n02130308 n02094114 n04131690 n03063599 n02110341 n04008634 n03218198 n01496331 n03146219 n03496892 n02097047 n02397096 n03942813 n03787032 n02125311 n02119789 n01945685 n02105162 n03127747 n02107142 n02992529 n12620546 n04067472 n01630670 n02423022 n02948072 n01491361 n04067472 n04263257 n03223299 n02088238 n02231487 n01739381 n01532829 n02099849 n09256479 n01580077 n03895866 n02037110 n07742313 n02091032 n03841143 n01986214 n04356056 n02971356 n01774384 n02097474 n04019541 n07753275 n01944390 n04371774 n02120079 n07932039 n04033901 n04074963 n02843684 n03457902 n02089078 n03544143 n02088238 n02342885 n01753488 n02895154 n04009552 n01806143 n03794056 n01740131 n02423022 n02033041 n03942813 n04023962 n03630383 n04251144 n04376876 n02107142 n01740131 n03075370 n01494475 n04590129 n02786058 n01773549 n02028035 n01978287 n02966193 n03982430 n02442845 n07734744 n07615774 n03970156 n03000134 n01883070 n02124075 n07892512 n03970156 n03958227 n04532670 n03743016 n04479046 n02011460 n02391049 n03877845 n01981276 n02488291 n01592084 n03544143 n02168699 n01494475 n03887697 n03249569 n03777754 n02100236 n02017213 n02999410 n03590841 n03476991 n04192698 n01582220 n04604644 n03658185 n03773504 n02640242 n01819313 n02906734 n07697537 n02403003 n04270147 n03544143 n02859443 n03733131 n03733131 n04251144 n01806143 n04254120 n04350905 n02090379 n01582220 n03868242 n02088466 n02793495 n04136333 n03476684 n02129604 n02112137 n01622779 n02087046 n02114548 n07875152 n01773549 n03721384 n01843065 n01601694 n04254680 n07860988 n04523525 n01843383 n03314780 n04069434 n02791270 n04125021 n07880968 n03314780 n04346328 n04335435 n02093647 n04532106 n04465501 n02102177 n04344873 n03788195 n03803284 n09835506 n01872401 n01688243 n02233338 n03633091 n03888605 n02095570 n04579145 n03598930 n02980441 n03095699 n02088466 n04296562 n01739381 n02033041 n04346328 n01695060 n03733281 n04265275 n01796340 n07880968 n02894605 n04465501 n01644900 n03100240 n03447721 n03792782 n01828970 n02486261 n02690373 n01774750 n09229709 n03045698 n03874293 n12267677 n03637318 n02398521 n02782093 n01728572 n02457408 n04005630 n04525305 n01820546 n02138441 n03532672 n02808440 n12985857 n02085620 n04584207 n02125311 n07742313 n03355925 n03868242 n03871628 n03840681 n04310018 n02793495 n02489166 n02727426 n04592741 n02841315 n02490219 n04273569 n04228054 n03991062 n02093647 n02113023 n01698640 n04591713 n02111277 n04596742 n02110627 n03720891 n04251144 n03179701 n02091244 n07745940 n03000247 n04243546 n07697313 n03127925 n01985128 n03942813 n02013706 n02483708 n01632458 n02279972 n02009912 n02256656 n01768244 n02091635 n03770679 n12144580 n01806567 n04536866 n03991062 n02391049 n02326432 n04443257 n02097047 n02101006 n02051845 n03933933 n03595614 n07695742 n07579787 n02120079 n02110627 n02095314 n03201208 n03803284 n02444819 n03899768 n02233338 n02747177 n03483316 n04136333 n03220513 n03623198 n03134739 n03630383 n02808440 n03769881 n02799071 n04019541 n01498041 n04428191 n02094433 n03450230 n02092002 n03929660 n03000134 n01914609 n03721384 n04389033 n02128385 n03000247 n02091244 n02108000 n02110063 n02128385 n02641379 n01664065 n02109525 n07802026 n07714571 n03691459 n02109961 n01688243 n04515003 n04252225 n02877765 n03476991 n07717410 n04389033 n02129165 n01440764 n12985857 n04371430 n03447721 n02441942 n02110958 n02094433 n04146614 n03857828 n03788195 n03804744 n02102040 n02317335 n09246464 n02110958 n02256656 n03781244 n01689811 n02487347 n02092002 n03733805 n01531178 n02454379 n02088238 n01729322 n01945685 n01774384 n01632458 n03776460 n01877812 n07615774 n02423022 n03384352 n01518878 n03000684 n02018207 n03876231 n02113799 n01855032 n02910353 n02109047 n03967562 n02112018 n02708093 n02417914 n13040303 n04005630 n02794156 n01689811 n02113186 n03476991 n03773504 n03868863 n03788365 n02133161 n02708093 n07718747 n02106030 n03916031 n02493793 n02277742 n02701002 n04238763 n07742313 n01755581 n02321529 n01728572 n12057211 n03016953 n04009552 n02107312 n04486054 n03837869 n04127249 n03837869 n03895866 n03032252 n04380533 n02777292 n01729322 n02607072 n03792972 n03930630 n02814533 n04005630 n04099969 n02110806 n03594734 n03697007 n02071294 n02346627 n02096294 n01440764 n12267677 n02097658 n02111889 n03825788 n04153751 n04259630 n04254680 n02092002 n01833805 n04200800 n04435653 n07753113 n03888257 n01744401 n04192698 n02415577 n04550184 n02097474 n02793495 n04252225 n03388549 n02422106 n02807133 n02090622 n03598930 n01592084 n01924916 n07584110 n02114712 n03874599 n03590841 n09246464 n04589890 n03794056 n03180011 n02104029 n03272562 n04263257 n03874599 n07714990 n02791124 n03690938 n02837789 n02138441 n02859443 n03026506 n02442845 n04004767 n02397096 n04120489 n01882714 n03124170 n03992509 n01818515 n03124170 n02002724 n03680355 n02096051 n02492660 n04033995 n04019541 n02108915 n01872401 n04366367 n04501370 n04355338 n03661043 n02536864 n01796340 n02326432 n02493509 n02099849 n02096051 n02974003 n03481172 n03089624 n01773157 n03445777 n02138441 n07565083 n03916031 n02363005 n01944390 n02093754 n04560804 n12267677 n03967562 n07932039 n03666591 n02256656 n03770439 n04509417 n03720891 n07565083 n07875152 n01843383 n03481172 n02708093 n02165105 n02123394 n01644900 n02109961 n04335435 n02096177 n02110185 n02687172 n04116512 n01693334 n03133878 n02493793 n01806143 n07892512 n03670208 n04264628 n03014705 n07615774 n02992211 n03063599 n04209239 n02489166 n07920052 n04081281 n04486054 n02783161 n03594734 n03016953 n02834397 n04409515 n03544143 n01924916 n02174001 n04599235 n07754684 n07753275 n02112706 n03197337 n02095570 n02120079 n03804744 n01820546 n02099849 n04004767 n02092339 n03983396 n01749939 n04162706 n04264628 n03598930 n02098286 n07892512 n03929660 n04209133 n03000684 n04589890 n02963159 n02206856 n03970156 n04418357 n02090379 n03785016 n02488291 n04501370 n04118538 n04311174 n03838899 n02906734 n01665541 n03188531 n03642806 n03220513 n02105855 n03642806 n02123394 n02457408 n03208938 n04536866 n02056570 n02088466 n04019541 n02165456 n02097209 n02108000 n04536866 n02777292 n02939185 n04366367 n01616318 n03337140 n04229816 n03792782 n07831146 n03903868 n03041632 n02089867 n07695742 n03534580 n03271574 n01843383 n07836838 n02279972 n07584110 n02119789 n01843065 n02206856 n03042490 n02104029 n04447861 n03814906 n02280649 n03494278 n02256656 n02909870 n03602883 n01748264 n02093428 n03841143 n03710193 n01675722 n02395406 n03250847 n02397096 n12267677 n03770679 n02007558 n03642806 n07871810 n03742115 n02190166 n07716358 n01978455 n02169497 n04204347 n03417042 n02793495 n03530642 n03188531 n02105505 n02804414 n02093754 n02092339 n02860847 n02085936 n02786058 n02056570 n02165456 n03710637 n04200800 n04592741 n03935335 n02102973 n04296562 n04328186 n12267677 n01824575 n02494079 n02730930 n02356798 n03937543 n03290653 n02109047 n02112137 n02104365 n02085620 n09246464 n01817953 n03345487 n02410509 n02281787 n04487081 n01770393 n03814906 n01728920 n02481823 n01768244 n03891251 n04111531 n03347037 n03929660 n02951585 n02840245 n02489166 n01756291 n02669723 n07583066 n02268443 n04552348 n04263257 n04371774 n03379051 n04355338 n04355933 n04118538 n04099969 n04507155 n02480495 n03814639 n02105855 n02487347 n04553703 n04310018 n03895866 n03000247 n01796340 n03903868 n03903868 n07583066 n04192698 n02018795 n02096177 n02098286 n03970156 n03733281 n07614500 n03388043 n02110958 n01601694 n07715103 n02127052 n02325366 n03673027 n02950826 n02091467 n03110669 n03840681 n03680355 n02441942 n03485407 n02097474 n02398521 n02776631 n02701002 n02325366 n03388043 n07873807 n03763968 n04515003 n02094258 n02422699 n01667114 n04263257 n07590611 n02110185 n03899768 n03877845 n03197337 n12144580 n04152593 n02108089 n02493793 n02105855 n03481172 n04228054 n03899768 n02093754 n01737021 n02415577 n01685808 n01773157 n02101388 n03710721 n01873310 n03627232 n02708093 n02102318 n07747607 n02791124 n02870880 n03388549 n04372370 n03775071 n04347754 n03026506 n07720875 n01883070 n03690938 n03776460 n01558993 n04552348 n03457902 n07768694 n04356056 n04485082 n09288635 n07760859 n03991062 n04136333 n03938244 n02102177 n03991062 n04550184 n04127249 n01498041 n03691459 n03255030 n02417914 n02099429 n04254777 n04277352 n01855032 n01983481 n04604644 n02102973 n02790996 n02094258 n02489166 n03887697 n02443114 n04228054 n01667778 n02172182 n04133789 n03196217 n02018207 n03124170 n02841315 n02174001 n02138441 n02364673 n03874599 n02690373 n12267677 n02071294 n02396427 n02100236 n04125021 n01704323 n02281406 n02226429 n02097298 n02787622 n02086910 n02415577 n02123597 n03977966 n03743016 n02951585 n04548280 n03216828 n02096437 n02233338 n02536864 n01773157 n03657121 n02883205 n03777754 n01843065 n15075141 n04462240 n02086240 n03832673 n04026417 n04346328 n02808440 n04152593 n03017168 n03710193 n02110341 n02111500 n02117135 n02018207 n03769881 n02087394 n04286575 n02105855 n03218198 n04509417 n02749479 n01756291 n03584254 n07613480 n02437312 n04458633 n01518878 n01677366 n02797295 n07717410 n03775071 n04209133 n03425413 n04347754 n02028035 n02085936 n04317175 n04310018 n13044778 n01693334 n03047690 n03983396 n02268443 n04442312 n02109961 n04019541 n04335435 n07932039 n03743016 n02268443 n04523525 n02134418 n02860847 n02096051 n02817516 n04238763 n12620546 n02092002 n13037406 n03000134 n04228054 n02002724 n02086079 n03394916 n04265275 n04136333 n02481823 n04041544 n03272562 n02999410 n02488702 n01824575 n03967562 n02730930 n01843383 n04604644 n02177972 n01744401 n07860988 n04153751 n01491361 n03297495 n04346328 n03956157 n02325366 n02974003 n03733281 n03899768 n07717556 n02114367 n04366367 n03400231 n02808440 n01968897 n02259212 n03642806 n01955084 n03776460 n09835506 n01775062 n02979186 n02093991 n04263257 n04485082 n04482393 n03179701 n01739381 n02088238 n03991062 n13040303 n01534433 n01978455 n02480495 n02086910 n02097209 n02096294 n04209133 n09428293 n03018349 n07871810 n01986214 n01491361 n02106662 n03028079 n04179913 n04264628 n03450230 n04376876 n02129165 n02127052 n02111500 n04254680 n02951358 n03854065 n02488702 n02834397 n02128757 n03075370 n07583066 n03047690 n01829413 n03124043 n01843065 n07697537 n07734744 n02834397 n02814860 n02481823 n04356056 n03124043 n01990800 n03291819 n02487347 n03658185 n04404412 n03791053 n03866082 n02930766 n02074367 n02777292 n04458633 n02098286 n02843684 n04592741 n01641577 n03529860 n01484850 n04141076 n03485407 n03590841 n04037443 n07613480 n01688243 n04074963 n02701002 n03535780 n02090379 n02111889 n06874185 n07693725 n07802026 n07754684 n01774384 n01514668 n02028035 n04423845 n02096051 n02115641 n01774384 n02894605 n03026506 n02666196 n03690938 n02112706 n03787032 n01748264 n03733131 n03920288 n04141076 n02101006 n03944341 n12267677 n03782006 n03924679 n02437616 n02992529 n02871525 n02104029 n03376595 n04243546 n03854065 n03983396 n02104029 n01883070 n07716906 n02092002 n02114855 n03255030 n01873310 n01704323 n04192698 n03485407 n02916936 n07590611 n02869837 n03527444 n03595614 n02105412 n09835506 n04033901 n04285008 n02326432 n02104029 n07716906 n07760859 n03832673 n03492542 n02408429 n03781244 n02099849 n03840681 n02092339 n03590841 n01685808 n01694178 n07753592 n03535780 n02730930 n04270147 n02011460 n04483307 n01688243 n01737021 n02033041 n03100240 n03447447 n03584829 n02483362 n03998194 n02483362 n03481172 n01558993 n04606251 n01537544 n02808440 n03825788 n01773157 n04507155 n04141076 n02504013 n04562935 n07590611 n04357314 n01608432 n02097658 n03950228 n02814860 n01498041 n04553703 n12768682 n03032252 n02097474 n01955084 n07695742 n02483708 n02106550 n04515003 n02226429 n04370456 n03000684 n03837869 n02113799 n02102480 n03459775 n02120079 n02071294 n13054560 n04192698 n02504458 n04372370 n04251144 n02006656 n03908618 n04311174 n03018349 n13133613 n03796401 n04409515 n02102480 n02843684 n04040759 n02086646 n02948072 n07836838 n03476684 n02236044 n04296562 n02017213 n04612504 n02769748 n07717410 n07717410 n01751748 n03773504 n02085782 n04562935 n04239074 n07760859 n07768694 n03160309 n01692333 n03045698 n03272562 n04417672 n03954731 n04505470 n04154565 n03691459 n04209239 n04409515 n02363005 n07734744 n02422699 n03529860 n04235860 n04536866 n01981276 n03888257 n02276258 n03388043 n07718472 n02869837 n02006656 n03595614 n02917067 n01440764 n01855032 n03930630 n02105505 n01491361 n03345487 n04372370 n03187595 n01491361 n04264628 n04557648 n02119022 n02607072 n02396427 n07615774 n04553703 n07718472 n03530642 n02100583 n04557648 n03485407 n07745940 n01531178 n03954731 n04465501 n12768682 n04486054 n03595614 n04548362 n07753113 n02701002 n04525038 n02317335 n02443484 n02939185 n03314780 n02089078 n02859443 n02091467 n02124075 n03690938 n02091831 n02454379 n04065272 n03196217 n02655020 n04487394 n04286575 n03125729 n03854065 n03670208 n02108422 n02102480 n02988304 n02009229 n02099267 n02097209 n02948072 n02110806 n02177972 n03494278 n01737021 n13133613 n04447861 n04591713 n03495258 n02859443 n02860847 n04554684 n03637318 n04258138 n01797886 n03095699 n04041544 n03602883 n04525038 n03706229 n02093859 n02119022 n02454379 n07614500 n02276258 n07714571 n02177972 n02129604 n01601694 n04355338 n02999410 n07760859 n02165456 n02111129 n03220513 n02437616 n04465501 n03272010 n02167151 n02174001 n02607072 n04254120 n07584110 n03388549 n03063599 n02795169 n02727426 n02799071 n10565667 n02454379 n07717410 n02504013 n04266014 n04493381 n03832673 n02033041 n02447366 n03314780 n02930766 n02110806 n04033901 n02870880 n01872401 n03063689 n03814906 n01798484 n02219486 n02111129 n03124170 n03443371 n01855672 n03089624 n04239074 n03814906 n04285008 n02097474 n01819313 n02364673 n03773504 n04310018 n04398044 n13054560 n01665541 n02025239 n03976657 n04553703 n07715103 n02018795 n03794056 n03595614 n03026506 n02128925 n03717622 n03041632 n04417672 n07753275 n07718747 n01728920 n03447447 n02114548 n02769748 n01784675 n02100877 n02097658 n04523525 n02002556 n03404251 n03786901 n04162706 n02776631 n13133613 n04254777 n04355338 n02104029 n04201297 n03775071 n02093754 n03992509 n03134739 n12057211 n04116512 n02281787 n07920052 n02105641 n01943899 n03841143 n02487347 n04486054 n02281787 n02342885 n03775546 n02011460 n02089078 n03776460 n04423845 n02865351 n03089624 n04371774 n01514859 n01734418 n02328150 n09468604 n03063689 n02951585 n02095314 n03792972 n03776460 n02346627 n02894605 n01775062 n02130308 n04192698 n13044778 n01751748 n07697537 n03868242 n04525038 n02259212 n02391049 n04399382 n02667093 n01530575 n01632777 n03259280 n02840245 n04019541 n02422699 n02113712 n03930630 n02643566 n02231487 n04487394 n03937543 n03355925 n01828970 n01580077 n07932039 n02877765 n02167151 n03476991 n02825657 n01751748 n03207941 n03840681 n09288635 n01843383 n04536866 n03814906 n04429376 n04428191 n03814906 n04344873 n01693334 n03417042 n02747177 n01986214 n02277742 n03127747 n02422699 n12985857 n02672831 n02823428 n02112018 n04037443 n07695742 n02536864 n02788148 n02088364 n02105251 n02105641 n02123159 n03729826 n03125729 n04179913 n02097474 n03297495 n03042490 n04252225 n03141823 n09193705 n04149813 n02655020 n03788365 n03085013 n02037110 n01944390 n02120505 n04536866 n07695742 n02951358 n03417042 n03733131 n04325704 n03843555 n03179701 n02009229 n04523525 n02098413 n02096585 n03424325 n02105162 n04590129 n01537544 n02093991 n03394916 n01514668 n13133613 n03445924 n03873416 n01632458 n03706229 n02085782 n01632777 n04371430 n12144580 n01665541 n02102040 n02701002 n04131690 n04347754 n13040303 n01775062 n02114712 n01833805 n03759954 n02860847 n04330267 n02859443 n02138441 n01774384 n07717556 n04311004 n03908714 n02361337 n04065272 n04146614 n04179913 n01697457 n03857828 n04285008 n02089078 n01755581 n02056570 n02701002 n02483708 n02101556 n01737021 n03874599 n02107683 n03657121 n01592084 n03995372 n03788195 n02100877 n03447447 n09399592 n04350905 n04266014 n02979186 n02988304 n02879718 n03032252 n01530575 n03291819 n04131690 n02037110 n01632458 n02102177 n04367480 n01807496 n02107908 n01740131 n02096585 n04235860 n02363005 n02110958 n07711569 n03384352 n03530642 n03761084 n03602883 n01531178 n01774384 n04456115 n01985128 n01694178 n03065424 n04589890 n04049303 n07248320 n06874185 n04604644 n01775062 n02123597 n02095570 n01985128 n02115913 n01622779 n01601694 n04589890 n01560419 n01440764 n02051845 n03218198 n03047690 n03854065 n02442845 n02361337 n02835271 n01531178 n02108422 n02115913 n03141823 n02088238 n03690938 n03207941 n02510455 n01806143 n01740131 n03854065 n02488291 n04428191 n03063599 n02101556 n02087046 n02101556 n03792972 n04296562 n02101006 n02776631 n01773797 n03709823 n04458633 n02281406 n03691459 n03692522 n02089867 n03868863 n02012849 n03763968 n01944390 n01667114 n03950228 n02128385 n02319095 n04553703 n03452741 n03345487 n02672831 n03935335 n02104365 n01592084 n04149813 n03594734 n02233338 n01688243 n07718472 n03394916 n13040303 n01986214 n02510455 n04285008 n03956157 n02264363 n03127747 n03445777 n04467665 n03240683 n03065424 n04517823 n02165105 n03602883 n01753488 n04399382 n09256479 n02086910 n03956157 n03485794 n02484975 n02666196 n02097209 n03535780 n02112018 n03109150 n04590129 n01667778 n02787622 n02088364 n03388549 n02494079 n01843065 n02108551 n03929855 n03498962 n02109525 n04328186 n09256479 n04540053 n03459775 n03982430 n02444819 n01494475 n02086079 n02125311 n03529860 n01843383 n03992509 n01641577 n04099969 n04254777 n01608432 n02346627 n02397096 n02676566 n01491361 n02074367 n04252225 n04485082 n02092002 n02098286 n02727426 n03100240 n13054560 n02097298 n02123045 n02002724 n02109047 n03131574 n02692877 n02088632 n04465501 n02930766 n01843065 n03697007 n02102973 n04147183 n02117135 n07754684 n02787622 n02114548 n04515003 n01855672 n01682714 n02110063 n04127249 n03127925 n04429376 n03710193 n03796401 n02786058 n02794156 n02112018 n02423022 n02094114 n02092339 n03344393 n03888605 n02437312 n02107574 n03710637 n01491361 n04074963 n02128385 n04044716 n02093991 n02113186 n01592084 n07714990 n02174001 n02777292 n02090379 n04509417 n02486261 n02841315 n02096051 n01768244 n03895866 n03891332 n02102177 n04525038 n03777754 n07716906 n02091244 n02966687 n01981276 n02092339 n04612504 n09229709 n02099429 n04540053 n03935335 n01644373 n02088466 n04380533 n02105162 n02916936 n01944390 n02123159 n03459775 n01944390 n02100735 n01740131 n03599486 n02169497 n03888605 n04296562 n03794056 n03110669 n02356798 n03032252 n04482393 n03888605 n01748264 n02098413 n03967562 n03706229 n13052670 n04252225 n02009229 n04252225 n09421951 n01930112 n04461696 n04208210 n02443484 n03045698 n03967562 n07880968 n02177972 n01698640 n02704792 n04328186 n01828970 n04482393 n03400231 n03394916 n04467665 n04259630 n01860187 n03868863 n03000134 n02783161 n02509815 n04465501 n02417914 n04482393 n02787622 n02089867 n03240683 n02403003 n04296562 n02782093 n02892201 n03777754 n04612504 n03372029 n01756291 n03902125 n03355925 n01843383 n04579432 n02091134 n04579432 n03481172 n02841315 n07831146 n03075370 n02009912 n04201297 n02396427 n01753488 n03249569 n04090263 n01704323 n02526121 n04204347 n02777292 n03126707 n04254120 n02111277 n01582220 n02206856 n02939185 n01693334 n02641379 n04263257 n04347754 n07734744 n01990800 n04399382 n04270147 n03944341 n01773549 n03259280 n02089078 n02094433 n04525305 n04493381 n01669191 n02066245 n02841315 n03796401 n04371430 n04548362 n03944341 n01773157 n03223299 n03692522 n03594945 n02100877 n03000134 n02783161 n03345487 n02802426 n01944390 n02817516 n02102973 n03956157 n03627232 n02114712 n03837869 n02797295 n04458633 n03196217 n02963159 n02110341 n02108551 n09468604 n03452741 n02174001 n04380533 n07716358 n04037443 n03803284 n03958227 n09288635 n04442312 n03272562 n03891251 n04118776 n04532670 n01742172 n03733281 n02102177 n03026506 n02606052 n01818515 n04589890 n04428191 n02279972 n02123045 n04254120 n03000684 n01983481 n02704792 n07590611 n04162706 n02088632 n02112706 n03938244 n02112018 n02123597 n01531178 n02325366 n03000684 n02066245 n02859443 n03063599 n07753113 n02999410 n03777568 n02108089 n01872401 n02025239 n01484850 n03899768 n04162706 n02110341 n02091467 n04417672 n03000134 n04356056 n04417672 n01689811 n02412080 n02086646 n02096294 n01622779 n02089973 n02835271 n09193705 n04111531 n04456115 n09193705 n03633091 n07749582 n07697537 n02860847 n01855672 n03743016 n02077923 n07754684 n01833805 n02013706 n03976657 n03134739 n03720891 n02837789 n04355933 n03584829 n09472597 n01843065 n01749939 n03717622 n03982430 n02504458 n02127052 n03127747 n04026417 n03866082 n01872401 n02094258 n03291819 n02110627 n03982430 n02093256 n02277742 n02965783 n04428191 n01740131 n02795169 n02119789 n03535780 n03461385 n01980166 n02486410 n03720891 n04597913 n03666591 n02843684 n04252225 n10565667 n02268443 n01491361 n02098105 n03775071 n03187595 n07760859 n02259212 n03042490 n03942813 n04069434 n04120489 n01820546 n04548280 n07718472 n02417914 n02095314 n06874185 n03447447 n03983396 n04592741 n02102177 n03649909 n03594945 n02099712 n04370456 n04517823 n07875152 n03207941 n02398521 n03954731 n01796340 n01798484 n02113712 n01491361 n04423845 n03483316 n04461696 n02106550 n01773157 n13052670 n02091244 n03706229 n01560419 n03832673 n02492660 n04099969 n03982430 n04532670 n01631663 n02085782 n01728920 n03240683 n04584207 n01806567 n01729977 n01601694 n04350905 n04179913 n04592741 n02108422 n02110806 n02814533 n01773797 n02704792 n02782093 n03916031 n03467068 n03710721 n04554684 n01955084 n07717556 n02009229 n02256656 n03095699 n02094258 n02486410 n02027492 n04200800 n04371430 n03662601 n02444819 n01665541 n01614925 n02112018 n03773504 n04505470 n02951358 n02948072 n02101556 n03868242 n02093256 n01641577 n02128385 n03000684 n03874293 n03134739 n01440764 n02268853 n07584110 n04399382 n01843065 n03188531 n02086240 n04540053 n01829413 n04462240 n03018349 n03782006 n07730033 n03676483 n04275548 n03930630 n03764736 n02226429 n02007558 n04149813 n01820546 n01829413 n02110185 n02107683 n03840681 n02018207 n01833805 n03902125 n03868863 n03443371 n02113978 n03793489 n02859443 n02097047 n04192698 n07590611 n07880968 n07697537 n02342885 n02398521 n02002724 n02910353 n02442845 n02906734 n02494079 n02091831 n02823750 n04447861 n01796340 n03089624 n03924679 n01980166 n04435653 n03649909 n02107142 n02110063 n02403003 n04081281 n01735189 n01532829 n03891251 n02077923 n03977966 n03452741 n04465501 n02777292 n02113799 n04367480 n03787032 n01744401 n02667093 n03933933 n01580077 n02794156 n01796340 n02002556 n02837789 n01818515 n09835506 n04604644 n01917289 n03180011 n02102480 n03873416 n03995372 n03884397 n03657121 n02093754 n02102318 n02097658 n02108422 n01855672 n02489166 n03208938 n02116738 n07802026 n03584254 n02108000 n09256479 n02892767 n02105162 n03388549 n02870880 n02116738 n01807496 n03045698 n03717622 n03109150 n03388549 n02437616 n07930864 n03991062 n03709823 n03680355 n02033041 n02843684 n02795169 n02236044 n02509815 n04442312 n12998815 n03255030 n02111889 n03595614 n03788195 n02690373 n01756291 n01698640 n07565083 n01983481 n03445777 n03998194 n02879718 n07930864 n03255030 n02086646 n04120489 n03733281 n01667114 n03532672 n03179701 n04229816 n03733281 n09256479 n02105251 n03146219 n04330267 n06874185 n12620546 n01641577 n02106550 n02445715 n03146219 n02493793 n02509815 n02804610 n03590841 n01871265 n02483362 n02437616 n03895866 n02071294 n03291819 n13044778 n02114855 n01984695 n02500267 n06359193 n01843065 n03763968 n02643566 n04258138 n02667093 n07734744 n04153751 n02138441 n03188531 n07802026 n02100583 n07860988 n01817953 n02106166 n02483708 n03782006 n02007558 n04476259 n02835271 n03124170 n04550184 n03661043 n04204238 n03776460 n03837869 n04443257 n02486261 n01537544 n02317335 n02134418 n04557648 n01872401 n04209239 n01677366 n02100735 n02096437 n04479046 n01693334 n02965783 n01514859 n07613480 n02108422 n01914609 n03482405 n03710637 n04009552 n02106166 n01531178 n02704792 n04487394 n02834397 n02108915 n02484975 n04310018 n02095570 n03447721 n02119022 n03017168 n03697007 n03249569 n02835271 n04591713 n03347037 n02791124 n01692333 n01882714 n03196217 n02422699 n04041544 n03796401 n02028035 n02966193 n04235860 n03642806 n03838899 n02510455 n01930112 n03781244 n02091032 n02025239 n03196217 n02094114 n01978455 n04254120 n13040303 n03459775 n07716358 n03016953 n03876231 n02892767 n04069434 n02256656 n02168699 n02128757 n01986214 n02009229 n02790996 n03630383 n07718747 n02361337 n02951585 n07873807 n03223299 n07836838 n04266014 n03956157 n02002724 n02077923 n02002556 n02951358 n03259280 n02113186 n02843684 n04332243 n01775062 n02777292 n04118538 n02226429 n03908618 n02782093 n03777568 n02101556 n02701002 n02018795 n02102318 n03045698 n04254680 n02692877 n12620546 n02325366 n01560419 n02977058 n03127925 n04325704 n03483316 n02101556 n03450230 n04264628 n02101556 n03482405 n07715103 n03544143 n02395406 n01797886 n03207941 n04389033 n01978455 n01755581 n02708093 n03461385 n02342885 n01930112 n04009552 n02804610 n13037406 n02092339 n02106550 n04033995 n02395406 n03733131 n02859443 n04008634 n02841315 n02412080 n03785016 n01440764 n03100240 n01665541 n03710721 n04599235 n04370456 n02124075 n02138441 n03085013 n01744401 n04296562 n09835506 n03785016 n07754684 n04311004 n02124075 n02802426 n04239074 n02971356 n02009229 n02096177 n01695060 n03954731 n01828970 n02086240 n02447366 n03095699 n03590841 n03482405 n02107574 n02096294 n03085013 n04456115 n04486054 n04599235 n03141823 n04263257 n03877845 n04428191 n03976657 n02797295 n03637318 n03041632 n07579787 n02687172 n03201208 n04579145 n01608432 n02099849 n01667114 n04372370 n02106166 n03075370 n02138441 n03028079 n01930112 n03388183 n03825788 n13044778 n02687172 n03692522 n02391049 n04254120 n03146219 n03126707 n02025239 n07714571 n02869837 n01580077 n03594945 n02109525 n04099969 n03792972 n03623198 n01872401 n02441942 n03032252 n02687172 n02096294 n02037110 n04310018 n02280649 n03992509 n04037443 n01806567 n02325366 n03372029 n02259212 n04371430 n02391049 n01755581 n01820546 n02264363 n01494475 n03201208 n01774750 n03259280 n02687172 n04090263 n02483708 n04487081 n03218198 n02480495 n01692333 n03017168 n01843065 n03930630 n02056570 n03041632 n02799071 n03344393 n01514859 n02113978 n02027492 n01981276 n02397096 n04192698 n03134739 n02666196 n02117135 n04461696 n02231487 n09246464 n04149813 n02102040 n02086910 n04355338 n02457408 n02093428 n01689811 n03481172 n07836838 n03803284 n01910747 n04553703 n03478589 n03584829 n04254777 n04254120 n02105505 n02361337 n03992509 n02804610 n02102318 n01560419 n01773549 n03902125 n06359193 n02129165 n02120079 n02113712 n01728920 n03160309 n07871810 n04258138 n03045698 n04552348 n13044778 n03717622 n02025239 n02268443 n02108915 n04542943 n03240683 n02966687 n07754684 n03991062 n02769748 n03187595 n03271574 n02256656 n03637318 n04357314 n03207941 n01728920 n04074963 n03000684 n04118538 n03888257 n03000134 n02930766 n02437616 n01622779 n03954731 n04266014 n02108915 n01729977 n04553703 n02328150 n07715103 n03617480 n02441942 n01734418 n02229544 n02259212 n03017168 n02077923 n03871628 n02025239 n02992211 n01978287 n01755581 n04008634 n01773797 n04209239 n04584207 n02493793 n01616318 n04127249 n01877812 n02814860 n03535780 n04040759 n02879718 n02514041 n04592741 n03854065 n01614925 n04026417 n03837869 n02865351 n04239074 n06794110 n02190166 n04208210 n02088238 n02497673 n03179701 n04613696 n01693334 n02672831 n02817516 n02106662 n04392985 n03777754 n03649909 n04311004 n01664065 n04389033 n02807133 n03476991 n03141823 n03793489 n02988304 n03325584 n01871265 n09288635 n04326547 n02110063 n03220513 n02093859 n01693334 n02815834 n02107574 n04487081 n04347754 n07695742 n04086273 n04493381 n01580077 n02910353 n07754684 n04067472 n12768682 n01675722 n02437312 n04417672 n03868863 n13054560 n02100735 n03888605 n04009552 n04238763 n03876231 n03706229 n02859443 n01530575 n01824575 n02096437 n04486054 n02704792 n02110185 n01824575 n12620546 n03814906 n04154565 n02058221 n02111129 n03690938 n03857828 n01534433 n09229709 n02086910 n04507155 n02098105 n02089078 n04355933 n02930766 n03384352 n02892201 n03992509 n02109961 n04479046 n03000247 n03047690 n04258138 n04005630 n02281787 n01693334 n03379051 n01614925 n04479046 n04591713 n03920288 n02051845 n01756291 n02107312 n04435653 n03325584 n02058221 n02107683 n02111277 n03786901 n07768694 n03891332 n04204347 n03400231 n03961711 n02490219 n03347037 n04597913 n02090721 n03450230 n02112137 n03250847 n03868242 n02058221 n04141327 n03761084 n02090379 n02486261 n02095570 n01749939 n02804610 n04273569 n02777292 n03930630 n03775546 n07716906 n02916936 n02930766 n03709823 n02056570 n02412080 n02666196 n03196217 n04479046 n04509417 n01532829 n07697313 n02493793 n02058221 n04252077 n02002556 n02085936 n03063599 n04273569 n04550184 n03710193 n01742172 n02443484 n03720891 n03706229 n02643566 n03218198 n03877845 n01630670 n07714990 n02264363 n01532829 n04540053 n02113712 n04259630 n03661043 n03220513 n03445924 n07831146 n01530575 n03691459 n01773157 n06785654 n03290653 n03995372 n03866082 n02276258 n03777568 n01675722 n12985857 n02835271 n03444034 n02101006 n03637318 n03787032 n04258138 n03535780 n04065272 n02099267 n03347037 n01755581 n03908714 n02056570 n02093647 n01729977 n04344873 n01847000 n02112350 n01632458 n04562935 n03325584 n04127249 n04141076 n04554684 n07714571 n02027492 n03532672 n02992529 n02321529 n03538406 n03721384 n02013706 n04599235 n02093991 n02777292 n02123394 n07747607 n03424325 n03976657 n04209239 n02951585 n07753592 n04443257 n03388183 n10148035 n03344393 n04336792 n02120505 n01981276 n03933933 n01829413 n03916031 n02776631 n01775062 n04286575 n04209239 n07730033 n02099712 n07613480 n02100583 n03733805 n03873416 n04476259 n02113799 n02690373 n09468604 n02009912 n01980166 n02096294 n03764736 n03417042 n03000134 n10565667 n04120489 n02114855 n04039381 n04376876 n02843684 n02643566 n03924679 n03958227 n03773504 n02276258 n03776460 n03000684 n02129165 n03445924 n02108089 n04310018 n03873416 n02236044 n03483316 n02099601 n02115913 n02441942 n03967562 n04479046 n04344873 n02123597 n02229544 n03179701 n02791124 n04525305 n03976657 n04147183 n02835271 n01685808 n02280649 n01768244 n02489166 n04355338 n02279972 n03770679 n01498041 n04041544 n02085620 n02086240 n03532672 n02268853 n02978881 n02363005 n04442312 n02280649 n02108915 n04380533 n04462240 n03271574 n03930630 n02892767 n01797886 n01978287 n02437616 n03920288 n03160309 n01560419 n02666196 n03424325 n02514041 n02790996 n02397096 n01775062 n02071294 n02100583 n04380533 n01990800 n03903868 n07583066 n02013706 n02130308 n02113023 n03884397 n03000684 n04037443 n01687978 n02058221 n02704792 n07693725 n04039381 n03461385 n01950731 n03773504 n02104365 n04536866 n02328150 n07871810 n03372029 n04462240 n02133161 n02808304 n03443371 n01843065 n01914609 n01855032 n04380533 n02086646 n02363005 n04296562 n04033995 n02871525 n03742115 n02704792 n02108915 n03670208 n02093428 n04428191 n09421951 n01984695 n02128757 n01917289 n04033901 n02092002 n03840681 n03476684 n04286575 n04423845 n02951358 n03877845 n01728572 n03481172 n03208938 n02487347 n02107908 n07565083 n04479046 n03832673 n02948072 n02950826 n03929660 n04370456 n02978881 n01498041 n02783161 n03697007 n01820546 n03026506 n04584207 n02091467 n02422699 n02123045 n03793489 n03958227 n02443484 n02098286 n02788148 n04392985 n12768682 n03843555 n02894605 n04372370 n02077923 n02111889 n01770393 n02840245 n01631663 n02786058 n04462240 n02264363 n03942813 n02457408 n03476991 n02107312 n02917067 n04612504 n02100583 n04239074 n04476259 n02105855 n03929855 n02389026 n04389033 n03876231 n04041544 n01806143 n07584110 n02814533 n03868863 n02104365 n02128925 n02105251 n04447861 n04517823 n02395406 n04208210 n02091831 n04330267 n02444819 n02815834 n02264363 n01484850 n02105641 n02808440 n02116738 n01873310 n03792972 n02125311 n01855032 n02704792 n07717556 n03814906 n01667114 n03857828 n01784675 n02091032 n04409515 n01614925 n03769881 n02814533 n02093754 n07747607 n03857828 n04277352 n02104029 n04131690 n02951358 n02134084 n07749582 n03126707 n04325704 n02497673 n02105412 n01685808 n07871810 n02927161 n04380533 n04152593 n02106382 n04350905 n01795545 n03871628 n02965783 n07614500 n03884397 n03980874 n02492035 n02113712 n03417042 n04259630 n03483316 n01494475 n02088238 n07565083 n07753113 n04366367 n04120489 n04429376 n02091467 n02112350 n02699494 n03995372 n02113186 n01685808 n03347037 n02843684 n02108089 n03825788 n03773504 n02787622 n04325704 n03796401 n01698640 n03045698 n02422699 n04417672 n04141327 n04118538 n02113624 n04550184 n01728572 n04380533 n04209133 n01537544 n07920052 n04317175 n01742172 n02786058 n03417042 n03770679 n02804414 n02236044 n03085013 n04019541 n03661043 n03769881 n01773797 n02835271 n01494475 n01773797 n02097298 n01667114 n02106030 n02106030 n03146219 n01930112 n02102177 n13040303 n04357314 n04264628 n07875152 n04371774 n02099849 n03127925 n02869837 n03710193 n02097130 n07730033 n04311004 n03085013 n02102040 n04486054 n02111889 n04204238 n03792972 n03450230 n03617480 n02124075 n03495258 n03769881 n02916936 n01704323 n03063599 n01883070 n01614925 n04311004 n01692333 n03125729 n04192698 n03874293 n03496892 n04118776 n02454379 n04116512 n01677366 n01514668 n03476991 n03733805 n03942813 n03095699 n02883205 n02091467 n02817516 n06794110 n03131574 n02101388 n01978455 n02106382 n02108915 n03216828 n07615774 n07730033 n01770393 n04371430 n02123159 n01984695 n01737021 n02825657 n02099267 n03658185 n02815834 n02120079 n03908714 n04554684 n04604644 n03109150 n03866082 n03908714 n03617480 n02093647 n02510455 n04074963 n03089624 n02095314 n03218198 n02817516 n01943899 n03854065 n03891251 n04423845 n04131690 n04442312 n01537544 n03325584 n02095889 n03291819 n03042490 n02504013 n03146219 n04252077 n02328150 n01697457 n02655020 n04606251 n07720875 n02091831 n02097209 n01630670 n01950731 n01910747 n07695742 n03063689 n01871265 n03478589 n07583066 n02109525 n03982430 n04270147 n01871265 n02033041 n03476991 n01494475 n09229709 n03967562 n03902125 n02837789 n04311004 n04228054 n02087394 n04147183 n02133161 n03100240 n04204238 n02445715 n03481172 n04487394 n03796401 n02978881 n01877812 n01496331 n07717410 n02871525 n02442845 n02112706 n02879718 n03085013 n02799071 n03902125 n02965783 n02281406 n04404412 n02123159 n02747177 n04548280 n04591713 n04044716 n03742115 n02992211 n07717410 n10148035 n02099429 n02486261 n04447861 n03843555 n04263257 n04330267 n02787622 n02823750 n01740131 n04235860 n03498962 n02492660 n02437312 n07718747 n03803284 n02364673 n02906734 n07684084 n03970156 n03825788 n03814906 n07715103 n02749479 n02815834 n02877765 n02088364 n02088632 n04270147 n07248320 n01514668 n01883070 n02276258 n04554684 n02009229 n07248320 n01924916 n03376595 n03983396 n02112018 n01770393 n02403003 n02051845 n02870880 n02484975 n02113799 n03717622 n07930864 n07717410 n02730930 n03874599 n02105162 n02099712 n01530575 n03891332 n01773157 n02808440 n02177972 n03759954 n07579787 n02877765 n03958227 n03977966 n03825788 n03028079 n04501370 n02259212 n03961711 n03496892 n03706229 n04409515 n12144580 n03769881 n09193705 n02782093 n01734418 n04285008 n02120505 n02111277 n02640242 n02790996 n02099267 n07871810 n01986214 n01984695 n12985857 n04542943 n03888605 n04074963 n10565667 n04483307 n09835506 n02129165 n03538406 n01498041 n04461696 n03944341 n03259280 n01484850 n04486054 n03788195 n09193705 n03530642 n04557648 n02892201 n04509417 n03041632 n02093256 n02391049 n04479046 n03961711 n15075141 n02108915 n01847000 n02325366 n03770439 n03676483 n06794110 n01770393 n02788148 n03127925 n03710721 n02484975 n02536864 n02105855 n03733131 n04435653 n02124075 n03792782 n04465501 n01644373 n02085620 n03720891 n03814639 n03133878 n02892201 n02077923 n02992211 n02114712 n02410509 n03733131 n03843555 n02917067 n02128385 n04009552 n03888605 n03388043 n04596742 n03935335 n06785654 n02356798 n02398521 n03445924 n03041632 n03535780 n07753113 n02834397 n01824575 n07697313 n04487081 n02509815 n02106550 n01704323 n01742172 n02094433 n01817953 n03032252 n01742172 n02483362 n02096437 n02487347 n02096294 n04465501 n02948072 n03424325 n02111500 n02114367 n01537544 n01945685 n02607072 n04005630 n04127249 n07714990 n03662601 n03179701 n09468604 n01530575 n03100240 n06359193 n02510455 n02120079 n02096437 n03141823 n01484850 n04579432 n04118538 n02094433 n02086910 n01622779 n07747607 n07718747 n02106030 n02363005 n03599486 n03637318 n02101388 n03662601 n03188531 n02104029 n11939491 n04238763 n01945685 n02834397 n02099712 n01558993 n03450230 n03838899 n04243546 n02123159 n04536866 n02808304 n04120489 n03127925 n04505470 n03782006 n02281406 n04252225 n02776631 n02444819 n04005630 n03717622 n03961711 n03444034 n03970156 n01824575 n02396427 n02165456 n02226429 n02056570 n07693725 n04599235 n03944341 n02134418 n03788365 n07717410 n04264628 n03967562 n04265275 n03584254 n01614925 n07720875 n03814639 n04370456 n04037443 n03297495 n02129604 n03131574 n04243546 n02105855 n03895866 n03216828 n02317335 n02106030 n03661043 n01924916 n02165456 n04536866 n01616318 n02799071 n03788195 n02363005 n01924916 n04461696 n04270147 n02843684 n04258138 n03944341 n01737021 n01882714 n02817516 n02097298 n01843383 n04019541 n04118776 n02799071 n03967562 n03494278 n02229544 n04325704 n03967562 n13044778 n03344393 n04557648 n03447721 n09472597 n04118538 n03424325 n04599235 n01530575 n02835271 n09472597 n02092002 n02730930 n04599235 n02422699 n03657121 n01622779 n03903868 n02090721 n04443257 n01734418 n07714571 n01496331 n02264363 n03483316 n03742115 n07714990 n03590841 n03871628 n04311174 n02114548 n03255030 n02105505 n07579787 n07697313 n03400231 n06874185 n04591713 n04509417 n03255030 n03404251 n02268853 n07613480 n07768694 n02321529 n01818515 n01877812 n02895154 n03485794 n04553703 n02364673 n09229709 n02916936 n04235860 n07932039 n15075141 n02006656 n02487347 n02087394 n02480855 n04372370 n03733805 n02979186 n02033041 n10565667 n02006656 n02099267 n02108915 n03930630 n01728572 n04552348 n02090721 n02870880 n02951585 n04259630 n02328150 n04435653 n02843684 n03788195 n03887697 n04335435 n04228054 n01608432 n04355933 n02123045 n04589890 n04086273 n03832673 n02111277 n01704323 n03599486 n04254680 n02086240 n02817516 n02487347 n04592741 n03272010 n02018795 n01930112 n03223299 n03388043 n03888605 n04040759 n02169497 n02793495 n04376876 n02177972 n04485082 n07717410 n04081281 n03109150 n02090622 n03482405 n01664065 n03032252 n03355925 n01910747 n04536866 n03000247 n03527444 n02025239 n04254777 n04141975 n03793489 n02979186 n02127052 n01847000 n02328150 n02909870 n10565667 n03709823 n02992211 n02093859 n07747607 n07717410 n03249569 n01734418 n03944341 n04344873 n01677366 n02108000 n03876231 n04461696 n06596364 n09428293 n03482405 n02088094 n04136333 n04204238 n01697457 n04074963 n01514859 n02106662 n04252225 n02117135 n03476684 n01770393 n02795169 n03733131 n03676483 n04133789 n04435653 n01728920 n04033995 n04355933 n01675722 n03717622 n04428191 n03535780 n02105162 n07753275 n04483307 n02917067 n04118776 n03000684 n03000134 n02281787 n01770393 n02326432 n01753488 n02167151 n02808304 n04392985 n03197337 n03100240 n04286575 n03127925 n01945685 n02536864 n02799071 n02783161 n02346627 n02264363 n02088364 n02093754 n03617480 n02105162 n02966687 n01795545 n02091831 n01537544 n03041632 n02834397 n02699494 n03404251 n01860187 n04550184 n02992211 n02437312 n02098105 n07590611 n03527444 n07583066 n01748264 n02966687 n03803284 n04366367 n02119022 n01740131 n02099601 n01534433 n04606251 n02099601 n02488702 n04336792 n02391049 n02086646 n02086079 n02110806 n02110341 n04447861 n02119789 n04162706 n02259212 n03124043 n02101388 n03630383 n02980441 n02494079 n03602883 n01695060 n04141327 n04266014 n03047690 n02097209 n02113023 n02174001 n01669191 n01667778 n02096051 n04251144 n02112706 n02988304 n03461385 n03447447 n02077923 n03887697 n02342885 n01641577 n01616318 n02007558 n01698640 n04033995 n03804744 n02110063 n03355925 n01667114 n01914609 n03804744 n02669723 n07836838 n02412080 n03743016 n04336792 n13052670 n03791053 n03776460 n03017168 n04404412 n03777754 n04037443 n03796401 n04404412 n06596364 n02105412 n04023962 n01734418 n02328150 n02101006 n07684084 n02002556 n13133613 n07248320 n01753488 n02107908 n02123394 n04154565 n02504458 n13052670 n04008634 n02916936 n02107683 n02134084 n02443484 n07720875 n04493381 n03761084 n02102040 n03089624 n01985128 n01753488 n02137549 n09835506 n03443371 n02346627 n02002556 n04589890 n04562935 n01632777 n02317335 n01632458 n02493509 n02398521 n03970156 n02667093 n03825788 n02086646 n13044778 n02088238 n01776313 n02481823 n04423845 n03047690 n07749582 n02977058 n01796340 n02110627 n02910353 n03201208 n01728572 n02114367 n03980874 n02776631 n02165456 n02437312 n02364673 n03764736 n04041544 n12998815 n03388043 n03803284 n02113624 n02102318 n03424325 n03250847 n09288635 n03924679 n03956157 n01910747 n04560804 n07714990 n04542943 n07716906 n02128925 n04487394 n04399382 n04044716 n04465501 n03854065 n02398521 n02823750 n07583066 n02107312 n04584207 n01829413 n01833805 n02417914 n04081281 n02088364 n02113799 n04376876 n02093991 n02730930 n04133789 n02442845 n02018207 n03930630 n02910353 n02730930 n03776460 n02088364 n04264628 n07714990 n04461696 n03372029 n02090379 n01819313 n03657121 n02106662 n02109525 n02500267 n04376876 n04483307 n03843555 n13037406 n02097047 n02403003 n03290653 n02690373 n02536864 n02091467 n03843555 n04044716 n01537544 n02037110 n04146614 n04612504 n01484850 n07684084 n03220513 n04326547 n03127925 n02971356 n03476991 n01774384 n07565083 n02672831 n03967562 n03998194 n09229709 n01641577 n01682714 n04204347 n03160309 n03478589 n03792972 n04458633 n04392985 n02480855 n02099429 n07714571 n02098105 n02963159 n02777292 n03529860 n03706229 n12057211 n04612504 n04554684 n03590841 n03661043 n04065272 n01531178 n07614500 n02017213 n02859443 n04235860 n02256656 n03481172 n02110063 n02281787 n04579432 n01985128 n02363005 n04317175 n01737021 n03216828 n02095570 n07714571 n04525305 n07565083 n03494278 n04525038 n01494475 n04404412 n07718747 n03903868 n04376876 n02088632 n07720875 n02111277 n01728920 n04311004 n02877765 n06785654 n01978455 n01729977 n02906734 n01601694 n04429376 n02676566 n03733281 n02106382 n02817516 n04039381 n04356056 n01514859 n03791053 n04376876 n03630383 n04252077 n04417672 n01641577 n04141076 n02025239 n02992529 n02672831 n02088466 n01797886 n04501370 n04149813 n02172182 n04336792 n04417672 n03944341 n03961711 n04493381 n04258138 n04523525 n02423022 n02102177 n02865351 n04507155 n07930864 n02097047 n03916031 n02892201 n04254680 n01608432 n04461696 n03483316 n02500267 n02916936 n03452741 n02892201 n02113186 n03775546 n03478589 n03633091 n04599235 n03065424 n02097209 n01873310 n04604644 n04418357 n03794056 n03179701 n01440764 n01806143 n02093859 n01496331 n01669191 n04367480 n02971356 n02114548 n03249569 n01796340 n07613480 n04505470 n03804744 n02950826 n03743016 n02777292 n03089624 n02110341 n03485407 n02480855 n02356798 n02910353 n03662601 n01601694 n04141076 n03384352 n02492660 n03376595 n02776631 n02025239 n04065272 n02033041 n03417042 n09332890 n02097658 n04552348 n03447447 n03781244 n03000684 n01749939 n01677366 n02094114 n04465501 n04372370 n02281787 n03196217 n02277742 n02701002 n03290653 n03452741 n01806143 n04037443 n03825788 n04266014 n07716906 n02123597 n02110063 n02981792 n03804744 n02134418 n03970156 n02483362 n02486261 n01514668 n02134084 n03970156 n01558993 n01644373 n03692522 n03804744 n02804414 n02108551 n01560419 n02490219 n03710637 n03673027 n04552348 n02094114 n03967562 n03776460 n02447366 n03733805 n03127925 n02279972 n09428293 n03089624 n03938244 n04041544 n02113712 n03594734 n02206856 n03485794 n02256656 n02981792 n03347037 n03026506 n04356056 n09332890 n07565083 n07760859 n04286575 n02790996 n01873310 n03337140 n04483307 n02281787 n02114548 n12057211 n02971356 n04591713 n04371774 n03841143 n02229544 n02794156 n04270147 n04090263 n04592741 n02120505 n02120505 n03532672 n03062245 n03089624 n03710193 n03792972 n02085936 n01924916 n01692333 n04428191 n13044778 n06359193 n07693725 n02916936 n02488702 n02489166 n02102318 n03980874 n04265275 n04429376 n02480855 n07873807 n03478589 n02071294 n02097298 n01734418 n02123159 n02951585 n07714990 n02859443 n04447861 n02096585 n03902125 n04525038 n03028079 n03866082 n03891332 n03220513 n03207743 n04589890 n03871628 n01774750 n02125311 n02747177 n04153751 n02101556 n02095570 n01629819 n03042490 n01872401 n04311004 n04228054 n03983396 n04456115 n04070727 n02490219 n02093256 n03710193 n03742115 n03841143 n04285008 n02074367 n02526121 n02116738 n03666591 n02363005 n02910353 n02219486 n03063599 n01955084 n02104029 n02114855 n04023962 n04376876 n04275548 n01682714 n01641577 n02676566 n07892512 n01775062 n03457902 n04486054 n03457902 n02843684 n07768694 n04026417 n03355925 n02025239 n03781244 n03947888 n02280649 n03450230 n02098286 n03776460 n03594945 n07734744 n02276258 n07720875 n02988304 n03595614 n02951358 n03764736 n02939185 n02091134 n01978287 n02268443 n03127747 n03814639 n03874293 n04081281 n07768694 n07715103 n02790996 n03160309 n04525038 n02013706 n04540053 n02105056 n07715103 n01860187 n07920052 n01687978 n07590611 n03394916 n03947888 n01945685 n02110063 n04074963 n04606251 n03594945 n04254120 n03187595 n02110958 n02977058 n07930864 n02099601 n03590841 n02441942 n01806567 n02643566 n03874293 n03255030 n04487394 n07760859 n02112137 n04486054 n01496331 n03337140 n01882714 n02113978 n07615774 n02168699 n04465501 n02086910 n04136333 n04254120 n03530642 n03187595 n01770393 n02422106 n03709823 n02910353 n01855672 n02361337 n01580077 n01694178 n04120489 n04517823 n03775546 n01773157 n03775546 n03777568 n04355933 n01784675 n01498041 n02422699 n04447861 n02177972 n02319095 n03935335 n03980874 n03976657 n02442845 n02085782 n03976467 n07583066 n04461696 n04467665 n02105641 n04501370 n03777754 n04065272 n03447721 n02206856 n03459775 n03947888 n04111531 n02807133 n03481172 n01983481 n03733131 n02105641 n03841143 n03976467 n02391049 n03196217 n02422699 n04462240 n04328186 n04310018 n04417672 n03018349 n02965783 n01629819 n03207941 n04311174 n02226429 n02363005 n03041632 n04033901 n02410509 n02112137 n02747177 n02825657 n02097298 n02992529 n03032252 n01734418 n04090263 n04201297 n02094258 n04111531 n04265275 n04065272 n02676566 n03388043 n07930864 n02423022 n02108551 n03424325 n02815834 n04228054 n02097209 n02137549 n03314780 n01608432 n01820546 n02109961 n01580077 n07579787 n03788365 n02749479 n03930313 n01806567 n02927161 n04447861 n04548362 n02259212 n04252225 n02105162 n03345487 n02727426 n07584110 n04005630 n02096294 n04273569 n02422106 n03534580 n09288635 n01795545 n02397096 n02730930 n01806143 n03661043 n02807133 n02277742 n07613480 n03297495 n03761084 n03109150 n07716906 n12267677 n04204238 n04204347 n04596742 n03710637 n02481823 n02669723 n01491361 n01629819 n03982430 n02869837 n01843065 n04311174 n01820546 n01677366 n02108089 n01807496 n03710721 n03063599 n03498962 n01729322 n02769748 n02268853 n04081281 n03983396 n06359193 n02127052 n02107142 n02488702 n02006656 n07831146 n02676566 n04277352 n03527444 n03372029 n03314780 n02114712 n01978287 n03337140 n03538406 n02917067 n01756291 n01667778 n01795545 n01631663 n02088364 n02808304 n01797886 n02104029 n03201208 n01558993 n03967562 n04428191 n02494079 n04162706 n04515003 n04040759 n01774750 n01943899 n02098413 n02099601 n04270147 n02417914 n03065424 n07734744 n02007558 n02119789 n07695742 n02364673 n01689811 n02672831 n02124075 n01644900 n04335435 n02086646 n02095889 n02105251 n02391049 n01955084 n02480495 n03032252 n02808440 n03637318 n02877765 n04597913 n02112706 n04590129 n01910747 n02895154 n03062245 n03775546 n03372029 n04228054 n04258138 n04074963 n11879895 n01986214 n01943899 n02138441 n01806143 n01983481 n03478589 n04389033 n02951358 n02102318 n03763968 n03594734 n01689811 n07753113 n02074367 n01819313 n03467068 n03393912 n02056570 n04008634 n04254777 n01644900 n02106166 n03891251 n04435653 n01773549 n03729826 n01770081 n03529860 n03110669 n03841143 n02091244 n04067472 n04371430 n03796401 n03782006 n04238763 n01784675 n04019541 n02097209 n02259212 n03956157 n02112706 n02111889 n03527444 n02167151 n04442312 n07695742 n03710193 n04074963 n02099849 n02134418 n02825657 n13037406 n02085782 n02417914 n12620546 n04275548 n02804610 n04146614 n01514668 n01443537 n04509417 n02892201 n02088466 n03065424 n04254120 n03792972 n01924916 n02037110 n07697537 n03394916 n02101006 n02110806 n03146219 n02814860 n03649909 n03127747 n01980166 n02092002 n03787032 n02133161 n03874599 n04201297 n02106550 n07615774 n03710637 n03527444 n07714990 n03017168 n02111500 n01744401 n03950228 n02410509 n02483708 n07583066 n04589890 n02655020 n02259212 n01990800 n03457902 n07920052 n04505470 n02111129 n03216828 n02892767 n02095314 n02092002 n01664065 n03944341 n03495258 n01737021 n01677366 n01806567 n02097298 n04532670 n04522168 n02708093 n02066245 n02971356 n02906734 n03492542 n03930313 n02396427 n02037110 n03297495 n03017168 n01773797 n03786901 n02910353 n02102177 n02730930 n02480495 n04562935 n02109525 n02988304 n02091467 n04204238 n04476259 n01532829 n03208938 n04532106 n02165105 n01677366 n07715103 n02795169 n02127052 n02098286 n01728572 n01833805 n02445715 n02259212 n04209133 n07711569 n07860988 n09421951 n03125729 n04141076 n01742172 n03063689 n01704323 n01748264 n01770393 n01955084 n02894605 n03792972 n04141975 n02672831 n03018349 n02971356 n02859443 n07749582 n03792782 n02398521 n04254777 n02326432 n03877472 n02123045 n03623198 n02342885 n03187595 n03884397 n04330267 n04266014 n02138441 n03538406 n03000247 n02363005 n02883205 n07753592 n04371430 n03871628 n03633091 n04023962 n01740131 n04251144 n02870880 n02009912 n03461385 n02328150 n01945685 n02280649 n02012849 n02112137 n04326547 n02117135 n07930864 n04136333 n04370456 n01737021 n01817953 n03888605 n03452741 n04330267 n07932039 n02398521 n07930864 n03787032 n02112350 n12267677 n03494278 n07693725 n03857828 n02815834 n04376876 n03874293 n04371774 n03929855 n02841315 n02090721 n09468604 n02488291 n02106662 n03461385 n04485082 n03995372 n02493793 n01914609 n02002556 n07711569 n02098286 n07693725 n02422106 n02110958 n04613696 n03692522 n07920052 n02799071 n04037443 n02113978 n01530575 n10565667 n10148035 n03773504 n03347037 n09193705 n02113978 n01882714 n03527444 n02979186 n01877812 n02111129 n03417042 n03461385 n02114855 n12768682 n01950731 n02667093 n02011460 n03290653 n02108000 n04229816 n01930112 n02486261 n04542943 n04235860 n07768694 n02403003 n03786901 n02396427 n02109047 n01968897 n03388043 n04258138 n02112137 n02607072 n02134084 n03837869 n04200800 n02071294 n04141076 n02085620 n03218198 n02098286 n02099601 n04099969 n03216828 n02892767 n03482405 n03838899 n03018349 n04487394 n04141076 n02106382 n11939491 n03100240 n03908714 n07831146 n09256479 n12267677 n04152593 n02093428 n02791270 n02099429 n02105056 n03223299 n02643566 n07720875 n02124075 n02699494 n03888605 n03249569 n03584254 n02981792 n04133789 n03534580 n01518878 n02704792 n07747607 n13037406 n02488291 n03538406 n03627232 n02099429 n02704792 n07684084 n03733805 n02397096 n02114367 n02319095 n02086646 n02094433 n04133789 n04483307 n02504013 n04525038 n04265275 n04209239 n03967562 n02129165 n03777754 n09835506 n02727426 n01693334 n02457408 n02128925 n03903868 n04409515 n01950731 n06359193 n03187595 n01950731 n04041544 n02892767 n02363005 n04355338 n02277742 n04090263 n03314780 n04285008 n01847000 n02094433 n02098105 n07892512 n09229709 n03527444 n03530642 n01774384 n01773157 n04366367 n03676483 n01930112 n03933933 n03877845 n02104365 n07697537 n02444819 n13037406 n04296562 n02457408 n11879895 n04120489 n03958227 n03187595 n03930630 n02277742 n01774750 n04550184 n02837789 n04479046 n02500267 n04317175 n07875152 n01687978 n02088094 n02814533 n02109961 n02117135 n04579145 n07880968 n02190166 n02396427 n04542943 n04357314 n02114855 n03920288 n02120079 n01776313 n01847000 n04447861 n04019541 n03201208 n03857828 n03404251 n07754684 n09256479 n02442845 n06794110 n02917067 n04592741 n02389026 n03444034 n03724870 n02895154 n02165456 n03804744 n01742172 n02037110 n02087046 n02865351 n02025239 n03887697 n02814533 n04133789 n03891332 n02483708 n07714571 n03982430 n04579145 n02127052 n07932039 n04238763 n03710637 n02825657 n03977966 n02321529 n02493509 n02219486 n09193705 n01950731 n03457902 n03908714 n03980874 n02113624 n03393912 n03379051 n01688243 n02971356 n04243546 n02510455 n02092002 n02116738 n02391049 n04111531 n02128925 n02097047 n02071294 n04462240 n01748264 n02086910 n04326547 n02107908 n06874185 n03773504 n04039381 n03874293 n04482393 n04371774 n02088094 n03887697 n03452741 n07802026 n02509815 n03347037 n03983396 n01774750 n02879718 n03888257 n01796340 n07717556 n02112706 n01742172 n12998815 n03271574 n01775062 n02112706 n04153751 n04350905 n02481823 n02487347 n01950731 n02667093 n02089973 n04592741 n03393912 n02840245 n02006656 n01498041 n04548362 n02782093 n09193705 n02443114 n01773549 n02093428 n04116512 n01770393 n02128925 n02939185 n04133789 n02777292 n03976657 n03876231 n02443114 n04590129 n02114855 n04335435 n03372029 n04418357 n02109961 n02088094 n02279972 n03657121 n04482393 n04229816 n02264363 n04136333 n02027492 n03617480 n07753592 n03459775 n04154565 n03425413 n01955084 n03127925 n02017213 n02437616 n01774384 n07760859 n01818515 n03000684 n02128385 n04487081 n02105505 n03376595 n02130308 n02108000 n03042490 n02992211 n07718472 n02417914 n02701002 n02058221 n03888605 n01694178 n01855672 n02168699 n02676566 n04507155 n03777754 n01704323 n02088094 n03444034 n02883205 n02909870 n02787622 n02102973 n02514041 n03085013 n04328186 n02494079 n02093428 n01986214 n03594945 n01847000 n02110958 n04252077 n03041632 n09421951 n03776460 n03676483 n02804610 n02112350 n02096294 n02108089 n03690938 n04372370 n03877845 n02111500 n04476259 n02104029 n02085782 n03424325 n01943899 n02443114 n02865351 n02129604 n04487394 n02493509 n03026506 n04136333 n04507155 n04356056 n04039381 n03944341 n03947888 n02098105 n02133161 n02841315 n04251144 n02094114 n04505470 n01829413 n02493509 n11879895 n07875152 n01983481 n02500267 n02085620 n13040303 n03902125 n12620546 n03599486 n03891332 n02102480 n04118538 n01807496 n01860187 n03444034 n01491361 n07831146 n02666196 n02892767 n13040303 n03032252 n02125311 n02168699 n02117135 n02395406 n01537544 n07753275 n04428191 n02109961 n04235860 n02417914 n04584207 n04070727 n01873310 n02749479 n02769748 n07714571 n04367480 n02012849 n01665541 n02167151 n02088466 n03527444 n04409515 n02013706 n03325584 n02441942 n07613480 n02101006 n02088632 n02129604 n01685808 n02966687 n04367480 n03908618 n02977058 n04111531 n03042490 n03717622 n06785654 n02980441 n01968897 n01843065 n04554684 n04523525 n04417672 n01855672 n03873416 n02100877 n02105505 n03492542 n01833805 n04116512 n04487394 n02105505 n03297495 n02119022 n04392985 n02108422 n02098413 n02012849 n04487394 n01990800 n02817516 n03216828 n03187595 n07871810 n02669723 n02229544 n02966687 n02113712 n03930313 n03417042 n02389026 n03249569 n03633091 n02096294 n02110627 n03916031 n07920052 n04146614 n03207743 n02325366 n03954731 n04133789 n03788195 n03982430 n02112706 n02017213 n02492660 n03976467 n03792782 n02123159 n07754684 n03444034 n03063599 n02326432 n02009912 n04154565 n03492542 n03649909 n02101388 n02091134 n02892201 n02077923 n02168699 n04239074 n03899768 n04461696 n03124170 n09428293 n03000247 n01558993 n02104365 n02093991 n03837869 n02169497 n03492542 n03706229 n02129165 n03216828 n03662601 n02444819 n03930313 n04039381 n01601694 n04228054 n02788148 n03133878 n01983481 n02093859 n02106166 n02102973 n03982430 n02667093 n03891332 n01592084 n02172182 n03404251 n02259212 n03250847 n02817516 n07747607 n03063599 n03935335 n02085620 n02092002 n02999410 n02504458 n03100240 n04392985 n02105855 n07718747 n03721384 n02483362 n01629819 n02107683 n02951358 n07920052 n03733805 n02483362 n01798484 n04418357 n04251144 n03197337 n03908618 n01978287 n01817953 n04486054 n04127249 n01945685 n07711569 n02088238 n02105641 n02910353 n07892512 n01484850 n03657121 n02859443 n07860988 n04141327 n03868863 n01768244 n03657121 n02102973 n02111500 n01632458 n02319095 n04328186 n04311004 n01558993 n01773549 n01622779 n02442845 n07768694 n01632777 n03733805 n03133878 n02012849 n03496892 n02066245 n02094433 n03271574 n02128757 n03792782 n02018795 n01630670 n02101006 n04067472 n02100583 n04317175 n03602883 n04141327 n02102040 n07875152 n02892201 n04127249 n07753275 n04355338 n02236044 n01749939 n07717556 n02317335 n02606052 n04483307 n04435653 n04264628 n04347754 n04179913 n07583066 n04146614 n03478589 n03599486 n02676566 n02264363 n04371430 n03782006 n04604644 n03180011 n03045698 n03887697 n02085936 n07614500 n04296562 n02074367 n01729977 n02018795 n01735189 n03777568 n03775546 n02091244 n03838899 n04357314 n01945685 n03788365 n02441942 n04429376 n02119022 n01945685 n03627232 n02056570 n02437616 n03590841 n01491361 n01871265 n04442312 n01833805 n04596742 n04553703 n04487394 n03763968 n02514041 n11879895 n04525038 n02510455 n04275548 n01531178 n04162706 n03240683 n04589890 n03871628 n04443257 n02655020 n04264628 n01843383 n02138441 n02091032 n02281406 n03272010 n03775546 n03345487 n03532672 n02814860 n07714571 n02423022 n03187595 n03992509 n03933933 n03956157 n07920052 n01981276 n03710721 n04201297 n09472597 n02097130 n02111889 n03929660 n02804610 n03961711 n07613480 n01755581 n02277742 n03452741 n02396427 n01514859 n04590129 n04116512 n01631663 n07711569 n02134084 n04332243 n04517823 n01558993 n02817516 n02088632 n03457902 n01775062 n02328150 n02804610 n02077923 n02129604 n02095314 n03388183 n02536864 n03134739 n03014705 n02423022 n04254120 n03776460 n03788195 n03637318 n02112706 n03777568 n02089078 n03838899 n03661043 n02687172 n02097658 n02395406 n01820546 n03788365 n02963159 n02097298 n07717556 n02114367 n02219486 n04442312 n04536866 n02979186 n04458633 n07584110 n03633091 n04501370 n03000684 n02417914 n02093859 n04228054 n03478589 n02112137 n03642806 n02113712 n02817516 n03980874 n01644900 n11879895 n04347754 n03788195 n02825657 n02119789 n02128925 n02129604 n04523525 n04162706 n03000247 n04347754 n02447366 n02096294 n02002724 n02098413 n03467068 n01582220 n02002556 n03063689 n01855672 n02971356 n02086240 n02817516 n01930112 n02490219 n09428293 n02091467 n03710637 n02917067 n06596364 n01532829 n02056570 n04560804 n01735189 n04557648 n07711569 n06785654 n04118776 n02860847 n02007558 n02356798 n04070727 n02489166 n07714990 n02104365 n02007558 n03649909 n01667114 n01641577 n03028079 n03494278 n07880968 n03775071 n01632458 n01990800 n02442845 n02119022 n02006656 n02701002 n02483362 n03124170 n01531178 n02704792 n02099849 n01873310 n01735189 n04462240 n03065424 n04398044 n04120489 n04330267 n03967562 n02099601 n03388043 n02100583 n02093991 n09399592 n01773797 n03761084 n02342885 n02206856 n02098286 n03207743 n13040303 n01629819 n02927161 n04125021 n04554684 n02328150 n03476684 n02114367 n03793489 n03633091 n03930630 n02871525 n02097474 n02113799 n02408429 n03899768 n07831146 n04525038 n02808304 n03724870 n02033041 n02110063 n03063689 n01855672 n02395406 n04254680 n03063689 n02487347 n02640242 n03457902 n12267677 n04482393 n04009552 n02174001 n01990800 n04209133 n01950731 n02113186 n03095699 n01770081 n04127249 n02971356 n02490219 n04044716 n01667778 n03710721 n03141823 n04099969 n02325366 n04599235 n01978455 n03599486 n02090622 n03630383 n02117135 n02037110 n02219486 n03297495 n02105505 n04263257 n02442845 n04266014 n03393912 n02115641 n02883205 n01729977 n03047690 n02361337 n04560804 n02106662 n03876231 n03041632 n02098105 n01560419 n02089078 n03218198 n04153751 n02123597 n03584829 n02930766 n03781244 n02264363 n07711569 n04418357 n06596364 n03345487 n02835271 n04467665 n03450230 n03692522 n03929660 n03935335 n01630670 n02120505 n02172182 n03777754 n04209133 n01687978 n03481172 n02088094 n02112350 n03982430 n02124075 n03854065 n04141076 n06785654 n02981792 n03207941 n03028079 n13133613 n02423022 n03777568 n02328150 n02037110 n02092002 n02655020 n04443257 n02963159 n01687978 n09193705 n10148035 n03065424 n03792972 n02013706 n01494475 n07860988 n02099267 n04355933 n02457408 n01943899 n03733131 n04252077 n02978881 n03868863 n03544143 n03692522 n12768682 n02088094 n04023962 n02793495 n03840681 n01773549 n03843555 n04482393 n07753592 n03673027 n07930864 n01685808 n02037110 n02787622 n06596364 n02033041 n04204238 n12267677 n02321529 n03404251 n03000684 n07753592 n03804744 n01514668 n03594945 n02110627 n03793489 n04243546 n02490219 n02817516 n03291819 n02100877 n01440764 n04209239 n02088364 n04590129 n02110806 n09229709 n02447366 n04606251 n04562935 n02128385 n02837789 n02363005 n04133789 n02165456 n03649909 n03661043 n02107683 n01688243 n01843383 n03891251 n12620546 n03832673 n03452741 n04074963 n04228054 n03982430 n01795545 n02877765 n03196217 n04435653 n02105505 n04467665 n07695742 n02672831 n03690938 n04456115 n04125021 n15075141 n03761084 n04487394 n02108089 n07932039 n01806567 n02089078 n02028035 n03623198 n02108551 n01632458 n03445924 n01739381 n03887697 n07836838 n02364673 n03355925 n02113799 n04476259 n02437312 n03534580 n03841143 n03131574 n07697537 n01818515 n03929660 n02093647 n02892767 n03916031 n04081281 n04443257 n02441942 n01534433 n01843383 n02951358 n02089078 n03874293 n03127925 n02094258 n04366367 n03485407 n04597913 n01755581 n01795545 n01601694 n01944390 n03124170 n02395406 n03594734 n01685808 n01582220 n02110627 n03991062 n02699494 n09472597 n02500267 n03476991 n02963159 n02089867 n01697457 n03347037 n01806143 n02074367 n02699494 n04090263 n03763968 n02422699 n04070727 n01694178 n01797886 n03459775 n03977966 n01751748 n03803284 n01950731 n01532829 n02454379 n02051845 n03976657 n07248320 n07753275 n09332890 n02002556 n03602883 n12057211 n02123045 n02950826 n02219486 n02115641 n02085936 n02951585 n02111889 n02102480 n01443537 n02105162 n02794156 n04479046 n03047690 n02105412 n02692877 n01739381 n07930864 n04552348 n02835271 n01531178 n04120489 n01582220 n02840245 n02422106 n01697457 n03075370 n04136333 n03874599 n03492542 n02389026 n03207743 n02089867 n04136333 n06359193 n02106382 n02101006 n02091467 n03325584 n01616318 n02804610 n07717556 n02111500 n01608432 n02007558 n03887697 n02107142 n02641379 n07734744 n03710193 n02231487 n02028035 n04296562 n04009552 n02977058 n03710721 n03884397 n03775546 n07892512 n04254777 n07697537 n03792782 n02102480 n03000247 n02117135 n01796340 n02892201 n04254680 n04040759 n01773549 n04040759 n03124170 n02790996 n04037443 n02033041 n04509417 n01484850 n03697007 n04208210 n04209133 n02497673 n03840681 n03785016 n04086273 n02085936 n02134084 n03404251 n02098286 n07734744 n03998194 n02086910 n03250847 n03983396 n04336792 n03457902 n03026506 n03980874 n01818515 n04507155 n03933933 n13037406 n04235860 n02504013 n03297495 n02802426 n01491361 n02916936 n01755581 n02727426 n04228054 n03584254 n04317175 n01667114 n04486054 n02110341 n04465501 n02974003 n12768682 n12998815 n02111129 n11879895 n03775546 n03496892 n03791053 n01768244 n09421951 n04192698 n04517823 n02514041 n12985857 n13054560 n04330267 n03388549 n04254120 n04423845 n11879895 n02776631 n02137549 n03495258 n03355925 n02486410 n02749479 n03187595 n03388043 n04005630 n02100877 n07714990 n06359193 n02096051 n02105641 n07579787 n09472597 n04355338 n03680355 n02730930 n03874599 n02730930 n04552348 n03535780 n01753488 n02012849 n01704323 n02097209 n03908714 n04589890 n04372370 n01443537 n03457902 n04238763 n09246464 n01739381 n02488702 n04026417 n01530575 n07749582 n02102480 n04557648 n02096585 n01740131 n04389033 n03314780 n07875152 n02492660 n12057211 n04371430 n02099267 n03495258 n02096051 n02105162 n02105641 n03016953 n02808440 n03598930 n04542943 n01855672 n03733281 n07717410 n02504013 n02091831 n04133789 n04356056 n02879718 n03891251 n03379051 n02113978 n09288635 n02444819 n01945685 n03980874 n02526121 n02101556 n04040759 n02009229 n03837869 n04311174 n07583066 n02777292 n03950228 n02129165 n02114548 n02100735 n04590129 n03400231 n03868242 n02074367 n06874185 n04141327 n01833805 n09288635 n04070727 n02795169 n03944341 n01560419 n03187595 n02092339 n03388043 n03255030 n04532670 n02120505 n02894605 n02101388 n01608432 n03995372 n02259212 n03908618 n03223299 n02107683 n07932039 n03063689 n01629819 n03982430 n03188531 n01748264 n03877472 n02115913 n01748264 n04350905 n04070727 n02643566 n02966193 n01770393 n02672831 n02494079 n02930766 n03259280 n02442845 n03903868 n03710721 n02690373 n01531178 n01496331 n03710721 n02088094 n07717556 n03920288 n02089078 n02109525 n02808304 n03447447 n04548280 n02906734 n07716358 n01774384 n03637318 n02909870 n03788195 n02699494 n04355338 n02095889 n02606052 n03623198 n01641577 n01669191 n02457408 n03627232 n02769748 n04311004 n03584254 n03220513 n03530642 n04285008 n01644373 n09421951 n03733281 n03047690 n02808304 n03720891 n02437616 n07684084 n01749939 n04409515 n02494079 n02948072 n02110806 n02077923 n01924916 n01496331 n04604644 n02667093 n02107142 n01692333 n04277352 n04254777 n02676566 n12144580 n03630383 n02095889 n03666591 n03937543 n01498041 n03272562 n09472597 n03223299 n04456115 n02099601 n03000134 n02951585 n03717622 n01910747 n06596364 n01820546 n02018795 n04264628 n02096177 n01944390 n01978287 n01818515 n03125729 n02093256 n01855032 n02009912 n02097047 n02113712 n01883070 n01774750 n01665541 n02093428 n01980166 n04392985 n03947888 n02690373 n02090721 n04023962 n03476684 n04389033 n03729826 n02910353 n01632458 n02167151 n02676566 n03045698 n01770081 n04238763 n10148035 n04344873 n02481823 n04467665 n02013706 n02088238 n02877765 n01833805 n07718747 n02091467 n03627232 n04141076 n04209239 n01950731 n04467665 n03976657 n03729826 n04398044 n07754684 n04465501 n01776313 n02111129 n03207743 n03201208 n01847000 n02085936 n03710721 n04599235 n02817516 n02807133 n04389033 n02840245 n04423845 n07718472 n02356798 n02167151 n02966687 n02790996 n02840245 n02342885 n02437312 n07716906 n02233338 n03379051 n01990800 n02443114 n01498041 n03337140 n02165105 n04525305 n02226429 n01558993 n02110341 n04069434 n01644900 n02096177 n04347754 n03127747 n02106382 n01608432 n02412080 n02134084 n04486054 n04026417 n02437616 n04081281 n04417672 n02018207 n03018349 n03595614 n02120079 n03388183 n03902125 n02403003 n03933933 n09193705 n01872401 n03534580 n02129165 n03710193 n01981276 n02259212 n07873807 n01843065 n02457408 n02837789 n02177972 n02951585 n02101006 n02965783 n04482393 n01616318 n04465501 n03485407 n02086646 n02085620 n02361337 n01753488 n04579145 n01682714 n02105641 n04065272 n01968897 n02102973 n12144580 n04372370 n02127052 n02690373 n02895154 n04049303 n03676483 n02268443 n02869837 n02206856 n04201297 n02091244 n02101556 n02843684 n04380533 n07753275 n01534433 n02027492 n02971356 n04118538 n03384352 n03444034 n03676483 n03495258 n02666196 n01756291 n03482405 n02098413 n04355933 n03841143 n02120079 n02417914 n03857828 n02114712 n01729977 n01770081 n03733131 n03793489 n03590841 n02088364 n01847000 n11939491 n03724870 n02025239 n07717556 n02119789 n03016953 n02129165 n04033901 n02790996 n02012849 n02099429 n03691459 n04330267 n10148035 n03888257 n07584110 n02096437 n04515003 n02804610 n02096437 n04418357 n02033041 n02092339 n12620546 n01669191 n03160309 n02112137 n02172182 n03110669 n04380533 n03673027 n03347037 n04201297 n02492660 n02110958 n02783161 n02483708 n02110958 n04120489 n03908618 n02423022 n04350905 n04153751 n02444819 n02114548 n07747607 n07614500 n04070727 n04074963 n01616318 n02112706 n02096437 n04228054 n01644900 n01756291 n02442845 n03980874 n02441942 n04149813 n03950228 n01843383 n02910353 n03207743 n04263257 n02099429 n04486054 n02606052 n04238763 n02099601 n02177972 n03584829 n04356056 n03673027 n02086646 n04485082 n02692877 n03761084 n03249569 n04252077 n02092339 n01770081 n02877765 n02129604 n03032252 n13044778 n02607072 n03498962 n02120505 n01534433 n01491361 n07730033 n02098413 n02793495 n02017213 n02100877 n02948072 n02398521 n03498962 n02494079 n04026417 n03259280 n04209133 n02094258 n02028035 n03627232 n03529860 n02077923 n03843555 n03873416 n02116738 n03995372 n02104365 n04347754 n04590129 n03657121 n01774384 n03937543 n07836838 n04127249 n02391049 n04296562 n02492035 n04254120 n04201297 n02115641 n02094258 n03729826 n02090379 n02165456 n02107142 n01518878 n03649909 n01558993 n01843383 n01695060 n02134084 n02101556 n02123045 n03929855 n02110185 n03291819 n02099601 n04443257 n02487347 n01795545 n04458633 n02229544 n03325584 n04086273 n03017168 n01729977 n03388043 n01675722 n02009229 n03126707 n02117135 n03873416 n04332243 n02486410 n03394916 n02480855 n02837789 n03018349 n03998194 n04317175 n01819313 n03291819 n01664065 n02128385 n02417914 n04040759 n01440764 n09468604 n03240683 n07248320 n11939491 n02971356 n02096437 n02101556 n04467665 n03983396 n04146614 n04252077 n03476684 n02777292 n03617480 n04004767 n02102177 n02088632 n07749582 n04264628 n04487081 n02808440 n04399382 n03961711 n04229816 n03977966 n03133878 n03877845 n03995372 n04131690 n02093754 n02110806 n01872401 n02106662 n07836838 n04553703 n02095314 n12620546 n02231487 n02277742 n04456115 n02643566 n02317335 n04008634 n04476259 n04550184 n02107908 n02125311 n03355925 n03769881 n07615774 n02443114 n02167151 n04590129 n12620546 n02177972 n03866082 n07718472 n02102318 n07697313 n03384352 n04330267 n03874293 n03895866 n02444819 n03908714 n02395406 n04355933 n03220513 n04147183 n02099267 n01983481 n01770081 n02095570 n01695060 n02115641 n04355338 n07584110 n02843684 n04023962 n02102480 n04116512 n02094258 n04326547 n02951358 n01784675 n03494278 n03935335 n02106662 n02256656 n03944341 n02105641 n02666196 n03982430 n02814533 n04204238 n07730033 n01807496 n03042490 n02963159 n02504458 n03535780 n04355933 n02009229 n02423022 n01582220 n07614500 n02321529 n03272562 n03642806 n04251144 n02115913 n02107312 n03924679 n02699494 n03908714 n04522168 n09246464 n03617480 n02231487 n02127052 n04335435 n02804610 n02437616 n03249569 n01682714 n02790996 n03742115 n02112350 n02837789 n04371774 n03443371 n02992529 n01688243 n03733281 n07875152 n02105641 n02110958 n02018795 n04482393 n03063689 n02328150 n02109525 n02071294 n02808304 n03530642 n03970156 n01860187 n02102973 n03220513 n03032252 n01797886 n03792782 n02085936 n04487394 n02790996 n01773157 n04367480 n03290653 n03478589 n04542943 n07579787 n02190166 n06785654 n02002724 n01740131 n04033995 n01978287 n02011460 n03937543 n02096437 n01534433 n02978881 n03445924 n07716358 n02093428 n01776313 n02704792 n01687978 n04550184 n02102973 n02165456 n03347037 n01755581 n02111889 n03967562 n01491361 n02437616 n02089078 n02123597 n04507155 n03110669 n03868242 n03874599 n02120505 n03930313 n02165105 n04604644 n03445777 n02099712 n02009229 n04389033 n04371774 n02437616 n04243546 n03794056 n03775071 n04479046 n03796401 n02892767 n03929660 n02133161 n03944341 n03884397 n04589890 n03590841 n02071294 n04263257 n01768244 n02410509 n04465501 n02098286 n02747177 n02105162 n01667114 n02999410 n01560419 n07749582 n01968897 n02130308 n02110806 n02106382 n07590611 n07697537 n04591157 n04462240 n02988304 n03126707 n02727426 n04127249 n02843684 n03179701 n02443484 n04344873 n02280649 n03216828 n12985857 n04548280 n03602883 n03447721 n01694178 n02415577 n02699494 n03085013 n02895154 n04371774 n03495258 n03791053 n02641379 n02980441 n02950826 n02110063 n03788195 n01693334 n02606052 n07742313 n02113624 n03874293 n04209239 n03388043 n02927161 n03944341 n04579432 n03759954 n02101388 n01978287 n03443371 n02129604 n01693334 n07742313 n01770393 n06785654 n03126707 n02058221 n03721384 n02093647 n07684084 n03775546 n03494278 n03131574 n02823428 n02111889 n04208210 n02190166 n04228054 n03888257 n02169497 n01770081 n02974003 n03637318 n02089078 n02117135 n02457408 n02606052 n03877845 n02776631 n01882714 n03325584 n02095314 n02102973 n02236044 n02090622 n02797295 n01775062 n02098286 n03498962 n02128385 n02783161 n07768694 n03337140 n01751748 n04447861 n02172182 n03743016 n03599486 n04380533 n07892512 n03598930 n02085782 n01685808 n02879718 n01491361 n04273569 n02441942 n04553703 n03649909 n03141823 n02115641 n04372370 n04265275 n04493381 n06596364 n02825657 n02480495 n02097298 n03532672 n01531178 n03843555 n03770679 n02346627 n02127052 n03297495 n02869837 n02106166 n01440764 n02510455 n02095570 n02177972 n03347037 n01978455 n02488702 n02791124 n04229816 n01675722 n03630383 n01930112 n04005630 n04039381 n03950228 n04592741 n01914609 n02129165 n01871265 n03902125 n01689811 n03534580 n01945685 n01773549 n02089867 n03788195 n02788148 n02113023 n03534580 n04592741 n02797295 n03017168 n04355933 n02097209 n02167151 n04026417 n03271574 n02105251 n04004767 n02108000 n04350905 n02106662 n03201208 n03126707 n01443537 n02837789 n02165456 n03796401 n02870880 n02641379 n01622779 n02113023 n07880968 n02165456 n03840681 n03372029 n04044716 n03840681 n03692522 n03992509 n02085620 n03530642 n02113186 n02086079 n07614500 n09468604 n03602883 n09468604 n04270147 n04146614 n02892201 n03958227 n03832673 n02268443 n02236044 n01494475 n02009912 n01532829 n02093754 n03404251 n03770439 n07734744 n04252077 n07714571 n02120079 n01665541 n02123394 n03240683 n04264628 n02457408 n07614500 n02124075 n03425413 n03133878 n07930864 n03160309 n02484975 n02086240 n02978881 n04404412 n02643566 n02494079 n02749479 n02114855 n02106166 n02114712 n03662601 n07583066 n02396427 n02108089 n04335435 n03017168 n02113186 n04493381 n02909870 n03075370 n03627232 n03794056 n01734418 n02951358 n02457408 n02883205 n02917067 n03250847 n02804610 n02110958 n02088364 n03891251 n02641379 n02098105 n02113624 n02027492 n02066245 n02168699 n06359193 n03627232 n09229709 n02749479 n04355338 n04252225 n02939185 n01632777 n02395406 n02219486 n02988304 n01518878 n03891332 n02114548 n02892767 n01491361 n03933933 n02795169 n09472597 n07579787 n03032252 n02093754 n13054560 n03891251 n02105505 n02132136 n07873807 n02640242 n04461696 n04613696 n09468604 n02113186 n02493509 n04553703 n01968897 n04296562 n03467068 n03763968 n04209239 n02219486 n03888257 n01871265 n03325584 n03272562 n03854065 n01558993 n03670208 n01665541 n03325584 n01695060 n02457408 n02797295 n02950826 n02099429 n03291819 n02939185 n03976467 n02120079 n02879718 n04579145 n04120489 n01632458 n02009912 n04328186 n06874185 n02398521 n02488291 n02107312 n03026506 n02119022 n01843383 n03657121 n03062245 n07584110 n02091032 n03476991 n02013706 n02607072 n02113712 n03788365 n04355338 n04428191 n04442312 n01753488 n12620546 n03417042 n02108089 n07871810 n03930313 n04019541 n04074963 n02408429 n02817516 n01955084 n02747177 n09472597 n03866082 n02099267 n03782006 n03998194 n02823428 n04487081 n03956157 n03854065 n02002556 n01440764 n02093256 n02229544 n02109047 n03160309 n02825657 n02423022 n03016953 n04179913 n01860187 n02107574 n06359193 n02088094 n04065272 n02088632 n02130308 n03769881 n02966193 n06794110 n07590611 n03924679 n04153751 n02112706 n02509815 n04335435 n04579432 n02815834 n02361337 n02123159 n03133878 n02457408 n02092002 n04347754 n03775071 n03498962 n02101388 n03447447 n02443114 n04039381 n02791124 n02104365 n01776313 n04442312 n03584254 n02094258 n02086646 n04370456 n01797886 n03724870 n01775062 n02687172 n02091244 n03124043 n01632777 n02787622 n01930112 n01664065 n01734418 n02110063 n01818515 n04336792 n03793489 n02097298 n02017213 n04273569 n03485794 n02002724 n04507155 n11879895 n02087046 n02486410 n04033995 n03345487 n03692522 n04347754 n01986214 n03873416 n03483316 n02101556 n03425413 n03000684 n02114367 n02113712 n03535780 n02454379 n03788195 n02086240 n02095889 n02422699 n03400231 n03690938 n01494475 n02099601 n04612504 n07753275 n03814639 n02165105 n03314780 n03478589 n01796340 n02105641 n01847000 n01877812 n02447366 n03929660 n02992529 n02088094 n07745940 n04522168 n04069434 n12620546 n03673027 n03998194 n03028079 n04252225 n02033041 n01843065 n07720875 n02099712 n02939185 n02098413 n04296562 n03796401 n01729977 n02859443 n02105251 n02860847 n04209133 n02108000 n04235860 n02782093 n02814533 n01614925 n01484850 n01669191 n04525305 n07716906 n02119022 n03721384 n02259212 n03976657 n02415577 n04392985 n04023962 n02793495 n04592741 n02233338 n02777292 n01514859 n03127747 n04548362 n03947888 n03792782 n03445777 n04592741 n02165105 n02105056 n04525038 n02395406 n02129604 n09399592 n09229709 n06785654 n03045698 n04380533 n02835271 n07715103 n03692522 n02950826 n02259212 n03773504 n04560804 n04355933 n02167151 n01695060 n02091635 n07745940 n03958227 n03642806 n01537544 n03733131 n02028035 n02667093 n03617480 n02443484 n04532106 n06874185 n02730930 n01632458 n04067472 n09246464 n02264363 n09229709 n02708093 n03804744 n03042490 n03347037 n02120079 n02098105 n02092339 n03017168 n02099429 n03160309 n12267677 n03642806 n07579787 n02817516 n01770393 n01667114 n04417672 n04515003 n02091134 n02090721 n04428191 n02086646 n04536866 n03000684 n01692333 n04591157 n03967562 n03743016 n04579145 n02110063 n04040759 n02074367 n03100240 n04552348 n02916936 n03485407 n02489166 n03271574 n01677366 n02457408 n02966193 n04152593 n01491361 n01748264 n03530642 n03840681 n01768244 n02226429 n03642806 n02002556 n03598930 n01631663 n03787032 n03954731 n04462240 n03680355 n02013706 n03271574 n04357314 n02397096 n01697457 n02441942 n03661043 n01985128 n03658185 n02099267 n04522168 n13037406 n02108422 n04111531 n01728920 n02085620 n01644373 n02101388 n02795169 n02100877 n04509417 n02088466 n02769748 n02965783 n03649909 n03179701 n01742172 n01877812 n03769881 n03000247 n02106662 n03888605 n03937543 n04346328 n03976467 n03187595 n15075141 n03062245 n03710721 n04009552 n02447366 n02107574 n03970156 n03991062 n02098413 n07892512 n03529860 n03935335 n01531178 n02835271 n03787032 n02101388 n02085620 n02701002 n11939491 n01698640 n02233338 n11879895 n02101556 n07753592 n02441942 n07871810 n01914609 n02132136 n02097658 n07720875 n02259212 n01560419 n02510455 n04200800 n04254777 n01616318 n04522168 n02100236 n04356056 n07615774 n03160309 n02666196 n02169497 n03207941 n07831146 n04131690 n04136333 n02895154 n02002556 n04311174 n04243546 n13052670 n02895154 n03527444 n02090622 n04429376 n01667778 n01871265 n01608432 n03424325 n02111129 n02094114 n03706229 n02883205 n07590611 n02948072 n01770393 n03290653 n02128925 n02110185 n02110341 n01796340 n02342885 n02487347 n04310018 n02091635 n02708093 n03016953 n02264363 n04372370 n03272562 n02089078 n03764736 n02963159 n03874599 n02641379 n01984695 n02802426 n02346627 n03773504 n04273569 n02111889 n03498962 n03141823 n04350905 n02095314 n04335435 n03388183 n01537544 n03947888 n02106662 n03854065 n01484850 n02086079 n07714571 n01768244 n04070727 n03494278 n03584829 n03837869 n01945685 n03733281 n04429376 n02099601 n04554684 n04509417 n01943899 n07565083 n04515003 n03777754 n03594734 n03777568 n03840681 n02536864 n04442312 n03127747 n03445777 n04579432 n03063599 n02113978 n03787032 n01742172 n02487347 n04486054 n02093859 n04162706 n02328150 n03482405 n04517823 n07615774 n04192698 n02808304 n02037110 n04254120 n02490219 n07684084 n02094258 n02814533 n02174001 n07753275 n04033901 n02481823 n03770679 n03134739 n01560419 n04275548 n01667778 n01737021 n01806567 n04456115 n07613480 n01737021 n03761084 n07753592 n04461696 n04336792 n02137549 n02100735 n04005630 n02112706 n12144580 n03785016 n03372029 n04486054 n02117135 n01667778 n02927161 n07760859 n03924679 n04040759 n07742313 n02106030 n03388549 n03950228 n01768244 n07734744 n04479046 n02791124 n01807496 n04357314 n01484850 n03888605 n04277352 n04326547 n03876231 n07584110 n02092002 n01667778 n01682714 n02091831 n02108089 n02951585 n02219486 n02090379 n01950731 n02089867 n01828970 n03837869 n01978287 n02092002 n02814533 n01664065 n12768682 n07930864 n04357314 n02802426 n02089867 n03063689 n03535780 n04591713 n03796401 n02877765 n02823428 n07717410 n04612504 n03642806 n04033995 n02095889 n04074963 n01855032 n04270147 n03110669 n03255030 n03530642 n10148035 n07745940 n02490219 n02074367 n02097130 n02106662 n03891332 n02089973 n04209239 n04548280 n04154565 n02037110 n02113978 n02115913 n02018795 n02823428 n02091032 n03874293 n04146614 n04560804 n04522168 n07717556 n04311004 n02105855 n02109961 n02134084 n02930766 n01855032 n02480495 n02509815 n02100877 n02795169 n02125311 n01734418 n03124043 n02165105 n02840245 n03759954 n01622779 n02442845 n04328186 n04152593 n04554684 n02965783 n02510455 n03445777 n07615774 n12998815 n07717410 n03742115 n04264628 n02165456 n04074963 n02098105 n02132136 n01872401 n02441942 n04560804 n02422699 n02802426 n07768694 n01518878 n02096051 n02786058 n02483708 n02099601 n04435653 n01630670 n02177972 n13052670 n02028035 n01978455 n13054560 n02165105 n04317175 n01739381 n02168699 n02483362 n02342885 n02007558 n01798484 n04579145 n02361337 n02643566 n04147183 n04208210 n01798484 n02488291 n03773504 n03662601 n02483708 n01986214 n04005630 n02165105 n02009229 n03814639 n04462240 n02090379 n03786901 n01734418 n01770081 n02814533 n03445777 n03196217 n02747177 n02493793 n03970156 n02165105 n03930313 n02169497 n04204347 n02113712 n02979186 n02085782 n04265275 n01694178 n09229709 n04317175 n07760859 n02865351 n03841143 n01601694 n02128925 n03908714 n01775062 n01770393 n02877765 n03902125 n01744401 n02094114 n03271574 n04372370 n07697313 n04229816 n02692877 n01537544 n04153751 n02490219 n09193705 n02951585 n01986214 n02865351 n02105855 n04392985 n03825788 n04265275 n12267677 n03787032 n02088632 n04507155 n03481172 n03868242 n02797295 n02500267 n02480855 n03956157 n02948072 n03792782 n03478589 n04590129 n01729322 n02105056 n02837789 n03393912 n02319095 n02100735 n02093256 n03782006 n03388043 n03891251 n02391049 n02167151 n03045698 n01534433 n04067472 n02105641 n04423845 n01983481 n03160309 n02802426 n09428293 n02106382 n04325704 n02444819 n01755581 n02895154 n02129604 n02910353 n07873807 n07716358 n03325584 n02104029 n01883070 n02408429 n02992529 n02111277 n04141327 n02098105 n12998815 n04133789 n02837789 n02321529 n04041544 n03131574 n01968897 n03721384 n09428293 n03637318 n04536866 n01641577 n01828970 n02794156 n02105855 n02825657 n02100735 n02487347 n02281406 n04550184 n02804414 n03594734 n01806143 n09256479 n04204238 n03544143 n04350905 n04380533 n03459775 n04509417 n02480495 n04204347 n03967562 n03666591 n03481172 n03179701 n01728920 n09835506 n02509815 n11939491 n02125311 n01774750 n01924916 n04380533 n03496892 n02510455 n02808304 n04328186 n04009552 n02105505 n02454379 n04507155 n01592084 n04118538 n01644373 n02965783 n03742115 n07715103 n03733281 n02268853 n03967562 n02107574 n04597913 n01798484 n04562935 n04584207 n07717556 n02110958 n04597913 n07693725 n02086910 n04136333 n01843383 n02794156 n02101556 n04192698 n02389026 n03250847 n01817953 n01682714 n01491361 n06874185 n02093647 n02483362 n04435653 n01667778 n04548280 n03133878 n02840245 n01950731 n04229816 n01817953 n04346328 n07871810 n04493381 n03476684 n01882714 n03100240 n02105505 n03623198 n02128925 n07749582 n03124170 n03042490 n01531178 n03180011 n02276258 n03538406 n01843383 n01833805 n02109047 n01735189 n01514859 n02396427 n01537544 n07920052 n02077923 n03661043 n03445924 n01514859 n04418357 n01630670 n02256656 n02980441 n01985128 n03787032 n09399592 n02096177 n03095699 n02791270 n02002556 n02099429 n02687172 n04487081 n03775071 n04120489 n02100877 n04131690 n02111277 n04008634 n03796401 n03690938 n03496892 n02487347 n02098286 n04398044 n02281787 n02641379 n03179701 n03110669 n03314780 n03388549 n02441942 n02091831 n03933933 n07584110 n02510455 n02437312 n02417914 n02110806 n02667093 n03384352 n03529860 n04209239 n04254120 n04310018 n07615774 n01984695 n03188531 n02701002 n01749939 n03494278 n04317175 n02480855 n04553703 n04591713 n02093991 n03496892 n03498962 n02870880 n07734744 n02090622 n02095889 n03089624 n03814906 n01443537 n03775546 n03895866 n04254680 n02093991 n02094433 n03709823 n04133789 n04356056 n09421951 n03781244 n03970156 n03709823 n03873416 n03950228 n03425413 n09229709 n03141823 n03290653 n01675722 n04259630 n04613696 n03838899 n01443537 n03617480 n02112350 n01774384 n02108915 n03876231 n02099429 n02226429 n01770393 n01694178 n06794110 n03220513 n11879895 n03124043 n02105855 n02486410 n04004767 n09835506 n07745940 n02097047 n03721384 n03133878 n02093647 n06794110 n04317175 n02134418 n02692877 n02128757 n03794056 n02727426 n01484850 n02514041 n02106382 n02097298 n04613696 n02701002 n03770439 n01855672 n02328150 n03944341 n09468604 n02281787 n04554684 n02098105 n03179701 n02174001 n02109961 n03742115 n04562935 n03729826 n04133789 n04086273 n01514859 n04597913 n04476259 n01914609 n02095889 n03125729 n04366367 n02443114 n02098413 n03599486 n01614925 n04483307 n02105412 n01631663 n02500267 n02095889 n04264628 n07753592 n02123597 n03884397 n04579432 n03938244 n07831146 n02101006 n02092002 n02006656 n02106166 n04596742 n03770679 n04149813 n04599235 n04332243 n03379051 n01776313 n01806567 n09468604 n04554684 n02747177 n04243546 n03838899 n01855032 n01917289 n02226429 n03706229 n03843555 n07615774 n02268853 n04141975 n01728920 n01531178 n03838899 n09472597 n01847000 n13133613 n04522168 n02088466 n09193705 n03445924 n02092002 n02640242 n07742313 n04612504 n01986214 n09229709 n02488291 n02643566 n03891251 n09468604 n01983481 n07920052 n03770679 n02097130 n03769881 n03498962 n07697537 n02422699 n04254777 n03452741 n04152593 n01616318 n02259212 n03690938 n04501370 n04355933 n01498041 n04023962 n02488702 n04443257 n02091134 n02978881 n02091244 n01756291 n04120489 n04141327 n02504458 n01667778 n02108089 n03843555 n02951358 n01807496 n02102318 n07745940 n06794110 n02363005 n07753113 n01644900 n02363005 n01484850 n02105056 n02107312 n03482405 n01945685 n02823750 n02090622 n03710193 n03379051 n07873807 n04263257 n03062245 n02088632 n04208210 n04141327 n07932039 n02951358 n02790996 n02777292 n02804414 n03970156 n04501370 n02641379 n01774750 n01498041 n04116512 n02233338 n03706229 n02097047 n07697537 n02444819 n04153751 n02398521 n03908714 n02088632 n02113712 n02132136 n04258138 n03425413 n02397096 n02443484 n06785654 n04367480 n03717622 n03721384 n02981792 n01955084 n02090721 n02879718 n02113712 n02417914 n02093859 n02009912 n02006656 n01770393 n02701002 n01818515 n12998815 n03532672 n03666591 n06794110 n03110669 n03220513 n03976467 n02396427 n03888257 n02514041 n02837789 n07711569 n07613480 n03075370 n07684084 n02708093 n02099267 n03131574 n01843383 n02091032 n03796401 n04243546 n04389033 n03014705 n03868863 n01883070 n01744401 n12267677 n03876231 n01847000 n02219486 n01955084 n03089624 n04350905 n02119022 n04004767 n02793495 n03404251 n03014705 n01677366 n03690938 n04162706 n04552348 n01985128 n07873807 n02526121 n07932039 n02102973 n02108000 n04493381 n02097130 n04086273 n03832673 n02088364 n02119789 n02113712 n07716906 n03792972 n02097658 n02226429 n09428293 n02116738 n07753113 n02777292 n02017213 n04209239 n02077923 n02509815 n07716906 n02843684 n02417914 n07920052 n09288635 n01980166 n09193705 n03124043 n03944341 n02219486 n02127052 n04147183 n02106550 n04550184 n01728572 n02102480 n04371430 n03983396 n02815834 n04264628 n04356056 n02096294 n02106382 n07579787 n02536864 n03630383 n02114367 n03781244 n03271574 n01739381 n04008634 n03594734 n03201208 n02058221 n02134418 n10148035 n01631663 n02526121 n02002556 n02095314 n02098105 n04509417 n04612504 n02497673 n01580077 n01697457 n03109150 n09468604 n03874293 n02109961 n02110627 n02892201 n02088364 n03100240 n03532672 n02892767 n07860988 n03337140 n02951358 n03691459 n03134739 n02422106 n02788148 n03814906 n02444819 n06785654 n04612504 n02123394 n03042490 n04116512 n03527444 n09288635 n01983481 n09332890 n07715103 n01828970 n04037443 n03089624 n02504458 n01917289 n03223299 n02119022 n02206856 n04252077 n02012849 n02037110 n01751748 n07930864 n04131690 n07697313 n02841315 n03950228 n04254680 n04141975 n03983396 n02124075 n12998815 n03709823 n01689811 n02966687 n03590841 n02002556 n01770393 n04532106 n02109961 n04286575 n02910353 n03785016 n04125021 n04370456 n02115641 n03874293 n13054560 n02480855 n02105855 n01773157 n02108915 n02108000 n03764736 n02231487 n04507155 n01744401 n04325704 n02526121 n04371774 n01582220 n02088094 n12267677 n07880968 n04266014 n02417914 n04270147 n07684084 n01443537 n03866082 n04179913 n02422106 n07697537 n02687172 n03803284 n01692333 n04192698 n02481823 n02115913 n03404251 n02138441 n02999410 n03388183 n02317335 n03759954 n04335435 n03814906 n03692522 n13052670 n03729826 n02790996 n02012849 n03935335 n01667114 n07836838 n01580077 n07615774 n03535780 n02226429 n03903868 n02999410 n03532672 n03498962 n01531178 n03868242 n02128757 n03793489 n01755581 n09332890 n02087394 n03920288 n02128385 n03495258 n02114712 n03976467 n04259630 n02794156 n01774384 n02091467 n04467665 n02091635 n04579432 n03599486 n02328150 n04147183 n02486410 n04252077 n02395406 n07584110 n03075370 n02138441 n02105505 n04311004 n04086273 n04435653 n04467665 n04201297 n01689811 n03345487 n02090379 n02776631 n04023962 n02114367 n13044778 n02917067 n07711569 n03452741 n01734418 n03272010 n01744401 n09399592 n02114855 n03594734 n02860847 n04141076 n02133161 n03804744 n01924916 n04532106 n01770081 n02096177 n02797295 n03188531 n04204347 n03063689 n02841315 n02276258 n02086646 n03775071 n03947888 n02137549 n03063599 n02074367 n02051845 n03832673 n03982430 n01776313 n02102177 n02106550 n03929855 n04201297 n01592084 n02906734 n03124043 n03598930 n07590611 n02091635 n02128757 n04204347 n01698640 n01955084 n03891251 n02823428 n03417042 n03666591 n03958227 n03895866 n02690373 n01667778 n02692877 n03532672 n07920052 n03924679 n03085013 n07697313 n02444819 n02992211 n07248320 n02950826 n02077923 n03786901 n03016953 n02111889 n02892201 n02786058 n02106382 n02877765 n02687172 n02747177 n02105412 n07753113 n03207743 n04418357 n02009912 n01580077 n01616318 n04273569 n01945685 n03706229 n04326547 n02105056 n13037406 n03459775 n02526121 n02837789 n04346328 n01819313 n02321529 n03916031 n03026506 n02105251 n04599235 n01518878 n02110627 n01984695 n01943899 n04069434 n02113023 n01531178 n03947888 n03733805 n03873416 n02087394 n04273569 n03690938 n02281787 n04515003 n01630670 n03445924 n04317175 n02395406 n02018207 n02128385 n03255030 n02169497 n03717622 n03602883 n02488291 n01622779 n03992509 n02877765 n03873416 n01855672 n03478589 n03404251 n07584110 n03980874 n03476684 n02138441 n02977058 n02105162 n03485407 n01616318 n02051845 n03793489 n01768244 n04209239 n03930630 n04532106 n03259280 n02841315 n02966193 n03980874 n04532106 n02981792 n01776313 n04355338 n02110341 n03697007 n02454379 n02655020 n03841143 n07584110 n02123394 n03255030 n07711569 n03724870 n03110669 n03133878 n01641577 n01644373 n04049303 n07768694 n03075370 n02823428 n02640242 n02104365 n04009552 n02129604 n03733805 n02281787 n04208210 n04067472 n01514859 n03384352 n03544143 n03355925 n01694178 n03950228 n07717556 n02317335 n02113799 n07583066 n02999410 n07760859 n02410509 n02013706 n04285008 n04296562 n03196217 n03000134 n02110627 n04442312 n02787622 n02443484 n02137549 n03337140 n03594734 n02879718 n02415577 n02092339 n03450230 n02102040 n07747607 n03085013 n03026506 n06874185 n02493793 n03532672 n01644900 n03792782 n04004767 n02966193 n01784675 n13037406 n03481172 n03775546 n04033995 n02101556 n03666591 n04317175 n01882714 n02640242 n03063689 n04560804 n01860187 n04376876 n04523525 n01833805 n02169497 n03314780 n02988304 n02168699 n04044716 n02109961 n01770393 n01531178 n04152593 n02106662 n04389033 n01735189 n07871810 n04277352 n02077923 n03347037 n02111500 n02088238 n03534580 n03314780 n02791270 n04548280 n03109150 n03944341 n02137549 n04523525 n04592741 n04266014 n01978455 n02091032 n04398044 n02113624 n02408429 n04417672 n04009552 n02231487 n04599235 n07248320 n04086273 n04606251 n03532672 n02112137 n09256479 n04523525 n01697457 n03662601 n04070727 n02098286 n02017213 n02177972 n01689811 n03697007 n03874599 n02110185 n04417672 n04310018 n02130308 n04252077 n03534580 n01860187 n03814906 n02442845 n04487394 n02090379 n01930112 n07860988 n02869837 n02231487 n03956157 n03482405 n02489166 n02107683 n01677366 n01806143 n03775071 n02825657 n02783161 n01622779 n02268853 n04044716 n04540053 n02107142 n04487394 n03376595 n01496331 n02815834 n02099267 n04229816 n07615774 n03272562 n01855672 n02804414 n01818515 n02704792 n02483708 n01629819 n03393912 n03794056 n01644373 n02951585 n02497673 n02415577 n01871265 n07718747 n02966193 n03017168 n01530575 n02319095 n02090379 n03297495 n03388183 n03825788 n01798484 n03814906 n02027492 n02111889 n04118538 n02356798 n01983481 n01986214 n02808440 n02486261 n01751748 n03777568 n04335435 n07720875 n03633091 n03534580 n04141975 n04162706 n03998194 n07579787 n02676566 n03483316 n01693334 n04238763 n02071294 n04493381 n07875152 n01753488 n02091635 n03314780 n03291819 n03924679 n12768682 n06794110 n03291819 n03544143 n01698640 n06785654 n03782006 n04154565 n02012849 n07930864 n03017168 n04133789 n02138441 n03769881 n03773504 n07930864 n04589890 n01806143 n03207743 n02097474 n01582220 n02939185 n02640242 n02981792 n03657121 n02106166 n02666196 n01751748 n03188531 n01768244 n04429376 n02690373 n01806567 n02319095 n02107683 n04550184 n04350905 n01797886 n04447861 n04485082 n03443371 n04229816 n03443371 n04579145 n03125729 n03942813 n03649909 n02119022 n02105251 n12144580 n02992529 n01518878 n02977058 n01968897 n02233338 n03642806 n01833805 n09421951 n01985128 n01824575 n04286575 n04330267 n02106166 n07875152 n02094258 n02123394 n01537544 n04493381 n02102480 n02086240 n02085782 n03786901 n04254680 n03721384 n04311174 n04487394 n02099267 n03207941 n02883205 n02672831 n04008634 n03868863 n04251144 n03529860 n01608432 n02093647 n02028035 n03982430 n01687978 n01632458 n03125729 n02389026 n02085782 n06359193 n03459775 n01773797 n02093754 n04275548 n02120505 n03450230 n03854065 n02096177 n02112706 n02089867 n02138441 n02504458 n02865351 n04479046 n03180011 n03223299 n02804414 n02134418 n01751748 n02483708 n01692333 n02992211 n03404251 n07716906 n01924916 n07695742 n02112137 n02692877 n02423022 n02860847 n01877812 n04326547 n02051845 n01855672 n02667093 n01829413 n07760859 n01630670 n02869837 n02086910 n01740131 n02398521 n03016953 n02091134 n02096585 n02093647 n03220513 n07716906 n03188531 n03627232 n03690938 n02788148 n04254680 n02493509 n02098413 n03532672 n02111889 n01843065 n02666196 n02457408 n03785016 n02097474 n02704792 n03868863 n04540053 n03529860 n04238763 n03658185 n03970156 n04285008 n02526121 n02096585 n03814639 n03180011 n02480855 n03594945 n02101006 n04517823 n12985857 n02104029 n04111531 n01729322 n03773504 n01580077 n02098413 n04065272 n02085936 n02093859 n02104365 n09472597 n02865351 n04254680 n02951358 n02281787 n01496331 n02093256 n01910747 n04509417 n02417914 n02389026 n03666591 n06794110 n03786901 n07695742 n02133161 n04540053 n02782093 n01871265 n03690938 n02028035 n02106550 n02494079 n07831146 n01498041 n02130308 n04483307 n01820546 n02105056 n04487081 n09332890 n02437312 n03692522 n02871525 n02326432 n07749582 n02992211 n02497673 n03544143 n13052670 n13133613 n07714571 n03868863 n02606052 n02111129 n03874293 n02190166 n02226429 n02363005 n02443484 n04579145 n03425413 n03018349 n03452741 n02791124 n02346627 n02128757 n03998194 n03530642 n01592084 n01917289 n03764736 n07615774 n03977966 n02877765 n02089973 n01986214 n01872401 n03942813 n01689811 n02834397 n07714990 n02486261 n02397096 n04467665 n02909870 n04517823 n04131690 n01728572 n01729322 n01797886 n02108551 n03866082 n01677366 n02979186 n03710637 n03933933 n03930313 n03899768 n03763968 n02326432 n02107142 n02066245 n04099969 n07860988 n07695742 n01924916 n03895866 n03788365 n01632777 n02787622 n01768244 n01768244 n03146219 n06785654 n02110341 n03400231 n02123045 n02025239 n03670208 n01784675 n03982430 n04485082 n03208938 n01990800 n03930313 n02708093 n04597913 n01796340 n02100236 n01608432 n01828970 n01614925 n03400231 n01631663 n03759954 n01872401 n01917289 n02690373 n01664065 n03016953 n04376876 n01664065 n02950826 n04557648 n02793495 n02111129 n01968897 n03781244 n07871810 n02641379 n02097209 n02109047 n03065424 n03838899 n04501370 n01753488 n04049303 n02097047 n04311004 n03538406 n03666591 n02017213 n02093647 n04409515 n03207743 n01843065 n03697007 n03291819 n03197337 n03000247 n02443484 n03891251 n02085782 n04033901 n03658185 n01819313 n03388549 n02606052 n04612504 n01582220 n02883205 n04467665 n03535780 n04326547 n03895866 n02095889 n02123045 n03777568 n01631663 n02999410 n07717410 n02837789 n04461696 n07720875 n03141823 n03216828 n04589890 n02105641 n03196217 n01797886 n07742313 n02396427 n04532106 n02655020 n02437312 n03028079 n02037110 n03788365 n01978455 n02483362 n02444819 n01580077 n04347754 n01728572 n03063689 n02106662 n02672831 n03895866 n04560804 n04540053 n02233338 n03777754 n02788148 n09472597 n02484975 n04404412 n02087046 n02089078 n03255030 n03095699 n07714990 n02641379 n03218198 n02481823 n01514859 n03337140 n04399382 n02641379 n02129604 n03982430 n04127249 n04125021 n01774384 n01740131 n02325366 n04041544 n02667093 n07836838 n01739381 n02108000 n02277742 n01950731 n03777754 n04310018 n02917067 n02835271 n04515003 n02119789 n02966687 n03085013 n12144580 n02071294 n12998815 n04162706 n03028079 n03218198 n02895154 n04562935 n07613480 n02128925 n03649909 n01629819 n01883070 n02098413 n02002724 n02106382 n01530575 n02113978 n02124075 n04332243 n02655020 n04239074 n01910747 n09399592 n02096051 n03930630 n07693725 n03933933 n03187595 n02281787 n02892201 n02108000 n01687978 n03803284 n07892512 n02074367 n03891251 n03384352 n04409515 n02107574 n01860187 n03529860 n02280649 n02860847 n03325584 n04409515 n03692522 n02089973 n02782093 n03208938 n02980441 n01693334 n01773157 n01729977 n03063689 n02865351 n03459775 n03637318 n04263257 n04604644 n04311004 n02120079 n02112018 n03196217 n01871265 n02804610 n07892512 n03124043 n02219486 n02089973 n02109047 n04040759 n07711569 n04458633 n07720875 n02277742 n01675722 n02119022 n02106030 n03763968 n02105412 n03017168 n03857828 n04346328 n04005630 n03492542 n02480495 n02090622 n03814906 n04004767 n02992529 n02692877 n09332890 n02979186 n01770393 n02129165 n02391049 n07871810 n03355925 n04398044 n07860988 n03961711 n02089973 n03404251 n02395406 n03063689 n04070727 n04552348 n02112137 n02110958 n01753488 n07697537 n04389033 n02783161 n07693725 n04286575 n07753113 n07716358 n03394916 n02093256 n01737021 n07836838 n02268853 n02130308 n02906734 n02134418 n02108000 n01560419 n03131574 n02133161 n03000247 n02279972 n02951585 n03733805 n01677366 n03976467 n03535780 n03938244 n01644373 n02109525 n03649909 n02190166 n01692333 n02910353 n01807496 n03982430 n02974003 n03950228 n01978287 n03720891 n02892767 n02504013 n01855032 n02483362 n02025239 n03868242 n02094114 n02109047 n07749582 n01669191 n03785016 n04041544 n02087046 n03272010 n03447447 n02783161 n03976657 n02087394 n04548280 n01860187 n01689811 n04584207 n04251144 n02113023 n03977966 n03792972 n13054560 n06785654 n07734744 n02115641 n04606251 n02277742 n02794156 n02137549 n04479046 n01753488 n04485082 n02100735 n02869837 n03534580 n02879718 n04525305 n01829413 n03792782 n02109961 n03443371 n02009229 n01744401 n01728572 n02098413 n04311004 n03272010 n02095570 n01632458 n02783161 n01644900 n01601694 n01608432 n04335435 n02086910 n04418357 n02097658 n03124170 n04228054 n02494079 n07754684 n02493793 n02165105 n02133161 n01847000 n03394916 n02105162 n01950731 n03970156 n02233338 n03045698 n02099601 n11939491 n04467665 n04346328 n04347754 n03063689 n03100240 n02127052 n03887697 n09428293 n02361337 n02606052 n04590129 n02692877 n03796401 n04532106 n03538406 n07747607 n01978455 n07717556 n02894605 n03134739 n04243546 n03903868 n02879718 n01824575 n01877812 n01770081 n04525305 n01773549 n02099712 n01774384 n02823428 n01860187 n03461385 n04366367 n02167151 n02454379 n03777568 n01833805 n03761084 n04542943 n02504458 n02033041 n02095314 n03527444 n02280649 n02123045 n01644373 n12998815 n03792972 n02480495 n03417042 n02091467 n02415577 n12985857 n03544143 n04370456 n02110806 n03676483 n03602883 n03538406 n04201297 n03929855 n02504013 n10565667 n02097130 n03950228 n01675722 n04523525 n02966687 n02504458 n02089973 n01641577 n04330267 n04146614 n01631663 n02978881 n07802026 n04039381 n03485794 n03825788 n04265275 n03141823 n04033995 n03179701 n01986214 n04604644 n02730930 n03920288 n02799071 n04399382 n04023962 n02951358 n02114367 n02074367 n03992509 n03000134 n01824575 n04525305 n02119789 n03899768 n03617480 n02012849 n03814639 n04347754 n04597913 n02113799 n04562935 n03777754 n02687172 n02066245 n02704792 n01751748 n02090622 n03857828 n03777754 n02130308 n02606052 n03483316 n02808440 n02114712 n01774384 n09468604 n03045698 n02107574 n02112706 n03777754 n04209239 n07745940 n02690373 n07584110 n03388549 n03977966 n04584207 n02279972 n02443114 n02493509 n02494079 n03063599 n01774750 n01968897 n01695060 n04380533 n02128757 n09256479 n02909870 n04501370 n03935335 n07693725 n04591713 n03787032 n01498041 n03042490 n02086910 n01855672 n04596742 n02445715 n02859443 n02804610 n03709823 n02488291 n02410509 n03393912 n03498962 n03131574 n03791053 n03763968 n02097130 n03042490 n01641577 n01677366 n01828970 n02096051 n03888605 n02094114 n02892201 n02486261 n03983396 n02133161 n03602883 n03065424 n02749479 n02791124 n01968897 n02797295 n02877765 n01843065 n02892201 n03786901 n02174001 n03133878 n02107908 n04136333 n02437616 n04592741 n04044716 n01773157 n02130308 n02325366 n04591713 n04090263 n03902125 n03670208 n07753113 n03866082 n04201297 n02093859 n02410509 n02823750 n01740131 n03417042 n03874293 n03710193 n02871525 n02091467 n04254120 n02109525 n04404412 n02094433 n11939491 n02107683 n04356056 n02002556 n02168699 n01945685 n04376876 n04033901 n01530575 n03838899 n01776313 n03028079 n03658185 n04310018 n02090379 n02109525 n04376876 n04418357 n04409515 n07583066 n03841143 n02837789 n03494278 n03457902 n02497673 n02504013 n02110063 n02835271 n01491361 n02807133 n02085782 n02088364 n02607072 n02120505 n07718472 n03781244 n02389026 n03026506 n02769748 n02096177 n02840245 n02606052 n03857828 n03837869 n01735189 n02093256 n02112706 n02749479 n04525038 n03982430 n02510455 n02410509 n03680355 n02105505 n03017168 n02120079 n03532672 n03992509 n02009229 n02106166 n02105056 n02422699 n03770439 n03794056 n03777568 n02110806 n01950731 n04371430 n03417042 n03743016 n01729977 n02669723 n02094433 n04251144 n02119022 n01697457 n01682714 n07614500 n02127052 n03042490 n02113799 n04399382 n03794056 n02963159 n02730930 n01592084 n04067472 n02815834 n07753592 n13052670 n07875152 n06785654 n04509417 n03977966 n03345487 n03223299 n04277352 n06794110 n02389026 n07920052 n02100877 n04435653 n04239074 n04069434 n03617480 n01494475 n02672831 n07831146 n02097047 n03814639 n02514041 n02091635 n01687978 n02116738 n01630670 n01695060 n04204238 n04090263 n04081281 n01819313 n02132136 n03787032 n04044716 n15075141 n03954731 n04389033 n02002556 n04591157 n04133789 n04277352 n02641379 n03733805 n04417672 n02403003 n01580077 n03920288 n03673027 n07697537 n07836838 n04243546 n02977058 n07684084 n07697537 n02132136 n03131574 n02093647 n03443371 n03134739 n04550184 n03891251 n02087394 n07697537 n07583066 n04522168 n04493381 n04065272 n02097130 n04467665 n01614925 n03961711 n02802426 n02089078 n02018207 n03947888 n01748264 n02280649 n02002556 n03709823 n01494475 n03485794 n04479046 n02108551 n03325584 n03188531 n02091032 n02259212 n02033041 n03290653 n04033995 n07614500 n02169497 n04553703 n02268443 n09288635 n01843383 n04428191 n03717622 n02268853 n02012849 n02894605 n02134418 n01751748 n02823750 n02177972 n03424325 n02397096 n07753275 n02417914 n03379051 n02096585 n03814639 n03355925 n03127747 n02264363 n03733131 n02481823 n03447447 n04409515 n02066245 n02102318 n03028079 n02107574 n04026417 n02058221 n02106662 n02607072 n01641577 n03376595 n07892512 n11939491 n02488702 n09421951 n01910747 n02364673 n07248320 n03908714 n02939185 n02099601 n03680355 n02095889 n02917067 n04380533 n01592084 n02109525 n02123394 n02236044 n02346627 n12057211 n12620546 n04346328 n01531178 n01735189 n04152593 n04487394 n02123597 n01768244 n02129604 n09193705 n04131690 n02085936 n02088238 n03538406 n03131574 n02110185 n03124043 n03000247 n02107574 n02110958 n03018349 n02930766 n02229544 n02483362 n03887697 n01773797 n02264363 n02088364 n04127249 n02113023 n03146219 n02114855 n04536866 n03770679 n01796340 n03866082 n04380533 n03764736 n07749582 n03658185 n04579145 n01784675 n01644373 n02110063 n02971356 n02494079 n02361337 n02490219 n03803284 n02113624 n02106550 n03814906 n03180011 n01872401 n02730930 n04548280 n02814860 n02105162 n03676483 n01871265 n07716358 n04476259 n03887697 n07697537 n02514041 n04004767 n04371774 n01855032 n01518878 n09835506 n01943899 n03908714 n03400231 n02129604 n02492035 n04252225 n02107312 n03443371 n02950826 n03814639 n02951585 n04265275 n01806567 n03482405 n01882714 n01580077 n02091831 n04266014 n02895154 n04532106 n02999410 n03729826 n03345487 n02105162 n02690373 n04597913 n04325704 n03461385 n01695060 n01818515 n09472597 n01806567 n07754684 n04326547 n02093859 n04049303 n02641379 n03196217 n02088466 n04376876 n02009229 n03929855 n02025239 n03814906 n03291819 n04612504 n03000134 n02837789 n07718747 n03459775 n02281406 n01693334 n02219486 n04266014 n04399382 n01774750 n02980441 n03062245 n04418357 n02841315 n04239074 n02117135 n03908714 n04429376 n02089867 n01641577 n02444819 n04277352 n01443537 n04522168 n02137549 n03770439 n03697007 n07248320 n04523525 n04141975 n04442312 n02979186 n03929855 n03160309 n07613480 n04154565 n03452741 n03063689 n01983481 n03884397 n02687172 n01622779 n01774750 n02096051 n04074963 n03207941 n02107908 n03180011 n04557648 n01491361 n04209239 n02091467 n03930313 n03417042 n02395406 n02112350 n02108915 n02123597 n04125021 n03777754 n09288635 n02066245 n03196217 n04118538 n03733281 n02106550 n02111889 n03720891 n04604644 n03016953 n03249569 n04039381 n02100735 n01582220 n02423022 n03764736 n03109150 n02028035 n02510455 n01735189 n02666196 n02992211 n04356056 n03240683 n01978455 n04579145 n02963159 n09288635 n02442845 n04606251 n02087046 n03344393 n01883070 n03697007 n03891251 n03662601 n02138441 n01753488 n04613696 n01950731 n03485794 n02110341 n02892767 n02492035 n04273569 n04008634 n02095314 n03794056 n09472597 n02802426 n07716906 n03792972 n01872401 n03673027 n02279972 n02910353 n03933933 n03938244 n01558993 n03908714 n01914609 n02101006 n02672831 n04067472 n02526121 n07836838 n02817516 n07742313 n01828970 n04286575 n03649909 n02107683 n02988304 n02165456 n04560804 n01629819 n03814906 n03782006 n02264363 n02909870 n09246464 n02328150 n02730930 n04596742 n03095699 n03146219 n01824575 n03977966 n01807496 n02500267 n02098105 n01796340 n02113978 n02948072 n03089624 n04550184 n07565083 n03529860 n03544143 n02791270 n03775071 n03710721 n13044778 n02504458 n02514041 n03743016 n03483316 n12985857 n03709823 n04465501 n03028079 n04209239 n01807496 n02859443 n04398044 n03337140 n02783161 n02500267 n01644373 n07711569 n03888257 n02655020 n09399592 n03197337 n02007558 n03961711 n04542943 n02116738 n01580077 n02088632 n02096294 n03388183 n02099267 n03445924 n04133789 n04332243 n03201208 n03032252 n02504458 n02979186 n04584207 n03535780 n02229544 n02111500 n04525305 n03197337 n02398521 n02088238 n02364673 n04146614 n02113186 n02391049 n02098286 n04548362 n02009229 n07802026 n07716906 n02111889 n02730930 n01632777 n02099601 n02981792 n03637318 n01735189 n04049303 n02129165 n02443484 n03770679 n04149813 n01622779 n03110669 n01945685 n03937543 n02977058 n02457408 n03041632 n01694178 n03095699 n02085936 n04252077 n03529860 n01978455 n01768244 n06359193 n02107908 n04162706 n03494278 n02009912 n01740131 n03717622 n13054560 n03014705 n02087394 n02093991 n03063689 n02113023 n03733131 n04493381 n03825788 n02643566 n03495258 n06794110 n02280649 n04065272 n02110958 n03452741 n03314780 n01828970 n02871525 n04447861 n02815834 n04417672 n04328186 n02134418 n03788365 n03877845 n04487081 n02500267 n03372029 n03837869 n01968897 n03443371 n12768682 n01685808 n03584829 n02814860 n03485407 n03670208 n01817953 n03026506 n01440764 n01685808 n03691459 n04141076 n04179913 n03670208 n01755581 n03958227 n03388043 n03223299 n02504013 n01773549 n01694178 n02112018 n01739381 n01695060 n01980166 n03788365 n03187595 n02277742 n01669191 n02892201 n02123045 n07747607 n04604644 n04149813 n04074963 n02111277 n02101006 n03961711 n01978287 n03127747 n02129604 n07717410 n02264363 n07802026 n02089973 n02096585 n04243546 n01688243 n02817516 n04596742 n03673027 n02797295 n07753113 n01685808 n02871525 n02093991 n01984695 n07760859 n03032252 n07711569 n02280649 n03761084 n03160309 n03891332 n02883205 n04372370 n04041544 n04552348 n04264628 n04041544 n01910747 n03950228 n02666196 n04204347 n01560419 n04204238 n02236044 n03131574 n04487081 n02018795 n02843684 n03000684 n01667778 n02115641 n04548362 n01943899 n02100877 n02093256 n02018207 n02112137 n03141823 n02093754 n02174001 n04476259 n02480495 n03887697 n02769748 n02002724 n02113978 n02110627 n03874293 n02107574 n02109047 n01855032 n02794156 n03134739 n07742313 n03124043 n02486261 n02992529 n01734418 n02321529 n03047690 n02879718 n02025239 n03131574 n04347754 n03216828 n02264363 n03041632 n02071294 n01914609 n02497673 n02172182 n01667778 n02106550 n02814860 n01773549 n01986214 n02236044 n02009912 n02487347 n01755581 n03623198 n02445715 n06794110 n02085620 n04482393 n01820546 n04579145 n02326432 n07754684 n04111531 n03724870 n02093256 n07711569 n02017213 n01688243 n01669191 n01664065 n02092339 n02108551 n04525305 n03950228 n03929660 n03956157 n03891332 n04493381 n02102973 n03255030 n01990800 n02500267 n02281406 n01824575 n03032252 n02129165 n02356798 n03538406 n02009229 n02097658 n03095699 n03786901 n03743016 n02980441 n07742313 n02106166 n03314780 n02097209 n04037443 n04086273 n03394916 n02037110 n02112018 n03379051 n02951585 n04501370 n04355338 n03874293 n04153751 n07930864 n02930766 n01496331 n04265275 n02256656 n01667114 n03630383 n04591713 n02704792 n03207743 n03854065 n03720891 n07873807 n02120505 n02099849 n04152593 n02100877 n04560804 n03792972 n03733131 n13133613 n02114548 n03000247 n04146614 n04398044 n02325366 n03633091 n09256479 n03617480 n01530575 n03633091 n03018349 n01768244 n02871525 n04040759 n03658185 n03272562 n02447366 n04392985 n02797295 n03903868 n04548362 n07714571 n03884397 n03888605 n02105505 n03666591 n03063599 n03530642 n02097474 n04483307 n04554684 n02978881 n02492660 n03692522 n04589890 n04579432 n02127052 n02112706 n02804610 n02190166 n11939491 n03000134 n01697457 n12620546 n02256656 n01968897 n02950826 n03127925 n02939185 n06596364 n02091134 n03877472 n02113799 n02102973 n02027492 n03498962 n02834397 n07248320 n04286575 n01735189 n02417914 n03690938 n03404251 n01739381 n02099267 n02219486 n02108089 n02206856 n03208938 n03127747 n02279972 n02281406 n02113023 n01601694 n07715103 n02107908 n02120079 n02102318 n02096051 n01990800 n02917067 n03372029 n03538406 n12267677 n03314780 n03903868 n02009229 n02100236 n03759954 n02277742 n03804744 n02966687 n02102318 n09835506 n01484850 n02097047 n02795169 n03673027 n02169497 n03532672 n04067472 n01944390 n02786058 n04019541 n01665541 n04162706 n01695060 n04116512 n03680355 n04548280 n04517823 n02883205 n02869837 n01871265 n01737021 n01496331 n01773797 n04562935 n03617480 n03930630 n04033901 n04270147 n03388183 n02823428 n02090622 n02504013 n04356056 n02510455 n01860187 n02492660 n02879718 n02669723 n15075141 n04263257 n02422106 n04350905 n02105056 n02102973 n03776460 n03857828 n02120505 n02105412 n02643566 n03291819 n04447861 n03938244 n07717556 n02423022 n03450230 n01770393 n04254680 n03530642 n03476991 n03710721 n04116512 n04398044 n02930766 n04370456 n02231487 n04019541 n03476991 n04366367 n02930766 n01728920 n03908618 n07615774 n06794110 n01744401 n04153751 n03187595 n02009912 n02096437 n02018207 n02363005 n07717410 n02939185 n03495258 n03787032 n03920288 n04392985 n02109961 n04325704 n03240683 n01773157 n02317335 n03929660 n02493509 n03920288 n03447721 n02486261 n04562935 n01829413 n01930112 n02104365 n02992211 n04033901 n03710193 n02797295 n01847000 n02100583 n04483307 n03874599 n04275548 n04540053 n01558993 n04560804 n04542943 n01773549 n04317175 n03935335 n07717410 n02165456 n03832673 n01692333 n03788195 n07831146 n03590841 n03840681 n02277742 n09472597 n07614500 n04548280 n03443371 n04532670 n01774750 n04486054 n03127747 n03676483 n02669723 n02017213 n01945685 n02219486 n04599235 n03530642 n04254777 n02111500 n03125729 n01631663 n07880968 n02111277 n01817953 n03776460 n01622779 n03240683 n02906734 n02391049 n01695060 n04023962 n01514668 n04133789 n02871525 n02277742 n02090721 n01693334 n04074963 n07693725 n01873310 n02279972 n02971356 n02071294 n03991062 n02088238 n03538406 n04552348 n02112706 n04229816 n03126707 n01518878 n03903868 n13054560 n04149813 n01828970 n03197337 n02443114 n03255030 n01558993 n03529860 n04069434 n02396427 n03197337 n02356798 n02504013 n02641379 n02017213 n01882714 n01514859 n04429376 n04366367 n04443257 n03075370 n03782006 n02927161 n03899768 n07715103 n03980874 n01514668 n03761084 n01773797 n02120079 n04131690 n07248320 n02133161 n02096051 n13052670 n02979186 n02113023 n03594945 n02123045 n02120505 n02119022 n02493793 n01728572 n03482405 n01980166 n07745940 n01773549 n02123394 n02093754 n03534580 n02174001 n02641379 n01693334 n01983481 n02793495 n04456115 n04141327 n02096585 n01855672 n03223299 n03544143 n02321529 n09193705 n04409515 n02105162 n03775546 n01990800 n02128757 n03769881 n03314780 n03598930 n03452741 n03388183 n03958227 n02236044 n04208210 n07693725 n01945685 n04579432 n02486410 n02791270 n02099429 n02074367 n04208210 n01981276 n03240683 n03425413 n02115913 n03124043 n02002724 n02667093 n03724870 n07730033 n03733281 n04522168 n07717556 n03977966 n03788365 n01484850 n03482405 n03623198 n07892512 n07711569 n03710637 n03376595 n04141975 n02981792 n03804744 n02107312 n03733131 n01739381 n04252077 n03445924 n04599235 n02422699 n03637318 n03673027 n03425413 n02442845 n02325366 n02410509 n02641379 n02165105 n02769748 n02859443 n01806567 n03527444 n02099601 n07715103 n01531178 n04599235 n07697313 n02091244 n04317175 n02823428 n02096437 n02236044 n02190166 n02948072 n01728920 n01728572 n03000684 n03133878 n02017213 n01978287 n03775071 n04479046 n07720875 n06785654 n01843383 n02108089 n02606052 n02794156 n02100583 n12620546 n02412080 n01677366 n03710637 n07753275 n02417914 n04019541 n01697457 n01806143 n03759954 n02115913 n12985857 n03530642 n02133161 n02086240 n02782093 n02259212 n02110806 n03733131 n02096294 n04229816 n06794110 n02699494 n03761084 n01592084 n07695742 n01631663 n03017168 n04350905 n02256656 n04285008 n01984695 n04275548 n01883070 n03047690 n02445715 n02088094 n03223299 n01729322 n03837869 n02102480 n02088364 n02102177 n04265275 n02319095 n02229544 n03759954 n02869837 n04209133 n03291819 n04371774 n02138441 n02417914 n02128757 n02098286 n04591157 n03443371 n03902125 n02422106 n04423845 n04465501 n13052670 n02087394 n04367480 n07742313 n03538406 n03492542 n03868863 n02088632 n01582220 n03876231 n03770439 n02977058 n03457902 n03874293 n03902125 n03929855 n02391049 n03180011 n03956157 n02790996 n02099712 n01980166 n04041544 n02033041 n03976657 n01751748 n02127052 n01494475 n02128385 n04204347 n03690938 n03759954 n02412080 n04204238 n03662601 n02114855 n03788365 n02104029 n02101556 n01737021 n09288635 n02096177 n02492035 n04238763 n03393912 n04149813 n02398521 n01742172 n02130308 n01534433 n04404412 n02107683 n02708093 n04209239 n07715103 n07718747 n04462240 n02510455 n02098105 n02277742 n02096437 n02802426 n02486261 n02091134 n03272010 n01491361 n04604644 n02640242 n03692522 n02229544 n07720875 n04606251 n04201297 n11939491 n02088364 n02655020 n03657121 n02112350 n02326432 n03445777 n02028035 n04326547 n03400231 n02091032 n03710193 n01742172 n01806567 n03485407 n03450230 n01735189 n02319095 n03467068 n04458633 n03394916 n02500267 n04525038 n02112137 n02107908 n12768682 n02119789 n03662601 n07860988 n04584207 n07932039 n03062245 n07745940 n03085013 n04465501 n02483708 n03379051 n01631663 n01773157 n02364673 n02917067 n02488702 n02105412 n02423022 n03868242 n02018207 n02113624 n04041544 n04548280 n03483316 n03444034 n02125311 n02281406 n04041544 n03223299 n03602883 n12144580 n04192698 n07831146 n01748264 n02096177 n01798484 n03075370 n01807496 n04479046 n03457902 n02504013 n02097047 n07583066 n02979186 n03595614 n04286575 n09246464 n02981792 n03220513 n02090379 n02037110 n02009912 n07860988 n04435653 n02486261 n02129604 n01491361 n04579432 n02165456 n03259280 n01860187 n03796401 n02356798 n01828970 n02206856 n03983396 n02783161 n03134739 n02823428 n04371430 n04118776 n02106166 n02988304 n01770081 n04465501 n03447447 n03976467 n02977058 n02058221 n02280649 n03445777 n03884397 n01797886 n03240683 n03485794 n02974003 n04548280 n02168699 n07716906 n02002556 n01632777 n02111129 n02492035 n02123159 n03424325 n02231487 n01641577 n07873807 n02363005 n02100877 n03777568 n01530575 n03998194 n01829413 n02480855 n09288635 n02321529 n02509815 n03482405 n04493381 n02319095 n03223299 n03388549 n02113186 n02093859 n07718747 n01855032 n10148035 n07753113 n04154565 n02423022 n04179913 n02486410 n02106382 n02033041 n02483708 n01537544 n02123597 n03240683 n04026417 n02108422 n09399592 n02104365 n03794056 n01776313 n02787622 n03854065 n01729977 n02127052 n03942813 n02109047 n03133878 n03775071 n02268443 n04118776 n02009912 n02111889 n04542943 n03759954 n03633091 n03124043 n03016953 n02133161 n02106030 n01773797 n03887697 n04501370 n04120489 n02096051 n01682714 n03133878 n02992211 n01795545 n02033041 n04285008 n02113978 n02006656 n01768244 n02837789 n01622779 n02091831 n02992529 n03929660 n02493793 n03447447 n02013706 n03478589 n07615774 n03530642 n02410509 n01968897 n04252077 n03976467 n07871810 n01697457 n04200800 n01806567 n03998194 n03721384 n02107683 n02950826 n02834397 n02978881 n02106166 n02098413 n04204238 n04328186 n01943899 n03494278 n01798484 n07714990 n02105056 n04033995 n03207743 n03459775 n02704792 n03379051 n04372370 n01855032 n03124170 n04039381 n04355338 n01774384 n03016953 n02486261 n01632777 n02319095 n02106550 n03476684 n01644900 n03729826 n03047690 n04179913 n02437312 n03769881 n01664065 n02107683 n09835506 n01784675 n02483362 n02089867 n04356056 n03666591 n06359193 n02277742 n04456115 n02099267 n03657121 n04149813 n07579787 n04372370 n02095314 n03496892 n02483708 n04417672 n04447861 n02804610 n03126707 n01704323 n09332890 n02090379 n03837869 n11939491 n03866082 n03733131 n02165456 n04443257 n02281787 n02398521 n07718472 n02106382 n02066245 n04428191 n03527444 n03085013 n02112350 n02094433 n03942813 n02398521 n02865351 n03908618 n02229544 n01981276 n03208938 n02236044 n04542943 n02804610 n02843684 n01687978 n02447366 n02099849 n03017168 n02999410 n02013706 n02102040 n02825657 n02091831 n01833805 n02117135 n01910747 n03724870 n04209133 n04328186 n03761084 n04509417 n04612504 n01537544 n01748264 n04542943 n02892767 n04332243 n04591713 n02116738 n07714990 n03782006 n07697313 n03692522 n02776631 n03197337 n06874185 n02089867 n02790996 n02979186 n03938244 n03028079 n02823428 n04133789 n02794156 n02815834 n03063599 n10148035 n02486261 n04435653 n01943899 n02391049 n02090622 n04542943 n02058221 n02089867 n02115641 n03930313 n02105412 n03691459 n03781244 n03721384 n01484850 n03201208 n03710721 n03384352 n02410509 n03787032 n03970156 n02105251 n03958227 n02690373 n01729322 n01518878 n04254680 n02988304 n03670208 n04033901 n02018795 n02749479 n03447721 n02093428 n02099712 n02094114 n02814860 n02167151 n04525305 n02483362 n02105251 n02817516 n04125021 n02979186 n01829413 n02097658 n02909870 n01558993 n03216828 n02280649 n02051845 n02115913 n03938244 n04522168 n01632458 n02106382 n02939185 n04111531 n01693334 n02268853 n02109525 n02125311 n03617480 n02437616 n04146614 n03832673 n02870880 n04554684 n02071294 n02971356 n03775071 n04326547 n11879895 n01531178 n02667093 n04317175 n02027492 n02002556 n02206856 n03527444 n04557648 n04467665 n01742172 n02100236 n02096437 n13054560 n02389026 n02098105 n07871810 n02488291 n04251144 n12057211 n04483307 n01917289 n03637318 n01950731 n01955084 n02869837 n04037443 n02099267 n04254120 n02493793 n12144580 n01968897 n03770679 n02910353 n04146614 n04154565 n02128757 n04380533 n03530642 n02640242 n01530575 n04325704 n04562935 n03838899 n02692877 n03692522 n03916031 n02486261 n03724870 n02099267 n03207941 n02128925 n03461385 n01950731 n02492660 n02102973 n07749582 n04310018 n02110806 n02105056 n09428293 n02087394 n15075141 n03141823 n03709823 n03930630 n02280649 n04069434 n07718747 n02480495 n07754684 n12985857 n03602883 n01665541 n04465501 n02788148 n02114548 n07753275 n03788195 n02814860 n02090379 n03425413 n01751748 n04311174 n01796340 n07613480 n03445777 n04404412 n03124170 n02364673 n01829413 n03134739 n07730033 n03379051 n04485082 n03250847 n07730033 n07714571 n02790996 n03160309 n02268443 n02093859 n13052670 n02086910 n01632458 n04259630 n01806567 n02094433 n02093647 n02111500 n03876231 n01883070 n02098286 n04483307 n03344393 n01592084 n04579432 n04152593 n04579145 n03998194 n02093256 n01616318 n03085013 n03527444 n04116512 n02514041 n03627232 n03376595 n04443257 n03095699 n02403003 n04589890 n01910747 n02978881 n02727426 n01985128 n03482405 n02132136 n04277352 n13133613 n02033041 n02100877 n01806143 n03733805 n01748264 n02483362 n03776460 n02105412 n03887697 n01773157 n02056570 n02808440 n02007558 n04146614 n02097130 n03888605 n02412080 n01806567 n02457408 n03935335 n03775071 n07697313 n01774750 n07873807 n07749582 n02091134 n02871525 n02117135 n03657121 n03661043 n02088632 n03776460 n02120505 n02165456 n03089624 n03485794 n01534433 n02835271 n03240683 n04251144 n02086910 n03447447 n04200800 n01582220 n02655020 n04458633 n04371430 n02097047 n03970156 n04418357 n04243546 n02098413 n02992529 n03384352 n02640242 n02894605 n03920288 n03250847 n02607072 n04326547 n04485082 n03868863 n09472597 n02027492 n02692877 n03388549 n03874599 n02096051 n01847000 n02328150 n01534433 n02910353 n01829413 n02107142 n03977966 n02090622 n03444034 n04418357 n04254680 n02692877 n02002724 n03535780 n02108551 n02112350 n15075141 n04141975 n04507155 n04509417 n11939491 n02112706 n02110627 n03125729 n03680355 n01644373 n01644373 n01756291 n01753488 n02098105 n02342885 n03759954 n02110958 n02797295 n02006656 n02111500 n04033901 n01784675 n04277352 n02489166 n02481823 n02398521 n01739381 n02823428 n02939185 n12985857 n04275548 n04127249 n02087394 n03920288 n04482393 n03100240 n03000684 n07248320 n02454379 n02361337 n03218198 n02106030 n03544143 n04456115 n02165105 n03188531 n01641577 n07742313 n03761084 n01518878 n04376876 n03782006 n02422699 n01773797 n02106550 n04590129 n03902125 n02823750 n03393912 n04090263 n01737021 n02129165 n01498041 n03792782 n02966687 n02504458 n03838899 n01689811 n04347754 n01608432 n01817953 n02536864 n01729977 n02096437 n03924679 n02096437 n01798484 n02869837 n04336792 n03485407 n03868863 n04376876 n03602883 n02128925 n02102973 n02447366 n07716358 n03857828 n04517823 n03837869 n07749582 n02105162 n02281787 n02769748 n02085620 n01751748 n02093647 n04423845 n02488702 n03485794 n03908714 n01498041 n02231487 n02108551 n03179701 n02786058 n01855032 n04147183 n04254680 n04557648 n01728572 n04325704 n07860988 n01847000 n13044778 n03445777 n03447447 n02169497 n03290653 n03376595 n02094114 n03854065 n02422699 n01796340 n03459775 n02091244 n04399382 n03476684 n02951585 n03207941 n02174001 n03445777 n01950731 n04562935 n01728572 n02089973 n01945685 n02791270 n04090263 n01665541 n02264363 n04228054 n03345487 n03947888 n01944390 n04153751 n01664065 n03223299 n02930766 n04404412 n03992509 n01877812 n02977058 n09835506 n12267677 n03127747 n01980166 n09835506 n07753113 n02860847 n02840245 n01748264 n03891251 n02484975 n02095314 n03063689 n04372370 n11879895 n02447366 n01795545 n03201208 n01797886 n04548362 n03028079 n03201208 n02109047 n03804744 n03417042 n02111500 n02109047 n02415577 n04456115 n02486410 n03976657 n02109525 n03602883 n03937543 n02492660 n02127052 n02641379 n03146219 n02091635 n02110185 n04389033 n04330267 n02165456 n04152593 n04548362 n02094433 n04372370 n03208938 n02356798 n02666196 n02279972 n03661043 n03187595 n03131574 n07742313 n02104029 n02172182 n02090622 n02085782 n02123159 n02105855 n02422106 n01667114 n01943899 n03692522 n03788195 n07718472 n03146219 n04553703 n09472597 n04447861 n02790996 n03673027 n02102040 n07565083 n01532829 n02276258 n04141327 n01817953 n04118538 n01990800 n02123597 n01751748 n02025239 n01644373 n03355925 n02177972 n04286575 n04009552 n03899768 n03857828 n04613696 n02120079 n02007558 n04311174 n03594945 n04355338 n03325584 n07590611 n07831146 n03899768 n02165105 n06359193 n06874185 n03657121 n02056570 n09428293 n04597913 n02114855 n04548280 n03065424 n01986214 n03623198 n04485082 n03888605 n02114855 n02917067 n04067472 n03457902 n03775071 n07579787 n02509815 n04458633 n03347037 n02098105 n12985857 n03691459 n04525305 n01817953 n03393912 n04251144 n02088364 n02526121 n02444819 n02088238 n02051845 n01667114 n04487394 n04125021 n02883205 n04162706 n02085936 n02807133 n02978881 n04350905 n01843383 n02906734 n01608432 n02950826 n04131690 n02823428 n02106030 n01818515 n03840681 n03443371 n03447447 n02492660 n11879895 n02981792 n01514668 n02701002 n04192698 n02106030 n07717410 n03492542 n06794110 n03977966 n04008634 n07768694 n04515003 n02111889 n02363005 n01930112 n04447861 n07684084 n01883070 n03250847 n02825657 n03793489 n01616318 n02110341 n06596364 n04456115 n01749939 n03180011 n02690373 n02088094 n01984695 n02493793 n09428293 n03888605 n09229709 n02128757 n04239074 n04040759 n03062245 n02168699 n02977058 n01773157 n02101388 n03459775 n04532106 n04026417 n02870880 n04179913 n02115913 n04525038 n11939491 n02165105 n04258138 n09472597 n01491361 n03706229 n03937543 n01855672 n03673027 n02443484 n03706229 n04149813 n03599486 n03272562 n01704323 n01537544 n03424325 n02085782 n02190166 n04592741 n02504458 n04086273 n07754684 n02443484 n02086910 n01756291 n01873310 n02096437 n02870880 n02106166 n07613480 n03018349 n03447721 n04335435 n02114855 n07760859 n03825788 n02107142 n02095570 n01697457 n03837869 n02018795 n02113624 n03781244 n03942813 n02445715 n02111129 n04372370 n02115641 n07802026 n02137549 n02099429 n03998194 n04162706 n03208938 n02486410 n02536864 n02437616 n02128757 n04604644 n03016953 n04404412 n02096585 n01494475 n03657121 n04259630 n04423845 n03388549 n02640242 n02988304 n02165456 n03924679 n04086273 n02492660 n02113624 n02093859 n02089867 n04192698 n01944390 n01632777 n02966687 n02107908 n02098286 n07831146 n02007558 n04536866 n02808304 n07718472 n03930630 n07754684 n01774750 n03980874 n03384352 n02104029 n02769748 n02058221 n01695060 n03929660 n13040303 n03089624 n04443257 n04428191 n03775546 n04517823 n01945685 n03216828 n02965783 n02088466 n04133789 n03838899 n02123597 n02128385 n02486410 n03124170 n03530642 n02500267 n12768682 n02128385 n01592084 n02526121 n04356056 n02137549 n03854065 n07684084 n01855032 n02992211 n02484975 n02106030 n09421951 n04367480 n09256479 n02119022 n02493509 n03803284 n01685808 n07697537 n01807496 n03733281 n03417042 n02219486 n09229709 n02526121 n03908714 n04204347 n03527444 n01740131 n02492035 n02094258 n03769881 n03026506 n02804414 n02489166 n02883205 n03482405 n04366367 n03868863 n03891332 n01797886 n03447447 n04399382 n04146614 n02423022 n02268443 n03250847 n07753592 n01984695 n03709823 n03884397 n03630383 n03814639 n02834397 n01737021 n03786901 n01775062 n01883070 n09428293 n03977966 n07754684 n03384352 n02794156 n13054560 n02132136 n02769748 n07718747 n02950826 n01930112 n02086240 n02125311 n03947888 n02840245 n03220513 n03720891 n02791270 n02802426 n03866082 n03825788 n02487347 n02169497 n02860847 n01728920 n03535780 n03710193 n02091467 n04243546 n01616318 n03942813 n02128757 n04049303 n04417672 n02127052 n03838899 n03729826 n02909870 n09421951 n04515003 n02165105 n03146219 n04423845 n03602883 n01930112 n04208210 n03887697 n03761084 n02268853 n04392985 n03649909 n03447721 n02692877 n12267677 n07715103 n04392985 n04509417 n04041544 n03538406 n01664065 n03179701 n01820546 n04204347 n03929660 n02102973 n03903868 n01742172 n01770081 n03109150 n04273569 n02123045 n07590611 n13037406 n02102177 n03000247 n02410509 n02088632 n07768694 n06785654 n03393912 n03496892 n04275548 n03854065 n04355933 n01807496 n07720875 n04584207 n03792782 n03208938 n02666196 n04149813 n02107683 n04049303 n04118538 n04418357 n02877765 n01883070 n02509815 n10565667 n02497673 n02115913 n03837869 n02190166 n04592741 n04285008 n04606251 n03075370 n04125021 n03796401 n02091134 n03792972 n01824575 n02086079 n01855032 n07742313 n03393912 n03958227 n02137549 n02113978 n02356798 n02808440 n02105412 n01797886 n04204347 n03837869 n02111277 n02777292 n02129604 n07930864 n02489166 n03459775 n01644900 n04149813 n03854065 n03125729 n04141076 n04505470 n02089973 n02172182 n04266014 n04606251 n07768694 n09472597 n02134418 n03623198 n02793495 n01484850 n02276258 n02095889 n03733281 n03535780 n03983396 n02640242 n01818515 n02051845 n03544143 n02092002 n02906734 n01518878 n03769881 n02087046 n03891332 n04392985 n03485794 n03445777 n02115913 n02321529 n03633091 n01984695 n04590129 n02268443 n02676566 n02134084 n03658185 n02091134 n03733805 n02488702 n02869837 n02640242 n03160309 n02443484 n02441942 n01775062 n02825657 n12144580 n04591713 n02783161 n01882714 n02815834 n02814860 n02102177 n02988304 n03376595 n02165105 n04081281 n03495258 n09193705 n04493381 n02815834 n11939491 n02883205 n03063689 n02095570 n04033901 n03937543 n02107908 n07742313 n02114712 n02971356 n02906734 n02814860 n01692333 n02808440 n03706229 n04335435 n03791053 n03742115 n02099429 n02877765 n02321529 n03814639 n01592084 n03272562 n02786058 n01667114 n03947888 n02100735 n04409515 n01601694 n03777568 n12620546 n06794110 n02483708 n03666591 n03759954 n01871265 n02790996 n01955084 n03868863 n03026506 n04070727 n02233338 n01983481 n02640242 n01819313 n02794156 n03017168 n02486261 n04118776 n02769748 n03250847 n02113799 n02105056 n02108422 n01806567 n04229816 n09256479 n04141327 n01692333 n01644373 n02493509 n02892201 n02346627 n07747607 n04120489 n03032252 n04081281 n09468604 n02108422 n07753113 n02441942 n03775071 n02319095 n04579145 n02097474 n03697007 n02769748 n02129604 n04141076 n04476259 n02442845 n04442312 n02012849 n01806567 n03337140 n02097209 n03207941 n01632458 n01818515 n02233338 n02088094 n02727426 n04239074 n03095699 n04606251 n03902125 n02099267 n02086240 n03337140 n02085782 n02412080 n03637318 n01734418 n02113023 n04251144 n03764736 n02114855 n02799071 n01675722 n02843684 n01756291 n04417672 n02835271 n04141076 n04389033 n04482393 n02087394 n02115641 n03017168 n01753488 n02514041 n04509417 n02089973 n03075370 n01644373 n03791053 n04265275 n02111500 n02097209 n04458633 n07802026 n04141076 n04597913 n02281787 n12057211 n02277742 n07716906 n03920288 n04326547 n03127747 n03404251 n02108915 n02127052 n02391049 n04229816 n02837789 n03314780 n02089973 n04296562 n02791270 n03000134 n01644900 n04209133 n01669191 n02107142 n03908714 n03045698 n03485794 n02108551 n02807133 n02892767 n04525305 n02493509 n10148035 n03201208 n03690938 n04505470 n02206856 n02098105 n03478589 n02123597 n02783161 n01667114 n02106550 n03733805 n03424325 n01882714 n01855672 n01855672 n01983481 n01695060 n01847000 n02799071 n04428191 n03223299 n13052670 n02101556 n04265275 n03016953 n01775062 n04033901 n01753488 n03146219 n04235860 n03759954 n03788195 n07749582 n01829413 n02093256 n02231487 n04536866 n03146219 n04004767 n02493793 n04371774 n02395406 n02114712 n02747177 n01560419 n03814906 n04141327 n01833805 n03825788 n02128925 n02120079 n03658185 n03935335 n03530642 n01968897 n02114548 n03873416 n01985128 n01514859 n02669723 n04311174 n03141823 n01872401 n03920288 n02927161 n02397096 n04357314 n03535780 n03127925 n01807496 n02895154 n02794156 n03666591 n04004767 n04039381 n04179913 n01828970 n02128385 n02095570 n04592741 n02793495 n02096177 n01631663 n02111500 n12057211 n04356056 n02894605 n02226429 n04482393 n01950731 n03452741 n01632777 n03197337 n04505470 n04599235 n01484850 n04501370 n02095570 n02276258 n02410509 n04037443 n02276258 n04418357 n02892767 n02099267 n03791053 n04599235 n03642806 n03530642 n07718472 n07693725 n11939491 n02793495 n02988304 n02096051 n01514668 n01616318 n04243546 n02808440 n04270147 n02106030 n04344873 n07930864 n03444034 n07860988 n02119022 n02108000 n04562935 n02105162 n02492035 n02823750 n03481172 n02108000 n04310018 n02107142 n02226429 n02074367 n03785016 n04553703 n03495258 n07579787 n07745940 n02111277 n04476259 n03476684 n04487081 n02091134 n07714571 n02105251 n04404412 n04398044 n01924916 n02487347 n12620546 n03255030 n04325704 n02093647 n02814533 n03125729 n03000247 n02492035 n01530575 n02108915 n02114367 n01796340 n13044778 n04522168 n02443114 n04589890 n04201297 n03733805 n02168699 n01616318 n03594945 n04479046 n02391049 n02892201 n04447861 n02134084 n02096294 n01484850 n03930630 n02090721 n04118538 n02445715 n06596364 n03599486 n04579145 n09468604 n01986214 n01820546 n02526121 n02408429 n03854065 n01855032 n03272562 n09288635 n02106550 n02095314 n01667778 n02137549 n02483708 n02804610 n04125021 n03769881 n02814533 n07718472 n04263257 n03877472 n02107312 n03042490 n01697457 n09468604 n03146219 n02799071 n03764736 n02493793 n03787032 n02808304 n03485407 n01740131 n04589890 n01914609 n02883205 n04254680 n03777568 n02280649 n02102040 n02823750 n04147183 n02091467 n04069434 n01729977 n01818515 n04023962 n03584254 n02095314 n03983396 n03956157 n02097209 n02095314 n02825657 n02107142 n02219486 n03796401 n01687978 n03944341 n02097658 n07718747 n04552348 n04263257 n03942813 n02037110 n03787032 n03642806 n01689811 n02102973 n02480495 n07684084 n02408429 n04356056 n02117135 n07584110 n04265275 n02493793 n01682714 n01981276 n04592741 n03976467 n02948072 n04086273 n04277352 n13054560 n02480495 n01983481 n02085782 n03598930 n03345487 n02017213 n03179701 n01984695 n04296562 n04507155 n04328186 n01534433 n02494079 n03916031 n04376876 n02093428 n01843383 n01924916 n03207743 n07747607 n03785016 n03388549 n02113624 n03961711 n02086646 n02134084 n04606251 n04493381 n02096585 n02992529 n03891332 n01616318 n01496331 n01694178 n01695060 n04026417 n01695060 n02117135 n03584254 n04336792 n01698640 n02177972 n04532670 n02859443 n02095889 n01682714 n11879895 n02114855 n02484975 n02097047 n04204238 n04604644 n01775062 n03775071 n01773549 n03956157 n03792972 n04404412 n09835506 n07717556 n02037110 n02361337 n02105412 n04447861 n02835271 n03240683 n07613480 n02422699 n02488702 n01776313 n04579432 n04116512 n03857828 n02676566 n03063599 n02397096 n02977058 n02089867 n04429376 n03018349 n13037406 n03998194 n01693334 n01770081 n03991062 n03141823 n03691459 n04039381 n02894605 n02096177 n02093256 n02917067 n03791053 n03976467 n02795169 n02112706 n01692333 n02111129 n03110669 n03803284 n01592084 n02514041 n02104365 n02089867 n07860988 n02093256 n02403003 n04522168 n02837789 n01855032 n02793495 n02093991 n02437312 n02980441 n04116512 n02120079 n04371774 n02104365 n04153751 n02091635 n01775062 n04310018 n03529860 n02105162 n02814860 n02088364 n02116738 n03630383 n02229544 n04111531 n01882714 n01917289 n03877472 n02346627 n03476991 n02115641 n03110669 n02799071 n03272562 n01729322 n03599486 n03445777 n04099969 n02536864 n03026506 n03899768 n04485082 n01440764 n04370456 n04125021 n07565083 n02012849 n02437616 n02281406 n03141823 n01440764 n04548362 n03584254 n04366367 n04069434 n02108551 n07697313 n02916936 n03124043 n01697457 n02095570 n03016953 n02441942 n02106382 n01833805 n03045698 n04404412 n03888605 n04259630 n03075370 n03124170 n03534580 n04277352 n03717622 n02526121 n01797886 n04133789 n02105855 n03530642 n02130308 n01980166 n04192698 n04336792 n07742313 n01692333 n02279972 n04371430 n01592084 n09332890 n04332243 n04392985 n07720875 n03478589 n03291819 n04560804 n02106030 n04049303 n02927161 n07753113 n04065272 n02835271 n03047690 n03538406 n01582220 n02113624 n03792782 n04116512 n02093859 n03961711 n02109047 n07831146 n02825657 n13054560 n02951585 n02442845 n02817516 n03874599 n02093859 n01755581 n02860847 n02167151 n01537544 n02099601 n02111500 n03670208 n03179701 n02093647 n03444034 n03131574 n02111500 n04069434 n01744401 n03220513 n03393912 n02486261 n03372029 n01728572 n02422106 n01833805 n03594734 n13044778 n02074367 n02391049 n07873807 n09468604 n02799071 n03832673 n02361337 n02111277 n04204238 n02172182 n04562935 n02100735 n02007558 n03630383 n01484850 n02484975 n02096051 n02206856 n03770679 n04265275 n09246464 n09835506 n07614500 n09472597 n03379051 n03457902 n01855032 n04201297 n02951585 n13133613 n03770439 n02172182 n03992509 n03617480 n02802426 n02676566 n01687978 n07711569 n03690938 n02869837 n03942813 n04332243 n01491361 n12768682 n01910747 n04179913 n03627232 n13037406 n07745940 n04152593 n01806143 n07565083 n03627232 n12267677 n03837869 n02094433 n04238763 n03496892 n04612504 n02807133 n02106166 n02484975 n03208938 n04065272 n02107574 n07715103 n04517823 n10565667 n02807133 n03717622 n04557648 n04591157 n02326432 n06874185 n04442312 n03042490 n03188531 n04487394 n02006656 n01729322 n03929660 n03425413 n03216828 n02346627 n02526121 n02089078 n01669191 n10565667 n04376876 n04258138 n02489166 n02493793 n03584829 n03379051 n02094114 n01514668 n03770439 n02231487 n01855032 n03180011 n04606251 n03916031 n01774750 n02087394 n03297495 n01968897 n02105056 n01491361 n02114712 n02097130 n02692877 n04125021 n03476684 n03658185 n02966687 n02259212 n03355925 n13133613 n03394916 n02107312 n02788148 n02109961 n01440764 n03124043 n06359193 n04133789 n02500267 n04209133 n03344393 n03494278 n02977058 n03710637 n01622779 n09421951 n02790996 n02089078 n02256656 n01531178 n04479046 n04141327 n03000134 n02504013 n03627232 n02114712 n03325584 n03773504 n04004767 n04266014 n02977058 n02125311 n02281406 n03291819 n01675722 n02138441 n03804744 n03000684 n02114367 n03187595 n01943899 n02125311 n02113624 n02823428 n02233338 n03110669 n02500267 n03594734 n03347037 n01990800 n02074367 n02396427 n03954731 n02687172 n02883205 n03127925 n02111500 n07718747 n02447366 n04286575 n02930766 n01664065 n04153751 n01687978 n02422699 n02791270 n02835271 n02504458 n01917289 n04252077 n04548280 n03089624 n07590611 n07754684 n01739381 n04483307 n01914609 n02087046 n03697007 n04039381 n01820546 n04355338 n02100735 n03032252 n02091467 n01728572 n02002556 n03874599 n02859443 n04146614 n03534580 n04532106 n01981276 n03814639 n01689811 n06359193 n01675722 n03888605 n07714990 n04476259 n02536864 n02492035 n04265275 n02948072 n03804744 n04380533 n01518878 n04005630 n07590611 n04417672 n03709823 n02105412 n02363005 n01494475 n03680355 n02951358 n04597913 n03998194 n01855032 n02018795 n03271574 n02167151 n02009912 n03825788 n04482393 n01774750 n02500267 n01514859 n03908618 n03761084 n03633091 n02096177 n03729826 n07717556 n03670208 n01773797 n04554684 n01697457 n03691459 n02138441 n03764736 n02123394 n04192698 n04120489 n07615774 n03929855 n02494079 n01669191 n01498041 n03250847 n03924679 n02356798 n02823750 n03447721 n02058221 n07930864 n01530575 n04428191 n04372370 n03840681 n02027492 n01498041 n07718472 n03954731 n04099969 n03954731 n01770081 n03445924 n03045698 n03527444 n02840245 n04201297 n01735189 n01986214 n02002724 n02113978 n02177972 n03908714 n03888257 n02100236 n02437312 n02236044 n07871810 n03775071 n03947888 n03933933 n02066245 n02128385 n01491361 n02493509 n07717556 n02865351 n03187595 n02666196 n01917289 n01770081 n02788148 n03661043 n02481823 n02085620 n02799071 n03590841 n01749939 n01614925 n02950826 n02088632 n01498041 n02105162 n01737021 n02690373 n03584254 n02791124 n02088238 n04328186 n01582220 n02231487 n03717622 n01751748 n03721384 n02108422 n01669191 n02980441 n04243546 n03982430 n02422106 n03014705 n04371774 n04125021 n02090622 n01930112 n04552348 n03764736 n01582220 n02056570 n02089973 n09399592 n03450230 n03770679 n03445924 n02007558 n02268443 n02396427 n01440764 n03062245 n02134418 n03594734 n02094433 n04264628 n02992211 n02093428 n02100735 n04367480 n03764736 n03041632 n01443537 n03476684 n09229709 n04355338 n02128385 n04550184 n01806567 n02098413 n04086273 n02090379 n03958227 n02091467 n02108000 n03658185 n02843684 n01440764 n02981792 n07892512 n03297495 n03692522 n03937543 n03691459 n03240683 n02977058 n07730033 n04591713 n11939491 n03902125 n02783161 n04355338 n02281406 n03538406 n01608432 n03935335 n01983481 n02730930 n01968897 n03769881 n04493381 n02112018 n02391049 n04389033 n03775546 n02172182 n09399592 n02093991 n01806143 n02226429 n01669191 n04125021 n02113712 n02860847 n02074367 n02447366 n02783161 n02454379 n01984695 n03721384 n03633091 n03376595 n02120505 n02105505 n04517823 n03372029 n03527444 n03786901 n03478589 n02066245 n07892512 n01491361 n02108089 n03325584 n03717622 n03773504 n01582220 n03676483 n04540053 n07248320 n04118538 n02095314 n12267677 n03602883 n02815834 n03379051 n02172182 n02107142 n06874185 n01776313 n07714571 n01775062 n03452741 n03916031 n04118538 n01580077 n02497673 n01518878 n03673027 n02101388 n03187595 n04350905 n02408429 n03417042 n02514041 n02116738 n03476684 n02497673 n04285008 n03126707 n03544143 n04147183 n03481172 n04041544 n02268443 n09472597 n02085782 n03400231 n03954731 n04074963 n03782006 n02281787 n04023962 n04008634 n07875152 n07716906 n02109525 n03995372 n02096177 n01981276 n03884397 n02509815 n03529860 n03584829 n02268853 n04141975 n04599235 n03759954 n02894605 n02454379 n03014705 n02786058 n04505470 n02172182 n02979186 n02091635 n02007558 n02797295 n02817516 n02233338 n04099969 n03250847 n02950826 n02124075 n01484850 n02096294 n02965783 n01943899 n02028035 n04486054 n02417914 n03445777 n04009552 n02125311 n03770439 n02018207 n02219486 n04111531 n09288635 n03825788 n03223299 n04606251 n02396427 n07717410 n02111277 n04515003 n02643566 n03733131 n02093428 n01807496 n02480855 n03527444 n02099849 n04482393 n02361337 n02107574 n04201297 n03633091 n04033995 n02641379 n02790996 n02190166 n03127747 n02483362 n03126707 n03590841 n07717410 n04033901 n02676566 n07875152 n02100236 n04584207 n01737021 n02493509 n02105251 n03930630 n03873416 n02396427 n02493793 n03250847 n02088466 n02814533 n02108000 n01443537 n02988304 n01944390 n04285008 n04356056 n01930112 n03630383 n02281406 n02346627 n04493381 n03709823 n01755581 n02018795 n07802026 n11939491 n07836838 n04429376 n03967562 n02113023 n03724870 n03792972 n01753488 n07875152 n07753592 n04357314 n03642806 n04131690 n04258138 n01667114 n02782093 n02493509 n04465501 n07583066 n02256656 n01532829 n01872401 n07684084 n03763968 n04579145 n03492542 n04417672 n04350905 n04069434 n03866082 n04311174 n01756291 n02797295 n03642806 n03676483 n03697007 n02087046 n03207941 n04201297 n02074367 n01608432 n02111500 n03633091 n02804610 n04562935 n02093859 n03935335 n02051845 n01990800 n02799071 n04228054 n02100877 n01755581 n02129604 n02727426 n01860187 n04326547 n03776460 n02206856 n02093256 n01968897 n02326432 n03770679 n02509815 n02978881 n03018349 n03394916 n02977058 n03891332 n01665541 n04141327 n02233338 n02092339 n03388549 n04548362 n04296562 n04067472 n03014705 n02747177 n02441942 n04081281 n03290653 n02066245 n01983481 n02085936 n01518878 n02085620 n04346328 n01601694 n01532829 n03992509 n01694178 n02437616 n04612504 n02666196 n03950228 n02093754 n02123597 n01817953 n02190166 n04067472 n03933933 n02398521 n02097130 n03444034 n03792972 n04418357 n01871265 n03208938 n01768244 n02174001 n02219486 n01774384 n07742313 n04355933 n02129165 n07742313 n01697457 n04310018 n02669723 n04367480 n01592084 n02105251 n02113799 n07565083 n02091032 n02011460 n03773504 n02445715 n04275548 n02112018 n01632458 n02486261 n07714990 n02106550 n03478589 n02963159 n03743016 n04146614 n03970156 n03874293 n07749582 n06874185 n01950731 n01498041 n04090263 n02077923 n02106662 n02786058 n04591157 n03481172 n03924679 n02500267 n04258138 n04540053 n03160309 n02087394 n03494278 n04325704 n01669191 n02108551 n01980166 n03314780 n02808440 n04447861 n02281787 n02095889 n02489166 n02114367 n04344873 n02058221 n02444819 n02988304 n03495258 n02002556 n03874293 n02085782 n01695060 n02870880 n01608432 n02948072 n04067472 n02098286 n02093428 n04009552 n12267677 n02085782 n03376595 n04335435 n03891332 n03733281 n02264363 n02132136 n04263257 n01698640 n01753488 n07714990 n03417042 n03259280 n01737021 n04118538 n01773797 n03124170 n03874293 n09421951 n02747177 n09288635 n04136333 n03956157 n02093256 n03729826 n03538406 n01774384 n04355338 n02105251 n02403003 n01697457 n01828970 n02892767 n02018207 n02134084 n03733805 n07930864 n02097474 n04507155 n04344873 n02950826 n03721384 n01943899 n07920052 n02319095 n04149813 n02364673 n01742172 n04428191 n03450230 n09399592 n01689811 n01978287 n07716358 n02074367 n04557648 n03062245 n02105251 n07716906 n03623198 n03125729 n03876231 n04509417 n03041632 n04347754 n06359193 n04118538 n01806143 n07749582 n02105855 n13052670 n02094114 n03775071 n01873310 n03788195 n04311004 n03018349 n03089624 n02087046 n03379051 n04493381 n07714990 n03895866 n15075141 n07684084 n01755581 n07715103 n04285008 n03476991 n04049303 n03496892 n03041632 n02403003 n03832673 n04131690 n04479046 n04479046 n02259212 n01734418 n02002556 n03179701 n03992509 n07932039 n04467665 n02099712 n04456115 n03690938 n04367480 n01729322 n03961711 n03841143 n02963159 n03476991 n04074963 n02077923 n01532829 n02865351 n02966687 n01694178 n03017168 n04429376 n03935335 n09246464 n04004767 n03208938 n04111531 n04389033 n07760859 n04326547 n04209239 n07697537 n03785016 n04367480 n04037443 n04311174 n02814533 n02113799 n02825657 n02672831 n02114855 n02090622 n09399592 n04482393 n01910747 n04417672 n04162706 n02098413 n07717556 n01580077 n02092002 n03014705 n04370456 n02835271 n03047690 n03944341 n07613480 n02361337 n02356798 n02835271 n02011460 n02096051 n01843065 n03498962 n07583066 n07734744 n04277352 n02088632 n09835506 n04141327 n01820546 n03218198 n03825788 n04310018 n02099849 n02025239 n07753275 n03876231 n02099267 n03794056 n07590611 n01740131 n02091032 n04200800 n01770081 n02869837 n03379051 n01833805 n03929855 n02749479 n01644900 n03445777 n02110627 n01630670 n04273569 n04483307 n02138441 n07892512 n01983481 n02108422 n02948072 n02094258 n03141823 n01632458 n04517823 n04380533 n09472597 n02165456 n01930112 n03018349 n02268853 n01770081 n04141975 n03998194 n03384352 n04147183 n03045698 n03791053 n03944341 n02536864 n01829413 n02088466 n01694178 n02106382 n01748264 n03759954 n12985857 n04254680 n04465501 n02795169 n02096177 n02444819 n01558993 n02115641 n03445924 n02701002 n06359193 n01773549 n03637318 n02437312 n04332243 n02865351 n02088632 n04067472 n02092002 n03956157 n04326547 n02786058 n01784675 n01847000 n04146614 n03666591 n04310018 n01914609 n07695742 n03404251 n03891251 n06874185 n03062245 n03355925 n12267677 n04254120 n07714990 n02233338 n02804414 n03062245 n02018795 n07720875 n03075370 n03530642 n01980166 n01667114 n04553703 n09468604 n06794110 n04367480 n02963159 n03710193 n01980166 n03000134 n03938244 n02231487 n02493509 n03447721 n07583066 n09472597 n03877845 n04147183 n04229816 n12998815 n03877472 n07718472 n03063599 n01665541 n02111889 n06596364 n02094433 n01817953 n02091635 n01755581 n01740131 n01592084 n03673027 n03467068 n03924679 n04467665 n03733805 n01833805 n03089624 n02091635 n02489166 n02112350 n04192698 n02102040 n02823428 n04074963 n01872401 n04579145 n03788365 n04086273 n02009229 n07753113 n02504458 n02002724 n02097474 n07754684 n03134739 n02113978 n02403003 n03998194 n01688243 n03891332 n04133789 n02111500 n02916936 n07248320 n04404412 n04209239 n07590611 n03673027 n04008634 n03272010 n13040303 n09399592 n02007558 n02488291 n07716906 n04009552 n02111889 n03658185 n01980166 n04367480 n02892201 n04423845 n03131574 n04041544 n04266014 n03825788 n02033041 n02002724 n01871265 n04099969 n02321529 n02666196 n01698640 n03709823 n02356798 n03089624 n03873416 n02097130 n02108089 n04258138 n01667778 n04456115 n03492542 n02363005 n01871265 n01950731 n04153751 n01984695 n01614925 n02110958 n01824575 n01981276 n15075141 n03814906 n03874599 n04118776 n01675722 n02939185 n03742115 n01697457 n02326432 n02090622 n04532106 n03983396 n02415577 n02412080 n02102480 n03459775 n04380533 n04254777 n01631663 n03404251 n07871810 n02123045 n02226429 n01871265 n01820546 n01688243 n02825657 n01689811 n02095570 n04019541 n03777754 n01748264 n02123045 n02129604 n02105056 n02125311 n02089973 n03649909 n04540053 n03670208 n02097209 n01819313 n03110669 n02124075 n02437616 n01843383 n03935335 n02782093 n07753113 n03791053 n02111129 n07614500 n03761084 n03676483 n01978455 n03857828 n02488702 n02165456 n07734744 n03991062 n02860847 n03954731 n03045698 n03944341 n02111129 n02092002 n03891251 n02130308 n01945685 n03188531 n02457408 n03085013 n03796401 n13052670 n02398521 n03743016 n02229544 n03160309 n02276258 n02276258 n02504013 n02281406 n02877765 n03649909 n07697313 n02058221 n02077923 n03394916 n02256656 n04328186 n02009229 n03476684 n03388549 n07714571 n09193705 n02396427 n01806567 n02090379 n02100583 n04483307 n02120079 n01914609 n01630670 n04259630 n07695742 n02106030 n02883205 n02398521 n03995372 n07590611 n04099969 n02110063 n03785016 n02669723 n03125729 n04442312 n07920052 n02497673 n02454379 n02091831 n02454379 n02088632 n02115641 n03761084 n02606052 n02264363 n01843065 n03623198 n03445777 n02481823 n01773157 n03109150 n04458633 n02165456 n02190166 n04111531 n03197337 n04542943 n04507155 n02089867 n02342885 n02099601 n03787032 n03483316 n02454379 n04041544 n02086079 n04485082 n07831146 n02106030 n03445777 n02398521 n02666196 n02009912 n01534433 n03126707 n12057211 n04355933 n02025239 n04336792 n02906734 n02002556 n04487394 n03291819 n01614925 n04235860 n04270147 n03291819 n03837869 n04192698 n04120489 n02930766 n02128385 n02837789 n02105505 n01704323 n02481823 n03384352 n02167151 n07753592 n07614500 n02134084 n04515003 n01729322 n04033901 n02134418 n01514668 n03942813 n02101556 n03642806 n03733131 n03290653 n02174001 n01784675 n03777754 n03942813 n02802426 n04049303 n03535780 n02492035 n04070727 n03075370 n04372370 n07860988 n04367480 n03786901 n04562935 n07590611 n02102973 n07248320 n03095699 n04009552 n07614500 n09288635 n03724870 n04258138 n01698640 n07753113 n04263257 n01755581 n04447861 n02666196 n03733281 n02051845 n02058221 n03958227 n02403003 n02097474 n02099429 n02484975 n07836838 n10565667 n07720875 n02486261 n02321529 n01755581 n03100240 n03063599 n01664065 n02783161 n03803284 n03110669 n02086240 n02487347 n02097209 n04310018 n02012849 n04120489 n03482405 n02447366 n01749939 n03478589 n02963159 n04428191 n04285008 n01530575 n02111129 n03109150 n07697313 n02802426 n03690938 n01914609 n02481823 n02259212 n03538406 n15075141 n03649909 n04483307 n04613696 n10565667 n02488702 n02094258 n02096585 n02127052 n02391049 n01734418 n09332890 n03379051 n02133161 n12144580 n02099429 n04447861 n04120489 n07860988 n02129604 n03065424 n02095314 n04154565 n02655020 n02165105 n04275548 n02415577 n02786058 n02091467 n03444034 n01498041 n07590611 n04554684 n02109047 n04552348 n03814639 n03125729 n03888257 n03950228 n02089973 n03967562 n02749479 n03729826 n02018207 n04487081 n03017168 n03976657 n03938244 n02769748 n07836838 n02002724 n03100240 n03598930 n04479046 n01644373 n02708093 n02134418 n13054560 n09332890 n03133878 n04554684 n03041632 n02869837 n03014705 n02510455 n03954731 n02788148 n02859443 n02640242 n02087046 n03891332 n02124075 n03476684 n04270147 n04542943 n03916031 n02051845 n02104029 n04270147 n02422106 n03692522 n02115641 n02447366 n03710721 n02112018 n03000134 n02105162 n02097047 n02356798 n04037443 n02071294 n07892512 n03924679 n01687978 n02098286 n03345487 n04254777 n03680355 n02963159 n01582220 n04090263 n03761084 n04604644 n02097209 n03109150 n02088632 n03937543 n01943899 n02093647 n02093428 n03461385 n04270147 n04389033 n03534580 n09468604 n02107312 n01797886 n02090379 n02871525 n01667778 n01773549 n01755581 n02093991 n04350905 n03995372 n02280649 n03933933 n02226429 n03207941 n09399592 n02106030 n03590841 n02966193 n03787032 n02115913 n04099969 n04273569 n02037110 n01917289 n04254777 n03888257 n02807133 n04589890 n02091032 n01685808 n07714571 n03777568 n03379051 n03028079 n04275548 n02395406 n04040759 n02109961 n01872401 n03825788 n02112706 n03692522 n02086910 n02321529 n03131574 n04311004 n03929855 n01514859 n03804744 n03417042 n02794156 n07730033 n04120489 n02342885 n04041544 n04366367 n02116738 n02992211 n02276258 n02895154 n01984695 n03661043 n03207941 n02025239 n02123045 n02117135 n02107908 n02815834 n04355933 n03598930 n07742313 n03876231 n02259212 n01775062 n03617480 n03840681 n03902125 n02930766 n03633091 n04404412 n03825788 n03337140 n02018795 n02447366 n07613480 n02493793 n01694178 n12620546 n06874185 n02443484 n04209133 n04515003 n04540053 n01796340 n03623198 n02108551 n03763968 n02410509 n11879895 n03832673 n03930630 n02490219 n03937543 n02111889 n02096437 n04154565 n02971356 n02865351 n03776460 n02777292 n02190166 n04612504 n04081281 n02747177 n03777754 n02445715 n03857828 n11939491 n01981276 n04041544 n04458633 n03447721 n02106030 n02834397 n02097474 n01877812 n02085936 n02096051 n03272562 n03793489 n02099849 n03649909 n01882714 n02860847 n04039381 n04264628 n02484975 n02167151 n02074367 n01773549 n04367480 n07718747 n02841315 n02910353 n02106550 n03602883 n04153751 n03992509 n09468604 n02129604 n09229709 n02056570 n03594734 n02111277 n07590611 n02704792 n03868863 n02115641 n02444819 n02808304 n04355338 n02281787 n02138441 n03814906 n04409515 n01739381 n03495258 n03627232 n02085620 n02190166 n03355925 n03188531 n02100735 n03961711 n02823428 n07860988 n01740131 n09229709 n03777568 n03908618 n02108551 n02177972 n09288635 n01693334 n02106382 n04026417 n03388183 n02002724 n03208938 n04517823 n04336792 n03658185 n02097474 n02690373 n13044778 n02281787 n02641379 n02130308 n02704792 n01582220 n02027492 n04525305 n02119789 n13054560 n03724870 n02488291 n07697313 n02132136 n04336792 n03983396 n03944341 n01774384 n02027492 n02091134 n07860988 n02106550 n04357314 n03662601 n03868242 n03804744 n02112350 n01774750 n02088238 n07718472 n01742172 n02992529 n04404412 n02089867 n03345487 n02437312 n02930766 n13133613 n02206856 n02486410 n03843555 n04476259 n02094433 n01843065 n07714571 n02389026 n04099969 n01843065 n03180011 n09472597 n03670208 n01751748 n01807496 n02229544 n02101006 n03188531 n03290653 n02403003 n02699494 n04266014 n02708093 n04399382 n02804414 n07747607 n02749479 n03424325 n04522168 n01843065 n01682714 n02138441 n11879895 n04355338 n03662601 n03658185 n03483316 n07718747 n03476684 n02110958 n04040759 n03814906 n04461696 n02492660 n04044716 n04596742 n01770081 n01806143 n04589890 n03016953 n02493793 n01983481 n01484850 n02981792 n03710637 n02104029 n01498041 n03976657 n04009552 n02790996 n04235860 n04447861 n01910747 n03481172 n04090263 n03929660 n07248320 n03271574 n03661043 n03954731 n03016953 n07614500 n03920288 n02091244 n02676566 n13044778 n03843555 n07871810 n03832673 n04252225 n02174001 n03832673 n10148035 n02280649 n09229709 n06874185 n02823428 n02692877 n02823428 n07753592 n02782093 n03459775 n09288635 n04204347 n02483708 n04461696 n02791124 n03710193 n12768682 n04435653 n04204347 n02669723 n03657121 n01518878 n04026417 n02319095 n03791053 n02110063 n02281787 n03197337 n04152593 n02025239 n03633091 n02259212 n02423022 n03891332 n03874293 n02071294 n01773797 n07711569 n02007558 n13133613 n02017213 n04270147 n02113624 n02916936 n01675722 n07614500 n03673027 n02109961 n02950826 n02966193 n01685808 n02804610 n02095314 n03929855 n10565667 n02013706 n02123394 n03590841 n07711569 n02113799 n07860988 n04367480 n07873807 n02096585 n02002724 n02134418 n02398521 n04033901 n02110063 n09468604 n01990800 n04423845 n02177972 n04447861 n02096585 n02442845 n04265275 n04317175 n01807496 n04366367 n03814906 n12998815 n03482405 n03884397 n03673027 n03673027 n03793489 n02443114 n02988304 n02422106 n04326547 n02992529 n01860187 n03895866 n03180011 n04118776 n03461385 n04275548 n15075141 n03761084 n01944390 n04317175 n04152593 n02927161 n03956157 n02085620 n02727426 n01667114 n04493381 n01729322 n04081281 n01484850 n03124043 n02841315 n02108089 n03345487 n02892201 n07875152 n02093991 n03697007 n02119789 n01739381 n02319095 n02361337 n01883070 n02492035 n02107312 n07715103 n04264628 n01843065 n07860988 n01795545 n01592084 n03676483 n04254120 n03223299 n03220513 n02108915 n03873416 n02128925 n02389026 n01698640 n15075141 n03028079 n01644900 n01694178 n03761084 n03873416 n03710637 n03924679 n03627232 n04542943 n03095699 n02100236 n01784675 n01744401 n04153751 n03770439 n02107142 n03297495 n07753275 n04008634 n07615774 n04550184 n02110806 n04404412 n03976467 n07715103 n04525038 n02776631 n02099267 n02095314 n03028079 n02100236 n03930630 n03188531 n02094258 n04554684 n03887697 n02116738 n02007558 n02102973 n02130308 n04328186 n04141076 n03220513 n02444819 n04458633 n01735189 n02701002 n02071294 n01498041 n04070727 n04423845 n02089973 n04141975 n01729322 n01824575 n04251144 n01692333 n01484850 n04208210 n01667114 n04458633 n04141076 n02058221 n02088466 n07760859 n04560804 n02099267 n03000134 n02481823 n02788148 n02097047 n04487081 n04286575 n02233338 n04344873 n02490219 n02123159 n02120079 n02114855 n02088238 n01775062 n04136333 n03344393 n03535780 n02074367 n03782006 n02487347 n02134418 n02500267 n03208938 n04162706 n02410509 n02091635 n04417672 n01537544 n02951358 n02116738 n03594734 n03775071 n03594945 n04532670 n01695060 n02277742 n02123597 n02883205 n07932039 n02497673 n07754684 n02112018 n03538406 n03895866 n01494475 n02177972 n03197337 n02105641 n02992529 n04070727 n02109525 n02125311 n04456115 n02980441 n03841143 n03938244 n03661043 n01756291 n03794056 n02018207 n03126707 n01614925 n03992509 n03127925 n02115913 n03773504 n02776631 n09472597 n02177972 n03532672 n04476259 n04517823 n13052670 n07753275 n01685808 n04120489 n02120079 n02123159 n02087046 n03598930 n02487347 n03065424 n04517823 n02797295 n02804414 n02843684 n02018795 n03976657 n04005630 n02699494 n03814906 n09332890 n02493793 n04442312 n02100877 n04532670 n03047690 n02077923 n03733281 n04266014 n09835506 n02492660 n04330267 n07716358 n01601694 n04579432 n04380533 n01749939 n03444034 n03400231 n03584254 n03710721 n03895866 n04591713 n03903868 n02088364 n04141975 n01774384 n02112018 n04485082 n04259630 n03041632 n02097130 n03775546 n02093991 n01742172 n09193705 n01984695 n01924916 n02190166 n03706229 n13037406 n04604644 n03602883 n02504458 n03467068 n04536866 n04398044 n01986214 n03777754 n02066245 n02346627 n04370456 n02108551 n04204238 n04371430 n03792972 n02441942 n02096294 n02699494 n04589890 n02085936 n02105056 n02415577 n07734744 n02098286 n02113186 n02096294 n02871525 n03873416 n01784675 n02788148 n02051845 n07930864 n01692333 n02111889 n03662601 n02097474 n02165456 n03595614 n03452741 n04606251 n03796401 n03452741 n07693725 n02112018 n03388549 n04562935 n13133613 n04461696 n01796340 n04270147 n03187595 n03666591 n04120489 n04522168 n02111500 n03976467 n01729322 n02364673 n04356056 n02797295 n02114855 n02749479 n04357314 n07565083 n02676566 n02088466 n02823750 n02093256 n02256656 n02119022 n02883205 n03584254 n03775071 n01682714 n03124170 n04201297 n04044716 n01629819 n12998815 n07584110 n04532106 n03825788 n04501370 n01560419 n03065424 n02106030 n04229816 n03623198 n02280649 n06785654 n02342885 n02488291 n02606052 n03271574 n04070727 n03717622 n02447366 n03065424 n03527444 n01943899 n02095889 n02132136 n04204347 n03026506 n01749939 n03742115 n02105162 n03733281 n02006656 n04552348 n02493793 n02992211 n02089867 n04111531 n04590129 n03982430 n03495258 n02640242 n02099429 n02132136 n02444819 n02056570 n03494278 n01773157 n02137549 n01534433 n02018795 n03630383 n02281787 n04120489 n02104029 n02098413 n02488702 n03379051 n02807133 n04591713 n02110185 n04209239 n01558993 n04325704 n04264628 n03291819 n02793495 n02133161 n03908714 n03584254 n02091831 n02099429 n09835506 n01798484 n03041632 n02808304 n04136333 n09428293 n04465501 n01688243 n02093428 n02129165 n07749582 n03197337 n04392985 n04367480 n02484975 n02607072 n03089624 n04116512 n04286575 n02233338 n04118538 n04254777 n02410509 n02091244 n03016953 n03026506 n02113978 n02091032 n02096585 n04179913 n01775062 n03903868 n04277352 n02841315 n04597913 n01614925 n04067472 n03876231 n02095889 n02100877 n03444034 n01484850 n02490219 n03272010 n12057211 n03980874 n02097474 n04270147 n04429376 n04111531 n09399592 n04005630 n03595614 n02123045 n03657121 n07892512 n03840681 n04296562 n02807133 n01806567 n04258138 n02114367 n01675722 n02794156 n01698640 n04296562 n07717556 n03476991 n04005630 n02099712 n02099429 n03721384 n04277352 n03127925 n02256656 n03201208 n02088466 n02086079 n01632458 n04376876 n03998194 n01440764 n02704792 n01855032 n03095699 n04355933 n04465501 n03841143 n04501370 n01558993 n03042490 n01950731 n03935335 n04584207 n01984695 n02747177 n03775546 n04525038 n01632777 n04485082 n04116512 n02486410 n02096585 n02096051 n02110627 n03272010 n03775546 n02123597 n02992529 n01632458 n02089078 n03954731 n02437616 n02120505 n04507155 n02114712 n03532672 n03983396 n02108000 n01514859 n07802026 n02951358 n01882714 n04505470 n02231487 n03388043 n04482393 n02112018 n04008634 n02606052 n04273569 n03594734 n04532670 n01855032 n02342885 n03950228 n02093859 n02841315 n02025239 n03930630 n01797886 n03240683 n01775062 n02321529 n02342885 n02108551 n03216828 n02281406 n03710721 n04201297 n01950731 n03216828 n07880968 n04208210 n02514041 n02123597 n04517823 n04553703 n03482405 n07697313 n03690938 n02444819 n04049303 n03085013 n01843065 n03709823 n02117135 n02787622 n07579787 n02099601 n04229816 n03776460 n01644900 n07579787 n03733281 n09472597 n01797886 n07802026 n01806567 n02108551 n02093754 n02132136 n04254120 n03877472 n02480855 n04285008 n15075141 n04325704 n09332890 n03947888 n01828970 n02106030 n04501370 n07730033 n02113186 n03026506 n04266014 n11939491 n04270147 n03777754 n04522168 n01860187 n02443484 n02835271 n04125021 n02794156 n06596364 n04265275 n04136333 n10565667 n04483307 n02277742 n02094433 n07716906 n01514859 n02397096 n02102318 n04442312 n03680355 n02086240 n02174001 n02277742 n03832673 n01768244 n01739381 n02361337 n02607072 n01843383 n02091467 n02090721 n01756291 n02099429 n01806567 n02966687 n02094258 n01986214 n07697537 n02909870 n03967562 n04296562 n03388043 n04482393 n09421951 n07614500 n02865351 n02089973 n04557648 n01537544 n01819313 n03929855 n04136333 n03977966 n04099969 n01675722 n03832673 n02643566 n07749582 n04275548 n04005630 n02074367 n03623198 n03495258 n04296562 n02437312 n02113799 n03874599 n02454379 n02877765 n02109525 n04270147 n01729977 n02950826 n02110063 n03216828 n01484850 n03062245 n02128385 n04228054 n03179701 n01796340 n01694178 n02088094 n03942813 n02869837 n03770439 n02097658 n03047690 n03742115 n03724870 n02966687 n02098286 n01687978 n02100236 n01616318 n04442312 n02396427 n03998194 n01773549 n07747607 n01944390 n03891332 n03045698 n03877472 n03207941 n02494079 n01819313 n02093754 n02088238 n02168699 n04515003 n01675722 n02018207 n02690373 n03777568 n03026506 n02342885 n02102040 n07583066 n03961711 n02916936 n03958227 n01698640 n07714990 n02483708 n03680355 n04141975 n02085936 n07930864 n03691459 n02892767 n03770679 n03450230 n02165456 n04560804 n01614925 n04458633 n02500267 n02190166 n04380533 n02950826 n07860988 n02346627 n03814906 n02494079 n01817953 n09421951 n03041632 n04371430 n04371430 n03743016 n01630670 n04074963 n04326547 n02894605 n02086910 n03935335 n04461696 n03476991 n03697007 n01818515 n04263257 n02088238 n07697313 n02110806 n07747607 n02108422 n02641379 n04507155 n02124075 n12985857 n02342885 n07697537 n03742115 n12998815 n04591713 n03450230 n02110185 n02091831 n03424325 n01795545 n04507155 n01616318 n01704323 n03887697 n02128925 n01824575 n02099712 n03498962 n04273569 n04090263 n01775062 n03970156 n02480855 n02730930 n02326432 n04355933 n03355925 n01734418 n02107908 n01978287 n03874599 n03478589 n03788365 n02325366 n02445715 n03180011 n03792782 n01667778 n02490219 n01882714 n04005630 n04118538 n03775071 n03792782 n02123045 n02264363 n02776631 n01773157 n01614925 n04548362 n02009912 n02487347 n03272562 n01685808 n02835271 n02110063 n04153751 n02123045 n02417914 n04208210 n03476684 n01768244 n07697313 n02100583 n02504013 n04040759 n04067472 n01798484 n07248320 n02094258 n02483708 n04557648 n01828970 n02172182 n03658185 n02493509 n03991062 n03494278 n03291819 n02410509 n03733805 n04579432 n03124043 n02966193 n02190166 n02526121 n07753592 n07753592 n07768694 n09246464 n07711569 n02018795 n02105056 n01669191 n02268853 n02488291 n02793495 n02101556 n04476259 n07584110 n04542943 n03670208 n03929855 n04204347 n02094433 n09472597 n04479046 n01667778 n03459775 n02056570 n12620546 n04286575 n02795169 n04209239 n02101556 n04532670 n02009229 n04584207 n02795169 n02112350 n01667778 n02939185 n03908618 n01753488 n02841315 n03388183 n03218198 n02776631 n02363005 n02130308 n06596364 n02814860 n02110063 n02117135 n07684084 n04254680 n03109150 n02408429 n04389033 n04483307 n01797886 n02095889 n03958227 n04548280 n02410509 n03837869 n03720891 n04435653 n01498041 n02749479 n07718747 n04461696 n03388043 n02133161 n02165105 n02817516 n04532670 n02013706 n01682714 n02102177 n03290653 n04086273 n02090379 n01797886 n01440764 n01818515 n04562935 n02782093 n03793489 n11879895 n02814860 n02669723 n02974003 n07693725 n02104029 n03372029 n03045698 n03100240 n02127052 n07579787 n03874599 n02504458 n02132136 n03692522 n04517823 n03223299 n04418357 n02110806 n01728572 n04259630 n03930313 n02321529 n02105251 n04317175 n01491361 n07753275 n02028035 n04476259 n03742115 n03032252 n02328150 n04591713 n02088094 n02190166 n04067472 n03134739 n02102318 n03026506 n04371430 n03535780 n01614925 n02111889 n03977966 n03131574 n02071294 n02110627 n02109961 n02412080 n01580077 n06359193 n04209133 n03775546 n03630383 n01753488 n02672831 n02092339 n01644900 n07730033 n03124043 n04065272 n03697007 n01616318 n01558993 n02107683 n04044716 n03877472 n02786058 n02087046 n07717410 n04019541 n01622779 n03337140 n02978881 n04131690 n03887697 n01582220 n02536864 n04065272 n02977058 n03825788 n01687978 n01756291 n04486054 n01737021 n01968897 n03047690 n02106166 n02259212 n02326432 n04476259 n02115913 n02006656 n04254120 n02871525 n03220513 n03769881 n03692522 n02730930 n04235860 n02112018 n02107142 n02834397 n04008634 n02100583 n01729977 n07714571 n01629819 n02028035 n03724870 n04355933 n01614925 n07714571 n07584110 n02870880 n13054560 n02727426 n03877472 n04263257 n04127249 n03630383 n01978287 n13044778 n02509815 n04251144 n04141327 n12620546 n03388043 n02951358 n02412080 n03110669 n03937543 n04044716 n02101388 n07716358 n04462240 n03933933 n02840245 n03485407 n03461385 n02119789 n01944390 n01924916 n04127249 n04209239 n03908618 n03133878 n03992509 n02410509 n03796401 n01798484 n04557648 n02088632 n03000247 n02971356 n03840681 n01776313 n01773157 n04366367 n03325584 n03873416 n01807496 n02790996 n09421951 n07734744 n03000247 n04597913 n04332243 n02408429 n01677366 n02229544 n03891251 n02110063 n03532672 n03937543 n01558993 n04540053 n12057211 n03388183 n02841315 n09399592 n03933933 n02823428 n02102040 n02690373 n02895154 n02085936 n04458633 n02415577 n04579432 n04557648 n03630383 n02009912 n02113978 n03000247 n09246464 n03498962 n02992211 n03249569 n03930313 n01632458 n02086910 n02097209 n03032252 n01496331 n04118538 n03272010 n02095314 n02930766 n02112137 n03697007 n04127249 n04141076 n03376595 n07613480 n04023962 n03958227 n04515003 n04596742 n02108000 n03874599 n01776313 n02088238 n01950731 n02086910 n03384352 n02093859 n02088632 n02749479 n01631663 n01955084 n04275548 n02493793 n03690938 n02802426 n02110341 n02906734 n02124075 n03991062 n03584254 n03444034 n02979186 n03888605 n01534433 n02129165 n01614925 n02397096 n12985857 n02123159 n01984695 n02097047 n01616318 n02117135 n01682714 n03814906 n02105251 n01877812 n04367480 n01770081 n02099849 n02328150 n07590611 n07734744 n03673027 n02129165 n02111500 n04090263 n02129604 n02894605 n02128757 n04238763 n03720891 n03793489 n03424325 n07716358 n02493509 n02099849 n02091244 n02097658 n02138441 n03047690 n02093647 n02108915 n04263257 n02129165 n04335435 n07760859 n02091831 n03445924 n02280649 n02640242 n04613696 n03527444 n01798484 n03995372 n01728572 n04004767 n02099267 n07920052 n03709823 n02095570 n02018795 n03642806 n04074963 n04141327 n01917289 n04131690 n03250847 n02104365 n03602883 n02093428 n03109150 n03240683 n02086079 n02114712 n02093256 n02102040 n03495258 n04584207 n02870880 n02916936 n07875152 n07583066 n02730930 n04019541 n04254120 n02666196 n03141823 n03063689 n06596364 n02906734 n03445777 n02971356 n03891332 n07892512 n02442845 n03527444 n02667093 n01806143 n03902125 n02457408 n01693334 n02799071 n02814533 n06874185 n02088466 n03825788 n01484850 n03355925 n02095889 n02086646 n03942813 n03425413 n04550184 n02817516 n04049303 n04483307 n02097209 n03388549 n02815834 n02487347 n02074367 n02113186 n02536864 n02114855 n07697313 n03938244 n02492035 n02085620 n02085620 n03223299 n04273569 n03496892 n03866082 n03065424 n03877845 n02871525 n03404251 n04462240 n02113799 n02093859 n03742115 n02123045 n04487081 n02107312 n03938244 n02966687 n02342885 n03781244 n02493509 n02134084 n02749479 n07749582 n12144580 n02114548 n13052670 n07753113 n03777754 n07615774 n02483708 n01784675 n01978287 n02536864 n02443484 n03877472 n04074963 n01632777 n02815834 n01669191 n02104029 n02093859 n01883070 n01774750 n01667778 n01728920 n02219486 n03124170 n02123394 n01740131 n04228054 n01592084 n02128925 n02281787 n02093647 n01667778 n02128925 n01978287 n02130308 n03065424 n12620546 n13052670 n02480855 n03376595 n07734744 n04019541 n02536864 n04350905 n01773549 n03782006 n02111129 n01806567 n07753275 n02256656 n01984695 n04443257 n02410509 n02092339 n02115913 n01806143 n02815834 n03908618 n02279972 n03691459 n03216828 n04370456 n02676566 n03710721 n01629819 n03967562 n03482405 n04487081 n01744401 n02454379 n02007558 n03201208 n03793489 n03902125 n02672831 n03447447 n02749479 n01440764 n03538406 n03794056 n02097130 n04332243 n02814860 n02488291 n03032252 n02137549 n02281406 n01494475 n02749479 n04458633 n01847000 n03825788 n01819313 n01847000 n03908618 n03444034 n02483362 n04254680 n02123597 n03838899 n02104029 n03633091 n03775546 n01807496 n03692522 n03721384 n04208210 n02892767 n02086240 n02492660 n04049303 n04238763 n03793489 n02107574 n02364673 n02134084 n02092339 n02906734 n04371774 n02097658 n02102040 n01968897 n02090622 n03916031 n03658185 n02536864 n03697007 n03924679 n02325366 n03337140 n02999410 n01983481 n03141823 n03662601 n01729322 n02676566 n02992211 n03089624 n01632777 n02443484 n03534580 n01847000 n02102318 n01855032 n03961711 n03895866 n02892767 n01601694 n02443484 n03930313 n03062245 n02988304 n02090622 n02107908 n03290653 n04542943 n04296562 n01986214 n02233338 n02093991 n03482405 n02966193 n03786901 n02027492 n04392985 n03376595 n07714990 n02504013 n04606251 n03724870 n02093991 n03933933 n02804414 n03063599 n01698640 n03498962 n04252225 n02013706 n03026506 n03787032 n04536866 n02100583 n01582220 n02500267 n03388183 n07693725 n02033041 n03908714 n02219486 n02730930 n03710193 n02108915 n01749939 n02817516 n01729977 n02086910 n02107908 n03450230 n07565083 n02128385 n03141823 n04259630 n01914609 n07697537 n04447861 n02099849 n03126707 n01943899 n04118776 n02791124 n03763968 n03492542 n02094433 n04366367 n01614925 n02007558 n02128757 n04019541 n04612504 n02841315 n13044778 n04147183 n03933933 n02110627 n02226429 n01631663 n03676483 n02487347 n04507155 n03216828 n07718472 n02058221 n03127747 n07745940 n02102177 n02113712 n02965783 n03840681 n04310018 n01774384 n02177972 n03063599 n01697457 n03759954 n02085620 n07753113 n03393912 n02692877 n03868242 n02403003 n03249569 n03884397 n02396427 n03457902 n07718747 n02167151 n04154565 n04147183 n04118538 n03124043 n04372370 n01667114 n03998194 n03995372 n10565667 n01798484 n04591157 n03127747 n02105641 n03485407 n02102177 n04461696 n01824575 n02066245 n04317175 n02107312 n06874185 n04465501 n02939185 n04019541 n03459775 n04548280 n03047690 n04325704 n07871810 n01819313 n03782006 n02086079 n03584254 n03929660 n02492035 n03670208 n02412080 n02109525 n02397096 n01582220 n03188531 n02105641 n02033041 n03992509 n02328150 n03000684 n03126707 n07590611 n02102480 n07684084 n07590611 n09421951 n04285008 n02930766 n04604644 n03584829 n03447721 n01693334 n02910353 n03532672 n04127249 n04154565 n03014705 n13052670 n03483316 n02817516 n03759954 n03733805 n04204238 n02110341 n04147183 n02007558 n02268443 n03133878 n03255030 n02442845 n02018207 n04069434 n02667093 n03866082 n02113978 n02108000 n03832673 n04039381 n01677366 n01955084 n02113023 n04371430 n03134739 n03840681 n07714571 n01955084 n03785016 n03924679 n04443257 n03709823 n04204347 n02086079 n02361337 n04317175 n09229709 n04270147 n01518878 n02105412 n07720875 n02177972 n02098105 n03534580 n02492660 n03954731 n03874599 n04243546 n04344873 n04252077 n02009229 n01774384 n03843555 n02988304 n02422699 n03045698 n03775071 n02098105 n04099969 n01582220 n03026506 n02099849 n02814860 n02980441 n07875152 n01873310 n02117135 n02510455 n02108422 n04599235 n03450230 n02105505 n04239074 n04131690 n04033995 n03445924 n01558993 n02791270 n03770679 n02480855 n02134084 n02098286 n03478589 n01744401 n04532670 n02105412 n03874599 n04125021 n01682714 n02747177 n02992211 n03710193 n01514859 n01687978 n04418357 n02017213 n01677366 n02281406 n02138441 n03594945 n02106030 n03017168 n02105251 n04273569 n02488291 n09332890 n03873416 n02895154 n02494079 n02437616 n01692333 n04311004 n03218198 n02110185 n02256656 n07880968 n02666196 n03337140 n04399382 n04265275 n04254120 n01798484 n03602883 n03825788 n01833805 n02704792 n01734418 n03594734 n02701002 n02085620 n01582220 n03623198 n03000134 n02992211 n03691459 n02526121 n03998194 n01990800 n03933933 n02950826 n01748264 n15075141 n10565667 n15075141 n02116738 n02643566 n02837789 n04005630 n02091134 n02071294 n10148035 n02951358 n04127249 n03866082 n04579145 n04239074 n02492035 n02107683 n04239074 n04004767 n04550184 n03961711 n03201208 n03207941 n03134739 n02892767 n03394916 n02398521 n03868863 n02486410 n04487394 n03394916 n01496331 n04418357 n02168699 n02097209 n01537544 n01687978 n02799071 n04009552 n03345487 n04346328 n12057211 n03485794 n02443484 n02229544 n02840245 n02415577 n02104029 n03792782 n03888605 n02128925 n03045698 n03837869 n02749479 n04033995 n02422106 n03404251 n04208210 n02113712 n03459775 n02514041 n04371430 n01644373 n03447721 n13052670 n03492542 n04366367 n01968897 n02033041 n02114712 n02804414 n01796340 n04009552 n04597913 n03141823 n04612504 n01729322 n02492660 n03792972 n02130308 n03400231 n01632777 n03085013 n01729322 n02095570 n03970156 n04009552 n03950228 n02086646 n02108000 n03196217 n01580077 n04275548 n04599235 n01774750 n03498962 n03457902 n03930630 n04590129 n01968897 n04462240 n04554684 n02840245 n02804414 n07614500 n03482405 n02871525 n04192698 n02699494 n03388183 n04153751 n03733281 n01797886 n01689811 n02777292 n02389026 n03788365 n01514859 n02102480 n03942813 n02111129 n03017168 n02105855 n04328186 n02115641 n02093647 n02415577 n02536864 n13044778 n02113712 n02123394 n01735189 n03085013 n03127747 n02105641 n04606251 n02814533 n02980441 n02910353 n02098105 n04380533 n02098286 n02018795 n02788148 n01807496 n03908714 n03388549 n02100877 n03982430 n01986214 n04201297 n03347037 n04008634 n04557648 n03445924 n02980441 n03131574 n02948072 n01797886 n04005630 n02111889 n02325366 n01728920 n02129165 n02168699 n04465501 n01728572 n02105641 n01774384 n04418357 n02325366 n03888605 n04149813 n02281406 n03599486 n03124170 n02100583 n03956157 n03788195 n04286575 n04136333 n04344873 n03743016 n01494475 n01910747 n02787622 n04562935 n02909870 n02974003 n02111500 n03388549 n04550184 n07745940 n03673027 n02727426 n03207743 n04487081 n04009552 n02130308 n02105412 n03476991 n01632458 n02790996 n04505470 n04380533 n02108422 n07920052 n03467068 n03249569 n03633091 n02124075 n03763968 n03710637 n03100240 n02256656 n03461385 n02869837 n02948072 n03991062 n02091244 n04476259 n02099429 n02346627 n02782093 n02457408 n02009229 n02910353 n02087046 n01877812 n03787032 n02281406 n04461696 n03782006 n01924916 n03223299 n01768244 n04023962 n07717410 n03062245 n07875152 n03393912 n02364673 n03937543 n02101388 n04548280 n12620546 n03584829 n04606251 n02776631 n04443257 n02788148 n03838899 n02051845 n07768694 n03498962 n02100583 n02102177 n07716358 n04589890 n02128757 n02489166 n03417042 n03355925 n02111889 n03297495 n03180011 n03196217 n02859443 n02321529 n04443257 n03089624 n07730033 n03874293 n03594945 n02423022 n11879895 n02104029 n02916936 n02403003 n03709823 n04467665 n01833805 n02119022 n02687172 n02492660 n02877765 n02099429 n03942813 n02105855 n02168699 n07565083 n03895866 n03126707 n02346627 n02606052 n03670208 n02114548 n02109047 n03916031 n01871265 n04523525 n02690373 n03014705 n02356798 n02128385 n02133161 n03884397 n02108915 n03759954 n03630383 n02106382 n02256656 n02085936 n03197337 n03661043 n04590129 n03958227 n04525038 n02037110 n03956157 n03717622 n02326432 n03249569 n01631663 n01687978 n12144580 n02277742 n03692522 n04507155 n04389033 n04548280 n01914609 n01776313 n03125729 n02096051 n02769748 n04131690 n02669723 n04376876 n01818515 n02091244 n03207743 n03134739 n03838899 n02641379 n02666196 n02397096 n02009229 n02410509 n02276258 n03062245 n02097130 n02093754 n02123045 n04357314 n03089624 n02091244 n01685808 n02412080 n03841143 n01807496 n02098286 n02124075 n02086646 n03627232 n09468604 n01768244 n07920052 n03976467 n03534580 n03617480 n04467665 n07584110 n04040759 n02090379 n03393912 n01945685 n04482393 n01537544 n02231487 n02137549 n03045698 n04346328 n04597913 n02114367 n07613480 n02892767 n04209133 n02097047 n02100877 n02480855 n03259280 n03272010 n07684084 n03743016 n01773549 n02708093 n02939185 n03617480 n01753488 n07880968 n03218198 n02871525 n02093256 n01798484 n02417914 n02108915 n04125021 n03126707 n04285008 n02526121 n04111531 n02089078 n02927161 n02971356 n04553703 n02442845 n01945685 n01491361 n04347754 n04371774 n09428293 n04370456 n01682714 n01664065 n02085620 n02114855 n03255030 n02130308 n04200800 n02447366 n04127249 n02110185 n02793495 n03944341 n03196217 n02096294 n04133789 n07754684 n03384352 n03459775 n04579145 n01682714 n03041632 n07860988 n06596364 n04296562 n04152593 n01698640 n03792972 n04067472 n03394916 n01728920 n04597913 n04090263 n03445777 n13040303 n07717556 n01914609 n07730033 n02108089 n04597913 n02786058 n06785654 n03956157 n04584207 n03697007 n02114712 n02749479 n07248320 n03673027 n02090379 n04501370 n01917289 n04265275 n04515003 n03710721 n03495258 n04532670 n04040759 n01829413 n02840245 n02699494 n02106550 n03089624 n02105056 n02860847 n02487347 n02085782 n03888257 n03691459 n02398521 n04398044 n01687978 n04371774 n02777292 n01664065 n04476259 n04548280 n12144580 n02669723 n02095314 n02877765 n04429376 n03400231 n03729826 n02825657 n02802426 n03733281 n03124043 n07871810 n02169497 n04263257 n01689811 n04485082 n04099969 n03902125 n04371430 n02091635 n03344393 n02815834 n13044778 n02100877 n02130308 n09246464 n02843684 n01735189 n06874185 n02100583 n02100877 n15075141 n02109525 n02486410 n02950826 n01871265 n02823750 n07583066 n02051845 n01751748 n02483362 n03908618 n02977058 n02111889 n04447861 n02114855 n02095314 n02804414 n02489166 n04277352 n02236044 n02408429 n02655020 n01693334 n03447721 n02093647 n02791124 n02077923 n04536866 n03291819 n02093859 n02115641 n04254680 n04501370 n04019541 n02795169 n03459775 n04209133 n07860988 n04553703 n02484975 n03530642 n02906734 n04325704 n04008634 n12057211 n02342885 n04344873 n03794056 n02107142 n04090263 n02009229 n02971356 n02504458 n04273569 n09399592 n03272562 n02277742 n02279972 n07930864 n02917067 n04004767 n04392985 n07718747 n02089078 n03903868 n03208938 n02133161 n03376595 n02978881 n03201208 n02834397 n02443484 n02085620 n02111889 n03532672 n04263257 n03661043 n15075141 n04200800 n03786901 n01873310 n04423845 n01737021 n02951358 n02116738 n01798484 n03980874 n02834397 n02398521 n01531178 n07734744 n01847000 n03841143 n02110185 n13044778 n02727426 n02799071 n02107908 n01806143 n03770679 n03967562 n02086646 n02892767 n01855032 n02165105 n01514859 n04037443 n03877472 n03729826 n01728920 n02676566 n03627232 n04069434 n04192698 n02486261 n02795169 n04033901 n01824575 n02105641 n02444819 n01824575 n03908714 n04239074 n02102480 n02264363 n01498041 n02930766 n04355933 n04125021 n03481172 n02123159 n02099712 n04209239 n02111889 n02002556 n03690938 n04429376 n03814906 n04525305 n02107908 n01692333 n04127249 n01914609 n04201297 n02807133 n01985128 n02979186 n02088238 n03594945 n03388043 n09468604 n03729826 n02704792 n07930864 n03355925 n04554684 n04131690 n04026417 n02437616 n03769881 n04330267 n02091831 n01797886 n02687172 n02906734 n02091635 n02814533 n02114712 n03770439 n04099969 n04033995 n02085936 n01644900 n02930766 n01917289 n01704323 n04515003 n01950731 n03888257 n07836838 n02687172 n02102318 n02106030 n02676566 n01749939 n03314780 n03690938 n02823750 n03344393 n03666591 n04458633 n04398044 n01440764 n04482393 n03075370 n02701002 n04023962 n01558993 n07716358 n02325366 n02106382 n04590129 n10148035 n02236044 n04252077 n12144580 n02110627 n03000134 n02086079 n03032252 n02408429 n03394916 n02871525 n01806567 n02127052 n02879718 n03032252 n03935335 n04482393 n03710721 n04522168 n04371430 n04579145 n03967562 n03201208 n04355338 n04328186 n04111531 n01968897 n02115913 n01518878 n04344873 n02814533 n01697457 n04371430 n01855032 n01806143 n03598930 n02971356 n03372029 n02101388 n02963159 n02391049 n01560419 n02114367 n03933933 n03259280 n01756291 n04479046 n07583066 n03792972 n02100877 n07768694 n02007558 n03937543 n03666591 n02104029 n01910747 n02095889 n04417672 n03769881 n03929855 n02641379 n02229544 n07614500 n04311174 n02361337 n07753592 n02206856 n04090263 n03444034 n04525305 n02281406 n02526121 n01807496 n02096294 n01667778 n02480855 n07711569 n02009229 n01697457 n03271574 n01687978 n02100236 n03908714 n01531178 n02364673 n03773504 n03000684 n02981792 n04485082 n01797886 n03498962 n03538406 n03530642 n01872401 n02342885 n02457408 n02480495 n02480855 n01770393 n01560419 n01665541 n04540053 n04346328 n04485082 n02091635 n03733805 n02120505 n02988304 n04049303 n02607072 n02488702 n03026506 n07718472 n03627232 n03388043 n02403003 n03627232 n03877845 n03388043 n02487347 n04005630 n01682714 n01818515 n04311174 n01664065 n04509417 n02086910 n02219486 n04392985 n04344873 n01685808 n07717410 n03384352 n01728920 n02027492 n02012849 n04336792 n02481823 n07565083 n03868863 n03179701 n02109525 n04330267 n03982430 n03272010 n04005630 n02112137 n03770439 n02088094 n02114548 n02091032 n01728572 n03240683 n02808440 n02486410 n02930766 n01737021 n03733805 n03110669 n03016953 n01748264 n02325366 n01748264 n02364673 n02017213 n04252077 n02860847 n03124043 n03461385 n02090721 n03998194 n02095570 n07753113 n04423845 n04044716 n01695060 n01632458 n02643566 n02167151 n01860187 n02403003 n02840245 n03658185 n04116512 n02096294 n01735189 n01514859 n04131690 n02978881 n03461385 n03944341 n02441942 n07753113 n01693334 n09399592 n02105412 n03400231 n04550184 n02823428 n02112137 n03920288 n04509417 n03785016 n03534580 n02066245 n02807133 n01924916 n02017213 n03796401 n02090721 n01981276 n02497673 n09399592 n01749939 n03344393 n03344393 n02490219 n04335435 n04065272 n07873807 n03314780 n03530642 n02783161 n02114548 n02319095 n03018349 n01498041 n02859443 n02096051 n04251144 n03042490 n02167151 n02096294 n09246464 n12985857 n02100583 n03240683 n02236044 n02356798 n02317335 n02859443 n02510455 n01945685 n03792972 n02011460 n03220513 n04141076 n03662601 n07745940 n02747177 n12998815 n04209133 n02097130 n01685808 n04273569 n04515003 n02094258 n02109047 n03028079 n02408429 n03777754 n02113186 n02500267 n03891251 n02112018 n04487081 n02927161 n01664065 n03534580 n03729826 n03187595 n02105505 n07718747 n02802426 n02226429 n04116512 n01756291 n01817953 n07714990 n02457408 n03109150 n04026417 n02437312 n02124075 n02113978 n03109150 n02389026 n06785654 n03089624 n03444034 n04149813 n02091032 n04376876 n02606052 n03492542 n04579145 n01496331 n01592084 n04141975 n01580077 n02112706 n03388043 n02256656 n02087394 n04179913 n07930864 n04355338 n03874293 n04033995 n02088364 n03535780 n03476991 n04336792 n03888257 n07836838 n03028079 n03877845 n03982430 n02116738 n04596742 n03843555 n15075141 n04325704 n04398044 n02134084 n02132136 n03602883 n01955084 n02268853 n02490219 n04044716 n02492660 n01770393 n03447447 n07871810 n01739381 n03933933 n02110958 n04517823 n10565667 n02087046 n02909870 n07747607 n13037406 n03743016 n02113023 n07716358 n01828970 n04579145 n04482393 n02169497 n04371430 n01751748 n01632777 n02106382 n01697457 n04074963 n03062245 n02607072 n03868863 n04409515 n01829413 n04254680 n01728920 n02802426 n03666591 n01984695 n02708093 n02090721 n02089973 n02099849 n02134084 n13133613 n03733281 n02268853 n04347754 n02115641 n04346328 n02769748 n01665541 n03961711 n02391049 n01675722 n02017213 n03045698 n02356798 n02977058 n01873310 n02276258 n03692522 n02107908 n03954731 n04389033 n02226429 n03676483 n02107908 n01484850 n01774750 n02979186 n03761084 n03623198 n03445777 n03770679 n01728572 n03495258 n04613696 n02441942 n03594734 n02114855 n02883205 n04311174 n04532670 n02134418 n03717622 n02859443 n03930313 n03126707 n03977966 n03983396 n04456115 n07760859 n01532829 n04208210 n03991062 n04131690 n03649909 n03425413 n02017213 n02974003 n03958227 n02408429 n01614925 n03884397 n04429376 n01749939 n01756291 n01498041 n03992509 n03532672 n04286575 n03376595 n02108000 n02108551 n07565083 n03792782 n02089867 n07684084 n03404251 n03871628 n04311004 n13040303 n02111129 n02422699 n03733281 n04153751 n04179913 n02268443 n02443114 n03485794 n07579787 n02110063 n01616318 n03871628 n07697537 n02114367 n02091134 n02883205 n02814533 n03871628 n02105056 n02865351 n03991062 n02104365 n04275548 n03929660 n03814639 n02834397 n03792782 n07730033 n02445715 n02804610 n02119789 n04040759 n02415577 n02206856 n02114367 n04493381 n02276258 n03991062 n02236044 n04332243 n07760859 n02504013 n02090379 n02445715 n10565667 n04487081 n09472597 n04398044 n01873310 n02087046 n03788365 n02097658 n03467068 n07717410 n03642806 n03063689 n01914609 n03792782 n12267677 n03220513 n02119789 n02950826 n02113712 n03697007 n04009552 n03876231 n10148035 n03590841 n03461385 n02814860 n03729826 n03255030 n09288635 n02094114 n04550184 n02115913 n01990800 n02112350 n12998815 n02672831 n01860187 n04493381 n02979186 n02441942 n02128757 n01883070 n03803284 n03417042 n02992211 n04462240 n03759954 n01984695 n07584110 n04118538 n02105412 n03218198 n02835271 n03314780 n04070727 n03325584 n01742172 n04266014 n03447447 n02701002 n01877812 n03062245 n01592084 n01924916 n03781244 n01798484 n02730930 n02417914 n02791124 n02412080 n09256479 n04008634 n02493793 n07753275 n03980874 n02280649 n03400231 n03476991 n02787622 n02086240 n04041544 n04370456 n04591713 n03062245 n04254120 n02125311 n03920288 n02088364 n02002724 n02107683 n01498041 n04550184 n01984695 n04584207 n02971356 n03961711 n02447366 n01855672 n03126707 n03481172 n02640242 n03376595 n02814860 n01498041 n04442312 n03776460 n01882714 n04485082 n03201208 n01978455 n04456115 n03467068 n02086240 n02256656 n04517823 n03291819 n04263257 n02106662 n02823750 n03527444 n01807496 n02112018 n02860847 n01980166 n01514859 n02879718 n02128925 n03944341 n07831146 n04049303 n04004767 n04254120 n02108422 n07871810 n01775062 n02808304 n03929660 n02667093 n07716906 n03697007 n12057211 n03196217 n01855032 n02097047 n02444819 n07711569 n02071294 n06596364 n03584829 n02025239 n09256479 n02484975 n02840245 n02814533 n03188531 n03891332 n01560419 n02110185 n01685808 n03207941 n02096294 n02672831 n04311004 n04265275 n07730033 n04296562 n02167151 n02110341 n03832673 n03709823 n02115641 n02510455 n04325704 n02129604 n04296562 n13037406 n04554684 n03706229 n02500267 n02101388 n02206856 n02111889 n04442312 n02102973 n02098105 n02906734 n01770081 n13054560 n04325704 n02909870 n02927161 n03976467 n03014705 n02483362 n02012849 n02321529 n03841143 n04389033 n02094258 n15075141 n03733805 n03958227 n03792972 n04542943 n02979186 n07614500 n03666591 n03929855 n07802026 n02974003 n02319095 n02804414 n04325704 n02109525 n02999410 n02120079 n04404412 n01871265 n03871628 n03337140 n01667778 n01819313 n04532670 n02319095 n03457902 n02978881 n02119789 n04026417 n01693334 n01744401 n03825788 n04273569 n03942813 n01984695 n02727426 n01820546 n04487081 n03956157 n04465501 n04579145 n02117135 n04447861 n03085013 n02134084 n03769881 n03717622 n02105251 n03761084 n02088466 n01872401 n02807133 n03775546 n03590841 n03617480 n01677366 n02119789 n02226429 n04409515 n03995372 n02013706 n07697537 n02025239 n02114712 n03394916 n02494079 n01968897 n03977966 n11879895 n03492542 n03843555 n03742115 n04208210 n02423022 n04515003 n13054560 n02483708 n04507155 n07717410 n03255030 n03133878 n03877845 n04344873 n04540053 n09399592 n04517823 n04086273 n02978881 n02115641 n04461696 n02102973 n02277742 n04399382 n04330267 n03661043 n13037406 n04604644 n03958227 n02397096 n04125021 n03445924 n03492542 n02092339 n03787032 n03791053 n02804414 n01753488 n07754684 n01496331 n01990800 n04356056 n04065272 n01756291 n04136333 n03662601 n02006656 n02326432 n02018795 n03777568 n07932039 n04265275 n02268853 n03649909 n04548362 n03538406 n02104365 n03062245 n04131690 n01955084 n04606251 n04037443 n01990800 n02892767 n02113023 n03873416 n04254680 n02444819 n04606251 n02091032 n03623198 n01693334 n04162706 n04476259 n01773157 n02510455 n01616318 n02782093 n04209133 n03777568 n12998815 n04417672 n12620546 n04517823 n02259212 n02727426 n02797295 n03062245 n02794156 n04347754 n03417042 n02123159 n03530642 n07715103 n07716906 n03874599 n04179913 n01877812 n02101388 n02233338 n04141327 n02666196 n04131690 n03032252 n02114367 n03045698 n02090721 n02815834 n07873807 n02965783 n04429376 n04604644 n01855032 n02018795 n03729826 n04404412 n07615774 n02013706 n01955084 n01774750 n01644373 n02096177 n02114712 n03891332 n03482405 n03916031 n02099849 n02480855 n13044778 n02226429 n03670208 n13133613 n03670208 n04125021 n02276258 n03131574 n03929855 n02687172 n02443484 n02101006 n04367480 n02109525 n04049303 n02096051 n03929660 n02776631 n02027492 n01795545 n02109525 n03584829 n03595614 n02992211 n04243546 n03404251 n04023962 n03085013 n02128385 n02111129 n04613696 n04152593 n02978881 n02909870 n10565667 n03467068 n02280649 n03763968 n02056570 n02504458 n03958227 n03874599 n02133161 n03871628 n02099849 n03179701 n01985128 n02112137 n02098413 n01945685 n02105505 n03796401 n04152593 n02410509 n01665541 n04147183 n02655020 n02233338 n03297495 n01776313 n01945685 n03710193 n04462240 n03956157 n02229544 n02782093 n04355338 n03000684 n04542943 n02111277 n04505470 n03196217 n02112706 n03590841 n03197337 n02526121 n04522168 n01877812 n03617480 n02870880 n04591713 n06359193 n02110958 n07892512 n03796401 n03047690 n01518878 n04263257 n01910747 n07753275 n01882714 n04033901 n01784675 n02489166 n03534580 n04447861 n02403003 n07717556 n02027492 n03710721 n02281787 n02807133 n03124170 n02396427 n02981792 n04613696 n02481823 n04522168 n03930313 n10565667 n03776460 n03180011 n04235860 n02397096 n03016953 n03838899 n09193705 n04404412 n04336792 n02978881 n07720875 n04286575 n12985857 n07613480 n03063689 n02206856 n02011460 n02769748 n02317335 n02749479 n01770081 n02422699 n02088094 n02906734 n06785654 n04152593 n03916031 n02113186 n02115913 n02791124 n03764736 n02356798 n02979186 n02749479 n03630383 n03259280 n04023962 n04026417 n02909870 n03404251 n03868863 n03495258 n03899768 n03733805 n02823750 n02086079 n04356056 n03196217 n01806143 n07718472 n04335435 n03937543 n04070727 n01631663 n02643566 n11879895 n03690938 n02093428 n02105641 n02091134 n03131574 n03485407 n01677366 n02099601 n02123045 n02443114 n02134418 n04370456 n01883070 n04141076 n03467068 n02105162 n02226429 n02397096 n02692877 n02447366 n13037406 n09332890 n04482393 n03877845 n02102480 n10565667 n02791270 n02669723 n02808304 n04548362 n03658185 n02489166 n02098286 n07615774 n04532106 n01807496 n02992529 n01694178 n04428191 n03445924 n07742313 n04037443 n03887697 n01630670 n02099267 n02123597 n01981276 n02825657 n02106662 n03657121 n03249569 n03218198 n04152593 n12985857 n03160309 n02939185 n01817953 n01773157 n02999410 n03482405 n04200800 n02488702 n03272562 n03992509 n03544143 n04141327 n02099712 n03016953 n02107142 n01751748 n02009912 n02087394 n04355933 n02117135 n13054560 n02006656 n03733805 n03710193 n04141076 n01608432 n09835506 n04398044 n07579787 n02099712 n02123597 n07836838 n04131690 n04090263 n02981792 n02018795 n03602883 n02074367 n02443484 n02871525 n02457408 n02799071 n03764736 n03804744 n02190166 n03769881 n04399382 n04553703 n02058221 n02981792 n01692333 n01631663 n03868242 n06785654 n03977966 n04423845 n02791124 n02128385 n01664065 n01756291 n07802026 n02979186 n02814533 n12768682 n04201297 n07742313 n02489166 n02120079 n03743016 n03482405 n01795545 n02108551 n02096051 n02951358 n02169497 n04532106 n02268443 n03676483 n01798484 n02113712 n07697313 n02112018 n04525038 n03982430 n04239074 n02123597 n03063689 n02091134 n02138441 n03255030 n02012849 n02879718 n02111277 n02088466 n02105056 n01776313 n04584207 n02095314 n01806567 n01770393 n03271574 n03599486 n10148035 n03627232 n04275548 n03063689 n03016953 n01990800 n04141076 n03131574 n01968897 n02093256 n01774750 n01855672 n04435653 n03127747 n03657121 n03529860 n07730033 n02837789 n01828970 n02002556 n02132136 n03873416 n03424325 n04259630 n02097130 n03272562 n03496892 n04525305 n03916031 n01644373 n04591713 n02504013 n02091831 n01847000 n03000684 n01770393 n03763968 n02093754 n03063689 n02085782 n03290653 n03777568 n07718472 n02090721 n02089078 n03792782 n13037406 n02111889 n04550184 n03063599 n04229816 n04238763 n01693334 n03743016 n02108551 n04604644 n02281787 n02119789 n02808304 n09332890 n02106550 n07802026 n03249569 n07836838 n03775546 n04204347 n04592741 n01498041 n03929660 n02077923 n02108089 n02094433 n02107574 n13133613 n02749479 n03249569 n02641379 n03804744 n02321529 n01797886 n02690373 n13054560 n02950826 n01737021 n01689811 n01664065 n07693725 n02342885 n02169497 n09288635 n02087394 n03376595 n02120505 n03938244 n03345487 n02500267 n01797886 n04443257 n03492542 n02094258 n03721384 n13044778 n03868863 n07711569 n02236044 n04081281 n03838899 n04596742 n02111500 n04251144 n02100583 n07714571 n04238763 n02105412 n02443484 n04019541 n03394916 n03776460 n03000134 n02109525 n02109525 n02870880 n03393912 n03197337 n04081281 n03763968 n01688243 n02110806 n02834397 n02939185 n02279972 n03888605 n02268443 n02988304 n04310018 n04285008 n09246464 n02389026 n01558993 n01955084 n01930112 n01644373 n12620546 n02093256 n09256479 n02002724 n03160309 n04204238 n01753488 n03393912 n01641577 n02100735 n04584207 n02100236 n02879718 n02988304 n02105162 n02110806 n04258138 n03590841 n02927161 n01498041 n03720891 n04515003 n02134418 n03014705 n03344393 n02783161 n04443257 n02492660 n03218198 n01755581 n02090622 n03179701 n04252225 n04417672 n04037443 n04065272 n03721384 n02089973 n02091635 n03804744 n09288635 n04613696 n03796401 n07714990 n01770393 n01742172 n02128385 n03492542 n03916031 n01883070 n01739381 n02980441 n02966687 n04486054 n04443257 n01984695 n03026506 n02808440 n02977058 n02114367 n02094114 n02326432 n03016953 n02106166 n03710193 n01644373 n02091134 n03259280 n03018349 n03791053 n04008634 n02095570 n07718747 n03376595 n07717410 n02894605 n07583066 n02281787 n03483316 n02105505 n03837869 n04591713 n02749479 n01514668 n02090379 n03424325 n03642806 n02089973 n01532829 n02105641 n04591713 n01819313 n02127052 n03124043 n03649909 n02113186 n04067472 n02114548 n03791053 n03792782 n02093991 n03530642 n02397096 n02281787 n03661043 n03495258 n02174001 n07880968 n03459775 n02100236 n02727426 n01820546 n02988304 n02112350 n03476684 n04238763 n02028035 n02120505 n01704323 n03047690 n02268443 n02443114 n02112137 n02879718 n01697457 n04264628 n03314780 n03649909 n02133161 n07730033 n03670208 n02835271 n03584829 n02326432 n03916031 n03485794 n03314780 n02342885 n02105412 n02321529 n01669191 n07742313 n03045698 n02510455 n04201297 n03710721 n02966687 n02094258 n02109047 n03376595 n03017168 n01924916 n02017213 n02086079 n03666591 n04465501 n02981792 n03832673 n01806567 n02793495 n02110806 n01833805 n01622779 n02493509 n03495258 n03485407 n02051845 n04141975 n02909870 n01698640 n02096294 n02009912 n02097658 n02018207 n02804414 n03095699 n01665541 n03532672 n02102177 n01806143 n01847000 n07693725 n02268853 n03530642 n03908618 n03781244 n04286575 n02111129 n04273569 n04590129 n02100583 n03916031 n04404412 n02708093 n03160309 n07579787 n03476991 n04204238 n03344393 n09193705 n01665541 n01968897 n03180011 n02948072 n01871265 n01843383 n02494079 n02105505 n02356798 n02769748 n01955084 n01990800 n02113712 n03976657 n03633091 n03937543 n04252225 n02442845 n03461385 n03014705 n01644900 n03924679 n04152593 n02974003 n02804414 n03290653 n04344873 n02326432 n04371430 n03485794 n02107142 n03483316 n04330267 n01883070 n02105505 n03062245 n03924679 n02326432 n03761084 n02104029 n02074367 n04023962 n02123597 n04264628 n03902125 n02077923 n02927161 n03272562 n04399382 n07875152 n03478589 n03680355 n02093428 n03903868 n02396427 n01753488 n01914609 n04487081 n03372029 n01753488 n02096585 n07747607 n01601694 n03146219 n03733131 n03124043 n02090622 n03063599 n03599486 n03976657 n07880968 n02086910 n02494079 n02100735 n01693334 n02966193 n02089973 n03866082 n02640242 n02094433 n03947888 n01592084 n04039381 n04263257 n04326547 n02841315 n04009552 n02099712 n03271574 n02701002 n03791053 n04252077 n07717410 n02027492 n02097474 n02113799 n01773797 n11939491 n03494278 n02971356 n02509815 n02107683 n04328186 n03998194 n03938244 n03721384 n02089973 n07684084 n04613696 n03476991 n03444034 n03272010 n02219486 n07613480 n03899768 n01770393 n04532106 n04264628 n03314780 n02422106 n01689811 n04154565 n03991062 n02088094 n03384352 n02088632 n03146219 n02017213 n02123597 n01806567 n01740131 n01829413 n04004767 n04355338 n04044716 n01735189 n03218198 n02108422 n07831146 n02110185 n07932039 n03658185 n01773797 n09288635 n02133161 n01820546 n09332890 n09468604 n03935335 n04562935 n03908714 n02167151 n03216828 n02497673 n04493381 n03452741 n02117135 n04131690 n02120505 n03743016 n02364673 n03980874 n04462240 n02804414 n02051845 n02808440 n02172182 n09428293 n02093428 n03220513 n02699494 n03803284 n03804744 n02514041 n04099969 n04296562 n03388549 n12998815 n03933933 n04208210 n02410509 n04482393 n04487081 n02486261 n02113799 n04228054 n09835506 n04067472 n01664065 n04428191 n01740131 n02493509 n11939491 n03042490 n03584254 n09468604 n04120489 n02483708 n01498041 n03786901 n04523525 n02165105 n03888605 n02115913 n04201297 n04501370 n04037443 n02172182 n03793489 n03724870 n02391049 n04069434 n02807133 n02056570 n07584110 n04398044 n04398044 n03854065 n02655020 n02107312 n04366367 n04086273 n03485407 n02104029 n04251144 n03627232 n02132136 n02979186 n02317335 n03201208 n04479046 n03452741 n04258138 n07590611 n04149813 n04355933 n03207941 n04479046 n02441942 n03866082 n07583066 n03445777 n03017168 n02672831 n04204238 n04326547 n02113712 n01514668 n02415577 n03706229 n02981792 n02840245 n04389033 n03992509 n02403003 n04005630 n03637318 n04371430 n04347754 n02100583 n01518878 n02319095 n02492035 n04597913 n02206856 n02025239 n04591157 n01773549 n04081281 n07697537 n01682714 n04069434 n02085782 n02655020 n07714571 n01614925 n04008634 n07873807 n04131690 n03680355 n02422699 n07753592 n03840681 n06785654 n01530575 n02096051 n03764736 n02108089 n04044716 n03384352 n01818515 n02056570 n02097130 n01665541 n01688243 n04131690 n04606251 n01616318 n01688243 n02113186 n04613696 n01737021 n02776631 n03995372 n01806143 n01753488 n04037443 n02879718 n04009552 n02110806 n04332243 n04560804 n03884397 n02110958 n03888605 n01685808 n07565083 n02883205 n02492660 n01798484 n03100240 n02088094 n04229816 n02098286 n02841315 n03017168 n04120489 n07718747 n03933933 n04355933 n04483307 n02107142 n01744401 n02093991 n02112137 n02085936 n03929855 n02051845 n02091831 n01740131 n02948072 n02112706 n04584207 n04070727 n03584254 n04235860 n01749939 n02086079 n03424325 n04485082 n02165456 n03259280 n02132136 n03445924 n12768682 n03325584 n01644373 n02361337 n04523525 n07753592 n04067472 n04579145 n07880968 n02231487 n04486054 n03658185 n04429376 n03126707 n02085620 n02104365 n02692877 n04557648 n04606251 n03888605 n02105412 n06785654 n02101388 n03393912 n04370456 n12985857 n07871810 n03742115 n04238763 n02101006 n02090379 n09399592 n07930864 n02123597 n03494278 n02363005 n07892512 n02776631 n03785016 n07930864 n02123394 n01855032 n02883205 n02091831 n03868242 n02930766 n01945685 n03594734 n02493793 n02398521 n04501370 n03417042 n02815834 n03710637 n02100583 n02497673 n02894605 n03895866 n01756291 n02091032 n02120505 n03980874 n07745940 n02769748 n04208210 n01990800 n02397096 n01692333 n03814639 n01855672 n04154565 n02317335 n02815834 n07693725 n03720891 n02110627 n13037406 n02391049 n04131690 n01930112 n07760859 n03770679 n02111500 n04252225 n01877812 n03180011 n13044778 n02492660 n04273569 n04004767 n04238763 n03706229 n04357314 n01641577 n04311174 n03109150 n03866082 n03933933 n02412080 n03207743 n03218198 n07716906 n03218198 n02667093 n02799071 n02346627 n03874293 n01537544 n01728572 n03804744 n01855672 n01744401 n02747177 n02939185 n02676566 n02950826 n02097298 n01819313 n02276258 n09428293 n01682714 n03710637 n03920288 n02672831 n02447366 n02860847 n02412080 n04254680 n01692333 n02807133 n03394916 n13133613 n01806567 n07720875 n07836838 n02088094 n02102040 n01580077 n03775546 n04238763 n04118776 n04540053 n02096294 n02441942 n03781244 n02093256 n02988304 n02423022 n07871810 n01704323 n02132136 n01560419 n02206856 n01833805 n02980441 n11879895 n07875152 n03930313 n03042490 n03954731 n03933933 n03126707 n03461385 n02114855 n03929660 n04550184 n02783161 n03944341 n07693725 n02123045 n09288635 n03196217 n03297495 n02091831 n03670208 n04487394 n02105251 n02454379 n02099849 n04409515 n01592084 n02092002 n07590611 n03992509 n02412080 n03075370 n02447366 n02669723 n12985857 n03584254 n01753488 n02708093 n02497673 n04069434 n01484850 n07873807 n03492542 n03457902 n03670208 n04376876 n01697457 n02101556 n11879895 n02071294 n03710193 n03961711 n03930313 n02793495 n12768682 n03657121 n04596742 n04204238 n02093754 n03961711 n09472597 n03379051 n02417914 n02107312 n02489166 n01828970 n03884397 n04251144 n03792782 n02782093 n01820546 n02981792 n06359193 n03443371 n01735189 n04501370 n03673027 n03770679 n03085013 n02112706 n01978287 n02794156 n02087394 n01443537 n04286575 n02123394 n04264628 n03337140 n03710721 n03947888 n02514041 n02328150 n02110185 n03992509 n02965783 n02096177 n01824575 n03929855 n02815834 n02643566 n01744401 n02672831 n02447366 n06874185 n04325704 n02317335 n03126707 n02056570 n02457408 n03443371 n04125021 n03866082 n03127747 n04311004 n02134084 n01910747 n07716358 n02134418 n02071294 n04335435 n03594734 n06359193 n04336792 n02097474 n07717410 n02092339 n04376876 n03785016 n02087394 n02825657 n03208938 n03720891 n04366367 n02480855 n03124043 n04067472 n03180011 n04049303 n04243546 n04423845 n03127747 n02259212 n03697007 n04136333 n04590129 n03942813 n02268443 n04008634 n04254680 n04125021 n04040759 n03924679 n04485082 n02410509 n04259630 n03584829 n03196217 n03776460 n01774750 n09421951 n07802026 n04399382 n04536866 n04525038 n02091467 n03902125 n03544143 n02791270 n03888605 n03376595 n02397096 n03777754 n04592741 n03047690 n07693725 n02113978 n04398044 n02783161 n04596742 n03785016 n01582220 n02791270 n02791124 n02129165 n03404251 n03670208 n03903868 n02978881 n02094433 n04252225 n02096177 n03496892 n03000684 n03983396 n02111277 n03720891 n03782006 n01829413 n04153751 n03271574 n03538406 n03970156 n03924679 n02088094 n01806143 n02113978 n03207941 n03347037 n03633091 n03404251 n04579145 n02276258 n02086240 n02799071 n03871628 n02087394 n02264363 n03478589 n03788365 n02097658 n02093647 n07920052 n03788195 n03720891 n07717556 n02113023 n01855032 n07802026 n02037110 n03832673 n04350905 n07613480 n02814860 n03777754 n03218198 n02441942 n02115913 n02109961 n04347754 n03841143 n02786058 n02690373 n07697313 n07613480 n01873310 n03874599 n02113624 n02992211 n07871810 n03388183 n01644900 n04067472 n04039381 n02361337 n04039381 n04370456 n01843065 n01877812 n02488291 n03692522 n02669723 n03018349 n03207743 n02096177 n01514859 n02105056 n03495258 n03207743 n04523525 n03259280 n03127747 n02988304 n02096437 n02087394 n04370456 n01882714 n01644900 n11879895 n03814639 n03763968 n03788365 n04579145 n03837869 n04429376 n02219486 n03983396 n04591157 n07693725 n02281787 n01829413 n04606251 n02795169 n03467068 n02486410 n04505470 n02488702 n02108089 n02783161 n06596364 n01558993 n07871810 n02655020 n02256656 n03290653 n03131574 n01829413 n02930766 n03529860 n01871265 n01675722 n02840245 n04392985 n04286575 n03404251 n02823428 n02951585 n02077923 n03000247 n01843065 n02804414 n04525038 n01749939 n03095699 n04552348 n03532672 n03527444 n03947888 n02667093 n02346627 n01667114 n07749582 n02128385 n02093754 n02092002 n02782093 n04310018 n02104365 n02134418 n03769881 n02776631 n01984695 n02097658 n02095570 n02321529 n02108000 n02098413 n03623198 n03100240 n03109150 n02168699 n03017168 n01819313 n02117135 n03871628 n03924679 n04399382 n15075141 n03884397 n03425413 n03584829 n03976467 n02979186 n02124075 n02869837 n03998194 n02025239 n01558993 n04044716 n02107908 n04404412 n04266014 n03944341 n01751748 n02025239 n04040759 n02102973 n03930630 n09246464 n02174001 n02389026 n03764736 n01795545 n02790996 n02526121 n03133878 n03124043 n02979186 n02093754 n03598930 n03250847 n02134084 n03733281 n02226429 n04019541 n02105855 n02256656 n02787622 n04435653 n03599486 n03733131 n02325366 n03259280 n03028079 n03476684 n03133878 n03590841 n03197337 n04525038 n03494278 n04270147 n01860187 n02086910 n02457408 n03627232 n03133878 n03947888 n02823428 n02097298 n02108000 n04540053 n03141823 n03201208 n03476991 n02113023 n03777754 n03854065 n02415577 n02974003 n01820546 n02087046 n04149813 n04332243 n02090379 n04509417 n07760859 n03637318 n02672831 n03141823 n03538406 n03201208 n04286575 n02097658 n03873416 n04515003 n09193705 n02939185 n03933933 n01749939 n03483316 n02098105 n02107908 n02130308 n02105641 n04458633 n03692522 n02777292 n07565083 n02708093 n02783161 n04037443 n04259630 n02112706 n07802026 n01729977 n02168699 n04192698 n04209133 n07590611 n01729322 n02028035 n04579432 n01518878 n02443484 n07742313 n04376876 n04019541 n02791270 n02906734 n02264363 n02233338 n06874185 n04069434 n13044778 n02981792 n02117135 n03775071 n03249569 n04239074 n03868242 n02099267 n03467068 n02791270 n01632777 n01817953 n04325704 n01582220 n04081281 n03838899 n02865351 n02445715 n04009552 n02089867 n02256656 n01860187 n02815834 n04447861 n03786901 n04120489 n03584254 n03255030 n02006656 n03187595 n04152593 n03467068 n03942813 n03947888 n07831146 n02090721 n04532670 n03018349 n02093991 n01917289 n01729322 n02108422 n03197337 n02951585 n04263257 n07932039 n01537544 n03495258 n01755581 n02096051 n01737021 n04120489 n02111500 n03895866 n02106166 n04350905 n04081281 n02791124 n04501370 n02115913 n02088466 n07614500 n02410509 n01740131 n03483316 n02701002 n03792782 n03995372 n03016953 n02536864 n12144580 n02011460 n04355933 n02423022 n03658185 n03344393 n02096177 n03692522 n04423845 n02110185 n02177972 n03197337 n03924679 n01749939 n02229544 n03000247 n01744401 n02321529 n03874293 n03481172 n01872401 n02112018 n02492035 n03670208 n04372370 n01697457 n02788148 n01796340 n03272562 n02098286 n03781244 n03666591 n13037406 n04532670 n03394916 n01744401 n02114855 n04542943 n02860847 n02268443 n04254120 n02088466 n11939491 n03788195 n07860988 n03832673 n02134084 n02092339 n02797295 n04252077 n04591713 n02096177 n03134739 n03982430 n02107574 n02233338 n07697313 n03891332 n03325584 n03208938 n01518878 n02509815 n03710721 n04487394 n03014705 n02099429 n02834397 n04141975 n01978455 n03891332 n02870880 n04265275 n02497673 n01955084 n02963159 n02099712 n02793495 n03691459 n02085782 n03991062 n02088094 n07711569 n02346627 n07695742 n03218198 n01784675 n02799071 n03944341 n03179701 n02415577 n04370456 n04443257 n04254777 n01496331 n02699494 n01677366 n02514041 n02086240 n02107908 n11879895 n03770679 n02749479 n03803284 n04485082 n03201208 n03045698 n03944341 n01930112 n02113186 n04286575 n03706229 n02871525 n01774384 n01855032 n02109047 n02114548 n12998815 n03218198 n03216828 n04371774 n02114712 n04548280 n02276258 n04033995 n03393912 n03980874 n04389033 n07583066 n01704323 n03445924 n02018795 n03445777 n02098286 n03838899 n01689811 n03666591 n03000247 n02099712 n03483316 n04505470 n02490219 n04239074 n01531178 n02116738 n01950731 n02113624 n04204238 n02276258 n07715103 n03026506 n02108551 n02127052 n02088466 n02093256 n02102040 n03976657 n04532670 n03776460 n03220513 n03903868 n03792972 n03529860 n02009229 n02113624 n02447366 n03461385 n02102318 n04263257 n02114855 n02676566 n03425413 n03538406 n03666591 n03272010 n07768694 n04392985 n04330267 n03026506 n07730033 n02094258 n04515003 n04265275 n13044778 n02965783 n02120505 n02058221 n03314780 n02793495 n02708093 n03633091 n03014705 n01665541 n02526121 n04067472 n04428191 n07836838 n02177972 n01817953 n04296562 n04099969 n03956157 n02114367 n02091635 n02113978 n03838899 n02437616 n04370456 n02423022 n02112706 n02096585 n02497673 n04505470 n02098286 n02319095 n04560804 n03976657 n04330267 n02481823 n04532670 n12057211 n03584254 n04065272 n04596742 n02823428 n01494475 n03133878 n07579787 n04141975 n03794056 n03000684 n04067472 n02108422 n04254777 n01616318 n03814906 n03444034 n04277352 n04612504 n02917067 n03729826 n02095314 n03796401 n04486054 n03637318 n02786058 n03661043 n03400231 n02112350 n03980874 n04251144 n01978287 n03483316 n03633091 n04597913 n02093647 n02097474 n02097130 n03998194 n01689811 n04482393 n02231487 n04328186 n03188531 n02490219 n04579432 n09256479 n03770439 n07697537 n02389026 n04252225 n03594945 n04310018 n01978455 n03803284 n03063689 n01924916 n03240683 n03837869 n02114712 n02999410 n04371774 n03676483 n02091467 n03196217 n03347037 n04487081 n03888257 n03787032 n01631663 n03447721 n02086079 n01644373 n09468604 n07613480 n04356056 n04493381 n06785654 n03179701 n01675722 n04429376 n02966193 n03584254 n03673027 n03223299 n03443371 n02106382 n04125021 n03786901 n04467665 n03498962 n03662601 n02088632 n02510455 n12998815 n02747177 n04252077 n12267677 n04501370 n02113978 n03141823 n01817953 n03126707 n03110669 n02910353 n03417042 n09193705 n02102318 n01807496 n02268443 n01632777 n02814533 n07875152 n01484850 n02092339 n02791124 n04417672 n03160309 n02134418 n03483316 n01829413 n02095889 n07693725 n04579145 n03942813 n02091134 n04209239 n07584110 n04590129 n03873416 n02105056 n02488291 n04136333 n01855032 n04525305 n04039381 n02025239 n03476991 n01614925 n01735189 n02894605 n04505470 n02127052 n12267677 n02865351 n03481172 n02445715 n02892767 n02974003 n03249569 n01860187 n01687978 n03733805 n03445777 n02676566 n07734744 n03544143 n03676483 n03877845 n03372029 n03977966 n02090721 n03676483 n02655020 n02134418 n02364673 n02110627 n03527444 n04317175 n02280649 n02788148 n02119789 n02804610 n04435653 n02120505 n02802426 n02606052 n07717410 n03290653 n03017168 n02087046 n02093647 n04259630 n01819313 n03467068 n02113712 n03935335 n02927161 n02113186 n03673027 n04200800 n04192698 n01518878 n03417042 n02093754 n02088364 n02749479 n01688243 n04070727 n04604644 n02457408 n06874185 n04483307 n02422106 n01692333 n02834397 n03485794 n02219486 n01950731 n02028035 n01644900 n03125729 n12144580 n01682714 n03843555 n03602883 n02018795 n03447447 n02865351 n03223299 n03355925 n04592741 n02106662 n02033041 n01820546 n03761084 n02165105 n02397096 n02101556 n04328186 n03933933 n03355925 n04328186 n03950228 n03134739 n03535780 n01748264 n04330267 n02699494 n01985128 n02978881 n04141327 n02403003 n02120079 n07579787 n02317335 n02509815 n04146614 n01944390 n04467665 n02927161 n12620546 n02098286 n01914609 n02486410 n02963159 n03085013 n04525305 n04141076 n01742172 n01798484 n02102480 n01729322 n03938244 n02096585 n04099969 n02437616 n03729826 n01829413 n03527444 n04086273 n02013706 n03594734 n02105855 n04536866 n02489166 n02093991 n02109525 n01930112 n01580077 n02457408 n04328186 n01751748 n03026506 n04235860 n02113023 n03063689 n01882714 n03930630 n03710721 n04264628 n04081281 n04116512 n04044716 n01697457 n04330267 n02860847 n02107908 n04399382 n03873416 n04509417 n03792972 n02102318 n01883070 n07742313 n02033041 n12620546 n03995372 n02086646 n03485794 n07747607 n02098413 n03877472 n02106550 n04263257 n02134418 n04263257 n04606251 n01630670 n02280649 n02504013 n02871525 n04081281 n03782006 n01514668 n02396427 n02093428 n02979186 n04254777 n04009552 n03602883 n07747607 n04562935 n02033041 n04505470 n02906734 n03045698 n01629819 n04613696 n07717556 n02487347 n01917289 n01817953 n07753275 n02457408 n02992529 n01742172 n03950228 n03584254 n02526121 n01494475 n02085936 n02391049 n04355933 n03950228 n03584829 n02128385 n01872401 n02091467 n03481172 n04204347 n03899768 n02107312 n02692877 n04606251 n03770679 n07749582 n01558993 n02099712 n03792782 n03791053 n04317175 n02086079 n02480855 n01682714 n04509417 n03792972 n02108551 n02606052 n03995372 n04336792 n02490219 n07695742 n12998815 n03759954 n04265275 n02971356 n03661043 n02120505 n01530575 n03690938 n02422106 n02120079 n07873807 n04579432 n03930313 n09288635 n02509815 n03998194 n03791053 n01930112 n03991062 n02125311 n02909870 n07718747 n01729322 n02133161 n03763968 n03944341 n01943899 n02445715 n04443257 n02109047 n04141327 n03041632 n01592084 n02906734 n01828970 n03388549 n01917289 n02859443 n02110958 n03956157 n02797295 n02100583 n02776631 n03485407 n04285008 n03623198 n01753488 n03146219 n03535780 n12768682 n12768682 n02100583 n03976657 n04251144 n03444034 n03980874 n02066245 n01692333 n03223299 n04461696 n09835506 n02206856 n13040303 n02088094 n02487347 n03781244 n03832673 n02917067 n01806567 n03776460 n04208210 n04462240 n02093428 n02123045 n03047690 n04201297 n02895154 n04252225 n03837869 n01877812 n03961711 n01753488 n02105505 n02112018 n02110627 n02389026 n02782093 n02099712 n03742115 n04141076 n01735189 n02879718 n03594734 n04462240 n02788148 n02106166 n03991062 n01820546 n04259630 n04310018 n15075141 n03717622 n03595614 n03598930 n02132136 n03630383 n03692522 n04591157 n04154565 n02346627 n02687172 n07693725 n02514041 n02128757 n02095314 n01855032 n03942813 n03485407 n13133613 n03062245 n03447447 n02895154 n04380533 n02364673 n03146219 n02109961 n02113799 n02859443 n01558993 n02119789 n01930112 n04275548 n03602883 n02497673 n02037110 n03026506 n07930864 n04330267 n02480495 n02107683 n03786901 n01917289 n03133878 n04532670 n01775062 n03633091 n03777568 n01945685 n03109150 n03792972 n02895154 n04548362 n02114855 n03775071 n07717556 n02483362 n02909870 n02027492 n07584110 n03594734 n03642806 n03877845 n03379051 n02927161 n04417672 n04009552 n04004767 n02799071 n03874599 n01883070 n03933933 n03450230 n01698640 n03146219 n02113023 n03379051 n03160309 n01968897 n03976467 n04328186 n02018207 n02123597 n02791124 n01729977 n04228054 n02966687 n02094258 n03425413 n01819313 n02100236 n02389026 n02108551 n02085620 n03791053 n03916031 n01871265 n01698640 n02100877 n03146219 n03903868 n03803284 n04204238 n04037443 n02128925 n03131574 n02823428 n09421951 n03884397 n07742313 n03871628 n01770081 n04540053 n03000134 n02443114 n04476259 n04317175 n02091032 n07248320 n04146614 n04532106 n07920052 n02484975 n04612504 n01530575 n03929660 n04540053 n01796340 n01828970 n04162706 n03481172 n03983396 n02777292 n02018795 n02869837 n02835271 n03201208 n01518878 n12057211 n03787032 n02641379 n04554684 n02791124 n01819313 n02389026 n04090263 n03908618 n03792972 n02484975 n07590611 n01530575 n12985857 n09229709 n01755581 n03627232 n02123159 n03775546 n04596742 n04346328 n02669723 n07753592 n07613480 n03884397 n02892201 n01924916 n04467665 n02488291 n03868242 n02356798 n04265275 n02077923 n02102973 n03457902 n02190166 n03259280 n02105162 n02091831 n02256656 n01872401 n02493793 n02408429 n02106550 n03929660 n03325584 n04332243 n04270147 n01630670 n03250847 n02114367 n02106166 n03134739 n02814860 n02110063 n03903868 n02395406 n04311174 n03532672 n02840245 n01986214 n04429376 n02119022 n03218198 n02783161 n03770439 n02089867 n02966687 n03658185 n09193705 n03085013 n02971356 n04049303 n11939491 n02105641 n03494278 n02364673 n01534433 n01735189 n02105855 n03743016 n07718472 n02113799 n04443257 n02096294 n02128925 n02264363 n03796401 n02444819 n03770679 n02093647 n03483316 n02107574 n04127249 n02978881 n13054560 n02823750 n03794056 n03000684 n01496331 n01807496 n02791270 n01860187 n03218198 n02364673 n03498962 n04153751 n01688243 n03388183 n01968897 n02172182 n02112018 n02883205 n03854065 n12267677 n02094258 n04254120 n01855672 n02100877 n03344393 n07693725 n02669723 n02264363 n03763968 n03637318 n04447861 n01984695 n12267677 n04335435 n02120505 n02104365 n03450230 n04286575 n03207941 n02106166 n03325584 n03793489 n03788365 n03877845 n02190166 n02051845 n02100583 n02104029 n06359193 n01514859 n02106550 n02165456 n02276258 n01514859 n03485407 n01632777 n02408429 n03124043 n03717622 n04252225 n04517823 n03425413 n04310018 n03017168 n03832673 n01770081 n03127925 n02089867 n03461385 n03485407 n01592084 n02256656 n03146219 n01795545 n03947888 n07693725 n04483307 n02002556 n04532670 n04049303 n02892201 n03857828 n01494475 n01601694 n04131690 n02666196 n02098286 n02641379 n04228054 n03980874 n04590129 n01616318 n03690938 n04127249 n03345487 n02113023 n01749939 n04229816 n02927161 n03956157 n02111500 n01756291 n02492035 n02119022 n02443114 n02950826 n02319095 n04346328 n02128757 n03998194 n02667093 n01943899 n04467665 n01530575 n01614925 n04346328 n02093754 n03733805 n03742115 n03197337 n02107908 n01737021 n02281787 n03141823 n04254120 n01532829 n02526121 n02966687 n02484975 n03832673 n02113799 n03958227 n04350905 n03623198 n06874185 n03337140 n02097658 n04311174 n04201297 n03908714 n01740131 n03929855 n02509815 n03903868 n03658185 n01843065 n04557648 n04392985 n02454379 n02493793 n04275548 n03220513 n02606052 n04118776 n02514041 n07684084 n03388183 n02794156 n01632777 n04238763 n04372370 n03876231 n02948072 n02096437 n02497673 n03843555 n07565083 n02097130 n04509417 n03255030 n02129165 n01682714 n07753275 n09472597 n02134418 n02219486 n02097047 n03063689 n02091467 n03781244 n02807133 n03814906 n04355338 n04579145 n03272010 n02086646 n02106662 n03956157 n02783161 n02112137 n03188531 n03126707 n01608432 n03337140 n01847000 n04125021 n04147183 n07720875 n02319095 n02510455 n04311174 n03584254 n04542943 n02102480 n02114712 n02268443 n07718472 n03792972 n03724870 n04239074 n02091134 n02129604 n03127925 n02086646 n03207941 n01819313 n04522168 n03271574 n04487394 n03710193 n02105855 n03131574 n02105251 n02095889 n03384352 n07880968 n02259212 n04069434 n01669191 n03710193 n01855672 n13037406 n01484850 n04476259 n03871628 n01774750 n02108551 n02090622 n03733281 n03724870 n03976657 n02099267 n04127249 n02097474 n02056570 n01795545 n07714571 n02107142 n01608432 n02113023 n04486054 n03876231 n04270147 n03461385 n13040303 n02102318 n02910353 n02094114 n02786058 n02992211 n02396427 n04344873 n02097130 n01443537 n04325704 n02093428 n04258138 n07584110 n03443371 n03481172 n02110341 n04141975 n02226429 n02281406 n04141327 n04118538 n02037110 n02226429 n01692333 n03916031 n02787622 n03594945 n07860988 n03729826 n04515003 n04612504 n02007558 n01560419 n02951358 n02837789 n04456115 n04239074 n02094433 n04553703 n03045698 n03874599 n03595614 n02514041 n03876231 n04467665 n04146614 n02089973 n04005630 n04266014 n04074963 n03527444 n04355338 n09246464 n03980874 n01990800 n03697007 n13133613 n07613480 n02655020 n03240683 n04111531 n01871265 n01695060 n03478589 n04265275 n02094433 n02009229 n02708093 n03447447 n03216828 n04371430 n03991062 n02607072 n02481823 n02102318 n09256479 n02123597 n02927161 n01737021 n01675722 n11939491 n03937543 n03729826 n01820546 n01847000 n02112137 n01675722 n04613696 n02974003 n03384352 n03627232 n04429376 n01756291 n03496892 n02398521 n02168699 n03000247 n01739381 n04371430 n04335435 n03532672 n02441942 n03400231 n03793489 n01795545 n01740131 n02110806 n03063599 n02095314 n04579432 n04591157 n02321529 n03661043 n01440764 n04228054 n04462240 n03877472 n03720891 n02514041 n03272562 n01601694 n02091467 n04041544 n03796401 n03594734 n02089078 n02493793 n01440764 n09399592 n03775071 n04296562 n02099849 n02804610 n03384352 n02088632 n04026417 n02794156 n01968897 n02133161 n03777754 n02494079 n02107142 n03710193 n02640242 n04209133 n02443114 n03259280 n02172182 n02089078 n04049303 n02093647 n06785654 n03733131 n03476991 n04259630 n01768244 n13037406 n02168699 n02013706 n02089078 n01817953 n02280649 n02877765 n04273569 n02097209 n06785654 n02104365 n02107908 n02484975 n02906734 n09468604 n01632777 n01494475 n01983481 n04372370 n02364673 n02730930 n02100583 n04127249 n03355925 n02108089 n03197337 n03857828 n01496331 n02110341 n04074963 n02087046 n03000684 n03485794 n02500267 n02105162 n03425413 n01944390 n02112018 n04005630 n01582220 n04275548 n07754684 n02011460 n02132136 n01748264 n04228054 n02980441 n02113624 n04597913 n02123159 n02027492 n04590129 n02114548 n03208938 n02099267 n03538406 n03218198 n04254120 n03337140 n02089078 n02701002 n02086240 n02088632 n01943899 n13052670 n04606251 n09229709 n01687978 n03929660 n02093754 n01729322 n02107908 n07715103 n03773504 n04592741 n02107908 n02264363 n04154565 n02098105 n03485794 n02791270 n06874185 n02488702 n03014705 n03657121 n03854065 n02107574 n02669723 n03950228 n02317335 n04133789 n01685808 n03933933 n02097047 n02011460 n01819313 n03982430 n01784675 n03670208 n03220513 n04118538 n02782093 n02783161 n03496892 n02107574 n04040759 n02013706 n02777292 n01775062 n01748264 n03018349 n04111531 n02089867 n09246464 n04548280 n07734744 n03291819 n04552348 n03871628 n07753113 n01729322 n07715103 n04596742 n02128385 n03976467 n04548280 n02497673 n02134418 n02105251 n03970156 n01749939 n01795545 n01855032 n02395406 n02098413 n02111500 n02895154 n07565083 n03742115 n02108089 n02321529 n02971356 n02437616 n03208938 n01667114 n02226429 n03877845 n02910353 n04070727 n04152593 n01883070 n02870880 n02504458 n04243546 n02096051 n03899768 n02321529 n03877845 n03450230 n03290653 n01664065 n03908714 n01537544 n02088238 n01882714 n01773549 n04418357 n02727426 n01872401 n02106382 n03991062 n02017213 n02018207 n04370456 n02219486 n02669723 n01694178 n01784675 n03443371 n02114548 n01806567 n04090263 n07932039 n01608432 n02281406 n04238763 n01664065 n02028035 n01917289 n03793489 n04209239 n03042490 n03400231 n02356798 n03065424 n04335435 n01664065 n01692333 n07880968 n03297495 n02841315 n03095699 n07697313 n09399592 n01917289 n03724870 n13133613 n03787032 n02493793 n03843555 n01629819 n03843555 n04461696 n01669191 n03976657 n02097047 n03773504 n02951585 n04398044 n03599486 n03250847 n03796401 n01737021 n02776631 n03599486 n02110806 n04254680 n02138441 n02483362 n02747177 n03733805 n04118538 n01829413 n02112137 n02102318 n02097474 n02119789 n04136333 n04579432 n02493509 n01667778 n02442845 n02097209 n03404251 n02488291 n02091032 n01882714 n04081281 n02963159 n02088632 n01491361 n04380533 n04423845 n01629819 n03956157 n04548362 n02804610 n04310018 n04251144 n07860988 n02692877 n03938244 n01484850 n04325704 n01560419 n02916936 n02442845 n03998194 n04330267 n03425413 n07932039 n01984695 n03345487 n03259280 n07768694 n02444819 n01675722 n02328150 n04070727 n04423845 n03729826 n07684084 n03485794 n03498962 n01753488 n03958227 n02895154 n03100240 n02110806 n04118776 n02105056 n03874293 n04037443 n03496892 n07745940 n03871628 n03372029 n02100735 n02132136 n03623198 n03666591 n02823750 n01735189 n02106382 n07697537 n02454379 n04311004 n03110669 n04009552 n02074367 n02442845 n02099601 n09246464 n03814906 n04049303 n01749939 n03803284 n02667093 n03908714 n04409515 n03290653 n07730033 n02268443 n03028079 n02514041 n04592741 n07720875 n02988304 n02606052 n03877472 n01798484 n03742115 n04461696 n02917067 n01629819 n04486054 n04548362 n02860847 n02107683 n01944390 n03786901 n04044716 n01824575 n01440764 n02279972 n01914609 n03272562 n07590611 n01728572 n01687978 n03791053 n01518878 n02950826 n03982430 n02966193 n03841143 n02672831 n02787622 n02165105 n04525038 n03662601 n12057211 n04522168 n04613696 n02088632 n01985128 n09472597 n03271574 n01687978 n04147183 n07875152 n01580077 n03393912 n03903868 n04074963 n03788365 n01843065 n03690938 n02105056 n04525305 n01631663 n02097047 n02486410 n04152593 n02879718 n04443257 n02102040 n02093859 n02127052 n09332890 n01770393 n03527444 n03697007 n04515003 n07873807 n04429376 n03991062 n03085013 n01828970 n01608432 n03930313 n02105641 n01756291 n02500267 n04039381 n02168699 n03259280 n01855032 n10565667 n02115641 n04515003 n02669723 n02988304 n03825788 n02025239 n03706229 n01914609 n03344393 n04049303 n03259280 n02091244 n02514041 n03065424 n12057211 n02027492 n04118538 n04141076 n03899768 n04462240 n02096051 n02978881 n02114855 n04509417 n04505470 n03201208 n01986214 n02417914 n01677366 n07747607 n04409515 n01685808 n04599235 n03187595 n03657121 n15075141 n04372370 n02966687 n01820546 n03344393 n03476991 n03763968 n04070727 n03041632 n01877812 n07248320 n07875152 n02892767 n03355925 n01685808 n04228054 n03843555 n01755581 n04347754 n02277742 n03000247 n07742313 n07875152 n03075370 n02799071 n03133878 n06596364 n01806143 n03930313 n03930313 n02730930 n01773797 n03902125 n03721384 n02951358 n02119022 n01744401 n02112706 n02396427 n03633091 n01514668 n03791053 n02395406 n04370456 n03657121 n02096585 n02107312 n03970156 n03126707 n02105251 n02442845 n04461696 n07715103 n03873416 n01677366 n02012849 n03527444 n01798484 n04562935 n02279972 n02423022 n03992509 n01592084 n03788195 n02259212 n04462240 n03929660 n02090622 n04254120 n01592084 n02109961 n03769881 n02268443 n02909870 n01641577 n04550184 n04507155 n01630670 n04152593 n02090379 n01983481 n09421951 n04517823 n01744401 n07745940 n01843383 n03476684 n01735189 n03930313 n03916031 n02093991 n03207743 n02787622 n02106166 n04398044 n04428191 n04209133 n02085620 n09835506 n01871265 n03459775 n02089973 n02643566 n02481823 n02123159 n07875152 n04557648 n03196217 n04033995 n02037110 n01955084 n03089624 n01751748 n02099429 n03325584 n03445777 n03902125 n02116738 n02799071 n02843684 n03109150 n02869837 n06794110 n03908618 n02105251 n02790996 n02966687 n09256479 n02939185 n04417672 n02113624 n04266014 n02174001 n02483362 n03127925 n03717622 n01744401 n01739381 n02606052 n03290653 n04330267 n02486410 n02457408 n04355338 n01498041 n02134418 n01440764 n04552348 n02319095 n03781244 n07730033 n04525038 n02018795 n03494278 n04589890 n01829413 n04456115 n04118776 n02687172 n02992529 n07932039 n03075370 n04557648 n01728920 n01688243 n02443484 n03843555 n03786901 n03016953 n02536864 n04125021 n01514668 n04461696 n01983481 n02493509 n07614500 n01776313 n02091467 n02106030 n02814860 n02002556 n01818515 n03160309 n02092339 n02013706 n01753488 n01739381 n02981792 n01753488 n02704792 n09332890 n02317335 n03255030 n04201297 n02093256 n01688243 n03792782 n03028079 n01944390 n02107908 n03803284 n03775546 n02128757 n04542943 n04560804 n02514041 n04204347 n02916936 n03344393 n02364673 n03942813 n01614925 n02494079 n04542943 n07742313 n02490219 n03843555 n02281406 n02493793 n02123597 n04613696 n01796340 n07753592 n03384352 n03916031 n03908714 n03992509 n04201297 n03637318 n02977058 n02091032 n02494079 n03673027 n04548362 n01950731 n03721384 n02999410 n02483362 n02111277 n03709823 n02087046 n03929660 n07930864 n03954731 n03063599 n03692522 n02018207 n03788195 n04040759 n02011460 n07871810 n03690938 n04486054 n01986214 n04591713 n04127249 n01807496 n02095570 n01981276 n02128925 n02992529 n02815834 n01698640 n01632458 n02492660 n02319095 n03938244 n03876231 n01798484 n03666591 n02110806 n03782006 n01943899 n02643566 n04120489 n04399382 n02085782 n04389033 n07714571 n01614925 n03494278 n04141076 n03388043 n04118776 n03291819 n02389026 n04209133 n01685808 n03769881 n04074963 n04458633 n04532670 n02484975 n07579787 n02058221 n03000134 n01704323 n04044716 n03000684 n03179701 n07716906 n01518878 n02497673 n03445924 n02093647 n02410509 n03026506 n04153751 n04141076 n03532672 n04201297 n07836838 n03188531 n02486410 n04275548 n02133161 n03394916 n02098105 n04376876 n02106382 n03483316 n02490219 n03032252 n03770439 n02025239 n03840681 n03496892 n03633091 n02837789 n03126707 n02104365 n04584207 n04347754 n04243546 n02110185 n02865351 n02167151 n02871525 n02088466 n02138441 n02804610 n03935335 n02782093 n01744401 n09472597 n03445924 n01737021 n02102480 n02086646 n02137549 n02481823 n02107574 n02096437 n02701002 n03272562 n02978881 n01737021 n01824575 n03887697 n02097298 n03692522 n02437312 n03814639 n02236044 n02094433 n07742313 n04398044 n03255030 n04258138 n02422106 n06785654 n02319095 n03692522 n04350905 n04252077 n03804744 n03131574 n02107312 n07583066 n02006656 n01608432 n04428191 n04346328 n02493793 n04040759 n03733281 n02093754 n01677366 n02481823 n11939491 n13044778 n04070727 n02500267 n03347037 n03942813 n03218198 n02747177 n04286575 n01530575 n02437312 n02090379 n04447861 n01843383 n01629819 n01871265 n02077923 n02105162 n03873416 n02106662 n02096437 n02132136 n03000684 n01917289 n02777292 n02077923 n02110063 n02027492 n02124075 n04467665 n04192698 n04525305 n12057211 n02894605 n02108551 n04392985 n01742172 n02825657 n04336792 n04265275 n02172182 n02483362 n02168699 n02088094 n02128925 n03764736 n02113712 n03197337 n03393912 n03804744 n07697313 n03770679 n02795169 n02104365 n10148035 n01534433 n03089624 n10565667 n04536866 n02259212 n01828970 n01667114 n02110958 n03841143 n03325584 n03450230 n04423845 n04149813 n02802426 n03876231 n03868242 n07614500 n04356056 n02128925 n03379051 n02099712 n02870880 n02085936 n13044778 n03388043 n02113712 n02113624 n03141823 n02110627 n03394916 n04548362 n02927161 n01914609 n04275548 n03271574 n03527444 n01530575 n03775546 n02965783 n02105505 n03982430 n04258138 n03201208 n07684084 n02437616 n03388043 n04389033 n02841315 n03250847 n02480495 n01749939 n12998815 n02114712 n02056570 n03602883 n02281406 n02086079 n03769881 n03791053 n02165456 n02747177 n13040303 n04023962 n02948072 n04243546 n02690373 n04442312 n03837869 n04417672 n13054560 n02106166 n01776313 n02667093 n07565083 n13133613 n07730033 n02488291 n04423845 n03623198 n03977966 n03866082 n02100735 n02834397 n04461696 n02089078 n01694178 n01944390 n03706229 n03223299 n03980874 n03991062 n04004767 n04201297 n03761084 n03443371 n02033041 n02138441 n01924916 n04133789 n06359193 n02091032 n02981792 n03180011 n04522168 n04317175 n02106662 n01847000 n12768682 n03496892 n02892767 n07684084 n01877812 n03345487 n03495258 n03661043 n01990800 n03417042 n04330267 n01443537 n02397096 n01582220 n01910747 n02025239 n03724870 n02787622 n02892201 n02086079 n04417672 n04550184 n04525305 n03877845 n07718472 n04266014 n02396427 n01773797 n02009912 n01795545 n02120079 n02105505 n04252077 n07734744 n02793495 n04372370 n02667093 n01629819 n02493793 n02640242 n01748264 n02134418 n04335435 n02966687 n01608432 n03325584 n02013706 n02364673 n02791124 n02979186 n04493381 n03045698 n03032252 n02092339 n01806143 n03535780 n02319095 n04562935 n01873310 n02279972 n02124075 n03482405 n02056570 n02823750 n02823428 n01443537 n02860847 n02690373 n03825788 n04461696 n02106030 n01983481 n01632777 n04562935 n01847000 n03661043 n03272010 n02113978 n04550184 n02699494 n04505470 n01629819 n03944341 n03792782 n02071294 n02114367 n04536866 n02910353 n03355925 n03908618 n02786058 n02097047 n02088094 n02089867 n04356056 n02095570 n01756291 n02441942 n04208210 n07693725 n02088094 n06596364 n02992529 n04081281 n03467068 n01847000 n01693334 n03680355 n04501370 n03763968 n01917289 n02669723 n01924916 n02110958 n04041544 n02110806 n02134084 n02130308 n02443484 n02843684 n01968897 n01855672 n02113799 n03584829 n12768682 n01531178 n03197337 n01784675 n03075370 n04252077 n03935335 n02999410 n07716358 n04238763 n07753275 n02279972 n02666196 n02007558 n02105251 n02226429 n01751748 n02127052 n04579145 n02051845 n02445715 n02102177 n03759954 n03179701 n02007558 n03649909 n03992509 n03447721 n02916936 n03196217 n01883070 n01983481 n03000684 n01756291 n02111277 n03857828 n04479046 n02177972 n04067472 n03444034 n03854065 n03720891 n04208210 n01740131 n04423845 n01855672 n03388549 n02206856 n04606251 n03887697 n02865351 n04579145 n01496331 n02804414 n02787622 n04004767 n02097047 n02490219 n03529860 n03680355 n03942813 n01632458 n03733281 n03584829 n02797295 n02966687 n01824575 n07831146 n04366367 n03666591 n03788195 n02966193 n03042490 n06874185 n03345487 n02123597 n02895154 n01664065 n01819313 n12985857 n01855672 n02095314 n02102973 n02966193 n02115913 n03590841 n02093991 n02169497 n02814860 n02089078 n02138441 n02113712 n02883205 n01601694 n01774384 n04111531 n03000134 n02088364 n02489166 n01914609 n04009552 n03680355 n03843555 n03950228 n03680355 n04597913 n04347754 n04116512 n02747177 n01514668 n02840245 n03483316 n07715103 n04153751 n02500267 n03998194 n15075141 n03930313 n02112706 n03888257 n02110063 n02108000 n02102973 n02483708 n02097474 n02011460 n02492035 n02814860 n02009229 n03877845 n06596364 n07248320 n04344873 n04536866 n02823750 n03291819 n01770081 n02892767 n03481172 n02066245 n04370456 n02264363 n03670208 n02397096 n03075370 n02087394 n02536864 n04599235 n03982430 n04523525 n04522168 n13052670 n03633091 n04067472 n02988304 n04486054 n01677366 n02492660 n03127747 n02112350 n04336792 n03417042 n13133613 n01608432 n02865351 n02129165 n01773157 n04258138 n04041544 n04252077 n03197337 n03794056 n03877845 n04346328 n02086910 n01694178 n03445924 n04532670 n03781244 n04141975 n03124170 n03874293 n03498962 n01739381 n02791270 n07892512 n03444034 n02105162 n01734418 n04070727 n02916936 n03840681 n04399382 n07749582 n02480495 n04515003 n01688243 n02107142 n01914609 n01742172 n07753113 n01828970 n01797886 n04606251 n03062245 n03400231 n03483316 n02978881 n02109047 n02795169 n01728920 n03530642 n04209133 n02105641 n02111277 n01737021 n02092339 n04589890 n02454379 n12267677 n03627232 n01990800 n02109047 n03314780 n01798484 n03691459 n02669723 n03781244 n03467068 n01770081 n01796340 n03930313 n02226429 n02514041 n02356798 n07880968 n04131690 n02807133 n03841143 n02346627 n02397096 n02963159 n02641379 n02093428 n01537544 n02814860 n04074963 n02109525 n02085782 n02102973 n02319095 n02437616 n02395406 n02488291 n03777568 n03710193 n09421951 n03838899 n04004767 n02011460 n02526121 n02112018 n02687172 n02825657 n01882714 n01968897 n03196217 n02101556 n04389033 n04127249 n04254680 n03063689 n04125021 n01689811 n04325704 n02137549 n10565667 n02391049 n07836838 n04584207 n02423022 n02088364 n03961711 n02457408 n03535780 n02412080 n03017168 n02979186 n02676566 n01860187 n02423022 n03891332 n01494475 n01704323 n04423845 n03976467 n02091831 n02101006 n01491361 n03063689 n01910747 n01784675 n03967562 n02094114 n04065272 n01534433 n04372370 n02879718 n02871525 n02168699 n01784675 n03492542 n02101388 n07718472 n02110185 n12998815 n03127925 n03207743 n12057211 n07565083 n04525038 n04118776 n01616318 n02965783 n02206856 n03899768 n01687978 n03379051 n02104029 n04229816 n03124170 n02281406 n03032252 n02101556 n02980441 n03485794 n04366367 n02492035 n03599486 n04548362 n03764736 n07760859 n01978287 n04505470 n02488291 n02782093 n03417042 n02486261 n03843555 n02319095 n02493509 n01798484 n03857828 n03950228 n02791124 n03207941 n01751748 n03916031 n04074963 n03724870 n13133613 n03937543 n03255030 n04372370 n02168699 n03920288 n02514041 n02112350 n01443537 n01807496 n04070727 n01675722 n01518878 n03599486 n04162706 n04147183 n01795545 n01698640 n01873310 n07718472 n04033995 n04418357 n04429376 n02110806 n01944390 n09835506 n02092339 n02948072 n01978455 n02100236 n03710193 n04517823 n04154565 n03761084 n02346627 n02672831 n02422106 n01664065 n04125021 n03450230 n03980874 n03642806 n03866082 n01494475 n01910747 n02229544 n01770393 n02114367 n07920052 n01872401 n02109047 n03884397 n02704792 n07716906 n03843555 n03095699 n04532106 n02093754 n02879718 n04515003 n07718747 n02094258 n03838899 n03126707 n07730033 n03085013 n03680355 n02123045 n02279972 n02086240 n02134418 n03388549 n03637318 n03345487 n04517823 n03476991 n07734744 n03602883 n04371774 n04229816 n03249569 n02676566 n02011460 n02916936 n01806567 n02814533 n01560419 n03970156 n01978455 n02823750 n02883205 n02110627 n03787032 n10148035 n04596742 n04033995 n02444819 n03954731 n04311174 n02095889 n01914609 n03710193 n02782093 n01820546 n02091134 n04355933 n02389026 n04090263 n04254120 n01820546 n01641577 n02106550 n02326432 n03532672 n03065424 n07836838 n02786058 n04235860 n04264628 n02091244 n03773504 n02013706 n04458633 n04270147 n07711569 n04325704 n03017168 n02112350 n04192698 n02769748 n02096051 n04149813 n02483708 n04040759 n04265275 n02071294 n07873807 n02488702 n04200800 n02134084 n04418357 n04552348 n02999410 n02817516 n01981276 n02233338 n02504458 n02116738 n03633091 n03372029 n07714990 n04552348 n02504458 n02172182 n03691459 n02089078 n03594734 n02643566 n01665541 n01818515 n02802426 n03662601 n03495258 n01773797 n02206856 n03710721 n04442312 n02137549 n03657121 n04311004 n03775071 n03630383 n02412080 n01443537 n03874293 n03874599 n07590611 n04162706 n02108551 n07749582 n02804414 n03777754 n03584829 n02699494 n02097298 n03661043 n01774750 n03594945 n04005630 n07697313 n02009229 n03529860 n04355933 n03899768 n03337140 n02110958 n02092339 n02097130 n03337140 n01818515 n03345487 n01496331 n03124043 n02095570 n01558993 n03814906 n03216828 n03930630 n06874185 n02113799 n07720875 n03887697 n03697007 n02231487 n02669723 n02480855 n04366367 n03706229 n03529860 n03924679 n03527444 n01770393 n04493381 n04532670 n02883205 n04192698 n02129604 n02669723 n04259630 n02091831 n09332890 n01883070 n04026417 n03485407 n01877812 n01644900 n09256479 n04286575 n01601694 n04428191 n03065424 n03770439 n02174001 n02110341 n02916936 n04086273 n03393912 n02701002 n03991062 n01608432 n04273569 n04522168 n07760859 n02493793 n02804414 n02229544 n04009552 n03874599 n03649909 n07614500 n02094433 n02097298 n03662601 n03450230 n02093256 n04033995 n02113023 n09246464 n01704323 n02488702 n02096294 n04536866 n07873807 n03770439 n04409515 n04532106 n04542943 n07584110 n02808304 n03903868 n03888605 n02051845 n02115641 n02099267 n03452741 n03498962 n01945685 n01692333 n03930630 n02794156 n04311004 n03482405 n04540053 n09256479 n02607072 n02281406 n03991062 n02056570 n04243546 n03100240 n01532829 n03127747 n02119022 n02666196 n03379051 n04417672 n07920052 n03617480 n01818515 n03998194 n03388183 n02113799 n04344873 n03590841 n04228054 n04228054 n02231487 n03888257 n04086273 n02090622 n03933933 n02422106 n03720891 n02093991 n04347754 n01630670 n03843555 n03729826 n01644900 n02264363 n03126707 n12057211 n04461696 n02098286 n02276258 n04552348 n01514668 n04243546 n02871525 n02106382 n02100583 n02085936 n04487081 n03995372 n01601694 n02279972 n03444034 n07730033 n02011460 n02099601 n04536866 n03014705 n02486261 n04590129 n04265275 n03447447 n02102177 n03388043 n01665541 n03924679 n06874185 n03018349 n02403003 n03196217 n02132136 n01514859 n02397096 n02113186 n03924679 n02096437 n07831146 n04584207 n03777568 n02276258 n02108915 n04540053 n03874293 n02033041 n04270147 n02114367 n07730033 n02342885 n03929660 n03032252 n02992211 n03658185 n02777292 n02879718 n02319095 n07760859 n03888257 n02910353 n03868863 n04133789 n04136333 n04356056 n02028035 n03000134 n03355925 n04326547 n02494079 n04099969 n02966193 n04147183 n02966193 n07697313 n03877472 n02486261 n02510455 n07720875 n03764736 n04239074 n02443484 n07720875 n02840245 n03782006 n02119789 n04328186 n02417914 n03216828 n02108551 n02013706 n01734418 n03729826 n01689811 n04522168 n02422106 n04004767 n12620546 n04041544 n04116512 n03478589 n02174001 n04486054 n02107142 n02422699 n03400231 n07930864 n04200800 n01582220 n07753592 n02690373 n07880968 n03958227 n01665541 n01847000 n12768682 n03478589 n02091467 n02787622 n02776631 n03000247 n04074963 n03743016 n03325584 n09246464 n03871628 n01740131 n09288635 n02730930 n03884397 n03775546 n02114712 n07718472 n01728920 n02494079 n01774750 n03967562 n07718747 n02906734 n03444034 n02408429 n02319095 n04330267 n02113624 n02231487 n04141076 n04552348 n03759954 n04120489 n02869837 n03838899 n02268443 n02321529 n04023962 n03843555 n04525038 n02361337 n03924679 n02236044 n01530575 n02877765 n01980166 n03777568 n04008634 n04579145 n07873807 n03207743 n03970156 n04254680 n03345487 n02454379 n03110669 n01980166 n02536864 n04285008 n07684084 n01924916 n02108915 n04074963 n03837869 n01882714 n03873416 n02169497 n02687172 n02268853 n02906734 n03018349 n04310018 n02978881 n01693334 n04542943 n03770679 n02123045 n02974003 n02086646 n01530575 n03786901 n03710193 n03388183 n02112350 n02113186 n01883070 n04552348 n04344873 n01773157 n02109961 n02123159 n04404412 n01917289 n02169497 n03899768 n03697007 n03874599 n02669723 n07717556 n04147183 n03424325 n03498962 n07715103 n01632777 n02264363 n03018349 n01669191 n04204238 n01829413 n03785016 n01871265 n02992529 n04127249 n01774384 n13040303 n02090721 n07615774 n02231487 n03126707 n04399382 n02127052 n02480495 n04357314 n04597913 n04311174 n04376876 n03344393 n04146614 n01622779 n04325704 n03527444 n07753275 n02422699 n03759954 n01824575 n01704323 n04067472 n01872401 n02114712 n02979186 n07615774 n02094433 n02106550 n01930112 n02086079 n07754684 n02088238 n03764736 n02077923 n01770081 n03763968 n03544143 n03777568 n03706229 n07871810 n02100583 n02096585 n03538406 n02794156 n04325704 n04127249 n02277742 n03314780 n13037406 n02607072 n07720875 n02277742 n02412080 n13054560 n02865351 n03467068 n03891251 n02089973 n02002724 n02017213 n02917067 n01665541 n07714990 n03372029 n03584254 n03662601 n03337140 n02692877 n02110627 n04201297 n04154565 n03637318 n03255030 n07745940 n02056570 n03895866 n02169497 n01818515 n04493381 n03041632 n02110627 n04553703 n02099429 n09428293 n03495258 n02483708 n04336792 n02825657 n03891251 n01860187 n09472597 n01753488 n04540053 n02895154 n02321529 n03259280 n01630670 n03000134 n03866082 n01514859 n07873807 n02105056 n01978455 n02009912 n03794056 n03720891 n03995372 n02869837 n02169497 n03425413 n04355338 n02977058 n02916936 n03840681 n04560804 n03042490 n07734744 n03706229 n01774384 n03530642 n02346627 n02105251 n02229544 n04522168 n03535780 n02105505 n02168699 n02138441 n04131690 n02172182 n02111129 n02776631 n03785016 n03895866 n02457408 n03146219 n02134084 n02097130 n02361337 n07720875 n01871265 n02231487 n07717556 n04328186 n04317175 n03065424 n02442845 n03729826 n02892201 n02489166 n03721384 n02096437 n02093647 n03376595 n01692333 n02134084 n01978287 n01592084 n02504458 n03544143 n04039381 n02690373 n01756291 n03814639 n03443371 n03633091 n02066245 n03868242 n02133161 n01496331 n02108915 n03325584 n03372029 n02085782 n04026417 n02111500 n03482405 n04149813 n02108551 n03337140 n03970156 n02443484 n03657121 n03633091 n01675722 n02965783 n03908714 n03777754 n03394916 n06794110 n02492660 n02099429 n01828970 n04404412 n01532829 n02109047 n07768694 n02104365 n01632777 n02794156 n02807133 n07615774 n01532829 n13040303 n04149813 n01828970 n03345487 n02096585 n03291819 n07754684 n02123597 n04266014 n02114855 n02018207 n04532106 n04579432 n09246464 n02088364 n07615774 n04487394 n04612504 n07613480 n02058221 n03980874 n02134418 n01622779 n04209239 n02692877 n01560419 n02870880 n03445924 n02117135 n04356056 n02097047 n02281406 n04243546 n02129604 n02395406 n02089973 n09332890 n07747607 n09246464 n04417672 n02859443 n02105251 n02012849 n03724870 n04562935 n02790996 n02825657 n02510455 n03884397 n04069434 n01843383 n01440764 n02909870 n04344873 n13054560 n03976657 n04270147 n02804610 n03792972 n01704323 n01689811 n03908714 n03062245 n03376595 n02442845 n04589890 n02114855 n04465501 n01664065 n07711569 n02457408 n02165105 n02389026 n03207743 n04081281 n04458633 n01843065 n04335435 n03444034 n04311174 n02128385 n01819313 n02098413 n02110341 n06874185 n02098413 n02007558 n02077923 n04461696 n01514859 n03388549 n03447721 n03207743 n02443114 n01664065 n03825788 n02799071 n01753488 n03642806 n01847000 n09421951 n02086910 n02441942 n03141823 n01664065 n03642806 n02364673 n03884397 n02033041 n04019541 n04266014 n07749582 n01818515 n02415577 n02804414 n04599235 n01910747 n02965783 n04111531 n03794056 n02088364 n03733805 n02497673 n04296562 n01983481 n04041544 n07892512 n02085936 n03929855 n02396427 n03854065 n02802426 n01751748 n01632458 n03207941 n02110627 n04554684 n03729826 n02480495 n01914609 n04200800 n02480495 n01630670 n03825788 n04458633 n07754684 n01756291 n02807133 n02099712 n03223299 n03394916 n02100735 n04548362 n01774750 n03085013 n02974003 n04004767 n02111129 n02113799 n02963159 n04275548 n06874185 n02105855 n03710193 n02916936 n03125729 n04209239 n04033995 n07930864 n03443371 n04604644 n03788195 n04238763 n02174001 n03637318 n07615774 n04200800 n02107142 n03709823 n03786901 n02086079 n03201208 n03000684 n04099969 n02102480 n01950731 n07753113 n02013706 n04536866 n02423022 n02687172 n04208210 n04596742 n02051845 n01833805 n02058221 n03344393 n03857828 n01978287 n04118538 n03976657 n03717622 n02097130 n09399592 n01768244 n02317335 n04204238 n01580077 n02097298 n03673027 n02013706 n02105251 n07697313 n03980874 n02804610 n02125311 n03781244 n02095570 n03344393 n02408429 n02110627 n02807133 n02129604 n04332243 n04398044 n13044778 n02098413 n02129604 n03763968 n03028079 n02108000 n03825788 n02116738 n04344873 n03924679 n02486261 n02667093 n03584254 n04554684 n07932039 n01872401 n02128757 n02966687 n02101556 n03207941 n04476259 n07684084 n02109525 n02268443 n03793489 n02106662 n04335435 n03146219 n01774384 n03980874 n01930112 n03485794 n03710193 n04525305 n03916031 n07565083 n02264363 n03676483 n04235860 n02808304 n03796401 n12620546 n02098286 n02091831 n02319095 n02264363 n04317175 n04120489 n02788148 n02110341 n04252077 n07715103 n04540053 n03016953 n02091244 n02640242 n04612504 n03000134 n02112706 n01532829 n02115913 n02101556 n02119789 n04252225 n03492542 n03272010 n03770679 n01629819 n04517823 n04366367 n02410509 n03623198 n03777754 n03899768 n04367480 n04525305 n03208938 n02951358 n03110669 n04483307 n04517823 n02422699 n04509417 n03590841 n09332890 n01629819 n04557648 n09421951 n13052670 n01677366 n02058221 n02102318 n03126707 n04548280 n03187595 n02966687 n03938244 n02486261 n02096177 n02165105 n02979186 n04310018 n01669191 n04356056 n01644373 n03676483 n04311174 n03617480 n02107908 n04310018 n02100236 n03623198 n03841143 n02488702 n04507155 n02097130 n02769748 n03781244 n02441942 n03240683 n02115641 n02117135 n02137549 n02113023 n02129165 n04532106 n04118538 n01774750 n02917067 n03394916 n04458633 n01704323 n04399382 n02410509 n02111277 n02102177 n03000247 n02107683 n04037443 n03445777 n04296562 n02971356 n04418357 n02730930 n03841143 n01774384 n03271574 n02443114 n12144580 n02097298 n02948072 n04179913 n02105251 n03888605 n03208938 n04265275 n09421951 n02408429 n02101388 n02105056 n07836838 n04591713 n02011460 n04532106 n01698640 n04330267 n04039381 n04542943 n02317335 n02504013 n01704323 n01829413 n04357314 n04252077 n01601694 n02006656 n03124043 n02965783 n02814533 n03347037 n03920288 n03874599 n02364673 n03496892 n01978455 n03544143 n04252077 n03630383 n03717622 n03141823 n04259630 n03785016 n02174001 n02869837 n04335435 n02687172 n01729977 n02018795 n01494475 n03529860 n02106166 n04553703 n04523525 n02445715 n03891332 n02747177 n03676483 n02667093 n07920052 n02910353 n02097209 n03991062 n04204238 n02110341 n02089867 n01776313 n02328150 n03180011 n07717410 n03047690 n04505470 n03014705 n01518878 n01807496 n04591713 n02999410 n04254777 n02870880 n02002556 n02095889 n02487347 n03944341 n03770679 n03794056 n03759954 n02093991 n01968897 n03743016 n03388183 n03775546 n02437312 n04120489 n03642806 n02808440 n04099969 n03891332 n03958227 n02113799 n03998194 n02104029 n03250847 n02100877 n07714990 n03110669 n02676566 n03347037 n03530642 n10565667 n02108000 n03110669 n03690938 n02095314 n02012849 n02277742 n01532829 n04553703 n02051845 n04456115 n03998194 n02417914 n03594734 n01775062 n02105855 n03903868 n02096294 n04371774 n02927161 n03657121 n03937543 n04532106 n01883070 n01537544 n02667093 n02104029 n02487347 n02104365 n02051845 n04243546 n02006656 n02808304 n04251144 n02356798 n02391049 n07753275 n02974003 n03482405 n09193705 n01694178 n02168699 n12768682 n03272562 n03710193 n03843555 n03126707 n03196217 n06785654 n04350905 n07873807 n04310018 n02264363 n02492660 n10565667 n04275548 n04147183 n04366367 n02114855 n02100236 n04154565 n02276258 n03424325 n03777568 n03494278 n01806143 n03459775 n03598930 n03967562 n03775546 n04418357 n02412080 n04591157 n01770081 n03877472 n01531178 n03794056 n04485082 n03786901 n01773797 n04254680 n02128925 n02128757 n02442845 n02606052 n02099429 n04442312 n01807496 n02107312 n03710637 n02027492 n03016953 n02017213 n12768682 n04192698 n02747177 n04532106 n01537544 n04254777 n03259280 n02025239 n09835506 n02096437 n04372370 n02797295 n03871628 n02481823 n03837869 n02268443 n04522168 n03690938 n04550184 n03657121 n02105251 n01833805 n01755581 n07734744 n01873310 n03538406 n01688243 n03452741 n02120505 n02412080 n04254120 n04019541 n02112706 n02100735 n03201208 n03134739 n02514041 n04065272 n02165105 n04443257 n04149813 n03871628 n02100236 n02412080 n02992211 n02951358 n03776460 n02666196 n03000134 n12144580 n03141823 n02110341 n02094114 n02504458 n04389033 n02085936 n04553703 n03594734 n09468604 n03980874 n07831146 n03141823 n13054560 n01704323 n02356798 n03970156 n02071294 n06794110 n02860847 n03970156 n11879895 n04389033 n01770393 n02104365 n02033041 n07754684 n02666196 n03658185 n03447447 n03840681 n01990800 n03992509 n02319095 n04540053 n04141975 n03026506 n02009229 n07880968 n03459775 n02488291 n02108551 n03793489 n03041632 n03887697 n12057211 n07875152 n01828970 n01796340 n03494278 n02281787 n01698640 n01537544 n02110185 n04209133 n02536864 n07714990 n02100236 n04317175 n04265275 n01983481 n01833805 n02808440 n01443537 n07697313 n02109525 n03935335 n03903868 n04074963 n01807496 n03729826 n04111531 n07860988 n04133789 n03873416 n03991062 n03028079 n03207743 n02487347 n03207941 n03920288 n02100735 n02105855 n03544143 n02071294 n03496892 n03461385 n01443537 n04239074 n03956157 n04553703 n04371430 n12057211 n04118776 n02793495 n02808304 n03709823 n02099267 n03063599 n03018349 n02009912 n03467068 n03637318 n12998815 n04153751 n03063599 n02132136 n02879718 n02835271 n03089624 n01734418 n02027492 n04133789 n01491361 n03041632 n02361337 n03710637 n02169497 n02268443 n03291819 n02492660 n04069434 n03457902 n04200800 n04429376 n01945685 n02910353 n02096177 n04204347 n03347037 n01806567 n02002724 n01675722 n04404412 n03476684 n03868242 n01773157 n02102040 n02088094 n02797295 n07831146 n03764736 n03000684 n02536864 n01983481 n02106550 n04065272 n01685808 n02090622 n04579432 n04204238 n13054560 n03016953 n03937543 n04229816 n02492660 n03445924 n11939491 n03544143 n02894605 n07697537 n04153751 n02483362 n02134084 n04208210 n03197337 n01753488 n03680355 n03938244 n03857828 n03761084 n02105162 n03742115 n02536864 n02930766 n01514668 n03876231 n02493509 n02095314 n04517823 n01729977 n04442312 n11939491 n01614925 n03496892 n02281787 n02095570 n02105505 n04127249 n04579432 n03804744 n04613696 n01440764 n04133789 n02115641 n02099849 n04493381 n02102480 n11939491 n07565083 n03425413 n01756291 n02132136 n02109525 n03995372 n12057211 n07697537 n04023962 n03690938 n03676483 n03868863 n04147183 n02895154 n01773549 n01667114 n12267677 n04507155 n03658185 n01644373 n06785654 n02114548 n04065272 n04118538 n01491361 n03792782 n03773504 n07831146 n02092002 n02808304 n04330267 n02437312 n03481172 n03706229 n02100583 n04347754 n02666196 n04074963 n03976467 n02090721 n02002556 n01728572 n02129165 n02483362 n01910747 n03887697 n02422106 n04039381 n02356798 n04350905 n02871525 n02086079 n04485082 n04116512 n02346627 n02840245 n03345487 n04336792 n03777568 n02797295 n02093428 n04037443 n03188531 n03538406 n02108089 n02268853 n02219486 n02415577 n02113978 n04367480 n02111277 n07754684 n03207941 n02708093 n02791124 n04239074 n01872401 n03124043 n02788148 n03933933 n01798484 n03065424 n03658185 n09421951 n03000247 n02669723 n04592741 n02097130 n02105641 n01629819 n02793495 n03954731 n04141327 n02966687 n02769748 n02281787 n01687978 n04229816 n04009552 n04418357 n04461696 n02006656 n03770439 n02017213 n07716358 n02445715 n02389026 n02948072 n06785654 n02268443 n03457902 n04118776 n12768682 n02095314 n01518878 n04275548 n02894605 n01843383 n02840245 n07697313 n07930864 n02690373 n02788148 n04081281 n03127925 n03706229 n03721384 n01632458 n04265275 n01924916 n02979186 n01872401 n04235860 n04476259 n07697537 n02488702 n03920288 n03670208 n04493381 n02113712 n01682714 n03271574 n03018349 n01641577 n02422699 n02807133 n02749479 n02749479 n02480495 n02120505 n02277742 n03935335 n03759954 n02113186 n02100236 n03126707 n04458633 n02281406 n01775062 n04204347 n02116738 n03388043 n04418357 n02100583 n03584829 n01592084 n04456115 n01728920 n02091635 n03637318 n02105056 n02110627 n02776631 n03788365 n03179701 n02009912 n02219486 n04179913 n07590611 n03903868 n04560804 n01917289 n04133789 n02085620 n03259280 n02484975 n01744401 n07836838 n07753592 n03673027 n01494475 n01728572 n02174001 n07873807 n02058221 n04252225 n03782006 n04133789 n15075141 n02106662 n02346627 n03769881 n03630383 n03871628 n01984695 n01514668 n01749939 n03457902 n04347754 n04370456 n02892201 n01693334 n03109150 n02102973 n02098413 n01930112 n02834397 n02091032 n02489166 n12985857 n02092339 n03995372 n02089078 n03709823 n02111500 n02268443 n02410509 n01798484 n03720891 n03868863 n02092002 n03018349 n04487394 n03240683 n03803284 n07579787 n02804414 n03887697 n04542943 n02113023 n02607072 n01882714 n02102040 n07697537 n02443114 n01986214 n02777292 n02939185 n02009229 n03769881 n04554684 n02037110 n02817516 n02089078 n03691459 n03680355 n04591713 n03804744 n03617480 n01795545 n02865351 n02840245 n02909870 n02101006 n04208210 n04487081 n02111889 n04264628 n01629819 n02111129 n12768682 n03134739 n03075370 n13037406 n02100735 n04330267 n04540053 n01498041 n03874599 n03874599 n04485082 n03095699 n04252225 n02172182 n01667114 n04557648 n02119022 n02091467 n04350905 n01817953 n01985128 n04067472 n02504013 n04476259 n09229709 n02865351 n02105251 n03255030 n02325366 n04200800 n03065424 n04330267 n02403003 n02123159 n02326432 n02097130 n02966687 n04591157 n03538406 n02107908 n02009912 n01644900 n02356798 n04201297 n04235860 n02110185 n03544143 n02787622 n04296562 n02804414 n02114367 n02894605 n02119022 n02965783 n03837869 n01955084 n02701002 n02137549 n03794056 n03759954 n03956157 n03461385 n02939185 n07892512 n07715103 n01742172 n04350905 n01817953 n02865351 n02002556 n01644900 n02795169 n03617480 n03207743 n02403003 n03109150 n03590841 n02480855 n02091032 n07584110 n02102318 n02111277 n02692877 n04604644 n03793489 n01877812 n02412080 n01698640 n02110806 n04019541 n04476259 n04584207 n02012849 n03720891 n04311174 n03459775 n03781244 n09428293 n02106550 n02132136 n03630383 n02128925 n03903868 n03814639 n01630670 n02106550 n01855672 n01807496 n02088364 n03290653 n02109525 n03902125 n07583066 n04542943 n03937543 n07583066 n04008634 n04532670 n02095314 n04118538 n07584110 n02747177 n03929855 n01950731 n07742313 n03649909 n02319095 n01697457 n02092339 n09332890 n04347754 n02480495 n03478589 n07880968 n03935335 n03976657 n02835271 n04367480 n02177972 n04070727 n04277352 n04125021 n03134739 n02128757 n02504013 n04111531 n04152593 n04591713 n03400231 n01704323 n12768682 n02110806 n04418357 n02536864 n04409515 n04542943 n03763968 n03662601 n02490219 n02086240 n04404412 n07718747 n02096051 n04599235 n01944390 n01990800 n04152593 n02807133 n02086910 n03347037 n01847000 n02107683 n02279972 n04019541 n01695060 n02087046 n03891251 n04154565 n04398044 n02504013 n02138441 n04285008 n03942813 n04239074 n02704792 n03794056 n04476259 n04483307 n03982430 n02109047 n11939491 n04335435 n02727426 n03781244 n01978455 n03887697 n02268853 n02607072 n02009229 n04371774 n07892512 n04523525 n01748264 n03924679 n04200800 n04026417 n04208210 n04548362 n04389033 n04152593 n02910353 n07697313 n03196217 n04200800 n02279972 n01917289 n02488291 n02808304 n03992509 n02804414 n01774750 n04442312 n03535780 n02802426 n04044716 n02128385 n07697313 n04179913 n03400231 n03095699 n03871628 n02129165 n01773797 n03691459 n02018795 n04116512 n03089624 n02127052 n02111129 n02093256 n03742115 n04429376 n02009229 n02815834 n07747607 n03481172 n03220513 n03495258 n02974003 n01704323 n04277352 n07684084 n02107574 n02276258 n12998815 n03617480 n03721384 n02992529 n02321529 n03933933 n03764736 n03764736 n02317335 n04235860 n02808440 n02110341 n04542943 n02442845 n02869837 n01742172 n02088632 n02120079 n04259630 n03447447 n03876231 n02037110 n01914609 n02102040 n13054560 n03930630 n03759954 n07584110 n04259630 n03291819 n07697537 n01614925 n03814906 n04540053 n02116738 n01776313 n03954731 n04479046 n03658185 n04357314 n03763968 n01755581 n01749939 n02981792 n03485407 n02442845 n04548280 n07880968 n02825657 n09332890 n04596742 n04596742 n02930766 n01843383 n03532672 n13133613 n02963159 n03759954 n02098413 n04367480 n02643566 n04254777 n02415577 n04560804 n04485082 n03781244 n04597913 n04482393 n01530575 n03250847 n02108089 n04404412 n02687172 n03786901 n02108000 n02687172 n02317335 n02606052 n02165105 n03045698 n03218198 n02415577 n04069434 n04482393 n01806143 n01443537 n02100735 n04153751 n04254777 n02091467 n03482405 n02794156 n07754684 n03495258 n04542943 n01797886 n03085013 n03792972 n01980166 n02782093 n03920288 n03666591 n01695060 n02486410 n02088364 n02389026 n07753592 n07248320 n03355925 n01737021 n04266014 n02167151 n03930630 n02133161 n02107142 n03180011 n04023962 n01443537 n02443114 n02892201 n03109150 n01872401 n07565083 n02815834 n02206856 n03729826 n10565667 n02111129 n02704792 n02117135 n03000247 n02129604 n04550184 n03089624 n03785016 n01689811 n02441942 n01641577 n02229544 n01622779 n02089973 n02791270 n02102177 n02114855 n13040303 n03944341 n01667114 n04149813 n03792972 n02869837 n02112706 n13044778 n01688243 n02097658 n02109961 n03791053 n04286575 n01985128 n03014705 n04265275 n04467665 n01985128 n04344873 n04335435 n02676566 n01806143 n04599235 n02093859 n04486054 n01601694 n02966193 n02965783 n02099712 n02808440 n03785016 n04285008 n04141076 n07760859 n03717622 n01917289 n03942813 n04409515 n01819313 n03255030 n02328150 n07590611 n01985128 n03998194 n12985857 n03014705 n02823428 n03127747 n02825657 n03935335 n02793495 n04509417 n02655020 n07873807 n02906734 n03720891 n04037443 n04254120 n07614500 n01667114 n02415577 n03710637 n02361337 n04081281 n04070727 n03649909 n07720875 n02011460 n01443537 n04525305 n02894605 n02113712 n09229709 n04367480 n04266014 n02105056 n09421951 n02814860 n02167151 n01744401 n02808304 n02106030 n02074367 n02536864 n04485082 n03538406 n02108915 n02114548 n01698640 n04286575 n02797295 n02124075 n02927161 n02747177 n02641379 n02325366 n02536864 n03697007 n02281406 n03017168 n02090721 n03776460 n02037110 n03100240 n04398044 n02871525 n03792782 n02787622 n03180011 n04522168 n04266014 n03218198 n02088094 n02097298 n04548362 n03196217 n02095889 n01873310 n02088466 n01968897 n04548280 n04604644 n02090379 n03787032 n04229816 n03891251 n02356798 n04350905 n03782006 n01664065 n03950228 n01601694 n01558993 n02777292 n02091134 n02088632 n02442845 n02137549 n01669191 n02007558 n03782006 n03692522 n02916936 n04357314 n02132136 n03930630 n04019541 n04005630 n02102480 n03443371 n04523525 n03814906 n07693725 n04371774 n04209239 n03720891 n02086079 n02071294 n01774384 n01560419 n04204238 n02101556 n03998194 n04486054 n04505470 n02089867 n04179913 n02112018 n04201297 n03673027 n03908714 n02105056 n02791270 n03775071 n03785016 n02088238 n04376876 n03272562 n02132136 n01748264 n02939185 n03485794 n02105412 n02814860 n03527444 n03803284 n02396427 n03877845 n07614500 n01514859 n02105056 n03047690 n04254120 n03218198 n02910353 n04328186 n03776460 n02109961 n03467068 n02704792 n04136333 n02169497 n02094114 n03837869 n03131574 n02090622 n04238763 n01682714 n03388043 n04493381 n04040759 n02099601 n03803284 n02101388 n13044778 n04483307 n03404251 n02090622 n12768682 n04367480 n03134739 n02356798 n02408429 n02974003 n02101388 n03124170 n04435653 n02105855 n07920052 n03272010 n03180011 n07717556 n04235860 n07716358 n02088094 n07873807 n03775071 n02110341 n02817516 n03146219 n02113186 n09246464 n02119022 n03240683 n03706229 n02701002 n04154565 n03467068 n03843555 n02107683 n02088094 n02108915 n02786058 n02326432 n01629819 n01614925 n12267677 n02108422 n02481823 n02892201 n02877765 n01955084 n12057211 n03063689 n02113978 n02777292 n03717622 n02787622 n02437312 n03992509 n01930112 n02500267 n03627232 n04505470 n03250847 n03400231 n02977058 n04554684 n04456115 n04147183 n03676483 n04465501 n02094114 n04532106 n07892512 n04557648 n03482405 n02088238 n03991062 n01751748 n02104029 n03733281 n02536864 n01860187 n03133878 n02110627 n03208938 n04192698 n02106166 n03028079 n04515003 n03787032 n04317175 n03447721 n02326432 n03535780 n03998194 n04560804 n04507155 n03134739 n01697457 n04270147 n02107683 n04525305 n02410509 n02099712 n02132136 n02268853 n01817953 n03929855 n07615774 n02100735 n01833805 n03207743 n04584207 n04266014 n07248320 n03467068 n03908618 n02133161 n02486410 n01755581 n02445715 n01914609 n02841315 n02877765 n01697457 n01981276 n06794110 n04485082 n02119022 n02481823 n02802426 n01689811 n01796340 n02667093 n01622779 n01980166 n02442845 n04328186 n01871265 n03729826 n02123394 n01630670 n02106166 n10148035 n02437616
TensorFlow/LanguageModeling/BERT/biobert/scripts
scripts
biobert_data_download
#!/usr/bin/env bash # Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. docker run --runtime=nvidia -v $PWD:/workspace/bert \ --rm --shm-size=1g --ulimit memlock=-1 \ --ulimit stack=67108864 --ipc=host -t -i \ bert bash -c "bash data/create_biobert_datasets_from_start.sh"
PyTorch/LanguageModeling/BERT/triton/runner
runner
exporter
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pathlib # method from PEP-366 to support relative import in executed modules if __name__ == "__main__" and __package__ is None: __package__ = pathlib.Path(__file__).parent.name from .core import Command from .exceptions import RunnerException from .stages import Stage class CommandsExporter: """ Command exported to BASH scripts """ def __init__(self, scripts_dir: pathlib.Path): """ Args: scripts_dir: Paths where scripts should be stored """ self._scripts_dir = scripts_dir def export(self, stage: Stage) -> Command: """ Export stage commands to script and return new command to execute Args: stage: Stage object with commands Returns: Command object with script execution command """ filename = self._get_filename(stage.label) file_path = self._scripts_dir / filename with open(file_path, "w+") as stagefile: stagefile.write("set -x\n") stagefile.write("set -e\n") stagefile.write("export PYTHONUNBUFFERED=1\n") stagefile.write("export PYTHONPATH=`pwd`\n") for command in stage.commands: stagefile.write(str(command)) result = os.system(f'ex +"set syn=sh" +"norm gg=G" -cwq {file_path}') if result != 0: raise RunnerException(f"Failed running {filename} script formatting. Exit code {result}") command = Command(f"bash -xe {file_path.as_posix()}") return command def _get_filename(self, label: str): """ Generate filename for script based on label Args: label: String with stage label Returns: String with script filename """ filename = label.replace(" ", "_").lower() filename = f"{filename}.sh" return filename
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/trt/plugins/taco2ModulationRemovalPlugin
taco2ModulationRemovalPlugin
taco2ModulationRemovalLayerPlugin
/* * Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the NVIDIA CORPORATION nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "taco2ModulationRemovalLayerPlugin.h" #include "taco2ModulationRemovalKernel.h" #include "taco2Utils.h" #include <cassert> #include <cstdlib> #include <cstring> #include <cuda_runtime.h> // cudaError_t #include <iostream> #include <sstream> #include <stdexcept> #include <string> using namespace tts; namespace nvinfer1 { namespace plugin { using value_type = Taco2ModulationRemovalLayerPlugin::value_type; /****************************************************************************** * CONSTANTS ****************************************************************** *****************************************************************************/ namespace { constexpr const char* const PLUGIN_NAME = "Taco2ModulationRemoval"; constexpr const char* const PLUGIN_VERSION = "0.1.0"; constexpr const int NUM_INPUTS = 1; constexpr const int NUM_OUTPUTS = 1; } // namespace /****************************************************************************** * STATIC METHODS ************************************************************* *****************************************************************************/ const char* Taco2ModulationRemovalLayerPlugin::getName() { return PLUGIN_NAME; } const char* Taco2ModulationRemovalLayerPlugin::getVersion() { return PLUGIN_VERSION; } Taco2ModulationRemovalLayerPlugin Taco2ModulationRemovalLayerPlugin::deserialize( const void* const data, const size_t length) { if (length < sizeof(int32_t) * 3) { throw std::runtime_error("Invalid serialized size: " + std::to_string(length)); } const int inputLength = static_cast<const int32_t*>(data)[0]; const int filterLength = static_cast<const int32_t*>(data)[1]; const int hopLength = static_cast<const int32_t*>(data)[2]; const size_t reqSize = 3 * sizeof(int32_t) + sizeof(value_type) * filterLength; if (reqSize != length) { throw std::runtime_error( "Invalid serialized size: " + std::to_string(length) + " / " + std::to_string(reqSize)); } const Weights weights{DataType::kFLOAT, static_cast<const int32_t*>(data) + 3, filterLength}; Taco2ModulationRemovalLayerPlugin layer(weights, inputLength, filterLength, hopLength); return layer; } /****************************************************************************** * CONSTRUCTORS / DESTRUCTOR ************************************************** *****************************************************************************/ Taco2ModulationRemovalLayerPlugin::Taco2ModulationRemovalLayerPlugin( const Weights& weights, const int inputLength, const int filterLength, const int hopLength) : mInputLength(inputLength), mFilterLength(filterLength), mHopLength(hopLength), mWeightsHost(taco2::Taco2Utils::toFloatVector(weights)), mWeightsDevice(), mNamespace() { if (mInputLength <= 0) { throw std::runtime_error("Invalid Taco2ModulationRemoval inputLength: " + std::to_string(mInputLength)); } if (mFilterLength <= 0) { throw std::runtime_error("Invalid Taco2ModulationRemoval filterLength " + std::to_string(mFilterLength)); } if (mHopLength <= 0) { throw std::runtime_error("Invalid Taco2ModulationRemoval hopLength: " + std::to_string(mHopLength)); } const int expNumWeights = mFilterLength; if (mWeightsHost.size() != static_cast<size_t>(expNumWeights)) { throw std::runtime_error("Incorrect Taco2ModulationRemoval number of weights: " + std::to_string(mWeightsHost.size()) + " / " + std::to_string(expNumWeights)); } } Taco2ModulationRemovalLayerPlugin::Taco2ModulationRemovalLayerPlugin(Taco2ModulationRemovalLayerPlugin&& other) : mInputLength(other.mInputLength) , mFilterLength(other.mFilterLength) , mHopLength(other.mHopLength) , mWeightsHost(std::move(other.mWeightsHost)) , mWeightsDevice(std::move(other.mWeightsDevice)) , mNamespace(std::move(other.mNamespace)) { other.mFilterLength = 0; other.mInputLength = 0; other.mHopLength = 0; } Taco2ModulationRemovalLayerPlugin::~Taco2ModulationRemovalLayerPlugin() { destroy(); } /****************************************************************************** * PUBLIC METHODS ************************************************************* *****************************************************************************/ Taco2ModulationRemovalLayerPlugin& Taco2ModulationRemovalLayerPlugin::operator=( Taco2ModulationRemovalLayerPlugin&& other) { // defere to constructor *this = Taco2ModulationRemovalLayerPlugin(std::move(other)); return *this; } DataType Taco2ModulationRemovalLayerPlugin::getOutputDataType( const int /* index */, const DataType* const /* inputTypes */, const int /* nbInputs */) const { return DataType::kFLOAT; } bool Taco2ModulationRemovalLayerPlugin::isOutputBroadcastAcrossBatch( const int /* outputIndex */, const bool* const /* inputIsBroadCasted */, const int /* nbInputs */) const { return false; } bool Taco2ModulationRemovalLayerPlugin::canBroadcastInputAcrossBatch(const int /* inputIndex */) const { return false; } const char* Taco2ModulationRemovalLayerPlugin::getPluginType() const { return getName(); } const char* Taco2ModulationRemovalLayerPlugin::getPluginVersion() const { return getVersion(); } int Taco2ModulationRemovalLayerPlugin::getNbOutputs() const { return NUM_OUTPUTS; } Dims Taco2ModulationRemovalLayerPlugin::getOutputDimensions( const int index, const Dims* const /*inputs*/, const int nbInputDims) { if (index >= getNbOutputs()) { throw std::runtime_error("Only has one output."); } if (nbInputDims != NUM_INPUTS) { throw std::runtime_error( "Can only handle " + std::to_string(NUM_INPUTS) + " input tensors: " + std::to_string(nbInputDims)); } // magnitude and phase are of the same size return Dims3(1, 1, mInputLength - mFilterLength); } bool Taco2ModulationRemovalLayerPlugin::supportsFormat( const nvinfer1::DataType type, const nvinfer1::PluginFormat /* format */) const { return type == DataType::kFLOAT; } void Taco2ModulationRemovalLayerPlugin::configurePlugin(const nvinfer1::Dims* const inputDims, const int nbInputs, const nvinfer1::Dims* const /* outputDims */, const int nbOutputs, const nvinfer1::DataType* const inputTypes, const nvinfer1::DataType* const /*outputTypes*/, const bool* const inputIsBroadcast, const bool* const /*outputIsBroadcast*/, const nvinfer1::PluginFormat /* format */, const int /* maxBatchSize */) { if (nbInputs != NUM_INPUTS) { throw std::runtime_error( "Can only handle " + std::to_string(NUM_INPUTS) + " input tensors: " + std::to_string(nbInputs)); } if (nbOutputs != NUM_OUTPUTS) { throw std::runtime_error( "Can only handle " + std::to_string(NUM_OUTPUTS) + " output tensors: " + std::to_string(nbOutputs)); } for (int i = 0; i < nbInputs; ++i) { if (inputTypes[i] != DataType::kFLOAT) { throw std::runtime_error("Only FLOAT supported as input " + std::to_string(i) + " : " + std::to_string(static_cast<int>(inputTypes[i]))); } if (inputIsBroadcast[i]) { throw std::runtime_error("Broadcasting input is not supported."); } } // assert dimensions { const Dims dims = taco2::Taco2Utils::getCompactedDims(inputDims[0], 1); if (dims.nbDims != 1 || dims.d[0] != mInputLength) { throw std::runtime_error("Taco2ModulationRemoval input must be 1* x inputLength (" + std::to_string(mInputLength) + ") : " + taco2::Taco2Utils::dimsToString(dims)); } } } int Taco2ModulationRemovalLayerPlugin::initialize() { try { mWeightsDevice = CudaMemory<float>(mWeightsHost); } catch (const std::exception& e) { std::cerr << "Taco2ModulationRemoval initialization failed: " << e.what() << std::endl; return 1; } return 0; } void Taco2ModulationRemovalLayerPlugin::terminate() { mWeightsDevice.clear(); } size_t Taco2ModulationRemovalLayerPlugin::getWorkspaceSize(const int /*maxBatchSize*/) const { return 0; } int Taco2ModulationRemovalLayerPlugin::enqueue(const int batchSize, const void* const* const inputs, void** const outputs, void* const /*workspace*/, cudaStream_t stream) { // name inputs and outputs const value_type* const inputDevice = static_cast<const value_type*>(inputs[0]); value_type* const outputDevice = static_cast<value_type*>(outputs[0]); try { Taco2ModulationRemovalKernel::compute(batchSize, static_cast<const float*>(mWeightsDevice.data()), inputDevice, outputDevice, mInputLength, mFilterLength, mHopLength, stream); } catch (const std::exception& e) { std::cerr << "Failed to launch Taco2ModulationRemoval kernel due to: " << e.what() << std::endl; return 1; } return 0; } size_t Taco2ModulationRemovalLayerPlugin::getSerializationSize() const { return sizeof(int32_t) * 3 + sizeof(value_type) * mWeightsHost.size(); } void Taco2ModulationRemovalLayerPlugin::serialize(void* const buffer) const { static_cast<int32_t*>(buffer)[0] = mInputLength; static_cast<int32_t*>(buffer)[1] = mFilterLength; static_cast<int32_t*>(buffer)[2] = mHopLength; value_type* const weights = reinterpret_cast<value_type*>(static_cast<int32_t*>(buffer) + 3); memcpy(weights, mWeightsHost.data(), sizeof(value_type) * mWeightsHost.size()); } void Taco2ModulationRemovalLayerPlugin::destroy() { terminate(); } IPluginV2Ext* Taco2ModulationRemovalLayerPlugin::clone() const { // call constructor which copy's data Taco2ModulationRemovalLayerPlugin clone( Weights{DataType::kFLOAT, mWeightsHost.data(), static_cast<int64_t>(mWeightsHost.size())}, mInputLength, mFilterLength, mHopLength); if (mWeightsDevice.size() > 0) { // initialize the clone too clone.initialize(); } // move it to the heap last to avoid exceptions causing memory leaks return new Taco2ModulationRemovalLayerPlugin(std::move(clone)); } void Taco2ModulationRemovalLayerPlugin::setPluginNamespace(const char* pluginNamespace) { mNamespace = pluginNamespace; } const char* Taco2ModulationRemovalLayerPlugin::getPluginNamespace() const { return mNamespace.c_str(); } } // namespace plugin } // namespace nvinfer1
PyTorch/Classification/ConvNets/triton
triton
client
# Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import json import os import numpy as np import torch import torchvision.datasets as datasets import torchvision.transforms as transforms from image_classification.dataloaders import get_pytorch_val_loader from tqdm import tqdm import tritongrpcclient from tritonclientutils import InferenceServerException def get_data_loader(batch_size, *, data_path): valdir = os.path.join(data_path, "val-jpeg") val_dataset = datasets.ImageFolder( valdir, transforms.Compose( [transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor()] ), ) val_loader = torch.utils.data.DataLoader( val_dataset, batch_size=batch_size, shuffle=False ) return val_loader if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( "--triton-server-url", type=str, required=True, help="URL adress of trtion server (with port)", ) parser.add_argument( "--triton-model-name", type=str, required=True, help="Triton deployed model name", ) parser.add_argument( "-v", "--verbose", action="store_true", default=False, help="Verbose mode." ) parser.add_argument( "--inference_data", type=str, help="Path to file with inference data." ) parser.add_argument( "--batch_size", type=int, default=1, help="Inference request batch size" ) parser.add_argument( "--fp16", action="store_true", default=False, help="Use fp16 precision for input data", ) FLAGS = parser.parse_args() triton_client = tritongrpcclient.InferenceServerClient( url=FLAGS.triton_server_url, verbose=FLAGS.verbose ) dataloader = get_data_loader(FLAGS.batch_size, data_path=FLAGS.inference_data) inputs = [] inputs.append( tritongrpcclient.InferInput( "input__0", [FLAGS.batch_size, 3, 224, 224], "FP16" if FLAGS.fp16 else "FP32", ) ) outputs = [] outputs.append(tritongrpcclient.InferRequestedOutput("output__0")) all_img = 0 cor_img = 0 result_prev = None for image, target in tqdm(dataloader): if FLAGS.fp16: image = image.half() inputs[0].set_data_from_numpy(image.numpy()) result = triton_client.infer( FLAGS.triton_model_name, inputs, outputs=outputs, headers=None ) result = result.as_numpy("output__0") result = np.argmax(result, axis=1) cor_img += np.sum(result == target.numpy()) all_img += result.shape[0] acc = cor_img / all_img print(f"Final accuracy {acc:.04f}")
PyTorch/SpeechSynthesis/Tacotron2
Tacotron2
main
# ***************************************************************************** # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the NVIDIA CORPORATION nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # ***************************************************************************** import argparse from train import main as main_train from inference_perf import main as main_infer def parse_args(parser): """ Parse commandline arguments. """ parser.add_argument('--bench-class', type=str, choices=['train', 'perf-infer', 'perf-train'], required=True, help='Choose test class') return parser def main(): parser = argparse.ArgumentParser(description='PyTorch Tacotron 2 Testing') parser = parse_args(parser) args, unknown_args = parser.parse_known_args() if "train" in args.bench_class: main_train() else: main_infer() if __name__ == '__main__': main()
Tools/DGLPyTorch/SyntheticGraphGeneration/syngen/cli/commands
commands
preprocess
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import logging from syngen.cli.commands.base_command import BaseCommand from syngen.preprocessing.datasets import DATASETS logger = logging.getLogger(__name__) log = logger class PreprocessingCommand(BaseCommand): def init_parser(self, base_parser): preprocessing_parser = base_parser.add_parser( "preprocess", help="Run Dataset Preprocessing", formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) preprocessing_parser.set_defaults(action=self.run) preprocessing_parser.add_argument( "--dataset", type=str, default=None, required=True, choices=list(DATASETS.keys()), help="Dataset to preprocess", ) preprocessing_parser.add_argument( "-sp", "--source-path", type=str, default=None, required=True, help="Path to raw data", ) preprocessing_parser.add_argument( "-dp", "--destination-path", type=str, default=None, required=False, help="Path to store the preprocessed data. Default is $source_path/syngen_preprocessed", ) preprocessing_parser.add_argument( "--download", action='store_true', help="Downloads the dataset if specified", ) preprocessing_parser.add_argument( "--cpu", action='store_true', help='Performs the preprocessing_parser without leveraging GPU' ) preprocessing_parser.add_argument( "--use-cache", action='store_true', help='Does nothing if the target preprocessed dataset exists' ) for preprocessing_class in DATASETS.values(): preprocessing_class.add_cli_args(preprocessing_parser) def run(self, args): dict_args = vars(args) dataset_name = dict_args.pop('dataset') source_path = dict_args.pop('source_path') destination_path = dict_args.pop('destination_path') download = dict_args.pop('download') gpu = not dict_args.pop('cpu') use_cache = dict_args.pop('use_cache') preprocessing_class = DATASETS[dataset_name] if download: try: preprocessing_class(source_path=source_path, destination_path=destination_path, download=download, **dict_args) log.info(f"{dataset_name} successfully downloaded into {source_path}") except NotImplementedError: log.info(f"{dataset_name} does not support automatic downloading, please download the dataset manually") else: preprocessing = preprocessing_class(source_path=source_path, destination_path=destination_path, download=download, **dict_args) preprocessing.transform(gpu=gpu, use_cache=use_cache) log.info(f"{dataset_name} successfully preprocessed into {preprocessing.destination_path}")
PyTorch/LanguageModeling/BERT/data
data
BookscorpusTextFormatting
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import glob import os class BookscorpusTextFormatting: def __init__(self, books_path, output_filename, recursive = False): self.books_path = books_path self.recursive = recursive self.output_filename = output_filename # This puts one book per line def merge(self): with open(self.output_filename, mode='w', newline='\n') as ofile: for filename in glob.glob(self.books_path + '/' + '*.txt', recursive=True): with open(filename, mode='r', encoding='utf-8-sig', newline='\n') as file: for line in file: if line.strip() != '': ofile.write(line.strip() + ' ') ofile.write("\n\n")
PyTorch/Classification/GPUNet/triton
triton
run_inference_on_triton
#!/usr/bin/env python3 # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. r""" To infer the model deployed on Triton, you can use `run_inference_on_triton.py` script. It sends a request with data obtained from pointed data loader and dumps received data into dump files. Those files are stored in directory pointed by `--output-dir` argument. Currently, the client communicates with the Triton server asynchronously using GRPC protocol. Example call: ```shell script python ./triton/run_inference_on_triton.py \ --server-url localhost:8001 \ --model-name ResNet50 \ --model-version 1 \ --dump-labels \ --output-dir /results/dump_triton ``` """ import argparse import logging import time import traceback from pathlib import Path from tqdm import tqdm # method from PEP-366 to support relative import in executed modules if __package__ is None: __package__ = Path(__file__).parent.name from .deployment_toolkit.args import ArgParserGenerator from .deployment_toolkit.core import DATALOADER_FN_NAME, load_from_file from .deployment_toolkit.dump import JsonDumpWriter from .deployment_toolkit.triton_inference_runner import TritonInferenceRunner LOGGER = logging.getLogger("run_inference_on_triton") def _parse_args(): parser = argparse.ArgumentParser(description="Infer model on Triton server", allow_abbrev=False) parser.add_argument( "--server-url", type=str, default="localhost:8001", help="Inference server URL (default localhost:8001)" ) parser.add_argument("--model-name", help="The name of the model used for inference.", required=True) parser.add_argument("--model-version", help="The version of the model used for inference.", required=True) parser.add_argument("--dataloader", help="Path to python file containing dataloader.", required=True) parser.add_argument("--dump-labels", help="Dump labels to output dir", action="store_true", default=False) parser.add_argument("--dump-inputs", help="Dump inputs to output dir", action="store_true", default=False) parser.add_argument("-v", "--verbose", help="Verbose logs", action="store_true", default=True) parser.add_argument("--output-dir", required=True, help="Path to directory where outputs will be saved") parser.add_argument( "--response-wait-time", required=False, help="Maximal time to wait for response", default=120, type=float ) parser.add_argument( "--max-unresponded-requests", required=False, help="Maximal number of unresponded requests", default=128, type=int, ) parser.add_argument( "--synchronous", help="Enable synchronous calls to Triton Server", action="store_true", default=False ) args, *_ = parser.parse_known_args() get_dataloader_fn = load_from_file(args.dataloader, label="dataloader", target=DATALOADER_FN_NAME) ArgParserGenerator(get_dataloader_fn).update_argparser(parser) args = parser.parse_args() return args def main(): args = _parse_args() log_format = "%(asctime)s %(levelname)s %(name)s %(message)s" log_level = logging.INFO if not args.verbose else logging.DEBUG logging.basicConfig(level=log_level, format=log_format) LOGGER.info("args:") for key, value in vars(args).items(): LOGGER.info(f" {key} = {value}") get_dataloader_fn = load_from_file(args.dataloader, label="dataloader", target=DATALOADER_FN_NAME) dataloader_fn = ArgParserGenerator(get_dataloader_fn).from_args(args) try: runner = TritonInferenceRunner( server_url=args.server_url, model_name=args.model_name, model_version=args.model_version, dataloader_fn=dataloader_fn, verbose=False, response_wait_time=args.response_wait_time, max_unresponded_requests=args.max_unresponded_requests, synchronous=args.synchronous, ) except Exception as e: message = traceback.format_exc() LOGGER.error(f"Encountered exception \n{message}") raise e with JsonDumpWriter(output_dir=args.output_dir) as writer: start = time.time() for ids, x, y_pred, y_real in tqdm(runner, unit="batch", mininterval=10): data = _verify_and_format_dump(args, ids, x, y_pred, y_real) writer.write(**data) stop = time.time() LOGGER.info(f"\nThe inference took {stop - start:0.3f}s") def _verify_and_format_dump(args, ids, x, y_pred, y_real): data = {"outputs": y_pred, "ids": {"ids": ids}} if args.dump_inputs: data["inputs"] = x if args.dump_labels: if not y_real: raise ValueError( "Found empty label values. Please provide labels in dataloader_fn or do not use --dump-labels argument" ) data["labels"] = y_real return data if __name__ == "__main__": main()
TensorFlow2/LanguageModeling/BERT/official/utils/logs
logs
mlperf_helper
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Wrapper for the mlperf logging utils. MLPerf compliance logging is only desired under a limited set of circumstances. This module is intended to keep users from needing to consider logging (or install the module) unless they are performing mlperf runs. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from collections import namedtuple import json import os import re import subprocess import sys import typing import tensorflow as tf _MIN_VERSION = (0, 0, 10) _STACK_OFFSET = 2 SUDO = "sudo" if os.geteuid() else "" # This indirection is used in docker. DROP_CACHE_LOC = os.getenv("DROP_CACHE_LOC", "/proc/sys/vm/drop_caches") _NCF_PREFIX = "NCF_RAW_" # TODO(robieta): move line parsing to mlperf util _PREFIX = r"(?:{})?:::MLPv([0-9]+).([0-9]+).([0-9]+)".format(_NCF_PREFIX) _BENCHMARK = r"([a-zA-Z0-9_]+)" _TIMESTAMP = r"([0-9]+\.[0-9]+)" _CALLSITE = r"\((.+):([0-9]+)\)" _TAG = r"([a-zA-Z0-9_]+)" _VALUE = r"(.*)" ParsedLine = namedtuple("ParsedLine", ["version", "benchmark", "timestamp", "callsite", "tag", "value"]) LINE_PATTERN = re.compile( "^{prefix} {benchmark} {timestamp} {callsite} {tag}(: |$){value}?$".format( prefix=_PREFIX, benchmark=_BENCHMARK, timestamp=_TIMESTAMP, callsite=_CALLSITE, tag=_TAG, value=_VALUE)) def parse_line(line): # type: (str) -> typing.Optional[ParsedLine] match = LINE_PATTERN.match(line.strip()) if not match: return major, minor, micro, benchmark, timestamp = match.groups()[:5] call_file, call_line, tag, _, value = match.groups()[5:] return ParsedLine(version=(int(major), int(minor), int(micro)), benchmark=benchmark, timestamp=timestamp, callsite=(call_file, call_line), tag=tag, value=value) def unparse_line(parsed_line): # type: (ParsedLine) -> str version_str = "{}.{}.{}".format(*parsed_line.version) callsite_str = "({}:{})".format(*parsed_line.callsite) value_str = ": {}".format(parsed_line.value) if parsed_line.value else "" return ":::MLPv{} {} {} {} {} {}".format( version_str, parsed_line.benchmark, parsed_line.timestamp, callsite_str, parsed_line.tag, value_str) def get_mlperf_log(): """Shielded import of mlperf_log module.""" try: import mlperf_compliance def test_mlperf_log_pip_version(): """Check that mlperf_compliance is up to date.""" import pkg_resources version = pkg_resources.get_distribution("mlperf_compliance") version = tuple(int(i) for i in version.version.split(".")) if version < _MIN_VERSION: tf.compat.v1.logging.warning( "mlperf_compliance is version {}, must be >= {}".format( ".".join([str(i) for i in version]), ".".join([str(i) for i in _MIN_VERSION]))) raise ImportError return mlperf_compliance.mlperf_log mlperf_log = test_mlperf_log_pip_version() except ImportError: mlperf_log = None return mlperf_log class Logger(object): """MLPerf logger indirection class. This logger only logs for MLPerf runs, and prevents various errors associated with not having the mlperf_compliance package installed. """ class Tags(object): def __init__(self, mlperf_log): self._enabled = False self._mlperf_log = mlperf_log def __getattr__(self, item): if self._mlperf_log is None or not self._enabled: return return getattr(self._mlperf_log, item) def __init__(self): self._enabled = False self._mlperf_log = get_mlperf_log() self.tags = self.Tags(self._mlperf_log) def __call__(self, enable=False): if enable and self._mlperf_log is None: raise ImportError("MLPerf logging was requested, but mlperf_compliance " "module could not be loaded.") self._enabled = enable self.tags._enabled = enable return self def __enter__(self): pass def __exit__(self, exc_type, exc_val, exc_tb): self._enabled = False self.tags._enabled = False @property def log_file(self): if self._mlperf_log is None: return return self._mlperf_log.LOG_FILE @property def enabled(self): return self._enabled def ncf_print(self, key, value=None, stack_offset=_STACK_OFFSET, deferred=False, extra_print=False, prefix=_NCF_PREFIX): if self._mlperf_log is None or not self.enabled: return self._mlperf_log.ncf_print(key=key, value=value, stack_offset=stack_offset, deferred=deferred, extra_print=extra_print, prefix=prefix) def set_ncf_root(self, path): if self._mlperf_log is None: return self._mlperf_log.ROOT_DIR_NCF = path LOGGER = Logger() ncf_print, set_ncf_root = LOGGER.ncf_print, LOGGER.set_ncf_root TAGS = LOGGER.tags def clear_system_caches(): if not LOGGER.enabled: return ret_code = subprocess.call( ["sync && echo 3 | {} tee {}".format(SUDO, DROP_CACHE_LOC)], shell=True) if ret_code: raise ValueError("Failed to clear caches") if __name__ == "__main__": tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) with LOGGER(True): ncf_print(key=TAGS.RUN_START)
TensorFlow/Segmentation/UNet_Industrial/model/layers
layers
dense
#!/usr/bin/env python # -*- coding: utf-8 -*- # ============================================================================== # # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ============================================================================== import tensorflow as tf from model.layers.utils import _log_hparams __all__ = ['dense'] def dense( inputs, units, use_bias=True, trainable=True, kernel_initializer=tf.variance_scaling_initializer(), bias_initializer=tf.zeros_initializer() ): net = tf.layers.dense( inputs, units=units, activation=None, use_bias=use_bias, kernel_initializer=kernel_initializer, bias_initializer=bias_initializer, trainable=trainable ) _log_hparams( classname='Dense', layername=net.name, units=units, use_bias=use_bias, trainable=trainable, out_shape=str(net.get_shape()), out_dtype=net.dtype ) return net
PyTorch/Recommendation/DLRM
DLRM
dgxa100_ccx
# Copyright (c) 2021 NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #! /bin/bash bind_cpu_cores=([0]="48-51,176-179" [1]="60-63,188-191" [2]="16-19,144-147" [3]="28-31,156-159" [4]="112-115,240-243" [5]="124-127,252-255" [6]="80-83,208-211" [7]="92-95,220-223") bind_mem=([0]="3" [1]="3" [2]="1" [3]="1" [4]="7" [5]="7" [6]="5" [7]="5")
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/test
test
UnitTest
/* * Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the NVIDIA CORPORATION nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "UnitTest.hpp" #include <exception> #include <iostream> namespace { std::vector<UnitTest*>* s_tests = nullptr; } /****************************************************************************** * PUBLIC STATIC METHODS ****************************************************** *****************************************************************************/ bool UnitTest::runAll() { size_t numPassed = 0; size_t numTests = 0; if (s_tests) { numTests = s_tests->size(); for (UnitTest* const test : *s_tests) { try { test->run(); if (test->passed()) { std::cout << "Test: " << test->fullname() << " passed." << std::endl; ++numPassed; continue; } } catch (const TestException&) { // assertion failed } catch (const std::exception& e) { std::cout << "Unhandled excpetion: " << e.what() << std::endl; } std::cout << "Test: " << test->fullname() << " failed." << std::endl; } } std::cout << numPassed << " / " << numTests << " passed." << std::endl; return numPassed == numTests; } void UnitTest::registerTest(UnitTest* const test) { if (!s_tests) { s_tests = new std::vector<UnitTest*>(0); } s_tests->emplace_back(test); } /****************************************************************************** * CONSTRUCTORS / DESTRUCTOR ************************************************** *****************************************************************************/ UnitTest::UnitTest(const std::string& filename, const std::string& name) : m_nullStream(), m_passed(true), m_filename(filename), m_name(name) { registerTest(this); } /****************************************************************************** * PUBLIC METHODS ************************************************************* *****************************************************************************/ std::string UnitTest::fullname() const { return m_filename + "__" + m_name; } bool UnitTest::passed() const { return m_passed; } /****************************************************************************** * PROTECTED METHODS ********************************************************** *****************************************************************************/ void UnitTest::failure() { m_passed = false; } /****************************************************************************** * MAIN *********************************************************************** *****************************************************************************/ int main(int /*argc*/, char** /*argv*/) { if (UnitTest::runAll()) { return 0; } else { return 1; } }
PyTorch/Classification/ConvNets/triton/scripts/docker
docker
build
#!/usr/bin/env bash # Copyright (c) 2021 NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. docker build -t resnet50 . -f triton/resnet50/Dockerfile
PyTorch/DrugDiscovery/MoFlow
MoFlow
setup
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup setup( name='moflow_pyt', packages=[ 'moflow', 'moflow.data', 'moflow.model', 'moflow.runtime' ], version='0.0.1', description='MoFlow: an invertible flow model for generating molecular graphs', )
TensorFlow/Segmentation/UNet_Industrial/scripts/benchmarking
benchmarking
UNet_trainbench_4GPU
#!/usr/bin/env bash # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This script launches UNet training benchmark in FP32/TF32 on 4 GPUs using 16 batch size (4 per GPU) # Usage ./UNet_trainbench_4GPU.sh <path to dataset> <dagm classID (1-10)> BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" export TF_CPP_MIN_LOG_LEVEL=3 # Cleaning up for benchmark RESULT_DIR="/tmp" rm -rf "${RESULT_DIR}" mpirun \ -np 4 \ -H localhost:4 \ -bind-to none \ -map-by slot \ -x NCCL_DEBUG=VERSION \ -x LD_LIBRARY_PATH \ -x PATH \ -mca pml ob1 -mca btl ^openib \ --allow-run-as-root \ python "${BASEDIR}/../../main.py" \ --unet_variant='tinyUNet' \ --activation_fn='relu' \ --exec_mode='training_benchmark' \ --iter_unit='batch' \ --num_iter=1500 \ --batch_size=4 \ --warmup_step=500 \ --results_dir="${RESULT_DIR}" \ --data_dir="${1}" \ --dataset_name='DAGM2007' \ --dataset_classID="${2}" \ --data_format='NCHW' \ --use_auto_loss_scaling \ --noamp \ --xla \ --learning_rate=1e-4 \ --learning_rate_decay_factor=0.8 \ --learning_rate_decay_steps=500 \ --rmsprop_decay=0.9 \ --rmsprop_momentum=0.8 \ --loss_fn_name='adaptive_loss' \ --weight_decay=1e-5 \ --weight_init_method='he_uniform' \ --augment_data \ --display_every=250 \ --debug_verbosity=0
TensorFlow/Recommendation/WideAndDeep
WideAndDeep
setup
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools NAME = 'trainer' VERSION = '1.0' TENSORFLOW_TRANSFORM = 'tensorflow-transform==0.1.8' if __name__ == '__main__': setuptools.setup(name=NAME, version=VERSION, packages=['trainer'], install_requires=[TENSORFLOW_TRANSFORM])
TensorFlow/Detection/SSD/models/research/object_detection/g3doc
g3doc
installation
# Installation ## Dependencies Tensorflow Object Detection API depends on the following libraries: * Protobuf 3.0.0 * Python-tk * Pillow 1.0 * lxml * tf Slim (which is included in the "tensorflow/models/research/" checkout) * Jupyter notebook * Matplotlib * Tensorflow (>=1.9.0) * Cython * contextlib2 * cocoapi For detailed steps to install Tensorflow, follow the [Tensorflow installation instructions](https://www.tensorflow.org/install/). A typical user can install Tensorflow using one of the following commands: ``` bash # For CPU pip install tensorflow # For GPU pip install tensorflow-gpu ``` The remaining libraries can be installed on Ubuntu 16.04 using via apt-get: ``` bash sudo apt-get install protobuf-compiler python-pil python-lxml python-tk pip install --user Cython pip install --user contextlib2 pip install --user jupyter pip install --user matplotlib ``` Alternatively, users can install dependencies using pip: ``` bash pip install --user Cython pip install --user contextlib2 pip install --user pillow pip install --user lxml pip install --user jupyter pip install --user matplotlib ``` <!-- common_typos_disable --> **Note**: sometimes "sudo apt-get install protobuf-compiler" will install Protobuf 3+ versions for you and some users have issues when using 3.5. If that is your case, try the [manual](#Manual-protobuf-compiler-installation-and-usage) installation. ## COCO API installation Download the [cocoapi](https://github.com/cocodataset/cocoapi) and copy the pycocotools subfolder to the tensorflow/models/research directory if you are interested in using COCO evaluation metrics. The default metrics are based on those used in Pascal VOC evaluation. To use the COCO object detection metrics add `metrics_set: "coco_detection_metrics"` to the `eval_config` message in the config file. To use the COCO instance segmentation metrics add `metrics_set: "coco_mask_metrics"` to the `eval_config` message in the config file. ```bash git clone https://github.com/cocodataset/cocoapi.git cd cocoapi/PythonAPI make cp -r pycocotools <path_to_tensorflow>/models/research/ ``` ## Protobuf Compilation The Tensorflow Object Detection API uses Protobufs to configure model and training parameters. Before the framework can be used, the Protobuf libraries must be compiled. This should be done by running the following command from the tensorflow/models/research/ directory: ``` bash # From tensorflow/models/research/ protoc object_detection/protos/*.proto --python_out=. ``` **Note**: If you're getting errors while compiling, you might be using an incompatible protobuf compiler. If that's the case, use the following manual installation ## Manual protobuf-compiler installation and usage **If you are on linux:** Download and install the 3.0 release of protoc, then unzip the file. ```bash # From tensorflow/models/research/ wget -O protobuf.zip https://github.com/google/protobuf/releases/download/v3.0.0/protoc-3.0.0-linux-x86_64.zip unzip protobuf.zip ``` Run the compilation process again, but use the downloaded version of protoc ```bash # From tensorflow/models/research/ ./bin/protoc object_detection/protos/*.proto --python_out=. ``` **If you are on MacOS:** If you have homebrew, download and install the protobuf with ```brew install protobuf``` Alternately, run: ```PROTOC_ZIP=protoc-3.3.0-osx-x86_64.zip curl -OL https://github.com/google/protobuf/releases/download/v3.3.0/$PROTOC_ZIP sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc rm -f $PROTOC_ZIP ``` Run the compilation process again: ``` bash # From tensorflow/models/research/ protoc object_detection/protos/*.proto --python_out=. ``` ## Add Libraries to PYTHONPATH When running locally, the tensorflow/models/research/ and slim directories should be appended to PYTHONPATH. This can be done by running the following from tensorflow/models/research/: ``` bash # From tensorflow/models/research/ export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim ``` Note: This command needs to run from every new terminal you start. If you wish to avoid running this manually, you can add it as a new line to the end of your ~/.bashrc file, replacing \`pwd\` with the absolute path of tensorflow/models/research on your system. # Testing the Installation You can test that you have correctly installed the Tensorflow Object Detection\ API by running the following command: ```bash python object_detection/builders/model_builder_test.py ```
PaddlePaddle/LanguageModeling/BERT/bert_configs
bert_configs
bert-large-cased
{ "attention_probs_dropout_prob": 0.1, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 4096, "max_position_embeddings": 512, "num_attention_heads": 16, "num_hidden_layers": 24, "type_vocab_size": 2, "vocab_size": 28996 }
PyTorch/Translation/Transformer/scripts
scripts
run_training
#! /bin/bash # # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. nvidia-smi RESULTS_DIR='/results' CHECKPOINTS_DIR='/results/checkpoints' STAT_FILE=${RESULTS_DIR}/run_log.json mkdir -p $CHECKPOINTS_DIR : ${PREC:='amp'} : ${SEED:=1} : ${LR:=0.000846} : ${WARMUP:=4000} : ${NUM_EPOCHS:=30} : ${BS:=5120} : ${NUM_GPU:=8} : ${USE_SLURM:=0} : ${USE_DISTRIBUTED:=1} DISTRIBUTED="" [ ${USE_DISTRIBUTED} = 1 ] && DISTRIBUTED+="-m torch.distributed.run --nproc_per_node=${NUM_GPU}" [ ${USE_DISTRIBUTED} = 1 ] && [ ${USE_SLURM} = 1 ] && DISTRIBUTED+=" --nnodes ${WORLD_SIZE} --node_rank ${SLURM_NODEID} \ --master_addr ${MASTER_ADDR} --master_port ${MASTER_PORT} " if [ "$PREC" = "amp" ]; then PREC='--amp ' else PREC='' fi python ${DISTRIBUTED} /workspace/translation/train.py \ /data/ \ --arch transformer_wmt_en_de_big_t2t \ --share-all-embeddings \ --optimizer adam \ --adam-betas 0.9 0.997 \ --adam-eps 1e-9 \ --clip-norm 0.0 \ --lr-scheduler inverse_sqrt \ --warmup-init-lr 0.0 \ --warmup-updates ${WARMUP} \ --lr $LR \ --min-lr 0.0 \ --dropout 0.1 \ --weight-decay 0.0 \ --criterion label_smoothed_cross_entropy \ --label-smoothing 0.1 \ --max-tokens ${BS} \ --seed ${SEED} \ --max-epoch ${NUM_EPOCHS} \ --no-save \ --fuse-layer-norm \ --online-eval \ --log-interval 500 \ --save-dir ${RESULTS_DIR} \ --stat-file ${STAT_FILE} \ ${PREC}
TensorFlow2/Recommendation/DLRM_and_DCNv2/deployment/hps
hps
deploy_ensemble
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # author: Tomasz Grel (tgrel@nvidia.com) import os from collections import namedtuple Tensor = namedtuple("Tensor", ["name", "dtype", "dims"]) _config_template = r''' name: "{model_name}" platform: "ensemble" max_batch_size: {max_batch_size} input [ {{ name: "EMB_KEY" data_type: TYPE_INT64 dims: [-1] }}, {{ name: "EMB_N_KEY" data_type: TYPE_INT32 dims: [-1] }}, {{ name: "numerical_features" data_type: TYPE_FP32 dims: [-1] }} ] output [ {{ name: "DENSE_OUTPUT" data_type: TYPE_FP32 dims: [-1] }} ] ensemble_scheduling {{ step [ {{ model_name: "{sparse_model_name}" model_version: -1 input_map {{ key: "KEYS" value: "EMB_KEY" }}, input_map {{ key: "NUMKEYS" value: "EMB_N_KEY" }}, output_map {{ key: "OUTPUT0" value: "LOOKUP_VECTORS" }} }}, {{ model_name: "{dense_model_name}" model_version: -1 input_map {{ key: "args_1" value: "LOOKUP_VECTORS" }}, input_map {{ key: "args_0" value: "numerical_features" }}, output_map {{ key: "output_1" value: "DENSE_OUTPUT" }} }} ] }} ''' def deploy_ensemble(dst, model_name, sparse_model_name, dense_model_name, num_cat_features, num_numerical_features, max_batch_size, version): config_str = _config_template.format(model_name=model_name, sparse_model_name=sparse_model_name, dense_model_name=dense_model_name, max_batch_size=max_batch_size) with open(os.path.join(dst, "config.pbtxt"), "w") as f: f.write(config_str) os.mkdir(os.path.join(dst, str(version))) print("Ensemble configuration:") print(config_str)
PyTorch/Forecasting/TFT/triton/runner
runner
preparer
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import abc import pathlib from datetime import datetime from typing import Dict, List # method from PEP-366 to support relative import in executed modules if __name__ == "__main__" and __package__ is None: __package__ = pathlib.Path(__file__).parent.name from .config import Config from .configuration import Configuration from .downloader import download from .experiment import Experiment, Stage from .logger import LOGGER from .maintainer import Maintainer from .pipeline import Pipeline from .stages import ResultsType, TritonPerformanceOfflineStage, TritonPerformanceOnlineStage from .task import Checkpoint, Dataset, SystemInfo, Task from .triton import Triton from .utils import clean_directory class Preparer(abc.ABC): """ Runner preparer object. """ @abc.abstractmethod def exec( self, workspace: pathlib.Path, config: Config, pipeline: Pipeline, maintainer: Maintainer, triton: Triton, logs_dir: pathlib.Path, ): pass class ExperimentPreparer(Preparer): """ Experiment runner preparer object. """ def exec( self, workspace: pathlib.Path, config: Config, pipeline: Pipeline, maintainer: Maintainer, triton: Triton, logs_dir: pathlib.Path, ): LOGGER.info("Preparing Triton container image") triton_container_image = self._prepare_triton_container_image(config, maintainer, triton) LOGGER.info("Initialize task") task = self._initialize_task( workspace=workspace, config=config, pipeline=pipeline, triton_container_image=triton_container_image, logs_dir=logs_dir, ) LOGGER.info("Preparing directories") self._create_dirs(workspace, task) LOGGER.info("Clean previous run artifacts directories") self._clean_previous_run_artifacts(workspace, task) LOGGER.info("Downloading checkpoints") self._download_checkpoints(task) return task def _create_dirs(self, workspace: pathlib.Path, task: Task) -> None: """ Create directories used to store artifacts and final results Returns: None """ for directory in [task.results_dir, task.logs_dir, task.checkpoints_dir]: directory_path = workspace / directory directory_path.mkdir(parents=True, exist_ok=True) LOGGER.info(f"Directory {directory} created.") def _clean_previous_run_artifacts(self, workspace: pathlib.Path, task: Task) -> None: """ Clean logs from previous run Returns: None """ for directory in [ task.logs_dir, task.results_dir, ]: directory_path = workspace / directory clean_directory(directory_path) LOGGER.info(f"Location {directory} cleaned.") def _prepare_triton_container_image(self, config: Config, maintainer: Maintainer, triton: Triton) -> str: """ Prepare Triton Container Image based on provided configuration Returns: Name of container image to use in process """ if not config.triton_dockerfile: image_name = triton.container_image(config.container_version) LOGGER.info(f"Using official Triton container image: {image_name}.") return image_name if config.triton_container_image: LOGGER.info(f"Using provided Triton Container Image: {config.triton_container_image}") return config.triton_container_image normalized_model_name = config.model_name.lower().replace("_", "-") image_name = f"tritonserver-{normalized_model_name}:latest" LOGGER.info(f"Building Triton Container Image: {image_name}") maintainer.build_image( image_name=image_name, image_file_path=pathlib.Path(config.triton_dockerfile), build_args={"FROM_IMAGE": triton.container_image(container_version=config.container_version)}, ) return image_name def _download_checkpoints(self, task: Task) -> None: """ Download checkpoints """ for variant, checkpoint in task.checkpoints.items(): checkpoint_url = checkpoint.url download_path = checkpoint.path if download_path.is_dir(): LOGGER.info(f"Checkpoint {download_path.name} already downloaded.") continue if not checkpoint_url: LOGGER.warning( f"Checkpoint {variant} url is not provided." "\nIf you want to use that checkpoint please train the model locally" f"\nand copy to {download_path} directory" ) continue download(checkpoint_url, download_path) def _initialize_task( self, workspace: pathlib.Path, config: Config, pipeline: Pipeline, triton_container_image: str, logs_dir: pathlib.Path, ) -> Task: """ Initialize task object Args: workspace: Path to workspace where artifacts are stored config: Config object pipeline: Pipeline object triton_container_image: Triton Inference Server container image used for tests Returns: Task object """ datasets = {} for dataset in config.datasets: datasets[dataset.name] = Dataset(name=dataset.name) checkpoints = {} for checkpoint in config.checkpoints: download_path = workspace / Task.checkpoints_dir / checkpoint.name checkpoints[checkpoint.name] = Checkpoint(name=checkpoint.name, url=checkpoint.url, path=download_path) results_types = self._task_results_types(pipeline=pipeline) stages = dict() for stage in pipeline.stages(): stages[stage.label] = {"result_path": stage.result_path, "result_type": stage.result_type} experiments = list() for idx, configuration in enumerate(config.configurations, start=1): experiment = self._prepare_experiment( idx=idx, configuration=configuration, results_types=results_types, stages=stages, ) experiments.append(experiment) system_info = SystemInfo.from_host() task = Task( model_name=config.model_name, framework=config.framework, checkpoints=checkpoints, datasets=datasets, datasets_dir=config.datasets_dir, experiments=experiments, container_version=config.container_version, system_info=system_info, triton_container_image=triton_container_image, triton_custom_operations=config.triton_custom_operations, triton_load_model_method=config.triton_load_model_method, started_at=int(datetime.utcnow().timestamp()), logs_dir=logs_dir, ) return task def _task_results_types(self, pipeline: Pipeline) -> List[str]: """ Types of results generated as part of task Returns: List of result types """ results = list() for stage in pipeline.stages(): if TritonPerformanceOfflineStage.label == stage.label: results.append(ResultsType.TRITON_PERFORMANCE_OFFLINE) continue if TritonPerformanceOnlineStage.label == stage.label: results.append(ResultsType.TRITON_PERFORMANCE_ONLINE) continue return results def _prepare_experiment( self, idx: int, configuration: Configuration, results_types: List[str], stages: Dict, ) -> Experiment: """ Prepare experiments data Args: idx: Experiment index configuration: Configuration object results_types: Results types stored in experiment stages: Stages executed as part of experiment Returns: Experiment object """ parameters = {key.lower(): value for key, value in configuration.parameters.items()} results_mapped = dict() for result_type in results_types: results_mapped[result_type] = result_type stages_mapped = dict() for name, stage_data in stages.items(): stages_mapped[name] = Stage(name=name, **stage_data) experiment = Experiment( experiment_id=idx, parameters=parameters, stages=stages_mapped, results=results_mapped, ) return experiment
PyTorch/Detection/Efficientdet/scripts/docker
docker
build
#!/bin/bash docker build --rm -t nvcr.io/nvidia/effdet:21.06-py3-stage . -f Dockerfile
Tools/DGLPyTorch/SyntheticGraphGeneration/syngen/utils/types
types
metadata
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import annotations from .str_enum import StrEnum class MetaData(StrEnum): PATH = "path" EDGES = "edges" NODES = "nodes" ALIGNERS = "[gen]aligners" GRAPHS = "graphs" NAME = "name" COUNT = "count" NODE_DATA = "node_data" EDGE_DATA = "edge_data" TYPE = "type" DTYPE = "dtype" SRC = "src" SRC_NAME = "src_name" SRC_NODE_TYPE = "src_node_type" DST = "dst" DST_NAME = "dst_name" DST_NODE_TYPE = "dst_node_type" NODE_NAME = "node_name" NODE_COLUMNS = "node_columns" EDGE_NAME = "edge_name" LABELS = "labels" FEATURES = "features" FEATURES_PATH = "features_path" FEATURES_DATA = "features_data" FEATURE_TYPE = "feature_type" FEATURE_FILE = "feature_file" FILENAME_PREFIX = "filename_prefix" STRUCTURE_PATH = "structure_path" STRUCTURE_DATA = "structure_data" NODE_FEAT = "node_feat" EDGE_FEAT = "edge_feat" TRAIN_MASK = "train_mask" VAL_MASK = "val_mask" TEST_MASK = "test_mask" CONTINUOUS = "continuous" CATEGORICAL = "categorical" CONTINUOUS_COLUMNS = "continuous_columns" CATEGORICAL_COLUMNS = "categorical_columns" UNDIRECTED = "undirected" DIRECTED = "directed" # generation related keys STRUCTURE_GENERATOR = "[gen]structure_generator" TABULAR_GENERATORS = "[gen]tabular_generators" DATA_SOURCE = "data_source" FEATURES_LIST = "features_list" PARAMS = "params" DUMP_PATH = "dump_path"
Tools/PyTorch/TimeSeriesPredictionPlatform/models/tft_pyt/triton/runner
runner
start_NVIDIA-DGX-A100-(1x-A100-80GB)
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #!/bin/bash # Install Docker . /etc/os-release && \ curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - && \ echo "deb [arch=amd64] https://download.docker.com/linux/debian buster stable" > /etc/apt/sources.list.d/docker.list && \ curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey| apt-key add - && \ curl -s -L https://nvidia.github.io/nvidia-docker/$ID$VERSION_ID/nvidia-docker.list > /etc/apt/sources.list.d/nvidia-docker.list && \ apt-get update && \ apt-get install -y docker-ce docker-ce-cli containerd.io nvidia-docker2 # Install packages pip install -r triton/runner/requirements.txt # Evaluate Runner python3 -m "triton.runner.__main__" \ --config-path "triton/runner/config_NVIDIA-DGX-A100-(1x-A100-80GB).yaml" \ --device 0