import * as Cesium from 'cesium';
//@ts-ignore
const { AttributeType, ResourceCache, defined, GltfLoaderUtil, InstanceAttributeSemantic, Credit,
    //@ts-ignore
    Cartesian3, defaultValue, InterpolationType, PrimitiveLoadPlan, Sampler,
    //@ts-ignore
    hasExtension, ComponentDatatype, getAccessorByteStride, ModelComponents, Cartesian2, Cartesian4, Matrix4, Quaternion, RuntimeError,
    //@ts-ignore
    ArticulationStageType, GltfStructuralMetadataLoader, GltfGpmLoader, GltfMeshPrimitiveGpmLoader, FeatureDetection, SupportedImageFormats,
    //@ts-ignore
    oneTimeWarning
} = Cesium;
import { VertexAttributeSemanticGaussian } from './fixVertexAttributeSemantic';
const GltfLoaderState = {
    NOT_LOADED: 0,
    LOADING: 1,
    LOADED: 2,
    PROCESSING: 3,
    POST_PROCESSING: 4,
    PROCESSED: 5,
    READY: 6,
    FAILED: 7,
    UNLOADED: 8,
};
const { Attribute, Indices, FeatureIdAttribute, FeatureIdTexture, FeatureIdImplicitRange, MorphTarget, Primitive, Instances, Skin, Node,
    AnimatedPropertyType, AnimationSampler, AnimationTarget, AnimationChannel, Animation, ArticulationStage, Articulation,
    Asset, Scene, Components, MetallicRoughness, SpecularGlossiness, Specular, Anisotropy, Clearcoat, Material } = ModelComponents;
const scratchCenter = new Cartesian3();
export default function fixGltfLoader() {
    //@ts-ignore
    Object.defineProperties(Cesium.GltfLoader.prototype, {
        _loadGaussianSplatting: {
            value: true,
        },
        _generateGaussianSplatTexture: {
            value: true
        }
    });
    //@ts-ignore
    Cesium.GltfLoader.prototype._process = function (frameState) {
        if (this._state === GltfLoaderState.READY) {
            return true;
        }

        if (this._state === GltfLoaderState.PROCESSING) {
            processLoaders(this, frameState);
        }

        if (
            this._resourcesLoaded &&
            this._state === GltfLoaderState.POST_PROCESSING
        ) {
            postProcessGeometry(this, frameState.context);
            this._state = GltfLoaderState.PROCESSED;
        }

        if (this._resourcesLoaded && this._state === GltfLoaderState.PROCESSED) {
            // The buffer views can be unloaded once the data is copied.
            unloadBufferViewLoaders(this);

            // Similarly, if the glTF was loaded from a typed array, release the memory
            this._typedArray = undefined;

            this._state = GltfLoaderState.READY;
            return true;
        }

        return false;
    };
    //@ts-ignore
    Cesium.GltfLoader.prototype.process = function (frameState) {
        //>>includeStart('debug', pragmas.debug);
        Cesium.Check.typeOf.object("frameState", frameState);
        //>>includeEnd('debug');

        if (
            this._state === GltfLoaderState.LOADED &&
            !defined(this._loadResourcesPromise)
        ) {
            this._loadResourcesPromise = loadResources(this, frameState)
                .then(() => {
                    this._resourcesLoaded = true;
                })
                .catch((error) => {
                    this._processError = error;
                });
        }

        if (defined(this._processError)) {
            this._state = GltfLoaderState.FAILED;
            const error = this._processError;
            this._processError = undefined;
            handleError(this, error);
        }

        // Pop the next error of the list in case there are multiple
        const textureError = this._textureErrors.pop();
        if (defined(textureError)) {
            // There shouldn't be the need to completely unload in this case. Just throw the error.
            const error = this.getError("Failed to load glTF texture", textureError);
            error.name = "TextureError";
            throw error;
        }

        if (this._state === GltfLoaderState.FAILED) {
            return false;
        }

        let ready = false;
        try {
            ready = this._process(frameState);
        } catch (error) {
            this._state = GltfLoaderState.FAILED;
            handleError(this, error);
        }

        // Since textures can be loaded independently and are handled through a separate promise, they are processed in their own function
        let texturesReady = false;
        try {
            texturesReady = this._processTextures(frameState);
        } catch (error) {
            this._textureState = GltfLoaderState.FAILED;
            handleError(this, error);
        }

        if (this._incrementallyLoadTextures) {
            return ready;
        }

        return ready && texturesReady;
    };
    function postProcessGeometry(loader: any, context: any) {
        // Apply post-processing steps on geometry such as
        // updating attributes for rendering outlines.
        const loadPlans = loader._primitiveLoadPlans;
        for (let i = 0; i < loadPlans.length; i++) {
            const loadPlan = loadPlans[i];
            loadPlan.postProcess(context);

            if (loadPlan.needsOutlines || loadPlan.needsGaussianSplats) {
                // The glTF loader takes ownership of any buffers generated in the
                // post-process stage since they were created after the geometry loaders
                // finished. This way they can be destroyed when the loader is destroyed.
                gatherPostProcessBuffers(loader, loadPlan);
            }
        }
    }
    async function loadResources(loader: any, frameState: any) {
        //@ts-ignore
        if (!FeatureDetection.supportsWebP.initialized) {
            //@ts-ignore
            await FeatureDetection.supportsWebP.initialize();
        }

        loader._supportedImageFormats = new SupportedImageFormats({
            //@ts-ignore
            webp: FeatureDetection.supportsWebP(),
            basis: frameState.context.supportsBasis,
        });

        // Loaders that create GPU resources need to be processed every frame until they become
        // ready since the JobScheduler is not able to execute all jobs in a single
        // frame. Any promise failures are collected, and will be handled synchronously in process().
        // Also note that it's fine to call process before a loader is ready to process or
        // after it has failed; nothing will happen.
        const promise = parse(loader, frameState);

        // All resource loaders have been created, so we can begin processing
        loader._state = GltfLoaderState.PROCESSING;
        loader._textureState = GltfLoaderState.PROCESSING;

        if (defined(loader._gltfJsonLoader) && loader._releaseGltfJson) {
            // Check that the glTF JSON loader is still defined before trying to unload it.
            // It can be unloaded if the glTF loader is destroyed.
            ResourceCache.unload(loader._gltfJsonLoader);
            loader._gltfJsonLoader = undefined;
        }

        return promise;
    }
    function parse(loader: any, frameState: any) {
        const gltf = loader.gltfJson;
        //@ts-ignore
        const extensions = defaultValue(gltf.extensions, defaultValue.EMPTY_OBJECT);
        const structuralMetadataExtension = extensions.EXT_structural_metadata;
        const featureMetadataExtensionLegacy = extensions.EXT_feature_metadata;
        const cesiumRtcExtension = extensions.CESIUM_RTC;

        if (defined(featureMetadataExtensionLegacy)) {
            // If the old EXT_feature_metadata extension is present, sort the IDs of the
            // feature tables and feature textures so we don't have to do this once
            // per primitive.
            //
            // This must run before loadNodes so these IDs are available when
            // attributes are processed.
            const featureTables = featureMetadataExtensionLegacy.featureTables;
            const featureTextures = featureMetadataExtensionLegacy.featureTextures;
            const allPropertyTableIds = defined(featureTables) ? featureTables : [];
            const allFeatureTextureIds = defined(featureTextures)
                ? featureTextures
                : [];
            loader._sortedPropertyTableIds = Object.keys(allPropertyTableIds).sort();
            loader._sortedFeatureTextureIds = Object.keys(allFeatureTextureIds).sort();
        }

        const nodes = loadNodes(loader, frameState);
        const skins = loadSkins(loader, nodes);
        const animations = loadAnimations(loader, nodes);
        const articulations = loadArticulations(gltf);
        const scene = loadScene(gltf, nodes);

        const components = new Components();
        const asset = new Asset();
        const copyright = gltf.asset.copyright;
        if (defined(copyright)) {
            const credits = copyright.split(";").map(function (string: any) {
                return new Credit(string.trim());
            });
            asset.credits = credits;
        }

        components.asset = asset;
        components.scene = scene;
        components.nodes = nodes;
        components.skins = skins;
        components.animations = animations;
        components.articulations = articulations;
        components.upAxis = loader._upAxis;
        components.forwardAxis = loader._forwardAxis;

        if (defined(cesiumRtcExtension)) {
            // CESIUM_RTC is almost always WGS84 coordinates so no axis conversion needed
            const center = Cartesian3.fromArray(
                cesiumRtcExtension.center,
                0,
                scratchCenter,
            );
            components.transform = Matrix4.fromTranslation(
                center,
                components.transform,
            );
        }

        loader._components = components;

        // Load structural metadata (property tables and property textures)
        if (
            defined(structuralMetadataExtension) ||
            defined(featureMetadataExtensionLegacy)
        ) {
            const promise = loadStructuralMetadata(
                loader,
                structuralMetadataExtension,
                featureMetadataExtensionLegacy,
                frameState,
            );
            loader._loaderPromises.push(promise);
        }

        // Load NGA_gpm_local from root object
        const gpmExtension = extensions.NGA_gpm_local;
        if (defined(gpmExtension)) {
            const gltfGpmLocal = GltfGpmLoader.load(gpmExtension);
            loader._components.extensions["NGA_gpm_local"] = gltfGpmLocal;
        }

        // Load NGA_gpm_local from mesh primitives
        const meshes = gltf.meshes;
        if (defined(meshes)) {
            for (const mesh of meshes) {
                const primitives = mesh.primitives;
                if (defined(primitives)) {
                    for (const primitive of primitives) {
                        const primitiveExtensions = primitive.extensions;
                        if (defined(primitiveExtensions)) {
                            const meshPrimitiveGpmExtension = primitiveExtensions.NGA_gpm_local;
                            if (defined(meshPrimitiveGpmExtension)) {
                                const promise = loadMeshPrimitiveGpm(
                                    loader,
                                    gltf,
                                    meshPrimitiveGpmExtension,
                                    frameState,
                                );
                                loader._loaderPromises.push(promise);
                            }
                        }
                    }
                }
            }
        }

        // Gather promises and handle any errors
        const readyPromises: any = [];
        readyPromises.push.apply(readyPromises, loader._loaderPromises);

        // When incrementallyLoadTextures is true, the errors are caught and thrown individually
        // since it doesn't affect the overall loader state
        if (!loader._incrementallyLoadTextures) {
            readyPromises.push.apply(readyPromises, loader._texturesPromises);
        }

        return Promise.all(readyPromises);
    }
    function loadNodes(loader: any, frameState: any) {
        const nodeJsons = loader.gltfJson.nodes;
        if (!defined(nodeJsons)) {
            return [];
        }

        const loadedNodes = nodeJsons.map(function (nodeJson: any, i: any) {
            const node = loadNode(loader, nodeJson, frameState);
            node.index = i;
            return node;
        });

        for (let i = 0; i < loadedNodes.length; ++i) {
            const childrenNodeIds = nodeJsons[i].children;
            if (defined(childrenNodeIds)) {
                for (let j = 0; j < childrenNodeIds.length; ++j) {
                    loadedNodes[i].children.push(loadedNodes[childrenNodeIds[j]]);
                }
            }
        }

        return loadedNodes;
    }
    function loadNode(loader: any, gltfNode: any, frameState: any) {
        const node = new Node();

        node.name = gltfNode.name;

        node.matrix = fromArray(Matrix4, gltfNode.matrix);
        node.translation = fromArray(Cartesian3, gltfNode.translation);
        node.rotation = fromArray(Quaternion, gltfNode.rotation);
        node.scale = fromArray(Cartesian3, gltfNode.scale);

        const nodeExtensions = defaultValue(
            gltfNode.extensions,
            //@ts-ignore
            defaultValue.EMPTY_OBJECT,
        );
        const instancingExtension = nodeExtensions.EXT_mesh_gpu_instancing;
        const articulationsExtension = nodeExtensions.AGI_articulations;

        if (defined(instancingExtension)) {
            if (loader._loadForClassification) {
                throw new RuntimeError(
                    "Models with the EXT_mesh_gpu_instancing extension cannot be used for classification.",
                );
            }
            node.instances = loadInstances(loader, nodeExtensions, frameState);
        }

        if (defined(articulationsExtension)) {
            node.articulationName = articulationsExtension.articulationName;
        }

        const meshId = gltfNode.mesh;
        if (defined(meshId)) {
            const mesh = loader.gltfJson.meshes[meshId];
            const primitives = mesh.primitives;
            for (let i = 0; i < primitives.length; ++i) {
                node.primitives.push(
                    loadPrimitive(
                        loader,
                        primitives[i],
                        defined(node.instances),
                        frameState,
                    ),
                );
            }

            // If the node has no weights array, it will look for the weights array provided
            // by the mesh. If both are undefined, it will default to an array of zero weights.
            const morphWeights = defaultValue(gltfNode.weights, mesh.weights);
            const targets = node.primitives[0].morphTargets;

            // Since meshes are not stored as separate components, the mesh weights will still
            // be stored at the node level.
            node.morphWeights = defined(morphWeights)
                ? morphWeights.slice()
                : new Array(targets.length).fill(0.0);
        }

        return node;
    }
    function loadPrimitive(loader: any, gltfPrimitive: any, hasInstances: any, frameState: any) {
        const primitive = new Primitive();
        //@ts-ignore
        const primitivePlan = new PrimitiveLoadPlan(primitive);
        loader._primitiveLoadPlans.push(primitivePlan);

        const materialId = gltfPrimitive.material;
        if (defined(materialId)) {
            //@ts-ignore
            primitive.material = loadMaterial(
                loader,
                loader.gltfJson.materials[materialId],
                frameState,
            );
        }

        const extensions = Cesium.defaultValue(
            gltfPrimitive.extensions,
            //@ts-ignore
            Cesium.defaultValue.EMPTY_OBJECT,
        );

        let needsPostProcessing = false;
        const outlineExtension = extensions.CESIUM_primitive_outline;
        if (loader._loadPrimitiveOutline && defined(outlineExtension)) {
            needsPostProcessing = true;
            primitivePlan.needsOutlines = true;
            primitivePlan.outlineIndices = loadPrimitiveOutline(
                loader,
                outlineExtension,
            );
        }

        const gaussianSplattingExtension = extensions.KHR_gaussian_splatting;

        if (loader._loadGaussianSplatting && defined(gaussianSplattingExtension)) {
            needsPostProcessing = true;
            primitivePlan.needsGaussianSplats = true;
            primitivePlan.generateGaussianSplatTexture =
                loader._generateGaussianSplatTexture;
        }

        const loadForClassification = loader._loadForClassification;
        const draco = extensions.KHR_draco_mesh_compression;

        let hasFeatureIds = false;
        const attributes = gltfPrimitive.attributes;
        if (defined(attributes)) {
            for (const semantic in attributes) {
                if (!attributes.hasOwnProperty(semantic)) {
                    continue;
                }
                const accessorId = attributes[semantic];
                const semanticInfo = getSemanticInfo(
                    loader,
                    VertexAttributeSemanticGaussian,
                    semantic,
                );

                const modelSemantic = semanticInfo.modelSemantic;
                if (loadForClassification && !isClassificationAttribute(modelSemantic)) {
                    continue;
                }
                //@ts-ignore
                if (modelSemantic === VertexAttributeSemanticGaussian.FEATURE_ID) {
                    hasFeatureIds = true;
                }

                const attributePlan = loadVertexAttribute(
                    loader,
                    accessorId,
                    semanticInfo,
                    gltfPrimitive,
                    draco,
                    hasInstances,
                    needsPostProcessing,
                    frameState,
                );

                primitivePlan.attributePlans.push(attributePlan);
                //@ts-ignore
                primitive.attributes.push(attributePlan.attribute);
            }
        }

        const targets = gltfPrimitive.targets;
        // Morph targets are disabled for classification models.
        if (defined(targets) && !loadForClassification) {
            for (let i = 0; i < targets.length; ++i) {
                //@ts-ignore
                primitive.morphTargets.push(
                    loadMorphTarget(
                        loader,
                        targets[i],
                        needsPostProcessing,
                        primitivePlan,
                        frameState,
                    ),
                );
            }
        }

        const indices = gltfPrimitive.indices;
        if (defined(indices)) {
            const indicesPlan = loadIndices(
                loader,
                indices,
                gltfPrimitive,
                draco,
                hasFeatureIds,
                needsPostProcessing,
                frameState,
            );

            if (defined(indicesPlan)) {
                primitivePlan.indicesPlan = indicesPlan;
                //@ts-ignore
                primitive.indices = indicesPlan.indices;
            }
        }

        // With the latest revision, feature IDs are defined in EXT_mesh_features
        // while EXT_structural_metadata is for defining property textures and
        // property mappings. In the legacy EXT_feature_metadata, these concepts
        // were all in one extension.
        const structuralMetadata = extensions.EXT_structural_metadata;
        const meshFeatures = extensions.EXT_mesh_features;
        const featureMetadataLegacy = extensions.EXT_feature_metadata;
        const hasFeatureMetadataLegacy = defined(featureMetadataLegacy);

        // Load feature Ids
        if (defined(meshFeatures)) {
            loadPrimitiveFeatures(loader, primitive, meshFeatures, frameState);
        } else if (hasFeatureMetadataLegacy) {
            loadPrimitiveFeaturesLegacy(
                loader,
                primitive,
                featureMetadataLegacy,
                frameState,
            );
        }

        // Load structural metadata
        if (defined(structuralMetadata)) {
            loadPrimitiveMetadata(primitive, structuralMetadata);
        } else if (hasFeatureMetadataLegacy) {
            loadPrimitiveMetadataLegacy(loader, primitive, featureMetadataLegacy);
        }

        const primitiveType = gltfPrimitive.mode;
        if (loadForClassification && primitiveType !== Cesium.PrimitiveType.TRIANGLES) {
            throw new Cesium.RuntimeError(
                "Only triangle meshes can be used for classification.",
            );
        }
        //@ts-ignore
        primitive.primitiveType = primitiveType;

        return primitive;
    }
}
//#region 未改动原始代码
function unloadBufferViewLoaders(loader: any) {
    const bufferViewLoaders = loader._bufferViewLoaders;
    for (let i = 0; i < bufferViewLoaders.length; ++i) {
        bufferViewLoaders[i] =
            !bufferViewLoaders[i].isDestroyed() &&
            ResourceCache.unload(bufferViewLoaders[i]);
    }
    loader._bufferViewLoaders.length = 0;
}
function processLoaders(loader: any, frameState: any) {
    let ready = true;
    const geometryLoaders = loader._geometryLoaders;
    for (let i = 0; i < geometryLoaders.length; ++i) {
        const geometryReady = geometryLoaders[i].process(frameState);
        if (geometryReady && defined(loader._geometryCallbacks[i])) {
            loader._geometryCallbacks[i]();
            loader._geometryCallbacks[i] = undefined;
        }
        ready = ready && geometryReady;
    }

    const structuralMetadataLoader = loader._structuralMetadataLoader;
    if (defined(structuralMetadataLoader)) {
        const metadataReady = structuralMetadataLoader.process(frameState);
        if (metadataReady) {
            loader._components.structuralMetadata =
                structuralMetadataLoader.structuralMetadata;
        }
        ready = ready && metadataReady;
    }

    const meshPrimitiveGpmLoader = loader._meshPrimitiveGpmLoader;
    if (defined(meshPrimitiveGpmLoader)) {
        const metadataReady = meshPrimitiveGpmLoader.process(frameState);
        if (metadataReady) {
            if (defined(loader._components.structuralMetadata)) {
                oneTimeWarning(
                    "structural-metadata-gpm",
                    "The model defines both the 'EXT_structural_metadata' extension and the " +
                    "'NGA_gpm_local' extension. The data from the 'EXT_structural_metadata' " +
                    "extension will be replaced with the data from the 'NGA_gpm_local' extension, " +
                    "and will no longer be available for styling and picking.",
                );
            }
            loader._components.structuralMetadata =
                meshPrimitiveGpmLoader.structuralMetadata;
        }
        ready = ready && metadataReady;
    }

    if (ready) {
        // Geometry requires further processing
        loader._state = GltfLoaderState.POST_PROCESSING;
    }
}
function gatherPostProcessBuffers(loader: any, primitiveLoadPlan: any) {
    const buffers = loader._postProcessBuffers;
    const primitive = primitiveLoadPlan.primitive;

    const outlineCoordinates = primitive.outlineCoordinates;
    if (defined(outlineCoordinates)) {
        // outline coordinates are always loaded as a buffer.
        buffers.push(outlineCoordinates.buffer);
    }

    // to do post-processing, all the attributes are loaded as typed arrays
    // so if a buffer exists, it was newly generated
    const attributes = primitive.attributes;
    for (let i = 0; i < attributes.length; i++) {
        const attribute = attributes[i];
        if (defined(attribute.buffer)) {
            buffers.push(attribute.buffer);
        }
    }

    // Similarly for the indices.
    const indices = primitive.indices;
    if (defined(indices) && defined(indices.buffer)) {
        buffers.push(indices.buffer);
    }
}
function handleError(gltfLoader: any, error: any) {
    gltfLoader.unload();
    const errorMessage = "Failed to load glTF";
    throw gltfLoader.getError(errorMessage, error);
}
function loadScene(gltf: any, nodes: any) {
    const scene = new Scene();
    const sceneNodeIds = getSceneNodeIds(gltf);
    scene.nodes = sceneNodeIds.map(function (sceneNodeId: any) {
        return nodes[sceneNodeId];
    });
    return scene;
}
function getSceneNodeIds(gltf: any) {
    let nodesIds;
    if (defined(gltf.scenes) && defined(gltf.scene)) {
        nodesIds = gltf.scenes[gltf.scene].nodes;
    }
    nodesIds = defaultValue(nodesIds, gltf.nodes);
    nodesIds = defined(nodesIds) ? nodesIds : [];
    return nodesIds;
}
function loadArticulations(gltf: any) {
    //@ts-ignore
    const extensions = defaultValue(gltf.extensions, defaultValue.EMPTY_OBJECT);
    const articulationJsons = extensions.AGI_articulations?.articulations;
    if (!defined(articulationJsons)) {
        return [];
    }
    return articulationJsons.map(loadArticulation);
}
function loadArticulation(articulationJson: any) {
    const articulation = new Articulation();
    articulation.name = articulationJson.name;
    articulation.stages = articulationJson.stages.map(loadArticulationStage);
    return articulation;
}
async function loadMeshPrimitiveGpm(loader: any, gltf: any, extension: any, frameState: any) {
    const meshPrimitiveGpmLoader = new GltfMeshPrimitiveGpmLoader({
        gltf: gltf,
        extension: extension,
        gltfResource: loader._gltfResource,
        baseResource: loader._baseResource,
        supportedImageFormats: loader._supportedImageFormats,
        frameState: frameState,
        asynchronous: loader._asynchronous,
    });
    loader._meshPrimitiveGpmLoader = meshPrimitiveGpmLoader;
    return meshPrimitiveGpmLoader.load();
}
function loadArticulationStage(gltfStage: any) {
    const stage = new ArticulationStage();
    stage.name = gltfStage.name;

    const type = gltfStage.type.toUpperCase();
    stage.type = ArticulationStageType[type];

    stage.minimumValue = gltfStage.minimumValue;
    stage.maximumValue = gltfStage.maximumValue;
    stage.initialValue = gltfStage.initialValue;

    return stage;
}
async function loadStructuralMetadata(
    loader: any,
    extension: any,
    extensionLegacy: any,
    frameState: any,
) {
    const structuralMetadataLoader = new GltfStructuralMetadataLoader({
        gltf: loader.gltfJson,
        extension: extension,
        extensionLegacy: extensionLegacy,
        gltfResource: loader._gltfResource,
        baseResource: loader._baseResource,
        supportedImageFormats: loader._supportedImageFormats,
        frameState: frameState,
        asynchronous: loader._asynchronous,
    });
    loader._structuralMetadataLoader = structuralMetadataLoader;
    return structuralMetadataLoader.load();
}
function loadAnimations(loader: any, nodes: any) {
    const animationJsons = loader.gltfJson.animations;

    // Animations are disabled for classification models.
    if (loader._loadForClassification || !defined(animationJsons)) {
        return [];
    }

    const animations = animationJsons.map(function (animationJson: any, i: any) {
        const animation = loadAnimation(loader, animationJson, nodes);
        animation.index = i;
        return animation;
    });

    return animations;
}
function loadAnimation(loader: any, animationJson: any, nodes: any) {
    const animation = new Animation();
    animation.name = animationJson.name;

    const samplers = animationJson.samplers.map(function (samplerJson: any, i: any) {
        const sampler = loadAnimationSampler(loader, samplerJson);
        sampler.index = i;
        return sampler;
    });

    const channels = animationJson.channels.map(function (channelJson: any) {
        return loadAnimationChannel(channelJson, samplers, nodes);
    });

    animation.samplers = samplers;
    animation.channels = channels;

    return animation;
}
function loadAnimationSampler(loader: any, gltfSampler: any) {
    const animationSampler = new AnimationSampler();
    const accessors = loader.gltfJson.accessors;

    const inputAccessor = accessors[gltfSampler.input];
    animationSampler.input = loadAccessor(loader, inputAccessor);

    const gltfInterpolation = gltfSampler.interpolation;
    animationSampler.interpolation = defaultValue(
        InterpolationType[gltfInterpolation],
        InterpolationType.LINEAR,
    );

    const outputAccessor = accessors[gltfSampler.output];
    animationSampler.output = loadAccessor(loader, outputAccessor, true);

    return animationSampler;
}
function loadAnimationChannel(gltfChannel: any, samplers: any, nodes: any) {
    const animationChannel = new AnimationChannel();

    const samplerIndex = gltfChannel.sampler;
    animationChannel.sampler = samplers[samplerIndex];
    animationChannel.target = loadAnimationTarget(gltfChannel.target, nodes);

    return animationChannel;
}
function loadAnimationTarget(gltfTarget: any, nodes: any) {
    const animationTarget = new AnimationTarget();

    const nodeIndex = gltfTarget.node;
    // If the node isn't defined, the animation channel should be ignored.
    // It's easiest to signal this by returning undefined.
    if (!defined(nodeIndex)) {
        return undefined;
    }

    animationTarget.node = nodes[nodeIndex];

    const path = gltfTarget.path.toUpperCase();
    animationTarget.path = AnimatedPropertyType[path];

    return animationTarget;
}
function loadSkins(loader: any, nodes: any) {
    const skinJsons = loader.gltfJson.skins;

    // Skins are disabled for classification models.
    if (loader._loadForClassification || !defined(skinJsons)) {
        return [];
    }

    const loadedSkins = skinJsons.map(function (skinJson: any, i: any) {
        const skin = loadSkin(loader, skinJson, nodes);
        skin.index = i;
        return skin;
    });

    const nodeJsons = loader.gltfJson.nodes;
    for (let i = 0; i < nodes.length; ++i) {
        const skinId = nodeJsons[i].skin;
        if (defined(skinId)) {
            nodes[i].skin = loadedSkins[skinId];
        }
    }

    return loadedSkins;
}
function loadSkin(loader: any, gltfSkin: any, nodes: any) {
    const skin = new Skin();

    const jointIds = gltfSkin.joints;
    skin.joints = jointIds.map((jointId: any) => nodes[jointId]);

    const inverseBindMatricesAccessorId = gltfSkin.inverseBindMatrices;
    if (defined(inverseBindMatricesAccessorId)) {
        const accessor = loader.gltfJson.accessors[inverseBindMatricesAccessorId];
        skin.inverseBindMatrices = loadAccessor(loader, accessor);
    } else {
        skin.inverseBindMatrices = new Array(jointIds.length).fill(
            Matrix4.IDENTITY,
        );
    }

    return skin;
}

function loadInstances(loader: any, nodeExtensions: any, frameState: any) {
    const instancingExtension = nodeExtensions.EXT_mesh_gpu_instancing;

    const instances = new Instances();
    const attributes = instancingExtension.attributes;
    if (defined(attributes)) {
        for (const semantic in attributes) {
            if (!attributes.hasOwnProperty(semantic)) {
                continue;
            }
            const accessorId = attributes[semantic];
            instances.attributes.push(
                loadInstancedAttribute(
                    loader,
                    accessorId,
                    attributes,
                    semantic,
                    frameState,
                ),
            );
        }
    }

    const instancingExtExtensions = defaultValue(
        instancingExtension.extensions,
        //@ts-ignore
        defaultValue.EMPTY_OBJECT,
    );
    const instanceFeatures = nodeExtensions.EXT_instance_features;
    const featureMetadataLegacy = instancingExtExtensions.EXT_feature_metadata;

    if (defined(instanceFeatures)) {
        loadInstanceFeatures(instances, instanceFeatures);
    } else if (defined(featureMetadataLegacy)) {
        loadInstanceFeaturesLegacy(
            loader.gltfJson,
            instances,
            featureMetadataLegacy,
            loader._sortedPropertyTableIds,
        );
    }

    return instances;
}
function loadInstanceFeaturesLegacy(
    gltf: any,
    instances: any,
    metadataExtension: any,
    sortedPropertyTableIds: any,
) {
    // For looking up the featureCount for each set of feature IDs
    const featureTables = gltf.extensions.EXT_feature_metadata.featureTables;

    const featureIdAttributes = metadataExtension.featureIdAttributes;
    if (defined(featureIdAttributes)) {
        for (let i = 0; i < featureIdAttributes.length; ++i) {
            const featureIdAttribute = featureIdAttributes[i];
            const featureTableId = featureIdAttribute.featureTable;
            const propertyTableId = sortedPropertyTableIds.indexOf(featureTableId);
            const featureCount = featureTables[featureTableId].count;
            const label = `instanceFeatureId_${i}`;

            let featureIdComponent;
            if (defined(featureIdAttribute.featureIds.attribute)) {
                featureIdComponent = loadFeatureIdAttributeLegacy(
                    featureIdAttribute,
                    propertyTableId,
                    featureCount,
                    label,
                );
            } else {
                featureIdComponent = loadFeatureIdImplicitRangeLegacy(
                    featureIdAttribute,
                    propertyTableId,
                    featureCount,
                    label,
                );
            }
            instances.featureIds.push(featureIdComponent);
        }
    }
}
function loadInstancedAttribute(
    loader: any,
    accessorId: any,
    attributes: any,
    gltfSemantic: any,
    frameState: any,
) {
    const accessors = loader.gltfJson.accessors;
    const hasRotation = defined(attributes.ROTATION);
    const hasTranslationMinMax =
        defined(attributes.TRANSLATION) &&
        defined(accessors[attributes.TRANSLATION].min) &&
        defined(accessors[attributes.TRANSLATION].max);

    const semanticInfo = getSemanticInfo(
        loader,
        InstanceAttributeSemantic,
        gltfSemantic,
    );
    const modelSemantic = semanticInfo.modelSemantic;

    const isTransformAttribute =
        modelSemantic === InstanceAttributeSemantic.TRANSLATION ||
        modelSemantic === InstanceAttributeSemantic.ROTATION ||
        modelSemantic === InstanceAttributeSemantic.SCALE;
    const isTranslationAttribute =
        modelSemantic === InstanceAttributeSemantic.TRANSLATION;

    // Load the attributes as typed arrays only if:
    // - loadAttributesAsTypedArray is true
    // - the instances have rotations. This only applies to the transform attributes,
    //   since The instance matrices are computed on the CPU. This avoids the
    //   expensive quaternion -> rotation matrix conversion in the shader.
    // - GPU instancing is not supported.
    const loadAsTypedArrayOnly =
        loader._loadAttributesAsTypedArray ||
        (hasRotation && isTransformAttribute) ||
        !frameState.context.instancedArrays;
    const loadTypedArrayForPicking =
        loader._enablePick && !frameState.context.webgl2;

    const loadBuffer = !loadAsTypedArrayOnly;

    // Load the translations as a typed array in addition to the buffer if
    // - the accessor does not have a min and max. The values will be used
    //   for computing an accurate bounding volume.
    // - the model will be projected to 2D.
    const loadFor2D = loader._loadAttributesFor2D && !frameState.scene3DOnly;
    const loadTranslationAsTypedArray =
        isTranslationAttribute &&
        (!hasTranslationMinMax || loadFor2D || loadTypedArrayForPicking);

    const loadTypedArray = loadAsTypedArrayOnly || loadTranslationAsTypedArray;

    // Don't pass in primitive or draco object since instanced attributes can't be draco compressed
    return loadAttribute(
        loader,
        accessorId,
        semanticInfo,
        undefined,
        undefined,
        loadBuffer,
        loadTypedArray,
        frameState,
    );
}
// For EXT_mesh_features
function loadInstanceFeatures(instances: any, instanceFeaturesExtension: any) {
    // feature IDs are required in EXT_instance_features
    const featureIdsArray = instanceFeaturesExtension.featureIds;

    for (let i = 0; i < featureIdsArray.length; i++) {
        const featureIds = featureIdsArray[i];
        const label = `instanceFeatureId_${i}`;

        let featureIdComponent;
        if (defined(featureIds.attribute)) {
            featureIdComponent = loadFeatureIdAttribute(featureIds, label);
        } else {
            // in EXT_instance_features, the default is to assign IDs by instance
            // ID. This can be expressed with offset: 0, repeat: 1
            featureIdComponent = loadDefaultFeatureIds(featureIds, label);
        }

        instances.featureIds.push(featureIdComponent);
    }
}
// For primitive-level EXT_structural_metadata
function loadPrimitiveMetadata(primitive: any, structuralMetadataExtension: any) {
    if (!defined(structuralMetadataExtension)) {
        return;
    }

    // Property Textures
    if (defined(structuralMetadataExtension.propertyTextures)) {
        primitive.propertyTextureIds = structuralMetadataExtension.propertyTextures;
    }

    // Property Attributes
    if (defined(structuralMetadataExtension.propertyAttributes)) {
        primitive.propertyAttributeIds =
            structuralMetadataExtension.propertyAttributes;
    }
}

// For EXT_feature_metadata
function loadPrimitiveMetadataLegacy(loader: any, primitive: any, metadataExtension: any) {
    // Feature Textures
    if (defined(metadataExtension.featureTextures)) {
        // feature textures are now identified by an integer index. To convert the
        // string IDs to integers, find their place in the sorted list of feature
        // table names
        primitive.propertyTextureIds = metadataExtension.featureTextures.map(
            function (id: any) {
                return loader._sortedFeatureTextureIds.indexOf(id);
            },
        );
    }
}
// for EXT_mesh_features
function loadFeatureIdAttribute(featureIds: any, positionalLabel: any) {
    const featureIdAttribute = new FeatureIdAttribute();
    featureIdAttribute.featureCount = featureIds.featureCount;
    featureIdAttribute.nullFeatureId = featureIds.nullFeatureId;
    featureIdAttribute.propertyTableId = featureIds.propertyTable;
    featureIdAttribute.setIndex = featureIds.attribute;
    featureIdAttribute.label = featureIds.label;
    featureIdAttribute.positionalLabel = positionalLabel;
    return featureIdAttribute;
}

// for backwards compatibility with EXT_feature_metadata
function loadFeatureIdAttributeLegacy(
    gltfFeatureIdAttribute: any,
    featureTableId: any,
    featureCount: any,
    positionalLabel: any,
) {
    const featureIdAttribute = new FeatureIdAttribute();
    const featureIds = gltfFeatureIdAttribute.featureIds;
    featureIdAttribute.featureCount = featureCount;
    featureIdAttribute.propertyTableId = featureTableId;
    featureIdAttribute.setIndex = getSetIndex(featureIds.attribute);
    featureIdAttribute.positionalLabel = positionalLabel;
    return featureIdAttribute;
}
// for EXT_mesh_features
function loadFeatureIdTexture(
    loader: any,
    gltfFeatureIdTexture: any,
    frameState: any,
    positionalLabel: any,
) {
    const featureIdTexture = new FeatureIdTexture();

    featureIdTexture.featureCount = gltfFeatureIdTexture.featureCount;
    featureIdTexture.nullFeatureId = gltfFeatureIdTexture.nullFeatureId;
    featureIdTexture.propertyTableId = gltfFeatureIdTexture.propertyTable;
    featureIdTexture.label = gltfFeatureIdTexture.label;
    featureIdTexture.positionalLabel = positionalLabel;

    const textureInfo = gltfFeatureIdTexture.texture;
    featureIdTexture.textureReader = loadTexture(
        loader,
        textureInfo,
        frameState,
        Sampler.NEAREST, // Feature ID textures require nearest sampling
    );

    // Though the new channel index is more future-proof, this implementation
    // only supports RGBA textures. At least for now, the string representation
    // is more useful for generating shader code.
    const channels = defined(textureInfo.channels) ? textureInfo.channels : [0];
    const channelString = channels
        .map(function (channelIndex: any) {
            return "rgba".charAt(channelIndex);
        })
        .join("");
    featureIdTexture.textureReader.channels = channelString;

    return featureIdTexture;
}

// for backwards compatibility with EXT_feature_metadata
function loadFeatureIdTextureLegacy(
    loader: any,
    gltfFeatureIdTexture: any,
    featureTableId: any,
    frameState: any,
    featureCount: any,
    positionalLabel: any,
) {
    const featureIdTexture = new FeatureIdTexture();
    const featureIds = gltfFeatureIdTexture.featureIds;
    const textureInfo = featureIds.texture;
    featureIdTexture.featureCount = featureCount;
    featureIdTexture.propertyTableId = featureTableId;
    featureIdTexture.textureReader = loadTexture(
        loader,
        textureInfo,
        frameState,
        Sampler.NEAREST, // Feature ID textures require nearest sampling
    );

    featureIdTexture.textureReader.channels = featureIds.channels;
    featureIdTexture.positionalLabel = positionalLabel;

    return featureIdTexture;
}
// For EXT_mesh_features
function loadPrimitiveFeatures(
    loader: any,
    primitive: any,
    meshFeaturesExtension: any,
    frameState: any,
) {
    let featureIdsArray;
    if (
        defined(meshFeaturesExtension) &&
        defined(meshFeaturesExtension.featureIds)
    ) {
        featureIdsArray = meshFeaturesExtension.featureIds;
    } else {
        featureIdsArray = [];
    }

    for (let i = 0; i < featureIdsArray.length; i++) {
        const featureIds = featureIdsArray[i];
        const label = `featureId_${i}`;

        let featureIdComponent;
        if (defined(featureIds.texture)) {
            featureIdComponent = loadFeatureIdTexture(
                loader,
                featureIds,
                frameState,
                label,
            );
        } else if (defined(featureIds.attribute)) {
            featureIdComponent = loadFeatureIdAttribute(featureIds, label);
        } else {
            // default to vertex ID, in other words an implicit range with
            // offset: 0, repeat: 1
            featureIdComponent = loadDefaultFeatureIds(featureIds, label);
        }

        primitive.featureIds.push(featureIdComponent);
    }
}
function loadDefaultFeatureIds(featureIds: any, positionalLabel: any) {
    const featureIdRange = new FeatureIdImplicitRange();
    featureIdRange.propertyTableId = featureIds.propertyTable;
    featureIdRange.featureCount = featureIds.featureCount;
    featureIdRange.nullFeatureId = featureIds.nullFeatureId;
    featureIdRange.label = featureIds.label;
    featureIdRange.positionalLabel = positionalLabel;
    featureIdRange.offset = 0;
    featureIdRange.repeat = 1;
    return featureIdRange;
}
function loadFeatureIdImplicitRangeLegacy(
    gltfFeatureIdAttribute: any,
    featureTableId: any,
    featureCount: any,
    positionalLabel: any,
) {
    const featureIdRange = new FeatureIdImplicitRange();
    const featureIds = gltfFeatureIdAttribute.featureIds;
    featureIdRange.propertyTableId = featureTableId;
    featureIdRange.featureCount = featureCount;

    // constant/divisor was renamed to offset/repeat
    featureIdRange.offset = defaultValue(featureIds.constant, 0);
    // The default is now undefined
    const divisor = defaultValue(featureIds.divisor, 0);
    featureIdRange.repeat = divisor === 0 ? undefined : divisor;

    featureIdRange.positionalLabel = positionalLabel;
    return featureIdRange;
}
// For EXT_feature_metadata
function loadPrimitiveFeaturesLegacy(
    loader: any,
    primitive: any,
    metadataExtension: any,
    frameState: any,
) {
    // For looking up the featureCount for each set of feature IDs
    const { featureTables } = loader.gltfJson.extensions.EXT_feature_metadata;

    let nextFeatureIdIndex = 0;

    // Feature ID Attributes
    const featureIdAttributes = metadataExtension.featureIdAttributes;
    if (defined(featureIdAttributes)) {
        for (let i = 0; i < featureIdAttributes.length; ++i) {
            const featureIdAttribute = featureIdAttributes[i];
            const featureTableId = featureIdAttribute.featureTable;
            const propertyTableId =
                loader._sortedPropertyTableIds.indexOf(featureTableId);
            const featureCount = featureTables[featureTableId].count;
            const label = `featureId_${nextFeatureIdIndex}`;
            nextFeatureIdIndex++;

            let featureIdComponent;
            if (defined(featureIdAttribute.featureIds.attribute)) {
                featureIdComponent = loadFeatureIdAttributeLegacy(
                    featureIdAttribute,
                    propertyTableId,
                    featureCount,
                    label,
                );
            } else {
                featureIdComponent = loadFeatureIdImplicitRangeLegacy(
                    featureIdAttribute,
                    propertyTableId,
                    featureCount,
                    label,
                );
            }
            primitive.featureIds.push(featureIdComponent);
        }
    }

    // Feature ID Textures
    const featureIdTextures = metadataExtension.featureIdTextures;
    if (defined(featureIdTextures)) {
        for (let i = 0; i < featureIdTextures.length; ++i) {
            const featureIdTexture = featureIdTextures[i];
            const featureTableId = featureIdTexture.featureTable;
            const propertyTableId =
                loader._sortedPropertyTableIds.indexOf(featureTableId);
            const featureCount = featureTables[featureTableId].count;
            const featureIdLabel = `featureId_${nextFeatureIdIndex}`;
            nextFeatureIdIndex++;

            const featureIdComponent = loadFeatureIdTextureLegacy(
                loader,
                featureIdTexture,
                propertyTableId,
                frameState,
                featureCount,
                featureIdLabel,
            );
            // Feature ID textures are added after feature ID attributes in the list
            primitive.featureIds.push(featureIdComponent);
        }
    }
}
function getIndexBufferLoader(
    loader: any,
    accessorId: any,
    primitive: any,
    draco: any,
    loadBuffer: any,
    loadTypedArray: any,
    frameState: any,
) {
    const indexBufferLoader = ResourceCache.getIndexBufferLoader({
        gltf: loader.gltfJson,
        accessorId: accessorId,
        gltfResource: loader._gltfResource,
        baseResource: loader._baseResource,
        frameState: frameState,
        primitive: primitive,
        draco: draco,
        asynchronous: loader._asynchronous,
        loadBuffer: loadBuffer,
        loadTypedArray: loadTypedArray,
    });

    return indexBufferLoader;
}
function loadIndices(
    loader: any,
    accessorId: any,
    primitive: any,
    draco: any,
    hasFeatureIds: any,
    needsPostProcessing: any,
    frameState: any,
) {
    const accessor = loader.gltfJson.accessors[accessorId];
    const bufferViewId = accessor.bufferView;

    if (!defined(draco) && !defined(bufferViewId)) {
        return undefined;
    }

    const indices = new Indices();
    indices.count = accessor.count;

    const loadAttributesAsTypedArray = loader._loadAttributesAsTypedArray;
    // Load the index buffer as a typed array to generate wireframes or pick in WebGL1.
    const loadForCpuOperations =
        (loader._loadIndicesForWireframe || loader._enablePick) &&
        !frameState.context.webgl2;

    // Load the index buffer as a typed array to batch features together for classification.
    const loadForClassification = loader._loadForClassification && hasFeatureIds;

    // Whether the final output should be a buffer or typed array
    // after loading and post-processing.
    const outputTypedArrayOnly = loadAttributesAsTypedArray;
    const outputBuffer = !outputTypedArrayOnly;
    const outputTypedArray =
        loadAttributesAsTypedArray || loadForCpuOperations || loadForClassification;

    // Determine what to load right now:
    //
    // - If post-processing is needed, load a packed typed array for
    //   further processing, and defer the buffer loading until later.
    // - On the other hand, if post-processing is not needed, set the load
    //   flags directly
    const loadBuffer = needsPostProcessing ? false : outputBuffer;
    const loadTypedArray = needsPostProcessing ? true : outputTypedArray;

    const indexBufferLoader = getIndexBufferLoader(
        loader,
        accessorId,
        primitive,
        draco,
        loadBuffer,
        loadTypedArray,
        frameState,
    );

    const index = loader._geometryLoaders.length;
    loader._geometryLoaders.push(indexBufferLoader);
    const promise = indexBufferLoader.load();
    loader._loaderPromises.push(promise);
    // This can only execute once indexBufferLoader.process() has run and returns true
    // Save this finish callback by the loader index so it can be called
    // in process().
    loader._geometryCallbacks[index] = () => {
        indices.indexDatatype = indexBufferLoader.indexDatatype;
        indices.buffer = indexBufferLoader.buffer;
        indices.typedArray = indexBufferLoader.typedArray;
    };

    const indicesPlan = new PrimitiveLoadPlan.IndicesLoadPlan(indices);
    indicesPlan.loadBuffer = outputBuffer;
    indicesPlan.loadTypedArray = outputTypedArray;

    return indicesPlan;
}
function loadMorphTarget(
    loader: any,
    target: any,
    needsPostProcessing: any,
    primitiveLoadPlan: any,
    frameState: any,
) {
    const morphTarget = new MorphTarget();

    // Don't pass in primitive or draco object since morph targets can't be draco compressed
    const primitive = undefined;
    const draco = undefined;
    const hasInstances = false;

    for (const semantic in target) {
        if (!target.hasOwnProperty(semantic)) {
            continue;
        }
        const accessorId = target[semantic];

        const semanticInfo = getSemanticInfo(
            loader,
            VertexAttributeSemanticGaussian,
            semantic,
        );

        const attributePlan = loadVertexAttribute(
            loader,
            accessorId,
            semanticInfo,
            primitive,
            draco,
            hasInstances,
            needsPostProcessing,
            frameState,
        );
        morphTarget.attributes.push(attributePlan.attribute);

        // The load plan doesn't need to distinguish morph target attributes from
        // regular attributes
        primitiveLoadPlan.attributePlans.push(attributePlan);
    }

    return morphTarget;
}
function loadMaterial(loader: any, gltfMaterial: any, frameState: any) {
    const material = new Material();

    const extensions = Cesium.defaultValue(
        gltfMaterial.extensions,
        //@ts-ignore
        Cesium.defaultValue.EMPTY_OBJECT,
    );
    const pbrSpecularGlossiness = extensions.KHR_materials_pbrSpecularGlossiness;
    const pbrSpecular = extensions.KHR_materials_specular;
    const pbrAnisotropy = extensions.KHR_materials_anisotropy;
    const pbrClearcoat = extensions.KHR_materials_clearcoat;
    const pbrMetallicRoughness = gltfMaterial.pbrMetallicRoughness;
    //@ts-ignore
    material.unlit = defined(extensions.KHR_materials_unlit);

    if (defined(pbrSpecularGlossiness)) {
        //@ts-ignore
        material.specularGlossiness = loadSpecularGlossiness(
            loader,
            pbrSpecularGlossiness,
            frameState,
        );
    } else {
        if (defined(pbrMetallicRoughness)) {
            //@ts-ignore
            material.metallicRoughness = loadMetallicRoughness(
                loader,
                pbrMetallicRoughness,
                frameState,
            );
        }
        //@ts-ignore
        if (defined(pbrSpecular) && !material.unlit) {
            //@ts-ignore
            material.specular = loadSpecular(loader, pbrSpecular, frameState);
        }
        //@ts-ignore
        if (defined(pbrAnisotropy) && !material.unlit) {
            //@ts-ignore
            material.anisotropy = loadAnisotropy(loader, pbrAnisotropy, frameState);
        }
        //@ts-ignore
        if (defined(pbrClearcoat) && !material.unlit) {
            //@ts-ignore
            material.clearcoat = loadClearcoat(loader, pbrClearcoat, frameState);
        }
    }

    // Top level textures
    if (defined(gltfMaterial.emissiveTexture)) {
        //@ts-ignore
        material.emissiveTexture = loadTexture(
            loader,
            gltfMaterial.emissiveTexture,
            frameState,
        );
    }
    // Normals aren't used for classification, so don't load the normal texture.
    if (defined(gltfMaterial.normalTexture) && !loader._loadForClassification) {
        //@ts-ignore
        material.normalTexture = loadTexture(
            loader,
            gltfMaterial.normalTexture,
            frameState,
        );
    }
    if (defined(gltfMaterial.occlusionTexture)) {
        //@ts-ignore
        material.occlusionTexture = loadTexture(
            loader,
            gltfMaterial.occlusionTexture,
            frameState,
        );
    }
    //@ts-ignore
    material.emissiveFactor = fromArray(Cartesian3, gltfMaterial.emissiveFactor);
    //@ts-ignore
    material.alphaMode = gltfMaterial.alphaMode;
    //@ts-ignore
    material.alphaCutoff = gltfMaterial.alphaCutoff;
    //@ts-ignore
    material.doubleSided = gltfMaterial.doubleSided;

    return material;
}
function loadPrimitiveOutline(loader: any, outlineExtension: any) {
    const accessorId = outlineExtension.indices;
    const accessor = loader.gltfJson.accessors[accessorId];
    const useQuaternion = false;
    return loadAccessor(loader, accessor, useQuaternion);
}
function loadAccessor(loader: any, accessor: any, useQuaternion: any = true) {
    const values = new Array(accessor.count);

    const bufferViewId = accessor.bufferView;
    if (defined(bufferViewId)) {
        const bufferViewLoader = getBufferViewLoader(loader, bufferViewId);
        const promise = loadAccessorBufferView(
            loader,
            bufferViewLoader,
            accessor,
            useQuaternion,
            values,
        );
        loader._loaderPromises.push(promise);

        return values;
    }

    return loadDefaultAccessorValues(accessor, values);
}
function getBufferViewLoader(loader: any, bufferViewId: any) {
    const bufferViewLoader = ResourceCache.getBufferViewLoader({
        gltf: loader.gltfJson,
        bufferViewId: bufferViewId,
        gltfResource: loader._gltfResource,
        baseResource: loader._baseResource,
    });

    loader._bufferViewLoaders.push(bufferViewLoader);

    return bufferViewLoader;
}
async function loadAccessorBufferView(
    loader: any,
    bufferViewLoader: any,
    accessor: any,
    useQuaternion: any,
    values: any,
) {
    // Save a link to the gltfJson, which is removed after bufferViewLoader.load()
    const { gltfJson } = loader;

    await bufferViewLoader.load();
    if (loader.isDestroyed()) {
        return;
    }

    const typedArray = getPackedTypedArray(
        gltfJson,
        accessor,
        bufferViewLoader.typedArray,
    );

    useQuaternion = Cesium.defaultValue(useQuaternion, false);
    loadAccessorValues(accessor, typedArray, values, useQuaternion);
}
function getPackedTypedArray(gltf: any, accessor: any, bufferViewTypedArray: any) {
    let byteOffset = accessor.byteOffset;
    //@ts-ignore
    const byteStride = Cesium.getAccessorByteStride(gltf, accessor);
    const count = accessor.count;
    //@ts-ignore
    const componentCount = Cesium.numberOfComponentsForType(accessor.type);
    const componentType = accessor.componentType;
    //@ts-ignore
    const componentByteLength = Cesium.ComponentDatatype.getSizeInBytes(componentType);
    const defaultByteStride = componentByteLength * componentCount;
    const componentsLength = count * componentCount;

    if (byteStride === defaultByteStride) {
        // Copy the typed array and let the underlying ArrayBuffer be freed
        bufferViewTypedArray = new Uint8Array(bufferViewTypedArray);
        //@ts-ignore
        return Cesium.ComponentDatatype.createArrayBufferView(
            componentType,
            bufferViewTypedArray.buffer,
            bufferViewTypedArray.byteOffset + byteOffset,
            componentsLength,
        );
    }

    const accessorTypedArray = Cesium.ComponentDatatype.createTypedArray(
        componentType,
        componentsLength,
    );

    const dataView = new DataView(bufferViewTypedArray.buffer);
    const components = new Array(componentCount);
    //@ts-ignore
    const componentReader = Cesium.getComponentReader(accessor.componentType);
    byteOffset = bufferViewTypedArray.byteOffset + byteOffset;

    for (let i = 0; i < count; ++i) {
        componentReader(
            dataView,
            byteOffset,
            componentCount,
            componentByteLength,
            components,
        );
        for (let j = 0; j < componentCount; ++j) {
            accessorTypedArray[i * componentCount + j] = components[j];
        }
        byteOffset += byteStride;
    }

    return accessorTypedArray;
}
function loadAccessorValues(accessor: any, typedArray: any, values: any, useQuaternion: any) {
    const accessorType = accessor.type;
    const accessorCount = accessor.count;

    if (accessorType === AttributeType.SCALAR) {
        for (let i = 0; i < accessorCount; i++) {
            values[i] = typedArray[i];
        }
    } else if (accessorType === AttributeType.VEC4 && useQuaternion) {
        for (let i = 0; i < accessorCount; i++) {
            values[i] = Cesium.Quaternion.unpack(typedArray, i * 4);
        }
    } else {
        const MathType = AttributeType.getMathType(accessorType);
        const numberOfComponents =
            AttributeType.getNumberOfComponents(accessorType);

        for (let i = 0; i < accessorCount; i++) {
            values[i] = MathType.unpack(typedArray, i * numberOfComponents);
        }
    }

    return values;
}
function loadDefaultAccessorValues(accessor: any, values: any) {
    const accessorType = accessor.type;
    if (accessorType === AttributeType.SCALAR) {
        return values.fill(0);
    }
    const MathType = AttributeType.getMathType(accessorType);
    return values.fill(MathType.clone(MathType.ZERO));
}
function loadSpecularGlossiness(loader: any, specularGlossinessInfo: any, frameState: any) {
    const {
        diffuseTexture,
        specularGlossinessTexture,
        diffuseFactor,
        specularFactor,
        glossinessFactor,
    } = specularGlossinessInfo;

    const specularGlossiness = new SpecularGlossiness();
    if (defined(diffuseTexture)) {
        specularGlossiness.diffuseTexture = loadTexture(
            loader,
            diffuseTexture,
            frameState,
        );
    }
    if (defined(specularGlossinessTexture)) {
        specularGlossiness.specularGlossinessTexture = loadTexture(
            loader,
            specularGlossinessTexture,
            frameState,
        );
    }
    specularGlossiness.diffuseFactor = fromArray(Cesium.Cartesian4, diffuseFactor);
    specularGlossiness.specularFactor = fromArray(Cesium.Cartesian3, specularFactor);
    specularGlossiness.glossinessFactor = glossinessFactor;

    return specularGlossiness;
}
function loadTexture(loader: any, textureInfo: any, frameState: any, samplerOverride: any = undefined) {
    const gltf = loader.gltfJson;
    const imageId = GltfLoaderUtil.getImageIdFromTexture({
        gltf: gltf,
        textureId: textureInfo.index,
        supportedImageFormats: loader._supportedImageFormats,
    });

    if (!defined(imageId)) {
        return undefined;
    }

    const textureLoader = ResourceCache.getTextureLoader({
        gltf: gltf,
        textureInfo: textureInfo,
        gltfResource: loader._gltfResource,
        baseResource: loader._baseResource,
        supportedImageFormats: loader._supportedImageFormats,
        frameState: frameState,
        asynchronous: loader._asynchronous,
    });

    const textureReader = GltfLoaderUtil.createModelTextureReader({
        textureInfo: textureInfo,
    });

    const index = loader._textureLoaders.length;
    loader._textureLoaders.push(textureLoader);
    const promise = textureLoader.load().catch((error: any) => {
        if (loader.isDestroyed()) {
            return;
        }

        if (!loader._incrementallyLoadTextures) {
            // If incrementallyLoadTextures is false, throw the error to ensure the loader state
            // immediately is set to have failed
            throw error;
        }

        // Otherwise, save the error so it can be thrown next
        loader._textureState = GltfLoaderState.FAILED;
        loader._textureErrors.push(error);
    });
    loader._texturesPromises.push(promise);
    // This can only execute once textureLoader.process() has run and returns true
    // Save this finish callback by the loader index so it can be called
    // in process().
    loader._textureCallbacks[index] = () => {
        textureReader.texture = textureLoader.texture;
        if (defined(samplerOverride)) {
            textureReader.texture.sampler = samplerOverride;
        }
    };

    return textureReader;
}
function fromArray(MathType: any, values: any) {
    if (!defined(values)) {
        return undefined;
    }

    if (MathType === Number) {
        return values[0];
    }

    return MathType.unpack(values);
}
function loadMetallicRoughness(loader: any, metallicRoughnessInfo: any, frameState: any) {
    const {
        baseColorTexture,
        metallicRoughnessTexture,
        baseColorFactor,
        metallicFactor,
        roughnessFactor,
    } = metallicRoughnessInfo;

    const metallicRoughness = new MetallicRoughness();
    if (defined(baseColorTexture)) {
        metallicRoughness.baseColorTexture = loadTexture(
            loader,
            baseColorTexture,
            frameState,
        );
    }
    if (defined(metallicRoughnessTexture)) {
        metallicRoughness.metallicRoughnessTexture = loadTexture(
            loader,
            metallicRoughnessTexture,
            frameState,
        );
    }
    metallicRoughness.baseColorFactor = fromArray(Cesium.Cartesian4, baseColorFactor);
    metallicRoughness.metallicFactor = metallicFactor;
    metallicRoughness.roughnessFactor = roughnessFactor;

    return metallicRoughness;
}
function loadSpecular(loader: any, specularInfo: any, frameState: any) {
    const {
        specularFactor,
        specularTexture,
        specularColorFactor,
        specularColorTexture,
    } = specularInfo;

    const specular = new Specular();
    if (defined(specularTexture)) {
        specular.specularTexture = loadTexture(loader, specularTexture, frameState);
    }
    if (defined(specularColorTexture)) {
        specular.specularColorTexture = loadTexture(
            loader,
            specularColorTexture,
            frameState,
        );
    }
    specular.specularFactor = specularFactor;
    specular.specularColorFactor = fromArray(Cartesian3, specularColorFactor);

    return specular;
}
function loadAnisotropy(loader: any, anisotropyInfo: any, frameState: any) {
    const {
        anisotropyStrength = Anisotropy.DEFAULT_ANISOTROPY_STRENGTH,
        anisotropyRotation = Anisotropy.DEFAULT_ANISOTROPY_ROTATION,
        anisotropyTexture,
    } = anisotropyInfo;

    const anisotropy = new Anisotropy();
    if (defined(anisotropyTexture)) {
        anisotropy.anisotropyTexture = loadTexture(
            loader,
            anisotropyTexture,
            frameState,
        );
    }
    anisotropy.anisotropyStrength = anisotropyStrength;
    anisotropy.anisotropyRotation = anisotropyRotation;

    return anisotropy;
}

function loadClearcoat(loader: any, clearcoatInfo: any, frameState: any) {
    const {
        clearcoatFactor = Clearcoat.DEFAULT_CLEARCOAT_FACTOR,
        clearcoatTexture,
        clearcoatRoughnessFactor = Clearcoat.DEFAULT_CLEARCOAT_ROUGHNESS_FACTOR,
        clearcoatRoughnessTexture,
        clearcoatNormalTexture,
    } = clearcoatInfo;

    const clearcoat = new Clearcoat();
    if (defined(clearcoatTexture)) {
        clearcoat.clearcoatTexture = loadTexture(
            loader,
            clearcoatTexture,
            frameState,
        );
    }
    if (defined(clearcoatRoughnessTexture)) {
        clearcoat.clearcoatRoughnessTexture = loadTexture(
            loader,
            clearcoatRoughnessTexture,
            frameState,
        );
    }
    if (defined(clearcoatNormalTexture)) {
        clearcoat.clearcoatNormalTexture = loadTexture(
            loader,
            clearcoatNormalTexture,
            frameState,
        );
    }
    clearcoat.clearcoatFactor = clearcoatFactor;
    clearcoat.clearcoatRoughnessFactor = clearcoatRoughnessFactor;

    return clearcoat;
}
const scratchSemanticInfo = {
    gltfSemantic: undefined,
    renamedSemantic: undefined,
    modelSemantic: undefined,
};

function getSemanticInfo(loader: any, semanticType: any, gltfSemantic: any) {
    // For .b3dm, rename _BATCHID (or the legacy BATCHID) to _FEATURE_ID_0
    // in the generated model components for compatibility with EXT_mesh_features
    let renamedSemantic = gltfSemantic;
    if (
        loader._renameBatchIdSemantic &&
        (gltfSemantic === "_BATCHID" || gltfSemantic === "BATCHID")
    ) {
        renamedSemantic = "_FEATURE_ID_0";
    }

    const modelSemantic = semanticType.fromGltfSemantic(renamedSemantic);

    const semanticInfo = scratchSemanticInfo;
    semanticInfo.gltfSemantic = gltfSemantic;
    semanticInfo.renamedSemantic = renamedSemantic;
    semanticInfo.modelSemantic = modelSemantic;

    return semanticInfo;
}
function isClassificationAttribute(attributeSemantic: any) {
    // Classification models only use the position, texcoord, and feature ID attributes.
    const isPositionAttribute =
        //@ts-ignore
        attributeSemantic === VertexAttributeSemanticGaussian.POSITION;
    const isFeatureIdAttribute =
        //@ts-ignore
        attributeSemantic === VertexAttributeSemanticGaussian.FEATURE_ID;
    const isTexcoordAttribute =
        //@ts-ignore
        attributeSemantic === VertexAttributeSemanticGaussian.TEXCOORD;

    return isPositionAttribute || isFeatureIdAttribute || isTexcoordAttribute;
}
function loadVertexAttribute(
    loader: any,
    accessorId: any,
    semanticInfo: any,
    primitive: any,
    draco: any,
    hasInstances: any,
    needsPostProcessing: any,
    frameState: any,
) {
    const modelSemantic = semanticInfo.modelSemantic;

    const isPositionAttribute =
        //@ts-ignore
        modelSemantic === VertexAttributeSemanticGaussian.POSITION;
    const isFeatureIdAttribute =
        //@ts-ignore
        modelSemantic === VertexAttributeSemanticGaussian.FEATURE_ID;

    const loadTypedArrayFor2D =
        isPositionAttribute &&
        !hasInstances &&
        loader._loadAttributesFor2D &&
        !frameState.scene3DOnly;
    const loadTypedArrayForPicking =
        isPositionAttribute && loader._enablePick && !frameState.context.webgl2;

    const loadTypedArrayForClassification =
        loader._loadForClassification && isFeatureIdAttribute;

    // Whether the final output should be a buffer or typed array
    // after loading and post-processing.
    const outputTypedArrayOnly = loader._loadAttributesAsTypedArray;
    const outputBuffer = !outputTypedArrayOnly;
    const outputTypedArray =
        outputTypedArrayOnly ||
        loadTypedArrayFor2D ||
        loadTypedArrayForPicking ||
        loadTypedArrayForClassification;

    // Determine what to load right now:
    //
    // - If post-processing is needed, load a packed typed array for
    //   further processing, and defer the buffer loading until later.
    // - On the other hand, if post-processing is not needed,
    //   set the load flags directly
    const loadBuffer = needsPostProcessing ? false : outputBuffer;
    const loadTypedArray = needsPostProcessing ? true : outputTypedArray;

    const attribute = loadAttribute(
        loader,
        accessorId,
        semanticInfo,
        primitive,
        draco,
        loadBuffer,
        loadTypedArray,
        frameState,
    );
    //@ts-ignore
    const attributePlan = new PrimitiveLoadPlan.AttributeLoadPlan(attribute);
    attributePlan.loadBuffer = outputBuffer;
    attributePlan.loadTypedArray = outputTypedArray;

    return attributePlan;
}
function loadAttribute(
    loader: any,
    accessorId: any,
    semanticInfo: any,
    primitive: any,
    draco: any,
    loadBuffer: any,
    loadTypedArray: any,
    frameState: any,
) {
    const gltf = loader.gltfJson;
    const accessor = gltf.accessors[accessorId];
    const bufferViewId = accessor.bufferView;

    const gltfSemantic = semanticInfo.gltfSemantic;
    const renamedSemantic = semanticInfo.renamedSemantic;
    const modelSemantic = semanticInfo.modelSemantic;

    const setIndex = defined(modelSemantic)
        ? getSetIndex(renamedSemantic)
        : undefined;

    const name = gltfSemantic;
    const attribute = createAttribute(
        gltf,
        accessorId,
        name,
        modelSemantic,
        setIndex,
    );

    if (!defined(draco) && !defined(bufferViewId)) {
        return attribute;
    }

    const vertexBufferLoader = getVertexBufferLoader(
        loader,
        accessorId,
        gltfSemantic,
        primitive,
        draco,
        loadBuffer,
        loadTypedArray,
        frameState,
    );

    const index = loader._geometryLoaders.length;
    loader._geometryLoaders.push(vertexBufferLoader);
    const promise = vertexBufferLoader.load();
    loader._loaderPromises.push(promise);
    // This can only execute once vertexBufferLoader.process() has run and returns true
    // Save this finish callback by the loader index so it can be called
    // in process().
    loader._geometryCallbacks[index] = () => {
        if (
            defined(draco) &&
            defined(draco.attributes) &&
            defined(draco.attributes[gltfSemantic])
        ) {
            finalizeDracoAttribute(
                attribute,
                vertexBufferLoader,
                loadBuffer,
                loadTypedArray,
            );
        } else {
            finalizeAttribute(
                gltf,
                accessor,
                attribute,
                vertexBufferLoader,
                loadBuffer,
                loadTypedArray,
            );
        }
    };

    return attribute;
}
function finalizeDracoAttribute(
    attribute: any,
    vertexBufferLoader: any,
    loadBuffer: any,
    loadTypedArray: any,
) {
    // The accessor's byteOffset and byteStride should be ignored for draco.
    // Each attribute is tightly packed in its own buffer after decode.
    attribute.byteOffset = 0;
    attribute.byteStride = undefined;
    attribute.quantization = vertexBufferLoader.quantization;

    if (loadBuffer) {
        attribute.buffer = vertexBufferLoader.buffer;
    }

    if (loadTypedArray) {
        const componentDatatype = defined(vertexBufferLoader.quantization)
            ? vertexBufferLoader.quantization.componentDatatype
            : attribute.componentDatatype;
        //@ts-ignore
        attribute.typedArray = ComponentDatatype.createArrayBufferView(
            componentDatatype,
            vertexBufferLoader.typedArray.buffer,
        );
    }
}

function finalizeAttribute(
    gltf: any,
    accessor: any,
    attribute: any,
    vertexBufferLoader: any,
    loadBuffer: any,
    loadTypedArray: any,
) {
    if (loadBuffer) {
        attribute.buffer = vertexBufferLoader.buffer;
    }

    if (loadTypedArray) {
        const bufferViewTypedArray = vertexBufferLoader.typedArray;
        attribute.typedArray = getPackedTypedArray(
            gltf,
            accessor,
            bufferViewTypedArray,
        );

        if (!loadBuffer) {
            // If the buffer isn't loaded, then the accessor's byteOffset and
            // byteStride should be ignored, since values are only available in a
            // tightly packed typed array
            attribute.byteOffset = 0;
            attribute.byteStride = undefined;
        }
    }
}
function getSetIndex(gltfSemantic: any) {
    const setIndexRegex = /^\w+_(\d+)$/;
    const setIndexMatch = setIndexRegex.exec(gltfSemantic);
    if (setIndexMatch !== null) {
        return parseInt(setIndexMatch[1]);
    }
    return undefined;
}
function createAttribute(gltf: any, accessorId: any, name: any, semantic: any, setIndex: any) {
    const accessor = gltf.accessors[accessorId];
    const MathType = AttributeType.getMathType(accessor.type);
    const normalized = defaultValue(accessor.normalized, false);

    const attribute = new Attribute();
    attribute.name = name;
    attribute.semantic = semantic;
    attribute.setIndex = setIndex;
    attribute.constant = getDefault(MathType);
    attribute.componentDatatype = accessor.componentType;
    attribute.normalized = normalized;
    attribute.count = accessor.count;
    attribute.type = accessor.type;
    attribute.min = fromArray(MathType, accessor.min);
    attribute.max = fromArray(MathType, accessor.max);
    attribute.byteOffset = accessor.byteOffset;
    attribute.byteStride = getAccessorByteStride(gltf, accessor);

    if (hasExtension(accessor, "WEB3D_quantized_attributes")) {
        setQuantizationFromWeb3dQuantizedAttributes(
            accessor.extensions.WEB3D_quantized_attributes,
            attribute,
            MathType,
        );
    }

    const isQuantizable =
        //@ts-ignore
        attribute.semantic === VertexAttributeSemanticGaussian.POSITION ||
        //@ts-ignore
        attribute.semantic === VertexAttributeSemanticGaussian.NORMAL ||
        //@ts-ignore
        attribute.semantic === VertexAttributeSemanticGaussian.TANGENT ||
        //@ts-ignore
        attribute.semantic === VertexAttributeSemanticGaussian.TEXCOORD ||
        //@ts-ignore
        attribute.semantic === VertexAttributeSemanticGaussian.FEATURE_ID ||
        attribute.semantic === VertexAttributeSemanticGaussian.SCALE ||
        attribute.semantic === VertexAttributeSemanticGaussian.ROTATION;

    // In the glTF 2.0 spec, min and max are not affected by the normalized flag.
    // However, for KHR_mesh_quantization, min and max must be dequantized for
    // normalized values, else the bounding sphere will be computed incorrectly.
    const hasKhrMeshQuantization = gltf.extensionsRequired?.includes(
        "KHR_mesh_quantization",
    );

    if (hasKhrMeshQuantization && normalized && isQuantizable) {
        dequantizeMinMax(attribute, MathType);
    }

    return attribute;
}
const minimumBoundsByType = {
    VEC2: new Cartesian2(-1.0, -1.0),
    VEC3: new Cartesian3(-1.0, -1.0, -1.0),
    VEC4: new Cartesian4(-1.0, -1.0, -1.0, -1.0),
};
function getQuantizationDivisor(componentDatatype: any) {
    switch (componentDatatype) {
        case ComponentDatatype.BYTE:
            return 127;
        case ComponentDatatype.UNSIGNED_BYTE:
            return 255;
        case ComponentDatatype.SHORT:
            return 32767;
        case ComponentDatatype.UNSIGNED_SHORT:
            return 65535;
        default:
            return 1.0;
    }
}
function dequantizeMinMax(attribute: any, VectorType: any) {
    const divisor = getQuantizationDivisor(attribute.componentDatatype);
    //@ts-ignore
    const minimumBound = minimumBoundsByType[attribute.type];

    // dequantized = max(quantized / divisor, -1.0)
    let min = attribute.min;
    if (defined(min)) {
        min = VectorType.divideByScalar(min, divisor, min);
        min = VectorType.maximumByComponent(min, minimumBound, min);
    }

    let max = attribute.max;
    if (defined(max)) {
        max = VectorType.divideByScalar(max, divisor, max);
        max = VectorType.maximumByComponent(max, minimumBound, max);
    }

    attribute.min = min;
    attribute.max = max;
}
function setQuantizationFromWeb3dQuantizedAttributes(
    extension: any,
    attribute: any,
    MathType: any,
) {
    const decodeMatrix = extension.decodeMatrix;
    const decodedMin = fromArray(MathType, extension.decodedMin);
    const decodedMax = fromArray(MathType, extension.decodedMax);

    if (defined(decodedMin) && defined(decodedMax)) {
        attribute.min = decodedMin;
        attribute.max = decodedMax;
    }

    const quantization = new ModelComponents.Quantization();
    quantization.componentDatatype = attribute.componentDatatype;
    quantization.type = attribute.type;

    if (decodeMatrix.length === 4) {
        quantization.quantizedVolumeOffset = decodeMatrix[2];
        quantization.quantizedVolumeStepSize = decodeMatrix[0];
    } else if (decodeMatrix.length === 9) {
        quantization.quantizedVolumeOffset = new Cartesian2(
            decodeMatrix[6],
            decodeMatrix[7],
        );
        quantization.quantizedVolumeStepSize = new Cartesian2(
            decodeMatrix[0],
            decodeMatrix[4],
        );
    } else if (decodeMatrix.length === 16) {
        quantization.quantizedVolumeOffset = new Cartesian3(
            decodeMatrix[12],
            decodeMatrix[13],
            decodeMatrix[14],
        );
        quantization.quantizedVolumeStepSize = new Cartesian3(
            decodeMatrix[0],
            decodeMatrix[5],
            decodeMatrix[10],
        );
    } else if (decodeMatrix.length === 25) {
        quantization.quantizedVolumeOffset = new Cartesian4(
            decodeMatrix[20],
            decodeMatrix[21],
            decodeMatrix[22],
            decodeMatrix[23],
        );
        quantization.quantizedVolumeStepSize = new Cartesian4(
            decodeMatrix[0],
            decodeMatrix[6],
            decodeMatrix[12],
            decodeMatrix[18],
        );
    }

    attribute.quantization = quantization;
}
function getDefault(MathType: any) {
    if (MathType === Number) {
        return 0.0;
    }

    return new MathType(); // defaults to 0.0 for all types
}
function getVertexBufferLoader(
    loader: any,
    accessorId: any,
    semantic: any,
    primitive: any,
    draco: any,
    loadBuffer: any,
    loadTypedArray: any,
    frameState: any,
) {
    const gltf = loader.gltfJson;
    const accessor = gltf.accessors[accessorId];
    const bufferViewId = accessor.bufferView;

    const vertexBufferLoader = ResourceCache.getVertexBufferLoader({
        gltf: gltf,
        gltfResource: loader._gltfResource,
        baseResource: loader._baseResource,
        frameState: frameState,
        bufferViewId: bufferViewId,
        primitive: primitive,
        draco: draco,
        attributeSemantic: semantic,
        accessorId: accessorId,
        asynchronous: loader._asynchronous,
        loadBuffer: loadBuffer,
        loadTypedArray: loadTypedArray,
    });

    return vertexBufferLoader;
}
//#endregion