/**
 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

// Autogenerated file -- DO NOT EDIT!

% Compiler::intrinsics.select(&:has_impl?).uniq{ |i| i.impl }.each do |intrinsic|
%     if intrinsic.private
#ifndef PANDA_PRODUCT_BUILD
%     end
% impl = intrinsic.respond_to?(:fast_path) ? intrinsic.fast_path : intrinsic.impl
% if intrinsic.respond_to?(:fast_path)
#if defined(PANDA_TARGET_AMD64) && !defined(PANDA_COMPILER_TARGET_X86_64)
    extern "C" void <%= intrinsic.impl.rpartition('::').last %>Bridge();
    extern "C" void <%= intrinsic.impl.rpartition('::').last %>RuntimeCallChecker();
#else
% end
% if intrinsic.respond_to?(:fast_path)
    extern "C" void <%= impl.rpartition('::').last %>();
% else
    extern "C" void <%= impl.rpartition('::').last %>Bridge();
% end
    extern "C" void <%= impl.rpartition('::').last %>RuntimeCallChecker();
%   if intrinsic.respond_to?(:fast_path)
#endif
%   end
%     if intrinsic.private
#endif  // PANDA_PRODUCT_BUILD
%     end
% end

extern "C" void ObjectCloneStub();

// NOLINTNEXTLINE(readability-function-size)
inline RuntimeInterface::IntrinsicId GetIntrinsicEntryPointId(intrinsics::Intrinsic intrinsic) {
    switch (intrinsic)
    {
% Compiler::intrinsics.each do |intrinsic|
    case intrinsics::Intrinsic::<%= intrinsic.enum_name %>:
        return RuntimeInterface::IntrinsicId::<%= intrinsic.entrypoint_name %>;
% end
    default:
        return RuntimeInterface::IntrinsicId::COUNT;
    }
}

// NOLINTNEXTLINE(readability-function-size)
uintptr_t PandaRuntimeInterface::GetIntrinsicAddress(bool runtime_call, PandaRuntimeInterface::IntrinsicId id) const {
    switch (id) {
    case IntrinsicId::LIB_CALL_FMOD: {
        using fp = double (*)(double, double);
        return reinterpret_cast<uintptr_t>(static_cast<fp>(fmod));
    }
    case IntrinsicId::LIB_CALL_FMODF: {
        using fp = float (*)(float, float);
        return reinterpret_cast<uintptr_t>(static_cast<fp>(fmodf));
    }
    case IntrinsicId::LIB_CALL_MEM_COPY: {
        using fp = void *(*)(void *, const void *, size_t);
        return reinterpret_cast<uintptr_t>(static_cast<fp>(memcpy));
    }
#include "intrinsics_stub.inl"

% Compiler::intrinsics.each do |intrinsic|
    case IntrinsicId::<%= intrinsic.entrypoint_name %>:
%   if !intrinsic.has_impl?
        return 0;
%     next
%   end
%   impl = intrinsic.respond_to?(:fast_path) ? intrinsic.fast_path : intrinsic.impl
%   bridge = intrinsic.respond_to?(:fast_path) ? intrinsic.fast_path : "#{intrinsic.impl.rpartition('::').last}Bridge"
%   if intrinsic.private
#ifndef PANDA_PRODUCT_BUILD
%   end
% if intrinsic.respond_to?(:fast_path)
#if defined(PANDA_TARGET_AMD64) && !defined(PANDA_COMPILER_TARGET_X86_64)
        return runtime_call ? reinterpret_cast<uintptr_t>(<%= "#{intrinsic.impl.rpartition('::').last}Bridge" %>) :
#ifdef NDEBUG
          reinterpret_cast<uintptr_t>(<%= intrinsic.impl %>);
#else
          reinterpret_cast<uintptr_t>(<%= intrinsic.impl.rpartition('::').last %>RuntimeCallChecker);
#endif
#else
% end
        return runtime_call ? reinterpret_cast<uintptr_t>(<%= bridge %>) :
#ifdef NDEBUG
          reinterpret_cast<uintptr_t>(<%= impl %>);
#else
          reinterpret_cast<uintptr_t>(<%= impl.rpartition('::').last %>RuntimeCallChecker);
#endif
% if intrinsic.respond_to?(:fast_path)
#endif
% end
%   if intrinsic.private
#else
        return reinterpret_cast<uintptr_t>(intrinsics::UnknownIntrinsic);
#endif  // PANDA_PRODUCT_BUILD
%   end
% end
    default:
        UNREACHABLE();
    }
}
