file(GLOB_RECURSE SOURCE_FILES "${CMAKE_CURRENT_LIST_DIR}/*.cpp")

if(NOT USE_W8A16)
    list(REMOVE_ITEM SOURCE_FILES "${CMAKE_CURRENT_LIST_DIR}/contrib/layer/flash_attention_layer.cpp")
    list(REMOVE_ITEM SOURCE_FILES "${CMAKE_CURRENT_LIST_DIR}/contrib/layer/paged_attention_layer.cpp")
    list(REMOVE_ITEM SOURCE_FILES "${CMAKE_CURRENT_LIST_DIR}/contrib/model/paged_attention_model.cpp")
    list(REMOVE_ITEM SOURCE_FILES "${CMAKE_CURRENT_LIST_DIR}/contrib/operation/linear_parallel_w8a16.cpp")
    list(REMOVE_ITEM SOURCE_FILES "${CMAKE_CURRENT_LIST_DIR}/contrib/operation/mlp_w8a16.cpp")
endif()

add_library(atb_speed_models SHARED ${SOURCE_FILES})
target_link_libraries(atb_speed_models PRIVATE atb atb_speed_core)
install(TARGETS atb_speed_models DESTINATION lib)