/*
 * @Author: liuzelin
 * @Date: 2024-04-20 10:39:28
 * @LastEditors: liuzelin
 * @LastEditTime: 2024-05-20 21:23:33
 * @Description: file content
 */
#include "panda_thread_impl.hpp"

#include "panda_log.hpp"
#include "panda_socket_impl.hpp"
#include "panda_resolver_impl.hpp"
#include "panda_utils.hpp"
#include "panda_uv_helper.hpp"
#ifdef PANDA_WIN32
#include <Windows.h>
#include <processthreadsapi.h>
#else
#include <sched.h>
#endif  // PANDA_WIN32
#include <filesystem>

namespace fs = std::filesystem;

using namespace PandaImpl;

namespace {
  template <typename Lock>
  class AutoLockWarper {
  public:
    AutoLockWarper( Lock& mut ) : m_( mut ) { m_.lock(); }
    ~AutoLockWarper() { m_.unlock(); }

  private:
    Lock& m_;
  };

#ifdef PANDA_WIN32
  void SetNativeThreadAffinity( int cpu_id ) {
    DWORD_PTR mask = 1ULL << cpu_id;
    if ( SetThreadAffinityMask( GetCurrentThread(), mask ) == 0 ) {
      LOG( Error ) << "SetNativeThreadAffinity failed:" << GetLastError();
    }
  }

#else
  void SetNativeThreadAffinity( int cpu_id ) {
    cpu_set_t mask;
    CPU_ZERO( &mask );
    CPU_SET( cpu_id, &mask );

    if ( sched_setaffinity( 0, sizeof( mask ), &mask ) != 0 ) {
      LOG( Error ) << "SetNativeThreadAffinity failed:" << errno;
    }
  }
#endif  // PANDA_WIN32
  
  void SetNativeThreadName( const std::string name ) {
#ifdef PANDA_WIN32
#define PANDA_ARRAY_SIZE(array, T) sizeof(array) / sizeof(T)
  wchar_t wide_thread_name[ 64 ];
  for ( size_t i = 0; i < PANDA_ARRAY_SIZE( wide_thread_name, wchar_t ) - 1; ++i ) {
    wide_thread_name[ i ] = name[ i ];
    if ( wide_thread_name[ i ] == L'\0' ) break;
  }
  // Guarantee null-termination.
  wide_thread_name[ PANDA_ARRAY_SIZE( wide_thread_name, wchar_t ) - 1 ] = L'\0';
  SetThreadDescription( ::GetCurrentThread(), wide_thread_name );
#elif PANDA_APPLE
  pthread_setname_np( name.c_str() );
#else
  pthread_setname_np( pthread_self(), name.c_str() );
#endif  // PANDA_WIN32
  }

  thread_local Panda::Thread* current = nullptr;
  thread_local bool calculate_task_cost_ = true;
  uint64_t threadMaxSleepMs = 50;
  uint64_t threadTaskExecTooLow = 500;
}  // namespace

UVTimer::UVTimer( ThreadImpl* thread ) {
  thread_ = thread;
  this->handler = &timer_;
  int ret = uv_timer_init( ( uv_loop_t* )thread_->GetLoop(), &this->timer_ );
  PandaCheck( ret == 0 ) << " UVTimer Ret:" << ret
    << " Error:" << uv_err_name( ret );
  uv_handle_set_data( ( uv_handle_t* )&this->timer_, this );
  this->isClosed = false;
}

void UVTimer::Start( uint64_t timeOutMs, OnTimer&& onTimer ) {
  if ( this->isClosed || this->isClosing ) return;
  int ret = uv_timer_start( &this->timer_, &UVHelper::UVTimerCb, timeOutMs, 0 );
  PandaCheck( ret == 0 ) << " UVTimer Start Ret:" << ret
    << " Error:" << uv_err_name( ret );
  if ( ret == 0 ) {
    this->onTimer_ = std::forward<OnTimer>( onTimer );
  }
}
void UVTimer::Stop() {
  int ret = uv_timer_stop( &this->timer_ );
  PandaCheck( ret == 0 ) << " Ret:" << ret << " Error:" << uv_err_name( ret );
}

ThreadImpl::~ThreadImpl() {
  if ( this->isMain_ == false && this->isRunning_ ) {
    Stop();
  }
}

bool ThreadImpl::IsCurrent() const { return this == current; }
void ThreadImpl::Stop() {
  PandaCheck( IsCurrent() == false ) << " Thread Stop Can`t On This Thread";
  this->isRunning_ = false;
  uv_stop( ( uv_loop_t* )&this->uvLoop_ );
  if ( this->isMain_ == false ) {
    this->thread_->join();
    this->thread_.release();
  }
}

Panda::UDPSocket* ThreadImpl::CreateUDPSocket() {
  if ( IsCurrent() )
    return new UDPSocketImpl( this );
  else {
    return this->Invoke<Panda::UDPSocket*>(
      PandaFromHere, [ this ]() { return new UDPSocketImpl( this ); } );
  }
}
Panda::TCPSocket* ThreadImpl::CreateTCPSocket() {
  if ( IsCurrent() )
    return new TCPSocketImpl( this );
  else {
    return this->Invoke<Panda::TCPSocket*>(
      PandaFromHere, [ this ]() { return new TCPSocketImpl( this ); } );
  }
}

Panda::Timer* ThreadImpl::CreateTimer() {
  if ( IsCurrent() )
    return new UVTimer( this );
  else {
    return this->Invoke<Panda::Timer*>( PandaFromHere,
      [ this ]() { return new UVTimer( this ); } );
  }
}

Panda::Resolver* PandaImpl::ThreadImpl::CreateResolver() {
  if ( IsCurrent() )
    return new ResolverImpl( this );
  else {
    return this->Invoke<Panda::Resolver*>( PandaFromHere,
      [ this ]() { return new ResolverImpl( this ); } );
  }
}

void ThreadImpl::PostTask( Panda::TaskLocation location, Panda::PandaTask func ) {
  while ( isRunning_ == false ) {
  }
  {
    AutoLockWarper lock( this->tasksMutex_ );
    this->tasks_.emplace( std::move( func ), location );
  }
  this->event_->Set();
}
void ThreadImpl::ClearDelayTask( void* key ) {
  auto innerClear = [ & ]() {
    AutoLockWarper lock( this->delayTasksMutex_ );
    auto it = this->delayTasks_.find( key );
    if ( it != this->delayTasks_.end() ) {
      this->delayTasks_.erase( it );
    }
    };
  if ( IsCurrent() ) {
    return innerClear();
  }
  else {
    return this->Invoke( PandaFromHere, std::move( innerClear ) );
  }
}
void ThreadImpl::PostDelayTask( Panda::TaskLocation location,
  Panda::PandaTask func, uint64_t delay,
  void* key ) {
    {
      AutoLockWarper lock( this->delayTasksMutex_ );
      auto it = this->delayTasks_.find( key );
      if ( it == this->delayTasks_.end() ) {
        auto ret = this->delayTasks_.emplace( key, DelayTasks() );
        PandaCheck( ret.second ) << " Insert DelayTask Failed";
        it = ret.first;
      }
      DelayTasks& tasks = it->second;
      tasks.emplace( uint64_t( delay + Panda::Time::NowMs() ),
        DelayTaskImpl( std::move( func ), location, delay ) );
    }
    this->event_->Set();
}

void ThreadImpl::PreRun() {
  current = this;
  int ret = uv_loop_init( ( uv_loop_t* )&uvLoop_ );
  PandaCheck( ret == 0 ) << " Ret:" << ret << " Error:" << uv_err_name( ret );
  uv_handle_set_data( ( uv_handle_t* )&this->uvLoop_, this );
  SetNativeThreadName( this->name_ );
  this->event_.reset( new UVAsync( this ) );
  this->timer_.reset( new UVTimer( this ) );
}

void ThreadImpl::Run() {
  if ( this->isMain_ == false ) PreRun();
  isRunning_ = true;
  LOG( Debug ) << this->name_ << " Thread Start";
  this->timer_->Start( threadMaxSleepMs, [ this ]( Panda::Timer* t ) {
    PandaCheck( this->timer_.get() == t ) << " Unknow Timer Event";
    this->Loop();
    } );
  int ret = uv_run( &this->uvLoop_, UV_RUN_DEFAULT );
  PandaCheck( ret == 0 || ret == 1 )
    << " Ret:" << ret << " Error:" << uv_err_name( ret );
  {
    AutoLockWarper lock( this->tasksMutex_ );
    while ( this->tasks_.empty() == false ) this->tasks_.pop();
  }
  {
    AutoLockWarper lock( this->delayTasksMutex_ );
    this->delayTasks_.clear();
  }
  LOG( Debug ) << this->name_ << " Thread Exit";
  PostRun();
}

void ThreadImpl::Loop() {
  uint64_t nowMs = Panda::Time::NowMs();
  while ( this->isRunning_ ) {
    Panda::PandaTask task;
    Panda::TaskLocation location;
    bool hasTask = false;
    {
      AutoLockWarper lock( this->tasksMutex_ );
      if ( this->tasks_.empty() == false ) {
        TaskImpl& front = this->tasks_.front();
        task = std::move( front.task );
        if ( nowMs - front.create_us/1000 > threadTaskExecTooLow ) {
          LOG( Warning ) << "Thread:" << this->name_.c_str() << " Task "
            << front.location.Function() << ":"
            << front.location.Line() << " Exec Too Low "
            << ( nowMs - front.create_us / 1000 ) << " Ms";
        }
        location = front.location;
        this->tasks_.pop();
        hasTask = this->tasks_.empty() == false;
      }
    }
    if ( task ) {
      uint64_t start_us = 0;
      if ( calculate_task_cost_ ) start_us = Panda::Time::NowUs();
      task();
      if ( start_us ) {
        dynamic_cast< ThreadManagerImpl* >( ThreadManagerImpl::Instance() )
          ->TaskAfterExec( this, location, Panda::Time::NowUs() - start_us );
      }
      if ( hasTask ) continue;
    }
    break;
  }

  while ( this->isRunning_ ) {
    Panda::PandaTask task;
    const char* task_func = nullptr;
    const char* task_file = nullptr;
    int task_line = 0;
    {
      AutoLockWarper lock( this->delayTasksMutex_ );
      for ( auto& tasks : this->delayTasks_ ) {
        for ( DelayTask it = tasks.second.begin(); it != tasks.second.end();
          ++it ) {
          if ( nowMs >= it->first ) {
            DelayTaskImpl& front = it->second;
            if ( nowMs - front.create_us / 1000 > front.delay + threadTaskExecTooLow ) {
              LOG( Warning ) << "Thread:" << this->name_.c_str() << " Task "
                << front.location.Function() << ":"
                << front.location.Line() << " Exec Too Low "
                << ( nowMs - front.create_us / 1000 - front.delay ) << " Ms";
            }
            task = std::move( front.task );
            task_func = front.location.Function();
            task_file = front.location.File();
            task_line = front.location.Line();
            tasks.second.erase( it );
            break;
          }
        }
      }
    }
    if ( task ) {
      try {
        task();
      }
      catch ( const std::exception& e ) {
        LOG( Fatal ) << "Run Task:" << task_file << ":" << task_line << " "
          << task_func << e.what();
      }

      continue;
    }
    break;
  }

  uint64_t waitUpUs = threadMaxSleepMs;
  {
    AutoLockWarper lock( this->delayTasksMutex_ );
    for ( auto& que : this->delayTasks_ ) {
      if ( !que.second.empty() ) {
        waitUpUs = std::min( waitUpUs, que.second.begin()->first - nowMs );
      }
    }
  }
  this->timer_->Start( waitUpUs, [ this ]( Panda::Timer* t ) {
    PandaCheck( this->timer_.get() == t ) << " Unknow Timer Event";
    this->Loop();
    } );
}

void ThreadImpl::PostRun() {
  this->timer_->isClosed = true;
  this->timer_.reset( nullptr );
  this->event_->isClosed = true;
  this->event_.reset( nullptr );
  current = nullptr;
}

void ThreadImpl::DisableCalculateTaskCost() {
  calculate_task_cost_ = false;
}

Panda::ThreadManager* Panda::ThreadManager::Instance() {
  static ThreadManagerImpl instance;
  return &instance;
}
Panda::Thread* ThreadManagerImpl::Current() {
  if ( current == nullptr ) {
    current = new ThreadImpl( true );
  }
  return current;
}

void ThreadManagerImpl::EnableCalculateTaskCost( bool enable ) {
  this->enable_calculate_task_cost = enable;
  if ( enable ) {
    this->calculate_task_cost_thread.reset( this->CreateThread() );
    this->calculate_task_cost_thread->SetThreadName( "PandaTaskCalculateThread" );
    {
      ThreadImpl* impl = dynamic_cast< ThreadImpl* >( this->calculate_task_cost_thread.get() );
      impl->DisableCalculateTaskCost();
    }
    this->calculate_task_cost_thread->Start();
  }
  else {
    this->calculate_task_cost_thread->Stop();
    this->calculate_task_cost_thread.release();
  }
}

void PandaImpl::ThreadManagerImpl::ExportTaskCostStat( const char* dir ) {
  if ( fs::exists( dir ) && fs::is_directory( dir ) ) {

  }
  else {
    LOG( Error ) << "Export Task Cost Error:" << dir
      << " Not Exist Or Not Directory";
  }
}

void ThreadManagerImpl::TaskAfterExec( Panda::Thread* thread,
  Panda::TaskLocation& location,
  int64_t use_us ) {
  if ( this->calculate_task_cost_thread ) {
    TaskCost cost{ location, thread, use_us };
    this->task_cost_que.enqueue( cost );
    this->calculate_task_cost_thread->PostTask( PandaFromHere, [ this ]() {
      TaskCost cost;
      while ( this->task_cost_que.dequeue( cost ) ) {
        auto it = task_cost_map.find( cost.task_location );
        if ( it == task_cost_map.end() ) {
          auto ret = task_cost_map.emplace( cost.task_location, TaskCostStat() );
          if ( ret.second ) {
            it = ret.first;
          }
          else {
            LOG( Error ) << "Add Task:" << cost.task_location.File() << ":" << cost.task_location.Line() << "(" << cost.task_location.Function()
              << ") Failed";
            continue;
          }
        }
        it->second.total_call_num++;
        it->second.total_use_us += cost.use_us;
        it->second.thread_cost_stat[ cost.thread ].total_call_num++;
        it->second.thread_cost_stat[ cost.thread ].total_use_us += cost.use_us;
      }
      } );
  }
}

Panda::ThreadPoolManager* Panda::ThreadPoolManager::Instance() {
  static ThreadPoolManagerImpl instance;
  return &instance;
}

TaskImpl::TaskImpl( Panda::PandaTask&& task, Panda::TaskLocation location )
  : task( task ), location( location ) {
  create_us = Panda::Time::NowUs();
}

DelayTaskImpl::DelayTaskImpl( Panda::PandaTask&& task,
  Panda::TaskLocation location, uint64_t delay )
  : TaskImpl( std::move( task ), location ), delay( delay ) {}

DelayTaskImpl::DelayTaskImpl( DelayTaskImpl& other ) :TaskImpl( std::move( other.task ), other.location ), delay( other.delay ) {}

DelayTaskImpl::DelayTaskImpl( DelayTaskImpl&& other ) :TaskImpl( std::move( other.task ), other.location ), delay( other.delay ) {}

void PandaImpl::ThreadPoolManagerImpl::Start( int thread_num ) {
  auto num_thread = std::thread::hardware_concurrency();
  if ( thread_num == 0 )
    thread_num = num_thread;
  for ( int i = 0;i < thread_num;i++ ) {
    auto thread = new ThreadHandler( this, i % num_thread );
    this->threads.emplace_back( thread );
  }

  dispatch_thread.reset( new ThreadImpl( false ) );
  dispatch_thread->SetThreadName( "pool_dispatch" );
  dispatch_thread->Start();
}

void PandaImpl::ThreadPoolManagerImpl::PostTask( Panda::TaskLocation location,
  Panda::PandaTask func ) {
  tasks_.enqueue( std::move( func ), location );
  dispatch_thread->PostTask( PandaFromHere, [ this ]() {
    dispatch_event.SetAll();
    } );
}


PandaImpl::TaskImpl PandaImpl::ThreadPoolManagerImpl::PollTask( ThreadHandler* handler ) {
  TaskImpl task;
  while ( tasks_.dequeue( task ) == false && handler->running) {
    dispatch_event.Wait();
  }
  return task;
}

void PandaImpl::ThreadPoolManagerImpl::AfterExecTask( Panda::TaskLocation location, uint64_t task_start, uint64_t task_create ) {
  uint64_t now_us = Panda::Time::NowUs();
  this->dispatch_thread->PostTask( PandaFromHere, [ & ]() {
    TaskExecInfo& info = task_exec_info[ location ];
    info.total_call_num++;
    info.total_exec_delay += now_us - task_create;
    info.total_use_us += now_us - task_start;
    } );
}

void PandaImpl::ThreadPoolManagerImpl::ThreadHandler::ThreadFunc() {
  running = true;
  current = nullptr;
  SetNativeThreadName( "pool_thread" );
  if ( cpu_index >= 0 )
    SetNativeThreadAffinity( cpu_index );
  while ( running ) {
    auto task = this->manager->PollTask(this);
    if ( task.task ) {
      uint64_t start_us = Panda::Time::NowUs();
      task.task();
      this->manager->AfterExecTask( task.location, start_us, task.create_us );
    }
  }
}