module fiona_output_mod

  use fiona_common_mod
  use fiona_buffer_mod
  use fiona_dataset_mod
  use fiona_objects_mod

  implicit none

  interface fiona_add_att
    module procedure fiona_add_att_1
    module procedure fiona_add_att_2
  end interface fiona_add_att

  interface fiona_output
    module procedure fiona_output_s
    module procedure fiona_output_0d_i2
    module procedure fiona_output_0d_i4
    module procedure fiona_output_0d_r4
    module procedure fiona_output_0d_r8
    module procedure fiona_output_1d_i2
    module procedure fiona_output_1d_i4
    module procedure fiona_output_1d_r4
    module procedure fiona_output_1d_r8
    module procedure fiona_output_2d_i2
    module procedure fiona_output_2d_i4
    module procedure fiona_output_2d_r4
    module procedure fiona_output_2d_r8
    module procedure fiona_output_3d_i2
    module procedure fiona_output_3d_i4
    module procedure fiona_output_3d_r4
    module procedure fiona_output_3d_r8
    module procedure fiona_output_4d_i2
    module procedure fiona_output_4d_i4
    module procedure fiona_output_4d_r4
    module procedure fiona_output_4d_r8
    module procedure fiona_output_5d_i2
    module procedure fiona_output_5d_i4
    module procedure fiona_output_5d_r4
    module procedure fiona_output_5d_r8
    module procedure fiona_output_6d_i2
    module procedure fiona_output_6d_i4
    module procedure fiona_output_6d_r4
    module procedure fiona_output_6d_r8
    module procedure fiona_output_7d_i2
    module procedure fiona_output_7d_i4
    module procedure fiona_output_7d_r4
    module procedure fiona_output_7d_r8
  end interface fiona_output

  logical :: print_serial_output_warning = .false.

contains

  subroutine fiona_create_dataset(dataset_name, desc, file_prefix, file_path, start_time, time_units, time_step, mpi_comm, ngroups, async)

    character(*), intent(in) :: dataset_name
    character(*), intent(in), optional :: desc
    character(*), intent(in), optional :: file_prefix
    character(*), intent(in), optional :: file_path
    character(*), intent(in), optional :: start_time
    character(*), intent(in), optional :: time_units
    integer, intent(in), optional :: time_step
    integer, intent(in), optional :: mpi_comm
    integer, intent(in), optional :: ngroups
    logical, intent(in), optional :: async

    character(256) file_prefix_, file_path_
    type(dataset_type), pointer :: dataset
    logical is_exist

    integer ierr

    if (datasets%hashed(dataset_name)) then
      dataset => get_dataset(dataset_name)
      is_exist = .true.
    else
      allocate(dataset)
      call dataset%init(dataset_name, desc, mpi_comm, ngroups, async=async)
      is_exist = .false.
    end if

    if (present(file_prefix)) then
      file_prefix_ = file_prefix
    else
      file_prefix_ = ''
    end if
    if (present(file_path)) then
      file_path_ = file_path
    else
      file_path_ = ''
    end if

    if (file_prefix_ /= '' .and. file_path_ == '') then
      dataset%file_prefix = trim(file_prefix_) // '.' // trim(dataset_name)
    else if (file_prefix_ == '' .and. file_path_ /= '') then
      dataset%file_path = file_path_
    end if

    if (present(start_time) .and. present(time_units)) then
      select case (time_units)
      case ('days', 'sol')
        dataset%time_units_in_seconds = 86400.0
      case ('hours')
        dataset%time_units_in_seconds = 3600.0
      case ('minutes')
        dataset%time_units_in_seconds = 60.0
      case ('seconds')
        dataset%time_units_in_seconds = 1.0
      case default
        call log_error('Invalid time_units ' // trim(time_units) // '!')
      end select
      dataset%start_time_str = start_time
      dataset%time_units_str = time_units
    else
      dataset%time_units_in_seconds = time_units_in_seconds
      dataset%start_time_str = start_time_str
      dataset%time_units_str = time_units_str
    end if

    if (present(time_step)) dataset%time_step = time_step

    if (.not. is_exist) then
      call datasets%insert(dataset%name, dataset)
      deallocate(dataset)
    end if

  end subroutine fiona_create_dataset

  subroutine fiona_add_att_1(dataset_name, name, value)

    character(*), intent(in) :: dataset_name
    character(*), intent(in) :: name
    class(*), intent(in) :: value

    type(dataset_type), pointer :: dataset

    dataset => get_dataset(dataset_name)

    call dataset%atts%insert(name, value)

  end subroutine fiona_add_att_1

  subroutine fiona_add_att_2(dataset_name, var_name, name, value)

    character(*), intent(in) :: dataset_name
    character(*), intent(in) :: var_name
    character(*), intent(in) :: name
    class(*), intent(in) :: value

    type(dataset_type), pointer :: dataset
    type(var_type), pointer :: var

    dataset => get_dataset(dataset_name)
    var => dataset%get_var(var_name)

    call var%atts%insert(name, value)

  end subroutine fiona_add_att_2

  subroutine fiona_add_dim(dataset_name, name, long_name, units, size, add_var, decomp)

    character(*), intent(in) :: dataset_name
    character(*), intent(in) :: name
    character(*), intent(in), optional :: long_name
    character(*), intent(in), optional :: units
    integer, intent(in), optional :: size
    logical, intent(in), optional :: add_var
    logical, intent(in), optional :: decomp

    type(dataset_type), pointer :: dataset
    type(dim_type) dim

    dataset => get_dataset(dataset_name)
    if (dataset%dims%hashed(name)) return

    call dim%init(name=name, long_name=long_name, units=units, size=size, decomp=decomp)
    ! Set time units if needs.
    if (.not. present(units) .and. (name == 'time' .or. name == 'Time')) then
      if (dataset%time_units_str == 'N/A') then
        dataset%time_units_str = 'hours'
        dataset%time_units_in_seconds = 3600.0d0
      end if
      if (dataset%start_time_str == 'N/A') then
        dataset%start_time_str = '1970-01-01'
      end if
      write(dim%units, '(A, " since ", A)') trim(dataset%time_units_str), trim(dataset%start_time_str)
    end if
    call dataset%dims%insert(name, dim)
    if (present(add_var)) then
      if (add_var) then
        call fiona_add_var(dataset_name, name, long_name=dim%long_name, units=dim%units, dim_names=[name], dtype='r8')
      end if
    end if

    if (name == 'Time' .or. name == 'time') dataset%time_dim => dataset%get_dim(name)

  end subroutine fiona_add_dim

  subroutine fiona_add_var(dataset_name, name, long_name, units, dim_names, dtype, missing_value)

    character(*), intent(in) :: dataset_name
    character(*), intent(in) :: name
    character(*), intent(in) :: long_name
    character(*), intent(in) :: units
    character(*), intent(in) :: dim_names(:)
    character(*), intent(in), optional :: dtype
    class(*), intent(in), optional :: missing_value

    type(dataset_type), pointer :: dataset
    type(var_type), pointer :: var
    type(hash_table_iterator_type) it
    type(dim_type), pointer :: dim
    integer i
    logical found
    real real

    dataset => get_dataset(dataset_name)

    if (dataset%vars%hashed(name)) return

    ! There are pointers (missing_value) in var object, so we need to get the object inserted into hash table.
    allocate(var)
    call dataset%vars%insert(name, var)
    deallocate(var)
    var => dataset%get_var(name)

    call var%init(name=name, ndim=size(dim_names), long_name=long_name, units=units, dtype=dtype, missing_value=missing_value)

    do i = 1, size(dim_names)
      found = .false.
      it = hash_table_iterator(dataset%dims)
      do while (.not. it%ended())
        dim => dataset%get_dim(it%key)
        if (dim%name == trim(dim_names(i))) then
          if (dim%size == NF90_UNLIMITED) var%has_unlimited_dim = .true.
          var%dims(i)%ptr => dim
          found = .true.
          exit
        end if
        call it%next()
      end do
      if (.not. found) then
        call log_error('Unknown dimension ' // trim(dim_names(i)) // ' for variable ' // trim(name) // '!', __FILE__, __LINE__)
      end if
    end do

    if (name == 'Time' .or. name == 'time') dataset%time_var => dataset%get_var(name)

  end subroutine fiona_add_var

  subroutine fiona_start_output(dataset_name, time_in_seconds, new_file, tag)

    character(*), intent(in) :: dataset_name
    real(8), intent(in), optional :: time_in_seconds
    logical, intent(in), optional :: new_file
    character(*), intent(in), optional :: tag

    logical new_file_opt
    character(256) file_path
    type(dataset_type), pointer :: dataset
    type(dim_type), pointer :: dim
    type(var_type), pointer :: var
    type(hash_table_iterator_type) it1, it2
    integer, allocatable :: dimids(:)
    integer i, ierr, old_mode

    new_file_opt = .true.; if (present(new_file)) new_file_opt = new_file

    dataset => get_dataset(dataset_name)

    if (.not. dataset%is_group_root) return

    ! Only group root needs to create or open file for writing.
    if (dataset%is_group_root) then
      ! Set file_path.
      if (present(tag)) then
        if (dataset%file_path /= 'N/A') then
          file_path = trim(delete_string(dataset%file_path, '.nc')) // '.' // trim(tag) // '.nc'
        else
          write(file_path, "(A, '.', A, '.nc')") trim(dataset%file_prefix), trim(tag)
        end if
      else
        if (dataset%file_path /= 'N/A') then
          file_path = dataset%file_path
        else
          write(file_path, "(A, '.nc')") trim(dataset%file_prefix)
        end if
      end if
      ! Create or open file.
      if (new_file_opt) then
        ! Create file for output.
#ifdef HAS_MPI
        if (.not. dataset%is_parallel) then
          ! When user does not provide MPI communator, we fall back to serial IO.
          ierr = NF90_CREATE(file_path, NF90_NETCDF4, dataset%id)
        else
          ierr = NF90_CREATE(file_path, ior(NF90_NETCDF4, NF90_MPIIO), dataset%id, &
                             comm=dataset%mpi_comm, info=MPI_INFO_NULL)
        end if
#else
        ierr = NF90_CREATE(file_path, NF90_NETCDF4, dataset%id); dataset%is_parallel = .false.
#endif
        if (ierr == -114) then ! NetCDF: Parallel operation on file opened for non-parallel access
          if (.not. print_serial_output_warning) then
            call log_warning('Fall back to serial IO due to the NetCDF library does not enable parallel!')
            print_serial_output_warning = .true.
          end if
          ierr = NF90_CREATE(file_path, NF90_NETCDF4, dataset%id); dataset%is_parallel = .false.
        end if
        call handle_error(ierr, 'Failed to create NetCDF file to output!', __FILE__, __LINE__)
      else
        ! Open file for output.
        if (dataset%last_file_path == '') dataset%last_file_path = file_path
        file_path = dataset%last_file_path
#ifdef HAS_MPI
        if (.not. dataset%is_parallel) then
          ierr = NF90_OPEN(file_path, ior(NF90_NETCDF4, NF90_WRITE), dataset%id); dataset%is_parallel = .false.
        else
          ierr = NF90_OPEN(file_path, ior(NF90_NETCDF4, ior(NF90_WRITE, NF90_MPIIO)), dataset%id, &
            comm=dataset%mpi_comm, info=MPI_INFO_NULL)
        end if
#else
        ierr = NF90_OPEN(file_path, ior(NF90_NETCDF4, NF90_WRITE), dataset%id); dataset%is_parallel = .false.
#endif
        if (ierr == -114) then ! NetCDF: Parallel operation on file opened for non-parallel access
          if (.not. print_serial_output_warning) then
            call log_warning('Fall back to serial IO due to the NetCDF library does not enable parallel!')
            print_serial_output_warning = .true.
          end if
          ierr = NF90_OPEN(file_path, ior(NF90_NETCDF4, NF90_WRITE), dataset%id); dataset%is_parallel = .false.
        end if
        call handle_error(ierr, 'Failed to open NetCDF file to output!', __FILE__, __LINE__)
        ierr = NF90_REDEF(dataset%id)
        call handle_error(ierr, 'Failed to enter definition mode!', __FILE__, __LINE__)
      end if
      ierr = NF90_PUT_ATT(dataset%id, NF90_GLOBAL, 'dataset', dataset%name)
      call handle_error(ierr, 'Failed to write dataset global attribute to ' // trim(file_path) // '!', __FILE__, __LINE__)
      ierr = NF90_PUT_ATT(dataset%id, NF90_GLOBAL, 'desc', dataset%desc)
      call handle_error(ierr, 'Failed to write desc global attribute to ' // trim(file_path) // '!', __FILE__, __LINE__)
      ierr = NF90_PUT_ATT(dataset%id, NF90_GLOBAL, 'author', dataset%author)
      call handle_error(ierr, 'Failed to write author global attribute to ' // trim(file_path) // '!', __FILE__, __LINE__)

      it1 = hash_table_iterator(dataset%atts)
      do while (.not. it1%ended())
        select type (value => it1%value)
        type is (integer(2))
          ierr = NF90_PUT_ATT(dataset%id, NF90_GLOBAL, it1%key, value)
        type is (integer(4))
          ierr = NF90_PUT_ATT(dataset%id, NF90_GLOBAL, it1%key, value)
        type is (real(4))
          ierr = NF90_PUT_ATT(dataset%id, NF90_GLOBAL, it1%key, value)
        type is (real(8))
          ierr = NF90_PUT_ATT(dataset%id, NF90_GLOBAL, it1%key, value)
        type is (character(*))
          ierr = NF90_PUT_ATT(dataset%id, NF90_GLOBAL, it1%key, value)
        type is (logical)
          ierr = NF90_PUT_ATT(dataset%id, NF90_GLOBAL, it1%key, to_str(value))
        end select
        call it1%next()
      end do

      it1 = hash_table_iterator(dataset%dims)
      do while (.not. it1%ended())
        dim => dataset%get_dim(it1%key)
        ierr = NF90_INQ_DIMID(dataset%id, dim%name, dim%id)
        if (ierr /= NF90_NOERR) then
          ierr = NF90_DEF_DIM(dataset%id, dim%name, dim%size, dim%id)
          call handle_error(ierr, 'Failed to define dimension ' // trim(dim%name) // '!', __FILE__, __LINE__)
        end if
        call it1%next()
      end do

      it1 = hash_table_iterator(dataset%vars)
      do while (.not. it1%ended())
        var => dataset%get_var(it1%key)
        ierr = NF90_INQ_VARID(dataset%id, var%name, var%id)
        if (ierr /= NF90_NOERR) then
          allocate(dimids(size(var%dims)))
          do i = 1, size(var%dims)
            dimids(i) = var%dims(i)%ptr%id
          end do
          ierr = NF90_DEF_VAR(dataset%id, var%name, var%dtype, dimids(1:size(var%dims)), var%id)
          call handle_error(ierr, 'Failed to define variable ' // trim(var%name) // '!', __FILE__, __LINE__)
          deallocate(dimids)
          ierr = NF90_PUT_ATT(dataset%id, var%id, 'long_name', trim(var%long_name))
          ierr = NF90_PUT_ATT(dataset%id, var%id, 'units', trim(var%units))
          if (associated(var%i4_missing_value)) then
            ierr = NF90_PUT_ATT(dataset%id, var%id, '_FillValue', var%i4_missing_value)
          else if (associated(var%i8_missing_value)) then
            ierr = NF90_PUT_ATT(dataset%id, var%id, '_FillValue', var%i8_missing_value)
          else if (associated(var%r4_missing_value)) then
            ierr = NF90_PUT_ATT(dataset%id, var%id, '_FillValue', var%r4_missing_value)
          else if (associated(var%r8_missing_value)) then
            ierr = NF90_PUT_ATT(dataset%id, var%id, '_FillValue', var%r8_missing_value)
          end if
          call handle_error(ierr, 'Failed to put attribute _FillValue for variable ' // trim(var%name) // '!', __FILE__, __LINE__)
          it2 = hash_table_iterator(var%atts)
          do while (.not. it2%ended())
            select type (value => it2%value)
            type is (character(*))
              ierr = NF90_PUT_ATT(dataset%id, var%id, it2%key, value)
            end select
            call it2%next()
          end do
        end if
        call it1%next()
      end do

      ierr = NF90_ENDDEF(dataset%id)
      call handle_error(ierr, 'Failed to end definition!', __FILE__, __LINE__)

      if (new_file_opt) then
        dataset%time_step = 0 ! Reset to zero!
        dataset%last_file_path = file_path
      end if

      ! Write time dimension variable.
      if (associated(dataset%time_var)) then
        if (.not. present(time_in_seconds)) then
          call log_error('Time in seconds is needed!', __FILE__, __LINE__)
        end if
        ! Read time information from existing file.
        if (dataset%time_step == 0) then
          ierr = NF90_INQUIRE_DIMENSION(dataset%id, dataset%time_var%id, len=dataset%time_step)
        end if
        if (time_in_seconds /= dataset%time_in_seconds) then
          dataset%time_step = merge(dataset%time_step + 1, 1, dataset%time_dim%size == NF90_UNLIMITED)
          dataset%time_in_seconds = time_in_seconds
          ! Update time units because restart may change it.
          write(dataset%time_var%units, '(A, " since ", A)') trim(dataset%time_units_str), trim(dataset%start_time_str)
#ifdef HAS_MPI
          if (dataset%mpi_comm /= MPI_COMM_NULL .and. dataset%is_parallel) then
            ierr = NF90_VAR_PAR_ACCESS(dataset%id, dataset%time_var%id, NF90_COLLECTIVE)
            call handle_error(ierr, 'Failed to set parallel access for variable time!', __FILE__, __LINE__)
          end if
#endif
          ierr = NF90_PUT_ATT(dataset%id, dataset%time_var%id, 'units', trim(dataset%time_var%units))
          call handle_error(ierr, 'Failed to add attribute to variable time!', __FILE__, __LINE__)
          ierr = NF90_PUT_VAR(dataset%id, dataset%time_var%id, [time_in_seconds / dataset%time_units_in_seconds], [dataset%time_step], [1])
          call handle_error(ierr, 'Failed to write variable time!', __FILE__, __LINE__)
        end if
      end if

      ierr = NF90_SET_FILL(dataset%id, NF90_NOFILL, old_mode)
      call handle_error(ierr, 'Failed to set fill mode!', __FILE__, __LINE__)
    end if ! dataset%is_group_root

  end subroutine fiona_start_output

  subroutine fiona_output_s(dataset_name, var_name, value, start, count, fatal)

    character(*), intent(in) :: dataset_name
    character(*), intent(in) :: var_name
    character(*), intent(in) :: value
    integer, intent(in) :: start(2)
    integer, intent(in) :: count(2)
    logical, intent(in), optional :: fatal

    type(dataset_type), pointer :: dataset
    type(var_type), pointer :: var
    logical fatal_opt
    integer ierr

    fatal_opt = .false.; if (present(fatal)) fatal_opt = fatal

    dataset => get_dataset(dataset_name)
    var => dataset%get_var(var_name)
    if (dataset%is_group_root) then
      ierr =  NF90_PUT_VAR(dataset%id, var%id, value, start=start, count=count)
      if (fatal_opt) then
        call handle_error(ierr, "Failed to write variable """ // trim(var%name) // """ to dataset """ // &
                          trim(dataset%file_path) // """! " // trim(NF90_STRERROR(ierr)), __FILE__, __LINE__)
      else
        if (ierr /= NF90_NOERR) then
          call log_warning("Failed to write variable """ // trim(var%name) // """ to dataset """ // &
                         trim(dataset%file_path) // """! " // trim(NF90_STRERROR(ierr)), __FILE__, __LINE__)
        end if
      end if
    end if

  end subroutine fiona_output_s

  subroutine fiona_end_output(dataset_name, keep_dataset)

    character(*), intent(in) :: dataset_name
    logical, intent(in), optional :: keep_dataset

    logical keep_dataset_opt
    type(dataset_type), pointer :: dataset
    integer ierr

    keep_dataset_opt = .false.; if (present(keep_dataset)) keep_dataset_opt = keep_dataset

    dataset => get_dataset(dataset_name)

    if (dataset%is_group_root) then
      ierr = NF90_SYNC(dataset%id)
      if (ierr /= NF90_NOERR) then
        call log_error('Failed to sync file ' // trim(dataset%file_path) // '!', __FILE__, __LINE__)
      end if
      call dataset%close()
    end if

    if (.not. keep_dataset_opt) call datasets%remove(dataset_name)

  end subroutine fiona_end_output

#include "fiona_output.F90"

end module fiona_output_mod
