model_diffusion_m.f90 Source File


Contents

Source Code


Source Code

module model_diffusion_m
    !! Implementation of diffusion model
    use MPI
    use comm_handler_m, only : comm_handler_t
    use static_data_m, only : equi, mesh_cano, mesh_stag, equi_on_cano, equi_on_stag, &
        hsolver_cano => field_solver_cano, &
        hsolver_stag => field_solver_stag, &
        map, &
        penalisation_cano => parbnd_immersed_cano, &
        penalisation_stag => parbnd_immersed_stag, &
        polars_cano, polars_stag

    use parallelisation_setup_m, only : is_rank_info_writer
    use precision_grillix_m, only : GP, MPI_GP
    use variable_m, only : variable_t
    use multistep_m, only : karniadakis_t
    use init_diffusion_m, only : build_initial_state
    use timestep_diffusion_m, only : timestep_diffusion
    use checkpoint_monitor_m, only : checkpoint_monitor_t
    use snapshot_m, only : snapshot_t, char_arr_t
    use mms_diffusion_m, only : mms_sol_diffusion, mms_diagnostics_diffusion
    
    ! From PARALLAX   
    use perf_m, only : perf_print, perf_reset
    use error_handling_grillix_m, only: handle_error, error_info_t
    use status_codes_grillix_m, only : GRILLIX_ERR_OTHER, &
                                       GRILLIX_WRN_TSTEP_NO_SUCCESS
    use screen_io_m, only :  get_stdout
    use descriptors_m, only : DISTRICT_CORE, DISTRICT_WALL, DISTRICT_DOME, &
                              DISTRICT_OUT
                              
    use params_diffusion_m, only : &
        read_paths, write_paths, read_all_params_diffusion, write_all_params_diffusion, &
        mms_on, &
        tstep_order, dtau, tau_fin, tau_snaps, tstep_dbgout, &
        bnd_descr_core, bnd_val_core, bnd_descr_wall, bnd_val_wall, &
        bnd_descr_dome, bnd_val_dome, bnd_descr_out, bnd_val_out, &
        init_type, init_path
    
    implicit none

    public :: model_diffusion
    private :: timestep_debug_output

contains

    subroutine model_diffusion(comm_handler)
        type(comm_handler_t), intent(in) :: comm_handler
        !! Communicators        
        type(variable_t) :: var, mms_sol
        ! Variable evolved in time (defined on canonical mesh)
        type(karniadakis_t) :: tstepkrk
        ! Time integrator(Karniadakis)
        type(checkpoint_monitor_t) :: checkpoint_monitor
        ! Main checkpoint monitor
        integer, dimension(mesh_cano%get_n_points_boundary()) :: bndperp_types
        ! Boundary descriptors
        real(GP), dimension(mesh_cano%get_n_points_boundary()) :: bndperp_vals
        ! Boundary values
        integer :: kb
        ! Loop index running over boundary points
        integer :: l
        ! Mesh index

        real(GP) :: x, y, phi
        integer :: district
        integer :: tstep_count, isnaps, tinfo_min
        real(GP) :: tau
        integer, dimension(2) :: tinfo
        real(GP), dimension(3) :: rinfo
              
        integer :: nvars, ierr
        type(char_arr_t), allocatable, dimension(:) :: name_vars 
        type(snapshot_t) :: snaps
        
        if (is_rank_info_writer) then
            write(get_stdout(),*)''
            write(get_stdout(),*)'-------------------------------------------------------'
            write(get_stdout(),*)'START RUNNING DIFFUSION MODEL'
            write(get_stdout(),*)'-------------------------------------------------------'
            write(get_stdout(),*)''
        endif

        ! Read parameters ------------------------------------------------------
        call read_paths('params_diffusion.in')
        if (is_rank_info_writer) then
            call write_paths()
        endif
        call MPI_BARRIER(comm_handler%get_comm_cart(), ierr)
        call read_all_params_diffusion()
        if (is_rank_info_writer) then
            call write_all_params_diffusion()
        endif

        ! Set snapsfile --------------------------------------------------------
        if (mms_on) then
            nvars = 2
            allocate(name_vars(nvars))
            name_vars(1)%str = 'var'
            name_vars(2)%str = 'mms_sol'
        else
            nvars = 1
            allocate(name_vars(nvars))
            name_vars(1)%str = 'var'
        endif
        call snaps%init(comm_handler, mesh_cano, mesh_stag, 'snapsdir_diffusion', nvars, name_vars)
        
        ! Setup initial state
        call build_initial_state(comm_handler, equi, mesh_cano, map, &
                                 init_type, init_path, &
                                 snaps, isnaps, tau, var)

        if (mms_on) then
            call mms_sol%init(comm_handler, ndim=mesh_cano%get_n_points(), &
                              staggered=.false.,init_vals=var%vals)
        endif

        ! Initialise checkpoint monitor
        call checkpoint_monitor%init(major_interval=tau_snaps, &
                                     major_tally_offset=isnaps)
    
        ! Create or open netcdf snapsfile --------------------------------------
        if (init_type /= 'CONTINUE') then
            ! Write out initial state
            if (mms_on) then               
                call snaps%write_snaps(comm_handler, isnaps, tau, [var, mms_sol])
            else
                call snaps%write_snaps(comm_handler, isnaps, tau, [var])
            endif
        endif

        ! Initialise time-integrator -------------------------------------------
        call tstepkrk%init(mesh_cano%get_n_points(), tstep_order, dtau)

        ! Set array with perpendicular boundary condition ----------------------
        !$omp parallel default(none) &
        !$omp shared(mesh_cano, bndperp_types, bndperp_vals, &
        !$omp         bnd_descr_core, bnd_val_core, bnd_descr_wall, bnd_val_wall, &
        !$omp         bnd_descr_dome, bnd_val_dome, bnd_descr_out, bnd_val_out) &
        !$omp private(kb, l, district) 
        !$omp do
        do kb = 1, mesh_cano%get_n_points_boundary()
            l = mesh_cano%boundary_indices(kb)
            district = mesh_cano%get_district(l)  
            select case(district) 
                case(DISTRICT_CORE)
                    bndperp_types(kb) = bnd_descr_core
                    bndperp_vals(kb) = bnd_val_core
                case(DISTRICT_WALL)
                    bndperp_types(kb) = bnd_descr_wall
                    bndperp_vals(kb) = bnd_val_wall
                case(DISTRICT_DOME)
                    bndperp_types(kb) =bnd_descr_dome
                    bndperp_vals(kb) = bnd_val_dome
                case(DISTRICT_OUT)
                    bndperp_types(kb) = bnd_descr_out
                    bndperp_vals(kb) = bnd_val_out
                case default
                    call handle_error('District not valid', &
                                      GRILLIX_ERR_OTHER, __LINE__, __FILE__, &
                                      error_info_t('District: ',[district]))
            end select     
        end do
        !$omp end do
        !$omp end parallel   
            
        ! Time-stepping loop ---------------------------------------------------
        if (is_rank_info_writer) then
            write(get_stdout(),*)
            write(get_stdout(),*)'start timestepping .....'
            write(get_stdout(),*)
        endif
        tstep_count = 0
        do while (tau <= tau_fin + dtau)
            tstep_count = tstep_count + 1

            call timestep_diffusion(comm_handler, equi, &
                                    mesh_cano, mesh_stag, &
                                    equi_on_cano, equi_on_stag, &
                                    hsolver_cano, hsolver_stag, &
                                    map, &
                                    penalisation_cano, penalisation_stag, &
                                    polars_cano, polars_stag, &
                                    tstepkrk, &
                                    bndperp_types, bndperp_vals, &
                                    tau, var, tinfo, rinfo)

            ! Check for the successful execution of the time step
            tinfo_min = minval(tinfo)
            call MPI_allreduce(MPI_IN_PLACE, tinfo_min, 1, MPI_Integer, MPI_MIN, &
                               comm_handler%get_comm_cart(), ierr)
            if ( tinfo_min < 0 ) then
                call handle_error('Timestep returned without success', &
                                      GRILLIX_WRN_TSTEP_NO_SUCCESS, &
                                      __LINE__, __FILE__, &
                                      error_info_t('tinfo: ',[tinfo_min]))
                return
            endif

            if (tstep_dbgout) then
                call timestep_debug_output(comm_handler, tstep_count, tau, tinfo, rinfo)
            endif

            ! Advance checkpoint monitor
            call checkpoint_monitor%drive(tau, dtau)
            if (checkpoint_monitor%on_major_checkpoint()) then
                
                ! Update isnaps
                isnaps = checkpoint_monitor%get_major_tally()
                ! Write snapshot to snapsfile
                if (is_rank_info_writer) then
                    write(get_stdout(),*)'Writing snapshot at -----------------------------------'
                    write(get_stdout(),733)tau, tstep_count, isnaps
733                 FORMAT(3X,'tau          = ',ES14.6E3 /, &
                           3X,'tstep_count  = ',I8 /, &
                           3X,'isnaps       = ',I8)  
                    write(get_stdout(),*)'-------------------------------------------------------'  
                endif
                
                if (mms_on) then
                    ! Compute MMS solution
                    phi = map%get_phi_cano()
                    !$omp parallel default(none) &
                    !$omp shared(equi, mesh_cano, mms_sol, phi, tau) &
                    !$omp private(l, x, y)
                    !$omp do
                    do l = 1, mesh_cano%get_n_points()
                        x = mesh_cano%get_x(l)
                        y = mesh_cano%get_y(l)
                        mms_sol%vals(l) = mms_sol_diffusion(equi, x, y, phi, &
                                                            tau)
                    enddo
                    !$omp end do
                    !$omp end parallel
                endif                
               
                if (mms_on) then               
                    call snaps%write_snaps(comm_handler, isnaps, &
                                           tau, [var, mms_sol])
                else
                    call snaps%write_snaps(comm_handler, isnaps, tau, [var])
                endif
                
                ! Write diagnostics of MMS analysis
                if (mms_on) then
                    call mms_diagnostics_diffusion(comm_handler, equi, &
                                                   mesh_cano, map, tau, var)
                endif

                ! Write performance statistics
                call perf_print() 
                call perf_reset()   
                
            endif
            
        enddo

        if (is_rank_info_writer) then
            write(get_stdout(),*)''
            write(get_stdout(),*)'-------------------------------------------------------'
            write(get_stdout(),*)'DIFFUSION MODEL RUN SUCCESFULLY'  
            write(get_stdout(),*)'-------------------------------------------------------'
            write(get_stdout(),*)''
        endif
     
    end subroutine

    subroutine timestep_debug_output(comm_handler, tstep_count, tau, tinfo, rinfo)
        !! Writes debug output from timestep (e.g. convergence results of solvers) to file
        use params_brag_pardiss_model_m, only : heatflux_model, landau_numlorentzians_e, landau_numlorentzians_i
        type(comm_handler_t), intent(in) :: comm_handler
        !! Communicators
        integer, intent(in) :: tstep_count
        !! Number of timestep
        real(GP), intent(in) :: tau
        !! Time       
        integer, intent(in), dimension(2) :: tinfo
        !! Info status (integers)
        real(GP), intent(in), dimension(3) :: rinfo
        !! Info status (float)

        integer :: iplane, nplanes, ierr, recvcount, il, ilr
        integer, allocatable, dimension(:,:) :: tinfo_gather
        real(GP), allocatable, dimension(:,:) :: rinfo_gather

        character(len=:),allocatable :: wrt_adv

        ! Gather tinfo and rinfo on plane 0
        nplanes = comm_handler%get_nplanes()

        recvcount = nplanes*2
        allocate(tinfo_gather(2, nplanes))
        allocate(rinfo_gather(3, nplanes))
        call MPI_Gather(tinfo, 2, MPI_Integer, tinfo_gather, 2, MPI_Integer, 0, &
                        comm_handler%get_comm_planes(), ierr)
        call MPI_Gather(rinfo, 3, MPI_GP, rinfo_gather,3, MPI_GP, 0, &
                        comm_handler%get_comm_planes(), ierr)
       
        ! Write debug output
        open(unit=55, file='tstep_debug.out', status='unknown', position='append')
        if (is_rank_info_writer) then
            write(55,*)repeat('-',100)
            write(55,501)tstep_count, tau
            501 FORMAT('tstep_count = ', I10,3X,'tau = ', ES15.7E3,X)
            
            write(55,*)'Statistics of 2D solver'
            write(55,502)'iplane', 'tinfo/rinfo'
            502 FORMAT(3X,A6,'|',(X,A14,X,'|'))
            do iplane = 1, nplanes
                write(55,503)iplane-1, tinfo_gather(1,iplane),  rinfo_gather(1,iplane)
                503 FORMAT(3X,I6,'|',(X,I4,X,ES9.2E2,X,'|'))
            enddo

            write(55,*)'Statistics of 3D solver'
            write(55,504)'niter', 'pseudores', 'true res'
            504 FORMAT(A5,X,2(A9,X))
            
            write(55,505) tinfo_gather(2,1),  rinfo_gather(2,1),  rinfo_gather(3,1)
            505 FORMAT(I5,2(X,ES9.2E2))
            
            write(55,*)repeat('-',100)
        endif
        close(55) 
        
    end subroutine

end module