polar_operators_m.f90 Source File


Contents

Source Code


Source Code

module polar_operators_m
    !! Implementation of polar operators, i.e.
    !! - map from Cartesian to Polar mesh
    !! - surface average
    !! - flux surface average
    use MPI
    use precision_grillix_m, only : GP, MPI_GP
    use polars_m, only : polars_t
    use error_handling_grillix_m, only: handle_error, error_info_t
    use status_codes_grillix_m, only : GRILLIX_ERR_OTHER
    use equilibrium_storage_m, only : equilibrium_storage_t
    use parallel_map_m, only : parallel_map_t
    
    ! from PARALLAX
    use screen_io_m, only :  get_stdout
    use descriptors_m, only : DISTRICT_CORE, DISTRICT_CLOSED
    use csrmat_m, only : csrmat_t, csr_times_vec
    use equilibrium_m, only : equilibrium_t
    use mesh_cart_m, only : mesh_cart_t    
    use coords_polar_m, only : cart_to_polar, polar_to_cart
    use interpolation_m, only : interpol_coeffs

    implicit none

    public :: map_cart_to_polar
    public :: map_polar_to_cart
    public :: surface_average
    public :: flux_surface_average
    private :: zonal_average
    public :: drift_surface_integral

contains

    subroutine map_cart_to_polar(mesh, polars, u, upol)
        !! Maps quantity from Cartesian mesh to polar grid
        type(mesh_cart_t), intent(in) :: mesh
        !! Mesh within poloidal plane
        type(polars_t), intent(in) :: polars
        !! Polar grid and matrices
        real(GP), dimension(mesh%get_n_points()), intent(in) :: u
        !! Values of field on Cartesian mesh   
        real(GP), dimension(polars%grid%get_ntheta(), polars%grid%get_nrho() ), intent(out) :: upol
        !! Values of field on polar grid  
  
        !$OMP PARALLEL      
        call csr_times_vec(polars%cart_to_polar_csr, u, upol)
        !$OMP END PARALLEL

    end subroutine

    subroutine map_polar_to_cart(equi, mesh, polars, upol, u, intorder)
        !! Maps quantity from polar grid to Cartesian mesh
        class(equilibrium_t), intent(inout) :: equi
        !! Equilibrium
        type(mesh_cart_t), intent(in) :: mesh
        !! Mesh within poloidal plane
        type(polars_t), intent(in) :: polars
        !! Polar grid and matrices
        real(GP), dimension(polars%grid%get_ntheta(), polars%grid%get_nrho() ), intent(in) :: upol
        !! Values of field on polar grid  
        real(GP), dimension(mesh%get_n_points()), intent(out) :: u
        !! Values of field on Cartesian mesh   
        integer, intent(in), optional :: intorder
        !! Integration order (default 3), must be an odd number

        integer :: l, district, jp_sw, ip_sw, jk, ik, jp, ip, intorder_a
        real(GP) :: x, y, phi, rho, theta, rho_rel, theta_rel, uc
        
        real(GP), dimension(:,:), allocatable :: coeffs
        
        phi = mesh%get_phi()
        
        if (present(intorder)) then
            intorder_a = intorder
        else
            intorder_a = 3
        endif
        
        if ( (mod(intorder_a,2) == 0) .or. (intorder_a < 1) ) then
            call handle_error('Intorder must be odd positive number', &
                              GRILLIX_ERR_OTHER, __LINE__, __FILE__, &
                              error_info_t('Intorder: ',[intorder]))
        endif 
        
        
        allocate(coeffs(intorder_a+1,intorder_a+1))

        !$OMP PARALLEL PRIVATE (l, x, y, district, rho, theta, jp_sw, ip_sw, theta_rel, rho_rel, coeffs, ik, jk, ip, jp, uc)
        !$OMP DO
        do l = 1, mesh%get_n_points()
            x = mesh%get_x(l)
            y = mesh%get_y(l)
            district = mesh%get_district(l)
            if (district == DISTRICT_CLOSED) then
                call cart_to_polar(equi, x, y, phi, rho, theta)

                ! Get indices of southwest point of interpolation stencil on polar grid
                jp_sw = floor( theta / polars%grid%get_dtheta() ) + 1 - (intorder_a-1)/2
                ip_sw = floor( (rho - polars%grid%get_rhopol_min()) / polars%grid%get_drho() ) + 1 - (intorder_a-1)/2

                ! Get interpolation coefficients
                theta_rel = (theta - polars%grid%get_theta(jp_sw)) / polars%grid%get_dtheta()
                rho_rel   = (rho   - polars%grid%get_rho(ip_sw)) / polars%grid%get_drho()
                call interpol_coeffs(intorder_a, theta_rel, rho_rel, coeffs)
                
                u(l) = 0.0_GP
                
                do jk = 1, intorder_a + 1
                    ! Indices of poloidal interpolation points (respect periodicity)
                    jp = jp_sw + (jk-1)
                    if (jp > polars%grid%get_ntheta()) then
                        jp = jp - polars%grid%get_ntheta()
                    endif
                    if (jp < 1) then
                        jp = jp + polars%grid%get_ntheta()
                    endif
            
                    do ik = 1, intorder_a + 1
                        ! Indices of radial interpolation points
                        ip = ip_sw + (ik-1)
                        
                        if ( (ip >= 1).and. (ip <= polars%grid%get_nrho()) ) then
                            uc  = upol(jp, ip)
                        else
                            uc = 0.0_GP
                        endif
                        u(l) = u(l) + coeffs(jk, ik)*uc
                    enddo
                enddo
            
            else
                u(l) = 0.0_GP
            endif            
        enddo
        !$OMP END DO
        !$OMP END PARALLEL
 
    end subroutine

    subroutine surface_average(comm, mesh, polars, u, u_srf_av)
        !! Computes zonal average of quantity
        integer, intent(in) :: comm
        !! MPI-communicator
        type(mesh_cart_t), intent(in) :: mesh
        !! Mesh within poloidal plane
        type(polars_t), intent(in) :: polars
        !! Polar grid and matrices
        real(GP), dimension(mesh%get_n_points()), intent(in) :: u
        !! Values of field on Cartesian mesh   
        real(GP), dimension(polars%grid%get_nrho() ), intent(out) :: u_srf_av
        !! Surface average of quantity

        call zonal_average(comm, 1, mesh, polars, u, u_srf_av)

    end subroutine

    subroutine flux_surface_average(comm, mesh, polars, u, u_fluxsrf_av)
        !! Computes zonal average of quantity
        integer, intent(in) :: comm
        !! MPI-communicator
        type(mesh_cart_t), intent(in) :: mesh
        !! Mesh within poloidal plane
        type(polars_t), intent(in) :: polars
        !! Polar grid and matrices
        real(GP), dimension(mesh%get_n_points()), intent(in) :: u
        !! Values of field on Cartesian mesh   
        real(GP), dimension(polars%grid%get_nrho() ), intent(out) :: u_fluxsrf_av
        !! Surface average of quantity

        call zonal_average(comm, 2, mesh, polars, u, u_fluxsrf_av)

    end subroutine

    subroutine zonal_average(comm, zmode, mesh, polars, u, u_zon_av)
        !! Computes zonal average of quantity
        integer, intent(in) :: comm
        !! MPI-communicator
        integer :: zmode
        !! zmode = 1 : surface average
        !! zmode = 2 : flux surface average
        type(mesh_cart_t), intent(in) :: mesh
        !! Mesh within poloidal plane
        type(polars_t), intent(in) :: polars
        !! Polar grid and matrices
        real(GP), dimension(mesh%get_n_points()), intent(in) :: u
        !! Values of field on Cartesian mesh   
        real(GP), dimension(polars%grid%get_nrho() ), intent(out) :: u_zon_av
        !! Surface average of quantity

        integer :: ip, nrho, nprocs, ierr
        real(GP) :: fac
        
        nrho =  polars%grid%get_nrho()           
        call MPI_comm_size(comm, nprocs, ierr)
        fac = 1.0_GP/(1.0_GP*nprocs)
        
        !$OMP PARALLEL    
        if (zmode == 1)  then
            call csr_times_vec(polars%surface_average_csr, u, u_zon_av)
        elseif (zmode == 2) then
            call csr_times_vec(polars%flux_surface_average_csr, u, u_zon_av)
        else
            !$omp critical
            call handle_error('Zmode not valid', &
                              GRILLIX_ERR_OTHER, __LINE__, __FILE__, &
                              error_info_t('Zmode: ',[zmode]))
            !$omp end critical
        endif
        !$OMP DO SIMD
        do ip = 1, nrho
            u_zon_av(ip) = u_zon_av(ip)*fac
        enddo
        !$OMP END DO SIMD

        !$OMP END PARALLEL

        call MPI_allreduce(MPI_IN_PLACE, u_zon_av, nrho, MPI_GP, MPI_SUM, comm, ierr)

    end subroutine

    subroutine drift_surface_integral(comm, equi, mesh, polars, dphi, u, drift_flux, v)
        !! Computes surface integral of flux in drift form, i.e.
        !! \int dS v/B^2 B\times\nabla u 
        !! via integration along flux surface, i.e.
        !! = \int v/B du/d\theta  J dphi d\theta
        integer, intent(in) :: comm
        !! MPI-communicator
        class(equilibrium_t) :: equi
        !! EQuilibrium
        type(mesh_cart_t), intent(in) :: mesh
        !! Mesh within poloidal plane
        type(polars_t), intent(in) :: polars
        !! Polar grid and matrices
        real(GP), intent(in) :: dphi
        !! Toroidal grid distance
        real(GP), dimension(mesh%get_n_points()), intent(in) :: u
        !! Field that gardient acts upon
        real(GP), dimension(polars%grid%get_nrho()), intent(out) :: drift_flux
        !! Drift flux
        real(GP), dimension(mesh%get_n_points()), intent(in), optional :: v
        !! Secondary field, default values = 1

        integer :: ip, jp, jp_fwd, ierr, nrho
        real(GP) :: phi, xp, yp, xp_fwd, yp_fwd, jac_av, btor_av, v_av, rs
        real(GP), dimension(polars%grid%get_ntheta(), polars%grid%get_nrho()) :: upol, vpol

        phi = mesh%get_phi()

        ! Map fields to polar grid
        call map_cart_to_polar(mesh, polars, u, upol)
        if (present(v)) then
            call map_cart_to_polar(mesh, polars, v, vpol)
        endif
    
        !$omp parallel default(none) &
        !$omp private(ip, jp, jp_fwd, xp, yp, xp_fwd, yp_fwd, jac_av, btor_av, v_av) &
        !$omp shared(equi, phi, dphi, polars, upol, v, vpol, drift_flux,rs)
        do ip = 1, polars%grid%get_nrho()
            !$omp single
            rs = 0.0_GP
            !$omp end single
            !$omp do reduction(+: rs)
            do jp = 1, polars%grid%get_ntheta()
                if (jp == polars%grid%get_ntheta()) then
                    jp_fwd = 1
                else
                    jp_fwd = jp + 1
                endif

                xp = polars%xpol(jp, ip)
                yp = polars%ypol(jp, ip)
                xp_fwd = polars%xpol(jp_fwd, ip)
                yp_fwd = polars%ypol(jp_fwd, ip)

                jac_av = 0.5_GP * (equi%jacobian(xp_fwd, yp_fwd, phi) +  equi%jacobian(xp, yp, phi))
                btor_av = 0.5_GP * (equi%btor(xp_fwd, yp_fwd, phi) +  equi%btor(xp, yp, phi))
                if (present(v)) then
                    v_av = 0.5_GP * (vpol(jp_fwd,ip) + vpol(jp,ip))
                else
                    v_av = 1.0_GP
                endif

                rs = rs + v_av / btor_av * jac_av * (upol(jp_fwd, ip) - upol(jp, ip)) * dphi
            enddo
            !$omp end do
            !$omp single
            drift_flux(ip) = rs
            !$omp end single
        enddo
        !$omp end parallel
        
        nrho = polars%grid%get_nrho()
        call MPI_allreduce(MPI_IN_PLACE, drift_flux, nrho, MPI_GP, MPI_SUM, comm, ierr)

    end subroutine

end module