module perp_bnd_flux_m !! Module for computing perpendicular fluxes through domain use MPI use NETCDF use precision_grillix_m, only : GP, GP_EPS, MPI_GP use screen_io_m, only : get_stdout use parallelisation_setup_m, only : is_rank_info_writer use comm_handler_m, only : comm_handler_t use error_handling_grillix_m, only: handle_error_netcdf use csrmat_m, only : csrmat_t use equilibrium_m, only : equilibrium_t use equilibrium_storage_m, only : equilibrium_storage_t use mesh_cart_m, only : mesh_cart_t use penalisation_m, only : penalisation_t use polygon_in_mesh_m, only : polygon_in_mesh_t use descriptors_m, only : DISTRICT_CORE, DISTRICT_WALL, DISTRICT_DOME, DISTRICT_OUT implicit none public :: mirrorset_polyperpbnd_points public :: compute_diffusive_flux public :: compute_drift_flux public :: get_diffusive_flux_total public :: get_diffusive_flux_local public :: get_drift_flux_total public :: get_drift_flux_local type, public :: perp_bnd_flux_t !! Wrapper type for managing perpendicular fluxes type(polygon_in_mesh_t), public :: core !! Polygon for core boundary type(csrmat_t), public :: conn_core !! Connectivity matrix for core polygon type(polygon_in_mesh_t), public :: outer !! Polygon for outer boundary !! This consists of the wall and dome excluding the part within the penalisation !! and the target type(csrmat_t), public :: conn_outer !! Connectivity matrix for outer polygon type(polygon_in_mesh_t), public :: target !! Polygon for target boundary type(csrmat_t), public :: conn_target !! Connectivity matrix for target polygon integer, dimension(:), allocatable, public :: full_to_inner !! Index map of full mesh indices to corresponding inner mesh index (if it exists) !! Full indices without associated inner index have value 0 contains procedure, public :: init => init_perp_bnd_flux procedure, public :: write_netcdf => write_netcdf_perp_bnd_flux procedure, public :: read_netcdf => read_netcdf_perp_bnd_flux final :: destructor end type interface module subroutine init_perp_bnd_flux(self, equi, mesh, penalisation, dbgout) !! Creates perpendicular boundary polygons and conectivity matrices class(perp_bnd_flux_t), intent(inout) :: self !! Instance of class class(equilibrium_t), intent(in) :: equi !! Equilibrium type(mesh_cart_t), intent(in) :: mesh !! Mesh type(penalisation_t), intent(in) :: penalisation !! Penalisation integer, intent(in), optional :: dbgout !! Debug output level end subroutine module subroutine write_netcdf_perp_bnd_flux(self, fgid) !! Writes boundary polygons to netcdf file class(perp_bnd_flux_t), intent(in) :: self !! Instance of class integer, intent(in) :: fgid !! File or group id end subroutine module subroutine read_netcdf_perp_bnd_flux(self, fgid) !! Reads boundary polygon information from netcdf file class(perp_bnd_flux_t), intent(inout) :: self !! Instance of class integer, intent(in) :: fgid !! File or group id end subroutine module subroutine destructor(self) !! Destructor type(perp_bnd_flux_t), intent(inout) :: self !! Instance of type end subroutine end interface contains subroutine mirrorset_polyperpbnd_points(equi, mesh, polygon, conn, u, co, iseg) !! Sets points of field u on boundary-polygon to the value !! of its connected inner points, according to connectivity matrix conn. class(equilibrium_t), intent(in) :: equi !! Equilibrium type(mesh_cart_t), intent(in) :: mesh !! Mesh type(polygon_in_mesh_t), intent(in) :: polygon !! Polygon type(csrmat_t), intent(in) :: conn !! Connectivity matrix real(GP), dimension(mesh%get_n_points()), intent(inout) :: u !! Field real(GP), dimension(mesh%get_n_points()), optional, intent(in) :: co !! Coefficient integer, intent(in), optional :: iseg !! Segment for which miroring is applied. !! By default it is applied to all segments. integer :: is, iseg_start, iseg_end, ks, ks_glob, l, iz, col real(GP) :: x, y, phi, jac_x, xc, yc, jac_bs, co_bs, wgh if (present(iseg)) then iseg_start = iseg iseg_end = iseg else iseg_start = 1 iseg_end = polygon%get_nsegs() endif phi = mesh%get_phi() !$omp parallel default(none) & !$omp private(is, ks, ks_glob, l, x, y, jac_x, iz, col, xc, yc, jac_bs, co_bs, wgh) & !$omp shared(equi, mesh, phi, polygon, conn, u, co) do is = 1, polygon%get_nsegs() !$omp do do ks = 1, polygon%get_nps(is) ks_glob = polygon%ki_seg(is) - 1 + ks l = polygon%get_ind_on_mesh(ks, is) x = mesh%get_x(l) y = mesh%get_y(l) jac_x = equi%jacobian(x, y, phi) ! CSR matrix multiplication structure u(l) = 0.0_GP wgh = 0.0_GP do iz = conn%i(ks_glob), conn%i(ks_glob+1) - 1 col = conn%j(iz) ! Get coefficient and Jacobian on boundary surface xc = mesh%get_x(l) yc = mesh%get_y(l) jac_bs = 0.5_GP * (jac_x + equi%jacobian(xc, yc, phi)) if (present(co)) then co_bs = 0.5_GP * (co(l) + co(col)) else co_bs = 1.0_GP endif u(l) = u(l) + co_bs * u(col) * jac_bs wgh = wgh + co_bs * jac_bs enddo u(l) = u(l) / wgh enddo !$omp end do enddo !$omp end parallel end subroutine subroutine compute_diffusive_flux(equi, mesh, dphi, polygon, conn, u, diff_flux, co) !! Computes the diffusive flux through a perpendicular boundary polygon, i.e. !! \int_S v \nabla u dS, where nabla is gradient within plane !! Sign convention is that flux out of inner domain is positive class(equilibrium_t), intent(in) :: equi !! Equilibrium type(mesh_cart_t), intent(in) :: mesh !! Mesh real(GP), intent(in) :: dphi !! Toroidal extent of surface type(polygon_in_mesh_t), intent(in) :: polygon !! Polygon type(csrmat_t), intent(in) :: conn !! Connectivity matrix real(GP), dimension(mesh%get_n_points()), intent(in) :: u !! Field real(GP), dimension(polygon%get_np_total()), intent(out) :: diff_flux !! Diffusive flux through boundary polygon points (its associated area) real(GP), dimension(mesh%get_n_points()), optional, intent(in) :: co !! Coefficient integer :: is, ks, ks_glob, l, iz, col real(GP) :: x, y, phi, jac_x, xc, yc, jac_bs, co_bs phi = mesh%get_phi() !$omp parallel default(none) & !$omp private(is, ks, ks_glob, l, x, y, jac_x, iz, col, xc, yc, jac_bs, co_bs) & !$omp shared(equi, mesh, phi, polygon, conn, dphi, u, diff_flux, co) do is = 1, polygon%get_nsegs() !$omp do do ks = 1, polygon%get_nps(is) ks_glob = polygon%ki_seg(is) - 1 + ks l = polygon%get_ind_on_mesh(ks, is) x = mesh%get_x(l) y = mesh%get_y(l) jac_x = equi%jacobian(x, y, phi) ! CSR matrix multiplication structure diff_flux(ks_glob) = 0.0_GP do iz = conn%i(ks_glob), conn%i(ks_glob+1) - 1 col = conn%j(iz) ! Get coefficient and Jacobian on boundary surface xc = mesh%get_x(l) yc = mesh%get_y(l) jac_bs = 0.5_GP * (jac_x + equi%jacobian(xc, yc, phi)) if (present(co)) then co_bs = 0.5_GP * (co(l) + co(col)) else co_bs = 1.0_GP endif diff_flux(ks_glob) = diff_flux(ks_glob) & + co_bs * (u(col) - u(l)) * jac_bs * dphi enddo enddo !$omp end do enddo !$omp end parallel end subroutine function get_diffusive_flux_total(comm_handler, polygon, diff_flux, sum_over_planes, iseg) result(res) !! Computes total flux through polygon segment from diff_flux array, !! which has to be computed previously via compute_diffusive_flux. type(comm_handler_t), intent(in) :: comm_handler !! Communicators type(polygon_in_mesh_t), intent(in) :: polygon !! Polygon real(GP), dimension(polygon%get_np_total()), intent(in) :: diff_flux !! Diffusive flux through boundary in between polygon points and its connected points !! obtained from compute_diffusive_flux logical, intent(in) :: sum_over_planes !! If true summation over planes is performed. !! This assumes that polygons have same structure over planes integer, intent(in), optional :: iseg !! Segment for which flux is computed. !! If not present then flux summed over all segments is returned real(GP) :: res integer :: iseg_start, iseg_end, is, ks, ks_glob, ierr if (present(iseg)) then iseg_start = iseg iseg_end = iseg else iseg_start = 1 iseg_end = polygon%get_nsegs() endif !$omp parallel default(none) private(is, ks, ks_glob) & !$omp shared(polygon, iseg_start, iseg_end, diff_flux, res) res = 0.0_GP do is = iseg_start, iseg_end !$omp do reduction(+ : res) do ks = 1, polygon%get_nps(is) ! Internal global polygon index ks_glob = polygon%ki_seg(is) - 1 + ks res = res + diff_flux(ks_glob) enddo !$omp end do enddo !$omp end parallel if (sum_over_planes) then call MPI_Allreduce(MPI_IN_PLACE, res, 1, MPI_GP, MPI_SUM, & comm_handler%get_comm_planes(), ierr) endif end function function get_diffusive_flux_local(polygon, diff_flux, ks, iseg) result(res) !! Computes local flux through polygon point (area associated with polygon point) !! diff_flux array has to be computed previously via compute_diffusive_flux type(polygon_in_mesh_t), intent(in) :: polygon !! Polygon real(GP), dimension(polygon%get_np_total()), intent(in) :: diff_flux !! Diffusive flux through boundary in between polygon points and its connected points !! obtained from compute_diffusive_flux integer, intent(in) :: ks !! Index on polygon segment !! If index does not exist on segment, zero flux is returned integer, intent(in), optional :: iseg !! Segment for which flux is computed, default = 1 real(GP) :: res integer :: is, np_seg, ks_glob if (present(iseg)) then is = iseg else is = 1 endif np_seg = polygon%get_nps(is) ! Handle out of range index if ( (ks < 1) .or. (ks > np_seg) ) then res = 0.0_GP return endif ks_glob = polygon%ki_seg(is) - 1 + ks res = diff_flux(ks_glob) end function subroutine compute_drift_flux(equi, equi_storage, mesh, dphi, polygon, u, drift_flux, v) !! Computes fluxes due to drift through a toroidally extended polygon !! \int_S v * (B/B^2 \times \nabla u) dS !! The integrand can be expressed via a derivative along the polygon !! \int v * 1/Btor du/dl J dl dphi !! where l measures the length along the polygon class(equilibrium_t), intent(in) :: equi !! Equilibrium type(equilibrium_storage_t), intent(in) :: equi_storage !! Equilibrium on mesh type(mesh_cart_t), intent(in) :: mesh !! Mesh real(GP), intent(in) :: dphi !! Toroidal extent of surface type(polygon_in_mesh_t), intent(in) :: polygon !! Polygon real(GP), dimension(mesh%get_n_points()), intent(in) :: u !! Field real(GP), dimension(polygon%get_np_total()), intent(out) :: drift_flux !! Drift flux through boundary in between polygon points !! This quantity is difficult to interpret, use the getter routines !! ... to compute the local or integrated fluxes real(GP), dimension(mesh%get_n_points()), intent(in), optional :: v !! Optional field, default values 1 integer :: nexclude_last, iseg, ks, ks_glob, l, lfwd real(GP) :: phi, x, y, xfwd, yfwd, jac_av, btor_av, v_av phi = mesh%get_phi() ! For non-periodc segments, the number of surface segment is one smaller ! than the number of segment points if (polygon%is_periodic()) then nexclude_last = 0 else nexclude_last = 1 endif !!$omp parallel default(none) & !!$omp private(iseg, ks, ks_glob, l, lfwd, x, y, xfwd, yfwd, & !!$omp jac_av, btor_av, v_av) & !!$omp shared(equi, equi_storage, mesh, dphi, polygon, phi, nexclude_last, & !!$omp u, v, drift_flux) do iseg = 1, polygon%get_nsegs() !!$omp do do ks = 1, polygon%get_nps(iseg) - nexclude_last ! Internal global polygon index ks_glob = polygon%ki_seg(iseg) - 1 + ks ! Subsequent mesh indices of polygon l = polygon%get_ind_on_mesh(ks, iseg) lfwd = polygon%get_ind_on_mesh(ks+1, iseg) x = mesh%get_x(l) y = mesh%get_y(l) xfwd = mesh%get_x(lfwd) yfwd = mesh%get_y(lfwd) ! Quantites mapped between subsequent polygon points jac_av = 0.5_GP * (equi%jacobian(x, y, phi) + equi%jacobian(xfwd, yfwd, phi)) btor_av = 0.5_GP * (equi_storage%btor(l) + equi_storage%btor(lfwd)) if (present(v)) then v_av = 0.5_GP * (v(l) + v(lfwd)) else v_av = 1.0_GP endif drift_flux(ks_glob) = v_av / btor_av * (u(lfwd) - u(l)) * jac_av * dphi enddo !!$omp end do enddo !!$omp end parallel end subroutine function get_drift_flux_total(comm_handler, polygon, drift_flux, sum_over_planes, iseg) result(res) !! Computes total flux through polygon segment from drift_flux array, !! which has to be computed previously via compute_drift_flux. type(comm_handler_t), intent(in) :: comm_handler !! Communicators type(polygon_in_mesh_t), intent(in) :: polygon !! Polygon real(GP), dimension(polygon%get_np_total()), intent(in) :: drift_flux !! Drift flux through boundary in between polygon points, !! obtained via compute_drift_flux routine logical, intent(in) :: sum_over_planes !! If true summation over planes is performed. !! This assumes that polygons have same structure over planes integer, intent(in), optional :: iseg !! Segment for which flux is computed. !! If not present then flux summed over all segments is returned real(GP) :: res integer :: iseg_start, iseg_end, nexclude_last, is, ks, ks_glob, ierr if (present(iseg)) then iseg_start = iseg iseg_end = iseg else iseg_start = 1 iseg_end = polygon%get_nsegs() endif ! For non-periodc segments, the number of surface segment is one smaller ! than the number of segment points if (polygon%is_periodic()) then nexclude_last = 0 else nexclude_last = 1 endif !!$omp parallel default(none) private(is, ks, ks_glob) & !!$omp shared(polygon, iseg_start, iseg_end, nexclude_last, drift_flux, res) res = 0.0_GP do is = iseg_start, iseg_end !!$omp do reduction(+ : res) do ks = 1, polygon%get_nps(is) - nexclude_last ! Internal global polygon index ks_glob = polygon%ki_seg(is) - 1 + ks res = res + drift_flux(ks_glob) enddo !!$omp end do enddo !!$omp end parallel if (sum_over_planes) then call MPI_Allreduce(MPI_IN_PLACE, res, 1, MPI_GP, MPI_SUM, & comm_handler%get_comm_planes(), ierr) endif end function function get_drift_flux_local(polygon, drift_flux, ks, iseg) result(res) !! Computes local flux through polygon point (area associated with polygon point) !! drift_flux array has to be computed previously via compute_drift_flux type(polygon_in_mesh_t), intent(in) :: polygon !! Polygon real(GP), dimension(polygon%get_np_total()), intent(in) :: drift_flux !! Drift flux through boundary in between polygon points, !! obtained via compute_drift_flux routine integer, intent(in) :: ks !! Index on polygon segment !! If index does not exist on segment, zero flux is returned integer, intent(in), optional :: iseg !! Segment for which flux is computed, default = 1 real(GP) :: res integer :: is, np_seg, ks_bwd, ks_glob, ks_bwd_glob real(GP) :: flx_fwd, flx_bwd if (present(iseg)) then is = iseg else is = 1 endif np_seg = polygon%get_nps(is) ! Handle out of range index if ( (ks < 1) .or. (ks > np_seg) ) then res = 0.0_GP return endif ! Find index backward to polygon point ks_bwd = ks - 1 if ( (ks_bwd == 0) .and. polygon%is_periodic() ) then ks_bwd = np_seg endif ! Internal global polygon indices ks_glob = polygon%ki_seg(is) - 1 + ks ks_bwd_glob = polygon%ki_seg(is) - 1 + ks_bwd ! Map fluxes from in-between points to points if (.not. polygon%is_periodic() .and. (ks == 1) ) then flx_bwd = 0.0_GP flx_fwd = drift_flux(ks_glob) elseif (.not. polygon%is_periodic() .and. (ks == np_seg) ) then flx_bwd = drift_flux(ks_bwd_glob) flx_fwd = 0.0_GP else flx_fwd = drift_flux(ks_glob) flx_bwd = drift_flux(ks_bwd_glob) endif res = 0.5_GP * (flx_fwd + flx_bwd) end function end module