module init_braginskii_m !! Sets initial state for BRAGINSKII model use MPI use netcdf use comm_handler_m, only : comm_handler_t use parallelisation_setup_m, only : is_rank_info_writer use precision_grillix_m, only : GP use error_handling_grillix_m, only: handle_error_netcdf, handle_error, & error_info_t use status_codes_grillix_m, only : GRILLIX_ERR_NAMELIST, GRILLIX_ERR_OTHER, & GRILLIX_ERR_SOLVER3D, GRILLIX_ERR_SOLVER2D, & GRILLIX_WRN_INITIALISATION use parallel_map_m, only : parallel_map_t use penalisation_m, only : penalisation_t use equilibrium_storage_m, only : equilibrium_storage_t use polars_m, only : polars_t use variable_m, only: variable_t use boundaries_braginskii_m, only : boundaries_braginskii_t use multistep_m, only : karniadakis_t use snapshot_m, only : snapshot_t use polarisation_braginskii_m, only : compute_vorticity use ohm_electromagnetic_braginskii_m, only : compute_jpar use mms_braginskii_m, only : mms_sol_braginskii_ne, & mms_sol_braginskii_pot, & mms_sol_braginskii_vort, & mms_sol_braginskii_upar, & mms_sol_braginskii_jpar, & mms_sol_braginskii_apar, & mms_sol_braginskii_psipar, & mms_sol_braginskii_te, & mms_sol_braginskii_ti use inplane_operators_m, only : inplane_operators_t ! From PARALLAX use constants_m, only : PI, TWO_PI use screen_io_m, only : get_stdout use equilibrium_m, only : equilibrium_t use mesh_cart_m, only : mesh_cart_t use helmholtz_solver_m, only : helmholtz_solver_t ! Parameters use params_feature_selection_m, only : & mms_potvort_solve_on, mms_aparjpar_solve_on use params_brag_model_m, only : & boussinesq_on implicit none public :: build_braginskii_initial_state public :: initialise_timesteppers, initialise_timesteppers_with_mms public :: init_braginskii_halos interface module subroutine init_state_profile(comm_handler, & equi, equi_on_cano, equi_on_stag, & mesh_cano, mesh_stag, & hsolver_cano, hsolver_stag, & map, & penalisation_cano, penalisation_stag, & polars_cano, polars_stag, & opsinplane_cano, opsinplane_stag, & filename, & boundaries, & ne, pot, vort, upar, jpar, apar, te, ti) !! Initialises variables according to equilibrium profile type(comm_handler_t), intent(in) :: comm_handler !! Communicators class(equilibrium_t), intent(inout) :: equi !! Equilibrium class(equilibrium_storage_t), intent(in) :: equi_on_cano !! Equilibrim quantities on canonical mesh class(equilibrium_storage_t), intent(in) :: equi_on_stag !! Equilibrim quantities on staggered mesh type(mesh_cart_t), intent(inout) :: mesh_cano !! Mesh (canonical) type(mesh_cart_t), intent(inout) :: mesh_stag !! Mesh (staggered) class(helmholtz_solver_t), intent(inout) :: hsolver_cano !! Elliptic (2D) solver on canonical mesh class(helmholtz_solver_t), intent(inout) :: hsolver_stag !! Elliptic (2D) solver on staggered mesh type(parallel_map_t), intent(in) :: map !! Parallel map type(penalisation_t), intent(in) :: penalisation_cano !! Penalisation (canonical) type(penalisation_t), intent(in) :: penalisation_stag !! Penalisation (staggered) type(polars_t), intent(in) :: polars_cano !! Polar grid and operators (canonical) type(polars_t), intent(in) :: polars_stag !! Polar grid and operators (staggered) type(inplane_operators_t), intent(in) :: opsinplane_cano !! In-plane operators (canonical) type(inplane_operators_t), intent(in) :: opsinplane_stag !! In-plane operators (staggered) character(len=*), intent(in) :: filename !! Filename, where parameters are possibly read from type(boundaries_braginskii_t), intent(inout) :: boundaries !! Boundary information for the BRAGINSKII model type(variable_t), intent(inout) :: ne !! Electron density type(variable_t), intent(inout) :: pot !! Electrostatic potential type(variable_t), intent(inout) :: vort !! Generalised vorticity type(variable_t), intent(inout) :: upar !! Parallel ion velocity type(variable_t), intent(inout) :: jpar !! Parallel current type(variable_t), intent(inout) :: apar !! Parallel electromagnetic potential type(variable_t), intent(inout) :: te !! Electron temperature type(variable_t), intent(inout) :: ti !! Ion temperature end subroutine end interface contains subroutine build_braginskii_initial_state(comm_handler, & equi, equi_on_cano, equi_on_stag, & mesh_cano, mesh_stag, & hsolver_cano, hsolver_stag, & map, & penalisation_cano, penalisation_stag, & polars_cano, polars_stag, & opsinplane_cano, opsinplane_stag, & init_select, filename, & snaps, & boundaries, & isnaps, tau, & ne, pot, vort, & upar, jpar, apar, apar_fluct, & te, ti) !! Builds initial state type(comm_handler_t), intent(in) :: comm_handler !! Communicators class(equilibrium_t), intent(inout) :: equi !! Equilibrium class(equilibrium_storage_t), intent(in) :: equi_on_cano !! Equilibrim quantities on canonical mesh class(equilibrium_storage_t), intent(in) :: equi_on_stag !! Equilibrim quantities on staggered mesh type(mesh_cart_t), intent(inout) :: mesh_cano !! Mesh (canonical) type(mesh_cart_t), intent(inout) :: mesh_stag !! Mesh (staggered) class(helmholtz_solver_t), intent(inout) :: hsolver_cano !! Elliptic (2D) solver on canonical mesh class(helmholtz_solver_t), intent(inout) :: hsolver_stag !! Elliptic (2D) solver on staggered mesh type(parallel_map_t), intent(in) :: map !! Parallel map type(penalisation_t), intent(in) :: penalisation_cano !! Penalisation (canonical) type(penalisation_t), intent(in) :: penalisation_stag !! Penalisation (staggered) type(polars_t), intent(in) :: polars_cano !! Polar grid and operators (canonical) type(polars_t), intent(in) :: polars_stag !! Polar grid and operators (staggered) type(inplane_operators_t), intent(in) :: opsinplane_cano !! In-plane operators (canonical) type(inplane_operators_t), intent(in) :: opsinplane_stag !! In-plane operators (staggered) character(len=*), intent(in) :: init_select !! Type of initial state character(len=*), intent(in) :: filename !! Filename, where parameters are possibly read from type(snapshot_t), intent(in) :: snaps !! Snapshot, where initial state is possibly read from type(boundaries_braginskii_t), intent(inout) :: boundaries !! Boundary information for the BRAGINSKII model integer, intent(out) :: isnaps !! Snapshot number real(GP), intent(out) :: tau !! Time of initial state type(variable_t), intent(inout) :: ne !! Electron density type(variable_t), intent(inout) :: pot !! Electrostatic potential type(variable_t), intent(inout) :: vort !! Generalised vorticity type(variable_t), intent(inout) :: upar !! Parallel ion velocity type(variable_t), intent(inout) :: jpar !! Parallel current type(variable_t), intent(inout) :: apar !! Parallel electromagnetic potential type(variable_t), intent(inout) :: apar_fluct !! Fluctuation of apar used for flutter operators type(variable_t), intent(inout) :: te !! Electron temperature type(variable_t), intent(inout) :: ti !! Ion temperature integer :: io_error character(len=256) :: io_errmsg integer :: kb, l real(GP) :: x, y, phi_cano, phi_stag real(GP) :: bckgrnd_ne, bckgrnd_te, bckgrnd_ti ! Switches for different blobs logical :: ne_blob_on, te_blob_on, ti_blob_on ! Variables related with density blob real(GP) :: xb_ne, yb_ne, phib_ne, wbx_ne, wby_ne, wbphi_ne, ampb_ne ! Variables related with electron temperature blob real(GP) :: xb_te, yb_te, phib_te, wbx_te, wby_te, wbphi_te, ampb_te ! Variables related with ion temperature blob real(GP) :: xb_ti, yb_ti, phib_ti, wbx_ti, wby_ti, wbphi_ti, ampb_ti ! Variables related with random numbers real(GP) :: rand_ne_amp, rand_te_amp, rand_ti_amp integer :: nseed integer, allocatable, dimension(:) :: seed real(GP), dimension(mesh_cano%get_n_points()) :: co ! Polarisation coefficient type(variable_t), allocatable, dimension(:) :: tmp_var namelist / init_blob / ne_blob_on, xb_ne, yb_ne, phib_ne, wbx_ne, wby_ne, wbphi_ne, ampb_ne, & te_blob_on, xb_te, yb_te, phib_te, wbx_te, wby_te, wbphi_te, ampb_te, & ti_blob_on, xb_ti, yb_ti, phib_ti, wbx_ti, wby_ti, wbphi_ti, ampb_ti, & bckgrnd_ne, bckgrnd_te, bckgrnd_ti namelist / init_random / rand_ne_amp, bckgrnd_ne, rand_te_amp, bckgrnd_te, rand_ti_amp, bckgrnd_ti phi_cano = map%get_phi_cano() phi_stag = map%get_phi_stag() select case(init_select) case('CONTINUE') allocate(tmp_var(8)) call snaps%read_snaps_last(comm_handler, isnaps, tau, tmp_var) ne = tmp_var(1) te = tmp_var(2) ti = tmp_var(3) pot = tmp_var(4) vort = tmp_var(5) upar = tmp_var(6) jpar = tmp_var(7) apar = tmp_var(8) deallocate(tmp_var) if (is_rank_info_writer) then write(get_stdout(),*)'Continuing simulation at ------------------------------' write(get_stdout(),201)isnaps, tau 201 FORMAT(1X,'isnaps = ',I8 /, & 1X,'tau = ',ES14.6E3) write(get_stdout(),*)'-------------------------------------------------------' endif case('RANDOM') ! Random numbers as initial state ------------------------------------------------- isnaps = 1 tau = 0.0_GP ! Read parameters of random ----------------------------------- open(unit = 20, file = filename, status = 'old', action = 'read',& iostat = io_error, iomsg = io_errmsg) if (io_error /= 0) then call handle_error(io_errmsg, GRILLIX_ERR_NAMELIST, __LINE__, __FILE__) endif ! default values bckgrnd_ne = 1.0_GP rand_ne_amp = 1.0E-2_GP bckgrnd_te = 1.0_GP rand_te_amp = 1.0E-2_GP bckgrnd_ti = 1.0_GP rand_ti_amp = 1.0E-2_GP read(20, nml = init_random, iostat = io_error, iomsg = io_errmsg) if (io_error /= 0) then call handle_error(io_errmsg, GRILLIX_ERR_NAMELIST, __LINE__, __FILE__) endif if (is_rank_info_writer) then write(get_stdout(),*)'Creating random as initial state --------------------' write(get_stdout(),2021)rand_ne_amp, bckgrnd_ne, rand_te_amp, bckgrnd_te, rand_ti_amp, bckgrnd_ti 2021 FORMAT(1X,'rand_ne_amp = ',ES14.6E3 /, & 1X,'bckgrnd_ne = ',ES14.6E3 /, & 1X,'rand_te_amp = ',ES14.6E3 /, & 1X,'bckgrnd_te = ',ES14.6E3 /, & 1X,'rand_ti_amp = ',ES14.6E3 /, & 1X,'bckgrnd_ti = ',ES14.6E3 ) write(get_stdout(),*)'-------------------------------------------------------' endif ! Initialise variables call ne%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call pot%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call vort%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call upar%init(comm_handler, mesh_stag%get_n_points(), staggered = .true.) call jpar%init(comm_handler, mesh_stag%get_n_points(), staggered = .true.) call apar%init(comm_handler, mesh_stag%get_n_points(), staggered = .true.) call te%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call ti%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) ! create seed for random number generator call random_seed(size = nseed) allocate(seed(nseed)) seed(1:nseed) = map%get_iplane() call random_seed(put = seed) ! Set values call random_number(ne%vals) call random_number(te%vals) call random_number(ti%vals) !$omp parallel default(none) private(l) & !$omp shared(mesh_cano, ne, te, ti, & !$omp bckgrnd_ne, bckgrnd_te, bckgrnd_ti, & !$omp rand_ne_amp, rand_te_amp, rand_ti_amp) !$omp do do l = 1, mesh_cano%get_n_points() ne%vals(l) = bckgrnd_ne + rand_ne_amp * sin(TWO_PI*ne%vals(l)) te%vals(l) = bckgrnd_te + rand_te_amp * sin(TWO_PI*te%vals(l)) ti%vals(l) = bckgrnd_ti + rand_ti_amp * sin(TWO_PI*ti%vals(l)) enddo !$omp end do !$omp end parallel case('BLOB') ! blob in density as initial state ------------------------------------------------ isnaps = 1 tau = 0.0_GP ! Read parameters of blob ------------------------------------- open(unit = 20, file = filename, status = 'old', action = 'read',& iostat = io_error, iomsg = io_errmsg) if (io_error /= 0) then call handle_error(io_errmsg, GRILLIX_ERR_NAMELIST, __LINE__, __FILE__) endif ! default values ne_blob_on = .false. xb_ne = 0.0_GP yb_ne = 0.0_GP phib_ne = 0.0_GP wbx_ne = 0.0_GP wby_ne = 0.0_GP wbphi_ne = 0.0_GP ampb_ne = 0.0_GP te_blob_on = .false. xb_te = 0.0_GP yb_te = 0.0_GP phib_te = 0.0_GP wbx_te = 0.0_GP wby_te = 0.0_GP wbphi_te = 0.0_GP ampb_te = 0.0_GP ti_blob_on = .false. xb_ti = 0.0_GP yb_ti = 0.0_GP phib_ti = 0.0_GP wbx_ti = 0.0_GP wby_ti = 0.0_GP wbphi_ti = 0.0_GP ampb_ti = 0.0_GP bckgrnd_ne = 1.0_GP bckgrnd_te = 1.0_GP bckgrnd_ti = 1.0_GP read(20, nml = init_blob, iostat = io_error, iomsg = io_errmsg) if (io_error /= 0) then call handle_error(io_errmsg, GRILLIX_ERR_NAMELIST, __LINE__, __FILE__) endif if (is_rank_info_writer) then write(get_stdout(),*)'Creating blob as initial state ------------------------' write(get_stdout(),202)ne_blob_on, xb_ne, yb_ne, phib_ne, wbx_ne, wby_ne, wbphi_ne, ampb_ne, & te_blob_on, xb_te, yb_te, phib_te, wbx_te, wby_te, wbphi_te, ampb_te, & ti_blob_on, xb_ti, yb_ti, phib_ti, wbx_ti, wby_ti, wbphi_ti, ampb_ti, & bckgrnd_ne, bckgrnd_te, bckgrnd_ti 202 FORMAT(1X,'ne_blob_on = ',L1 /, & 1X,'xb_ne = ',ES14.6E3 /, & 1X,'yb_ne = ',ES14.6E3 /, & 1X,'phib_ne = ',ES14.6E3 /, & 1X,'wbx_ne = ',ES14.6E3 /, & 1X,'wby_ne = ',ES14.6E3 /, & 1X,'wbphi_ne = ',ES14.6E3 /, & 1X,'ampb_ne = ',ES14.6E3 /, & 1X,'te_blob_on = ',L1 /, & 1X,'xb_te = ',ES14.6E3 /, & 1X,'yb_te = ',ES14.6E3 /, & 1X,'phib_te = ',ES14.6E3 /, & 1X,'wbx_te = ',ES14.6E3 /, & 1X,'wby_te = ',ES14.6E3 /, & 1X,'wbphi_te = ',ES14.6E3 /, & 1X,'ampb_te = ',ES14.6E3 /, & 1X,'ti_blob_on = ',L1 /, & 1X,'xb_ti = ',ES14.6E3 /, & 1X,'yb_ti = ',ES14.6E3 /, & 1X,'phib_ti = ',ES14.6E3 /, & 1X,'wbx_ti = ',ES14.6E3 /, & 1X,'wby_ti = ',ES14.6E3 /, & 1X,'wbphi_ti = ',ES14.6E3 /, & 1X,'ampb_ti = ',ES14.6E3 /, & 1X,'bckgrnd_ne = ',ES14.6E3 /, & 1X,'bckgrnd_te = ',ES14.6E3 /, & 1X,'bckgrnd_ti = ',ES14.6E3 ) write(get_stdout(),*)'-------------------------------------------------------' endif close(20) ! Initialise variables ---------------------------------------- call ne%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call pot%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call vort%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call upar%init(comm_handler, mesh_stag%get_n_points(), staggered = .true.) call jpar%init(comm_handler, mesh_stag%get_n_points(), staggered = .true.) call apar%init(comm_handler, mesh_stag%get_n_points(), staggered = .true.) call te%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call ti%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) ! Set blob ---------------------------------------------------- if (ne_blob_on) then if (wbx_ne >= 1.0E-9_GP .and. wby_ne >= 1.0E-9_GP) then call ne%set_blob_toroidal(mesh_cano, xb_ne, yb_ne, phib_ne, wbx_ne, wby_ne, wbphi_ne, ampb_ne) else write(*,*)'error(build_braginskii_initial_state): width of density blob is to small (min 1E-9)' error stop endif endif if (te_blob_on) then if (wbx_te >= 1.0E-9_GP .and. wby_te >= 1.0E-9_GP) then call te%set_blob_toroidal(mesh_cano, xb_te, yb_te, phib_te, wbx_te, wby_te, wbphi_te, ampb_te) else write(*,*)'error(build_braginskii_initial_state): width of electron temperature blob is to small (min 1E-9)' error stop endif endif if (ti_blob_on) then if (wbx_ti >= 1.0E-9_GP .and. wby_ti >= 1.0E-9_GP) then call ti%set_blob_toroidal(mesh_cano, xb_ti, yb_ti, phib_ti, wbx_ti, wby_ti, wbphi_ti, ampb_ti) else write(*,*)'error(build_braginskii_initial_state): width of ion temperature blob is to small (min 1E-9)' error stop endif endif !$omp parallel default(none) private(l) & !$omp shared(mesh_cano, ne, te, ti, bckgrnd_ne, bckgrnd_te, bckgrnd_ti) !$omp do do l = 1, mesh_cano%get_n_points() ne%vals(l) = ne%vals(l) + bckgrnd_ne te%vals(l) = te%vals(l) + bckgrnd_te ti%vals(l) = ti%vals(l) + bckgrnd_ti enddo !$omp end do !$omp end parallel case('MMS') ! Set initial state to MMS solution ----------------------------------------------- isnaps = 1 tau = 0.0_GP call ne%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call pot%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call vort%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call upar%init(comm_handler, mesh_stag%get_n_points(), staggered = .true.) call apar%init(comm_handler, mesh_stag%get_n_points(), staggered = .true.) call jpar%init(comm_handler, mesh_stag%get_n_points(), staggered = .true.) call te%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) call ti%init(comm_handler, mesh_cano%get_n_points(), staggered = .false.) !$omp parallel default(none) private(l, x, y) & !$omp shared(mesh_cano, phi_cano, equi, equi_on_cano, tau, ne, pot, vort, te, ti, co, boussinesq_on) !$omp do do l = 1, mesh_cano%get_n_points() x = mesh_cano%get_x(l) y = mesh_cano%get_y(l) ne%vals(l) = mms_sol_braginskii_ne(equi, x, y, phi_cano, tau) pot%vals(l) = mms_sol_braginskii_pot(equi, x, y, phi_cano, tau) te%vals(l) = mms_sol_braginskii_te(equi, x, y, phi_cano, tau) ti%vals(l) = mms_sol_braginskii_ti(equi, x, y, phi_cano, tau) vort%vals(l) = mms_sol_braginskii_vort(equi, x, y, phi_cano, tau) if (boussinesq_on) then co(l) = 1.0_GP / equi_on_cano%btor(l)**2 * equi%jacobian(x, y, phi_cano) else co(l) = ne%vals(l) / equi_on_cano%btor(l)**2 * equi%jacobian(x, y, phi_cano) endif enddo !$omp end do !$omp end parallel !$omp parallel default(none) private(l, x, y) & !$omp shared(mesh_stag, phi_stag, equi, tau, upar, apar, jpar) !$omp do do l = 1, mesh_stag%get_n_points() x = mesh_stag%get_x(l) y = mesh_stag%get_y(l) upar%vals(l) = mms_sol_braginskii_upar(equi, x, y, phi_stag, tau) apar%vals(l) = mms_sol_braginskii_apar(equi, x, y, phi_stag, tau) jpar%vals(l) = mms_sol_braginskii_jpar(equi, x, y, phi_stag, tau) enddo !$omp end do !$omp end parallel ! Compute vorticity numerically if (mms_potvort_solve_on) then ! Boundary values for vorticity !$omp parallel default(none) private(kb, l, x, y) & !$omp shared(mesh_cano, phi_cano, equi, tau, boundaries) !$omp do do kb = 1, mesh_cano%get_n_points_boundary() l = mesh_cano%boundary_indices(kb) x = mesh_cano%get_x(l) y = mesh_cano%get_y(l) boundaries%vort%vals(kb) = mms_sol_braginskii_vort(equi, x, y, phi_cano, tau) enddo !$omp end do !$omp end parallel call compute_vorticity(equi, mesh_cano, boundaries, & co = co, & potv = pot%vals, & nev = ne%vals, & tiv = ti%vals, & vortv = vort%vals) endif ! Compute jpar numerically if (mms_aparjpar_solve_on) then ! Boundary values for vorticity and parallel current !$omp parallel default(none) private(kb, l, x, y) & !$omp shared(mesh_stag, phi_stag, equi, tau, boundaries) !$omp do do kb = 1, mesh_stag%get_n_points_boundary() l = mesh_stag%boundary_indices(kb) x = mesh_stag%get_x(l) y = mesh_stag%get_y(l) boundaries%jpar%vals(kb) = mms_sol_braginskii_jpar(equi, x, y, phi_stag, tau) enddo !$omp end do !$omp end parallel call compute_jpar(equi, mesh_stag, boundaries, & aparv = apar%vals, & jparv = jpar%vals) endif case('PROFILE') ! Set an equilibrium profile as initial state ---------------------------------------------- isnaps = 1 tau = 0.0_GP call init_state_profile(comm_handler, & equi, equi_on_cano, equi_on_stag, & mesh_cano, mesh_stag, & hsolver_cano, hsolver_stag, & map, & penalisation_cano, penalisation_stag, & polars_cano, polars_stag, & opsinplane_cano, opsinplane_stag, & filename, & boundaries, & ne, pot, vort, upar, jpar, apar, te, ti) case default call handle_error('Init_select not valid', & GRILLIX_ERR_OTHER, __LINE__, __FILE__, & error_info_t(init_select)) end select call apar_fluct%init(comm_handler, mesh_stag%get_n_points(), staggered=.true., init_vals=apar%vals) ! Initialize apar_fluct as apar. If remove_magnetic_shift = true, ! apar_fluct will be subsequently updated by remove_magnetic_shift_t%init. end subroutine subroutine initialise_timesteppers(mesh_cano, mesh_stag, tstep_order, dtau, & tstep_continuity, tstep_vort, tstep_parmom, tstep_ohm, tstep_etemp, tstep_itemp) !! Initialises the timesteppers. This automatically will use a lower-order method for the first few timesteps !! to fill the storage type(mesh_cart_t), intent(in) :: mesh_cano !! Mesh (canonical) type(mesh_cart_t), intent(in) :: mesh_stag !! Mesh (staggered) integer, intent(in) :: tstep_order !! Order of time-stepping scheme real(GP), intent(in) :: dtau !! Time-step size (base, not bdf) type(karniadakis_t), intent(inout) :: tstep_continuity !! Time integrator for continuity equation type(karniadakis_t), intent(inout) :: tstep_vort !! Time-step integrator for vorticity equation type(karniadakis_t), intent(inout) :: tstep_parmom !! Time-step integrator for parallel momentum equation type(karniadakis_t), intent(inout) :: tstep_ohm !! Time-step integrator for Ohm's law type(karniadakis_t), intent(inout) :: tstep_etemp !! Time-step integrator for electron temperature equation type(karniadakis_t), intent(inout) :: tstep_itemp !! Time-step integrator for ion temperature equation integer :: np_cano, np_stag np_cano = mesh_cano%get_n_points() np_stag = mesh_stag%get_n_points() call tstep_continuity%init(np_cano, tstep_order, dtau) call tstep_vort%init(np_cano, tstep_order, dtau) call tstep_parmom%init(np_stag, tstep_order, dtau) call tstep_ohm%init(np_stag, tstep_order, dtau) call tstep_etemp%init(np_cano, tstep_order, dtau) call tstep_itemp%init(np_cano, tstep_order, dtau) end subroutine subroutine initialise_timesteppers_with_mms(equi, mesh_cano, mesh_stag, & tstep_order, dtau, & tstep_continuity, tstep_vort, tstep_parmom, & tstep_ohm, tstep_etemp, tstep_itemp) !! Initialise the timesteppers, and fills their storage with the MMS solution and derivative at previous !! "timesteps" (which have t < 0) class(equilibrium_t), intent(inout) :: equi !! Equilibrium type(mesh_cart_t), intent(in) :: mesh_cano !! Mesh (canonical) type(mesh_cart_t), intent(in) :: mesh_stag !! Mesh (staggered) integer, intent(in) :: tstep_order !! Order of time-stepping scheme real(GP), intent(in) :: dtau !! Time-step size (base, not bdf) type(karniadakis_t), intent(inout) :: tstep_continuity !! Time integrator for continuity equation type(karniadakis_t), intent(inout) :: tstep_vort !! Time-step integrator for vorticity equation type(karniadakis_t), intent(inout) :: tstep_parmom !! Time-step integrator for parallel momentum equation type(karniadakis_t), intent(inout) :: tstep_ohm !! Time-step integrator for Ohm's law type(karniadakis_t), intent(inout) :: tstep_etemp !! Time-step integrator for electron temperature equation type(karniadakis_t), intent(inout) :: tstep_itemp !! Time-step integrator for ion temperature equation real(GP), dimension(mesh_cano%get_n_points(), tstep_order) :: & logne_first, dlogne_first, logte_first, dlogte_first, logti_first, dlogti_first real(GP), dimension(mesh_stag%get_n_points(), tstep_order) :: & upar_first, dupar_first, psipar_first, dpsipar_first !! Work arrays, to store the MMS solution and its derivative at t < 0 real(GP) :: tau, phi_cano, phi_stag, x, y, dtau_eps integer :: iorder, ki, kb, kg, l, np_cano, np_stag np_cano = mesh_cano%get_n_points() np_stag = mesh_stag%get_n_points() phi_cano = mesh_cano%get_phi() phi_stag = mesh_stag%get_phi() ! Calculate the MMS solution and its derivative at t = -i * dtau for i = 1, 2, ..., tstep_order-1 dtau_eps = dtau * 1.0E-4_GP !$omp parallel default(none) private(ki, kb, kg, l, x, y, tau, iorder) & !$omp shared(equi, mesh_cano, mesh_stag, phi_cano, phi_stag, & !$omp tstep_order, dtau, dtau_eps, & !$omp logne_first, logte_first, logti_first, & !$omp dlogne_first, dlogte_first, dlogti_first, & !$omp upar_first, psipar_first, & !$omp dupar_first, dpsipar_first) do iorder = 1, tstep_order tau = -1.0_GP * dtau * iorder ! Loops over canonical quantities !$omp do do ki = 1, mesh_cano%get_n_points_inner() l = mesh_cano%inner_indices(ki) x = mesh_cano%get_x(l) y = mesh_cano%get_y(l) logne_first(l, iorder) = log(mms_sol_braginskii_ne(equi, x, y, phi_cano, tau)) logte_first(l, iorder) = log(mms_sol_braginskii_te(equi, x, y, phi_cano, tau)) logti_first(l, iorder) = log(mms_sol_braginskii_ti(equi, x, y, phi_cano, tau)) dlogne_first(l, iorder) = ( mms_sol_braginskii_ne(equi, x, y, phi_cano, tau+dtau_eps) - & mms_sol_braginskii_ne(equi, x, y, phi_cano, tau-dtau_eps) ) / & (2.0_GP * dtau_eps) / mms_sol_braginskii_ne(equi, x, y, phi_cano, tau) dlogte_first(l, iorder) = ( mms_sol_braginskii_te(equi, x, y, phi_cano, tau+dtau_eps) - & mms_sol_braginskii_te(equi, x, y, phi_cano, tau-dtau_eps) ) / & (2.0_GP*dtau_eps) / mms_sol_braginskii_te(equi, x, y, phi_cano, tau) dlogti_first(l, iorder) = ( mms_sol_braginskii_ti(equi, x, y, phi_cano, tau+dtau_eps) - & mms_sol_braginskii_ti(equi, x, y, phi_cano, tau-dtau_eps) ) / & (2.0_GP*dtau_eps) / mms_sol_braginskii_ti(equi, x, y, phi_cano, tau) enddo !$omp end do !$omp do do kb = 1, mesh_cano%get_n_points_boundary() ! Boundary points are advanced as dummys l = mesh_cano%boundary_indices(kb) logne_first(l, iorder) = 0.0_GP logte_first(l, iorder) = 0.0_GP logti_first(l, iorder) = 0.0_GP dlogne_first(l, iorder) = 0.0_GP dlogte_first(l, iorder) = 0.0_GP dlogti_first(l, iorder) = 0.0_GP enddo !$omp end do !$omp do do kg = 1, mesh_cano%get_n_points_ghost() ! Ghost points are advanced as dummys l = mesh_cano%ghost_indices(kg) logne_first(l, iorder) = 0.0_GP logte_first(l, iorder) = 0.0_GP logti_first(l, iorder) = 0.0_GP dlogne_first(l, iorder) = 0.0_GP dlogte_first(l, iorder) = 0.0_GP dlogti_first(l, iorder) = 0.0_GP enddo !$omp end do ! Loops over staggered quantities !$omp do do ki = 1, mesh_stag%get_n_points_inner() l = mesh_stag%inner_indices(ki) x = mesh_stag%get_x(l) y = mesh_stag%get_y(l) upar_first(l, iorder) = mms_sol_braginskii_upar(equi, x, y, phi_stag, tau) psipar_first(l, iorder) = mms_sol_braginskii_psipar(equi, x, y, phi_stag, tau) dupar_first(l, iorder) = ( mms_sol_braginskii_upar(equi, x, y, phi_stag, tau+dtau_eps) - & mms_sol_braginskii_upar(equi, x, y, phi_stag, tau-dtau_eps) ) / & (2.0_GP*dtau_eps) dpsipar_first(l, iorder) = ( mms_sol_braginskii_psipar(equi, x, y, phi_stag, tau+dtau_eps) - & mms_sol_braginskii_psipar(equi, x, y, phi_stag, tau-dtau_eps) ) / & (2.0_GP*dtau_eps) enddo !$omp end do !$omp do do kb = 1, mesh_stag%get_n_points_boundary() ! Boundary points are advanced as dummys l = mesh_stag%boundary_indices(kb) upar_first(l, iorder) = 0.0_GP psipar_first(l, iorder) = 0.0_GP dupar_first(l, iorder) = 0.0_GP dpsipar_first(l, iorder)= 0.0_GP enddo !$omp end do !$omp do do kg = 1, mesh_stag%get_n_points_ghost() ! Ghost points are advanced as dummys l = mesh_stag%ghost_indices(kg) upar_first(l, iorder) = 0.0_GP psipar_first(l, iorder) = 0.0_GP dupar_first(l, iorder) = 0.0_GP dpsipar_first(l, iorder)= 0.0_GP enddo !$omp end do enddo !$omp end parallel ! Initialise timesteppers with the initial solution call tstep_continuity%init(np_cano, tstep_order, dtau, logne_first, dlogne_first ) call tstep_etemp%init(np_cano, tstep_order, dtau, logte_first, dlogte_first ) call tstep_itemp%init(np_cano, tstep_order, dtau, logti_first, dlogti_first ) ! call tstep_parmom%init(np_stag, tstep_order, dtau, upar_first, dupar_first ) ! With viscosity treated implicitly, the above initialisation breaks MMS convergence. ! But with explicit viscosity, it can be used to check for 3rd order time convergence. call tstep_parmom%init(np_stag, tstep_order, dtau) call tstep_vort%init(np_cano, tstep_order, dtau ) ! call tstep_ohm%init(np_stag, tstep_order, dtau, psipar_first, dpsipar_first) ! With resistivity treated implicitly, the above initialisation breaks MMS convergence. ! But without resistivity, it can be used to check for 3rd order time convergence. call tstep_ohm%init(np_stag, tstep_order, dtau) end subroutine subroutine init_braginskii_halos(comm_handler, ne, pot, vort, upar, jpar, apar, apar_fluct, te, ti) !! Allocates halo points of Braginskii fields type(comm_handler_t), intent(in) :: comm_handler !! Communicators type(variable_t), intent(inout) :: ne !! Electron density type(variable_t), intent(inout) :: pot !! Electrostatic potential type(variable_t), intent(inout) :: vort !! Generalised vorticity type(variable_t), intent(inout) :: upar !! Parallel ion velocity type(variable_t), intent(inout) :: jpar !! Parallel current type(variable_t), intent(inout) :: apar !! Parallel electromagnetic potential type(variable_t), intent(inout) :: apar_fluct !! Fluctuation of apar used for flutter operators type(variable_t), intent(inout) :: te !! Electron temperature type(variable_t), intent(inout) :: ti !! Ion temperature ! Variables on which second order parallel operators are applied directly call pot%create_halos(comm_handler, 1, 1) call ne%create_halos(comm_handler, 1, 1) call ti%create_halos(comm_handler, 1, 1) ! Variables which can be Neumann penalized call upar%create_halos(comm_handler, 1, 1) call vort%create_halos(comm_handler, 1, 1) ! Variables on which Q,M: G -> G* is applied call te%create_halos(comm_handler, 1, 0) ! Variables on which Q*,M*: G* -> G is applied call jpar%create_halos(comm_handler, 0, 1) call apar_fluct%create_halos(comm_handler, 0, 1) call apar%create_halos(comm_handler, 0, 1) end subroutine end module