Line data Source code
1 : !--------------------------------------------------------------------------------------------------!
2 : ! CP2K: A general program to perform molecular dynamics simulations !
3 : ! Copyright 2000-2026 CP2K developers group <https://cp2k.org> !
4 : ! !
5 : ! SPDX-License-Identifier: GPL-2.0-or-later !
6 : !--------------------------------------------------------------------------------------------------!
7 :
8 : MODULE qs_tddfpt2_subgroups
9 : USE admm_types, ONLY: admm_type,&
10 : get_admm_env
11 : USE atomic_kind_types, ONLY: atomic_kind_type
12 : USE basis_set_types, ONLY: get_gto_basis_set,&
13 : gto_basis_set_type
14 : USE cell_types, ONLY: cell_type
15 : USE cp_blacs_env, ONLY: cp_blacs_env_create,&
16 : cp_blacs_env_release,&
17 : cp_blacs_env_type
18 : USE cp_control_types, ONLY: dft_control_type,&
19 : qs_control_type,&
20 : tddfpt2_control_type
21 : USE cp_dbcsr_api, ONLY: dbcsr_create,&
22 : dbcsr_distribution_release,&
23 : dbcsr_distribution_type,&
24 : dbcsr_get_info,&
25 : dbcsr_release,&
26 : dbcsr_type
27 : USE cp_dbcsr_cp2k_link, ONLY: cp_dbcsr_alloc_block_from_nbl
28 : USE cp_dbcsr_operations, ONLY: cp_dbcsr_dist2d_to_dist
29 : USE cp_fm_struct, ONLY: cp_fm_struct_create,&
30 : cp_fm_struct_release,&
31 : cp_fm_struct_type
32 : USE cp_fm_types, ONLY: cp_fm_copy_general,&
33 : cp_fm_create,&
34 : cp_fm_get_info,&
35 : cp_fm_release,&
36 : cp_fm_type
37 : USE distribution_1d_types, ONLY: distribution_1d_type
38 : USE distribution_2d_types, ONLY: distribution_2d_release,&
39 : distribution_2d_type
40 : USE distribution_methods, ONLY: distribute_molecules_2d
41 : USE hartree_local_methods, ONLY: init_coulomb_local
42 : USE hartree_local_types, ONLY: hartree_local_create,&
43 : hartree_local_release,&
44 : hartree_local_type
45 : USE input_constants, ONLY: tddfpt_kernel_full,&
46 : tddfpt_kernel_none,&
47 : tddfpt_kernel_stda
48 : USE input_section_types, ONLY: section_vals_type,&
49 : section_vals_val_get
50 : USE kinds, ONLY: default_string_length,&
51 : dp
52 : USE message_passing, ONLY: mp_para_env_release,&
53 : mp_para_env_type
54 : USE molecule_kind_types, ONLY: molecule_kind_type
55 : USE molecule_types, ONLY: molecule_type
56 : USE particle_types, ONLY: particle_type
57 : USE pw_env_methods, ONLY: pw_env_create,&
58 : pw_env_rebuild
59 : USE pw_env_types, ONLY: pw_env_release,&
60 : pw_env_retain,&
61 : pw_env_type
62 : USE pw_types, ONLY: pw_r3d_rs_type
63 : USE qs_environment_types, ONLY: get_qs_env,&
64 : qs_environment_type
65 : USE qs_kind_types, ONLY: get_qs_kind,&
66 : qs_kind_type
67 : USE qs_ks_types, ONLY: qs_ks_env_type
68 : USE qs_local_rho_types, ONLY: local_rho_set_create,&
69 : local_rho_set_release,&
70 : local_rho_type
71 : USE qs_neighbor_list_types, ONLY: neighbor_list_set_p_type,&
72 : release_neighbor_list_sets
73 : USE qs_neighbor_lists, ONLY: atom2d_build,&
74 : atom2d_cleanup,&
75 : build_neighbor_lists,&
76 : local_atoms_type,&
77 : pair_radius_setup
78 : USE qs_rho0_ggrid, ONLY: rho0_s_grid_create
79 : USE qs_rho0_methods, ONLY: init_rho0
80 : USE qs_rho_atom_methods, ONLY: allocate_rho_atom_internals
81 : USE task_list_methods, ONLY: generate_qs_task_list
82 : USE task_list_types, ONLY: allocate_task_list,&
83 : deallocate_task_list,&
84 : task_list_type
85 : #include "./base/base_uses.f90"
86 :
87 : IMPLICIT NONE
88 :
89 : PRIVATE
90 :
91 : CHARACTER(len=*), PARAMETER, PRIVATE :: moduleN = 'qs_tddfpt2_subgroups'
92 : LOGICAL, PARAMETER, PRIVATE :: debug_this_module = .TRUE.
93 :
94 : PUBLIC :: tddfpt_subgroup_env_type
95 : PUBLIC :: tddfpt_sub_env_init, tddfpt_sub_env_release
96 : PUBLIC :: tddfpt_dbcsr_create_by_dist, tddfpt_fm_replicate_across_subgroups
97 :
98 : ! **************************************************************************************************
99 : !> \brief Parallel (sub)group environment.
100 : !> \par History
101 : !> * 01.2017 created [Sergey Chulkov]
102 : ! **************************************************************************************************
103 : TYPE tddfpt_subgroup_env_type
104 : !> indicates that the global MPI communicator has been split into subgroups; if it is .FALSE.
105 : !> certain components of the structure (blacs_env, para_env, admm_A, and mos_occ)
106 : !> can still be accessed; in this case they simply point to the corresponding global variables
107 : LOGICAL :: is_split = .FALSE.
108 : !> number of parallel groups
109 : INTEGER :: ngroups = -1
110 : !> group_distribution(0:ngroups-1) : a process with rank 'i' belongs to the parallel group
111 : !> with index 'group_distribution(i)'
112 : INTEGER, DIMENSION(:), ALLOCATABLE :: group_distribution
113 : !> group-specific BLACS parallel environment
114 : TYPE(cp_blacs_env_type), POINTER :: blacs_env => NULL()
115 : !> group-specific MPI parallel environment
116 : TYPE(mp_para_env_type), POINTER :: para_env => NULL()
117 : !> (active) occupied MOs stored in a matrix form [nao x nmo_occ(spin)] distributed across processes
118 : !> in the parallel group
119 : TYPE(cp_fm_type), ALLOCATABLE, DIMENSION(:) :: mos_occ
120 : TYPE(cp_fm_type), ALLOCATABLE, DIMENSION(:) :: mos_active
121 : !> group-specific copy of the ADMM A matrix 'admm_type%A'
122 : TYPE(cp_fm_type), POINTER :: admm_A => NULL()
123 : !
124 : !> indicates that a set of multi-grids has been allocated; if it is .FALSE. all the components
125 : !> below point to the corresponding global variables and can be accessed
126 : LOGICAL :: is_mgrid = .FALSE.
127 : !> group-specific DBCSR distribution
128 : TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist => NULL()
129 : !> group-specific two-dimensional distribution of pairs of particles
130 : TYPE(distribution_2d_type), POINTER :: dist_2d => NULL()
131 : !> group-specific plane wave environment
132 : TYPE(pw_env_type), POINTER :: pw_env => NULL()
133 : !> integration weights
134 : TYPE(pw_r3d_rs_type), POINTER :: xcint_weights => Null()
135 : !> lists of neighbours in auxiliary and primary basis sets
136 : TYPE(neighbor_list_set_p_type), &
137 : DIMENSION(:), POINTER :: sab_aux_fit => NULL(), sab_orb => NULL()
138 : !> task lists in auxiliary and primary basis sets
139 : TYPE(task_list_type), POINTER :: task_list_aux_fit => NULL(), task_list_orb => NULL()
140 : !> soft task lists in auxiliary and primary basis sets
141 : TYPE(task_list_type), POINTER :: task_list_aux_fit_soft => NULL(), task_list_orb_soft => NULL()
142 : !> GAPW local atomic grids
143 : TYPE(hartree_local_type), POINTER :: hartree_local => NULL()
144 : TYPE(local_rho_type), POINTER :: local_rho_set => NULL()
145 : TYPE(local_rho_type), POINTER :: local_rho_set_admm => NULL()
146 : END TYPE tddfpt_subgroup_env_type
147 :
148 : ! **************************************************************************************************
149 : !> \brief Structure to save global multi-grid related parameters.
150 : !> \par History
151 : !> * 09.2016 created [Sergey Chulkov]
152 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
153 : ! **************************************************************************************************
154 : TYPE mgrid_saved_parameters
155 : !> create commensurate grids
156 : LOGICAL :: commensurate_mgrids = .FALSE.
157 : !> create real-space grids
158 : LOGICAL :: realspace_mgrids = .FALSE.
159 : !> do not perform load balancing
160 : LOGICAL :: skip_load_balance = .FALSE.
161 : !> cutoff value at the finest grid level
162 : REAL(KIND=dp) :: cutoff = 0.0_dp
163 : !> inverse scale factor
164 : REAL(KIND=dp) :: progression_factor = 0.0_dp
165 : !> relative cutoff
166 : REAL(KIND=dp) :: relative_cutoff = 0.0_dp
167 : !> list of explicitly given cutoff values
168 : REAL(KIND=dp), DIMENSION(:), POINTER :: e_cutoff => NULL()
169 : END TYPE mgrid_saved_parameters
170 :
171 : CONTAINS
172 :
173 : ! **************************************************************************************************
174 : !> \brief Split MPI communicator to create a set of parallel (sub)groups.
175 : !> \param sub_env parallel group environment (initialised on exit)
176 : !> \param qs_env Quickstep environment
177 : !> \param mos_occ ground state molecular orbitals in primary atomic basis set
178 : !> \param mos_active active ground state molecular orbitals in primary atomic basis set
179 : !> \param kernel Type of kernel (full/sTDA) that will be used
180 : !> \par History
181 : !> * 01.2017 (sub)group-related code has been moved here from the main subroutine tddfpt()
182 : !> [Sergey Chulkov]
183 : ! **************************************************************************************************
184 2684 : SUBROUTINE tddfpt_sub_env_init(sub_env, qs_env, mos_occ, mos_active, kernel)
185 : TYPE(tddfpt_subgroup_env_type), INTENT(out) :: sub_env
186 : TYPE(qs_environment_type), POINTER :: qs_env
187 : TYPE(cp_fm_type), DIMENSION(:), INTENT(in) :: mos_occ, mos_active
188 : INTEGER, INTENT(in) :: kernel
189 :
190 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_sub_env_init'
191 :
192 : INTEGER :: handle, ispin, nao, nao_aux, natom, &
193 : nmo_active, nmo_occ, nspins
194 : TYPE(admm_type), POINTER :: admm_env
195 1342 : TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
196 : TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
197 : TYPE(cp_fm_struct_type), POINTER :: fm_struct
198 : TYPE(dft_control_type), POINTER :: dft_control
199 : TYPE(mgrid_saved_parameters) :: mgrid_saved
200 : TYPE(mp_para_env_type), POINTER :: para_env_global
201 : TYPE(pw_env_type), POINTER :: pw_env_global
202 : TYPE(pw_r3d_rs_type), POINTER :: weights
203 : TYPE(qs_control_type), POINTER :: qs_control
204 1342 : TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
205 : TYPE(tddfpt2_control_type), POINTER :: tddfpt_control
206 :
207 1342 : CALL timeset(routineN, handle)
208 :
209 1342 : nspins = SIZE(mos_occ)
210 :
211 : CALL get_qs_env(qs_env, blacs_env=blacs_env_global, dft_control=dft_control, &
212 1342 : para_env=para_env_global, pw_env=pw_env_global)
213 :
214 1342 : tddfpt_control => dft_control%tddfpt2_control
215 1342 : qs_control => dft_control%qs_control
216 :
217 : ! ++ split mpi communicator if
218 : ! a) the requested number of processors per group > 0
219 : ! (means that the split has been requested explicitly), and
220 : ! b) the number of subgroups is >= 2
221 1342 : sub_env%is_split = tddfpt_control%nprocs > 0 .AND. tddfpt_control%nprocs*2 <= para_env_global%num_pe
222 :
223 5520 : ALLOCATE (sub_env%mos_occ(nspins))
224 5520 : ALLOCATE (sub_env%mos_active(nspins))
225 1342 : NULLIFY (sub_env%admm_A)
226 :
227 1342 : CALL get_qs_env(qs_env, xcint_weights=weights)
228 1342 : sub_env%xcint_weights => weights
229 1342 : IF (sub_env%is_split .AND. ASSOCIATED(weights)) THEN
230 0 : CPABORT('subgroups and integration weights not compatible')
231 : END IF
232 :
233 1342 : IF (sub_env%is_split) THEN
234 6 : ALLOCATE (sub_env%group_distribution(0:para_env_global%num_pe - 1))
235 :
236 2 : ALLOCATE (sub_env%para_env)
237 : CALL sub_env%para_env%from_split(comm=para_env_global, ngroups=sub_env%ngroups, &
238 2 : group_distribution=sub_env%group_distribution, subgroup_min_size=tddfpt_control%nprocs)
239 :
240 : ! ++ create a new parallel environment based on the given sub-communicator)
241 2 : NULLIFY (sub_env%blacs_env)
242 :
243 : ! use the default (SQUARE) BLACS grid layout and non-repeatable BLACS collective operations
244 : ! by omitting optional parameters 'blacs_grid_layout' and 'blacs_repeatable'.
245 : ! Ideally we should take these parameters from the variables globenv%blacs_grid_layout and
246 : ! globenv%blacs_repeatable, however the global environment is not available
247 : ! from the subroutine 'qs_energies_properties'.
248 2 : CALL cp_blacs_env_create(sub_env%blacs_env, sub_env%para_env)
249 :
250 2 : NULLIFY (fm_struct)
251 :
252 4 : DO ispin = 1, nspins
253 2 : CALL cp_fm_get_info(mos_occ(ispin), nrow_global=nao, ncol_global=nmo_occ)
254 2 : CALL cp_fm_struct_create(fm_struct, nrow_global=nao, ncol_global=nmo_occ, context=sub_env%blacs_env)
255 2 : CALL cp_fm_create(sub_env%mos_occ(ispin), fm_struct)
256 2 : CALL cp_fm_struct_release(fm_struct)
257 : CALL tddfpt_fm_replicate_across_subgroups(fm_src=mos_occ(ispin), &
258 6 : fm_dest_sub=sub_env%mos_occ(ispin), sub_env=sub_env)
259 : END DO
260 :
261 4 : DO ispin = 1, nspins
262 2 : CALL cp_fm_get_info(mos_active(ispin), nrow_global=nao, ncol_global=nmo_active)
263 2 : CALL cp_fm_struct_create(fm_struct, nrow_global=nao, ncol_global=nmo_active, context=sub_env%blacs_env)
264 2 : CALL cp_fm_create(sub_env%mos_active(ispin), fm_struct)
265 2 : CALL cp_fm_struct_release(fm_struct)
266 : CALL tddfpt_fm_replicate_across_subgroups(fm_src=mos_active(ispin), &
267 6 : fm_dest_sub=sub_env%mos_active(ispin), sub_env=sub_env)
268 : END DO
269 :
270 2 : IF (dft_control%do_admm) THEN
271 2 : CALL get_qs_env(qs_env, admm_env=admm_env)
272 2 : CALL cp_fm_get_info(admm_env%A, nrow_global=nao_aux, ncol_global=nao)
273 2 : CALL cp_fm_struct_create(fm_struct, nrow_global=nao_aux, ncol_global=nao, context=sub_env%blacs_env)
274 2 : ALLOCATE (sub_env%admm_A)
275 2 : CALL cp_fm_create(sub_env%admm_A, fm_struct)
276 2 : CALL cp_fm_struct_release(fm_struct)
277 2 : CALL tddfpt_fm_replicate_across_subgroups(fm_src=admm_env%A, fm_dest_sub=sub_env%admm_A, sub_env=sub_env)
278 : END IF
279 : ELSE
280 1340 : CALL para_env_global%retain()
281 1340 : sub_env%para_env => para_env_global
282 :
283 1340 : CALL blacs_env_global%retain()
284 1340 : sub_env%blacs_env => blacs_env_global
285 :
286 2832 : sub_env%mos_occ(:) = mos_occ(:)
287 2832 : sub_env%mos_active(:) = mos_active(:)
288 :
289 1340 : IF (dft_control%do_admm) THEN
290 242 : CALL get_qs_env(qs_env, admm_env=admm_env)
291 242 : sub_env%admm_A => admm_env%A
292 : END IF
293 : END IF
294 :
295 1342 : IF (kernel == tddfpt_kernel_full) THEN
296 : ! ++ allocate a new plane wave environment
297 810 : sub_env%is_mgrid = sub_env%is_split .OR. tddfpt_control%mgrid_is_explicit
298 :
299 810 : NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
300 810 : NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
301 810 : NULLIFY (sub_env%task_list_orb, sub_env%task_list_aux_fit)
302 810 : NULLIFY (sub_env%task_list_orb_soft, sub_env%task_list_aux_fit_soft)
303 :
304 810 : IF (sub_env%is_mgrid) THEN
305 12 : IF (tddfpt_control%mgrid_is_explicit) &
306 10 : CALL init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved)
307 :
308 12 : IF (ASSOCIATED(weights)) THEN
309 0 : CPABORT('Redefining MGRID and integration weights not compatible')
310 : END IF
311 :
312 12 : NULLIFY (sub_env%pw_env)
313 :
314 12 : CALL pw_env_create(sub_env%pw_env)
315 12 : CALL pw_env_rebuild(sub_env%pw_env, qs_env, sub_env%para_env)
316 :
317 : CALL tddfpt_build_distribution_2d(distribution_2d=sub_env%dist_2d, dbcsr_dist=sub_env%dbcsr_dist, &
318 12 : blacs_env=sub_env%blacs_env, qs_env=qs_env)
319 :
320 : CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb, sab=sub_env%sab_orb, basis_type="ORB", &
321 : distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, &
322 : skip_load_balance=qs_control%skip_load_balance_distributed, &
323 12 : reorder_grid_ranks=.TRUE.)
324 :
325 12 : IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
326 : CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb_soft, sab=sub_env%sab_orb, basis_type="ORB_SOFT", &
327 : distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, &
328 : skip_load_balance=qs_control%skip_load_balance_distributed, &
329 8 : reorder_grid_ranks=.TRUE.)
330 : END IF
331 :
332 12 : IF (dft_control%do_admm) THEN
333 : CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit, sab=sub_env%sab_aux_fit, &
334 : basis_type="AUX_FIT", distribution_2d=sub_env%dist_2d, &
335 : pw_env=sub_env%pw_env, qs_env=qs_env, &
336 : skip_load_balance=qs_control%skip_load_balance_distributed, &
337 8 : reorder_grid_ranks=.FALSE.)
338 8 : IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
339 : CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit_soft, sab=sub_env%sab_aux_fit, &
340 : basis_type="AUX_FIT_SOFT", distribution_2d=sub_env%dist_2d, &
341 : pw_env=sub_env%pw_env, qs_env=qs_env, &
342 : skip_load_balance=qs_control%skip_load_balance_distributed, &
343 4 : reorder_grid_ranks=.FALSE.)
344 : END IF
345 : END IF
346 :
347 12 : IF (tddfpt_control%mgrid_is_explicit) &
348 10 : CALL restore_qs_mgrid(qs_control, mgrid_saved)
349 : ELSE
350 798 : CALL pw_env_retain(pw_env_global)
351 798 : sub_env%pw_env => pw_env_global
352 :
353 : CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, &
354 798 : sab_orb=sub_env%sab_orb, task_list=sub_env%task_list_orb)
355 798 : IF (dft_control%do_admm) THEN
356 : CALL get_admm_env(admm_env, sab_aux_fit=sub_env%sab_aux_fit, &
357 220 : task_list_aux_fit=sub_env%task_list_aux_fit)
358 220 : IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
359 84 : sub_env%task_list_aux_fit_soft => admm_env%admm_gapw_env%task_list
360 : END IF
361 : END IF
362 798 : IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
363 398 : CALL get_qs_env(qs_env, task_list_soft=sub_env%task_list_orb_soft)
364 : END IF
365 : END IF
366 :
367 : ! GAPW initializations
368 810 : IF (dft_control%qs_control%gapw) THEN
369 : CALL get_qs_env(qs_env, &
370 : atomic_kind_set=atomic_kind_set, &
371 : natom=natom, &
372 334 : qs_kind_set=qs_kind_set)
373 :
374 334 : CALL local_rho_set_create(sub_env%local_rho_set)
375 : CALL allocate_rho_atom_internals(sub_env%local_rho_set%rho_atom_set, atomic_kind_set, &
376 334 : qs_kind_set, dft_control, sub_env%para_env)
377 :
378 : CALL init_rho0(sub_env%local_rho_set, qs_env, dft_control%qs_control%gapw_control, &
379 334 : zcore=0.0_dp)
380 334 : CALL rho0_s_grid_create(sub_env%pw_env, sub_env%local_rho_set%rho0_mpole)
381 334 : CALL hartree_local_create(sub_env%hartree_local)
382 334 : CALL init_coulomb_local(sub_env%hartree_local, natom)
383 476 : ELSEIF (dft_control%qs_control%gapw_xc) THEN
384 : CALL get_qs_env(qs_env, &
385 : atomic_kind_set=atomic_kind_set, &
386 72 : qs_kind_set=qs_kind_set)
387 72 : CALL local_rho_set_create(sub_env%local_rho_set)
388 : CALL allocate_rho_atom_internals(sub_env%local_rho_set%rho_atom_set, atomic_kind_set, &
389 72 : qs_kind_set, dft_control, sub_env%para_env)
390 : END IF
391 :
392 : ! ADMM/GAPW
393 810 : IF (dft_control%do_admm) THEN
394 228 : IF (dft_control%qs_control%gapw .OR. dft_control%qs_control%gapw_xc) THEN
395 88 : CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set)
396 88 : CALL local_rho_set_create(sub_env%local_rho_set_admm)
397 : CALL allocate_rho_atom_internals(sub_env%local_rho_set_admm%rho_atom_set, atomic_kind_set, &
398 : admm_env%admm_gapw_env%admm_kind_set, &
399 88 : dft_control, sub_env%para_env)
400 : END IF
401 : END IF
402 :
403 532 : ELSE IF (kernel == tddfpt_kernel_stda) THEN
404 408 : sub_env%is_mgrid = .FALSE.
405 408 : NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
406 408 : NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
407 408 : NULLIFY (sub_env%task_list_orb, sub_env%task_list_orb_soft)
408 408 : NULLIFY (sub_env%task_list_aux_fit, sub_env%task_list_aux_fit_soft)
409 408 : NULLIFY (sub_env%pw_env)
410 408 : IF (sub_env%is_split) THEN
411 0 : CPABORT('Subsys option not available')
412 : ELSE
413 408 : CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb)
414 : END IF
415 124 : ELSE IF (kernel == tddfpt_kernel_none) THEN
416 124 : sub_env%is_mgrid = .FALSE.
417 124 : NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
418 124 : NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
419 124 : NULLIFY (sub_env%task_list_orb, sub_env%task_list_orb_soft)
420 124 : NULLIFY (sub_env%task_list_aux_fit, sub_env%task_list_aux_fit_soft)
421 124 : NULLIFY (sub_env%pw_env)
422 124 : IF (sub_env%is_split) THEN
423 0 : CPABORT('Subsys option not available')
424 : ELSE
425 124 : CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb)
426 : END IF
427 : ELSE
428 0 : CPABORT("Unknown kernel type")
429 : END IF
430 :
431 1342 : CALL timestop(handle)
432 :
433 2684 : END SUBROUTINE tddfpt_sub_env_init
434 :
435 : ! **************************************************************************************************
436 : !> \brief Release parallel group environment
437 : !> \param sub_env parallel group environment (modified on exit)
438 : !> \par History
439 : !> * 01.2017 created [Sergey Chulkov]
440 : ! **************************************************************************************************
441 1342 : SUBROUTINE tddfpt_sub_env_release(sub_env)
442 : TYPE(tddfpt_subgroup_env_type), INTENT(inout) :: sub_env
443 :
444 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_sub_env_release'
445 :
446 : INTEGER :: handle, i
447 :
448 1342 : CALL timeset(routineN, handle)
449 :
450 1342 : IF (sub_env%is_mgrid) THEN
451 12 : IF (ASSOCIATED(sub_env%task_list_aux_fit)) &
452 8 : CALL deallocate_task_list(sub_env%task_list_aux_fit)
453 :
454 12 : IF (ASSOCIATED(sub_env%task_list_aux_fit_soft)) &
455 4 : CALL deallocate_task_list(sub_env%task_list_aux_fit_soft)
456 :
457 12 : IF (ASSOCIATED(sub_env%task_list_orb)) &
458 12 : CALL deallocate_task_list(sub_env%task_list_orb)
459 :
460 12 : IF (ASSOCIATED(sub_env%task_list_orb_soft)) &
461 8 : CALL deallocate_task_list(sub_env%task_list_orb_soft)
462 :
463 12 : CALL release_neighbor_list_sets(sub_env%sab_aux_fit)
464 12 : CALL release_neighbor_list_sets(sub_env%sab_orb)
465 :
466 12 : IF (ASSOCIATED(sub_env%dbcsr_dist)) THEN
467 12 : CALL dbcsr_distribution_release(sub_env%dbcsr_dist)
468 12 : DEALLOCATE (sub_env%dbcsr_dist)
469 : END IF
470 :
471 12 : IF (ASSOCIATED(sub_env%dist_2d)) &
472 12 : CALL distribution_2d_release(sub_env%dist_2d)
473 : END IF
474 :
475 : ! GAPW
476 1342 : IF (ASSOCIATED(sub_env%local_rho_set)) THEN
477 406 : CALL local_rho_set_release(sub_env%local_rho_set)
478 : END IF
479 1342 : IF (ASSOCIATED(sub_env%hartree_local)) THEN
480 334 : CALL hartree_local_release(sub_env%hartree_local)
481 : END IF
482 1342 : IF (ASSOCIATED(sub_env%local_rho_set_admm)) THEN
483 88 : CALL local_rho_set_release(sub_env%local_rho_set_admm)
484 : END IF
485 :
486 : ! if TDDFPT-specific plane-wave environment has not been requested,
487 : ! the pointers sub_env%dbcsr_dist, sub_env%sab_*, and sub_env%task_list_*
488 : ! point to the corresponding ground-state variables from qs_env
489 : ! and should not be deallocated
490 :
491 1342 : CALL pw_env_release(sub_env%pw_env)
492 :
493 1342 : sub_env%is_mgrid = .FALSE.
494 :
495 1342 : IF (sub_env%is_split .AND. ASSOCIATED(sub_env%admm_A)) THEN
496 2 : CALL cp_fm_release(sub_env%admm_A)
497 2 : DEALLOCATE (sub_env%admm_A)
498 : NULLIFY (sub_env%admm_A)
499 : END IF
500 :
501 1342 : IF (sub_env%is_split) THEN
502 4 : DO i = SIZE(sub_env%mos_occ), 1, -1
503 4 : CALL cp_fm_release(sub_env%mos_occ(i))
504 : END DO
505 4 : DO i = SIZE(sub_env%mos_active), 1, -1
506 4 : CALL cp_fm_release(sub_env%mos_active(i))
507 : END DO
508 : END IF
509 1342 : DEALLOCATE (sub_env%mos_occ)
510 1342 : DEALLOCATE (sub_env%mos_active)
511 :
512 1342 : CALL cp_blacs_env_release(sub_env%blacs_env)
513 1342 : CALL mp_para_env_release(sub_env%para_env)
514 :
515 1342 : IF (ALLOCATED(sub_env%group_distribution)) &
516 2 : DEALLOCATE (sub_env%group_distribution)
517 :
518 1342 : sub_env%is_split = .FALSE.
519 :
520 1342 : CALL timestop(handle)
521 :
522 1342 : END SUBROUTINE tddfpt_sub_env_release
523 :
524 : ! **************************************************************************************************
525 : !> \brief Replace the global multi-grid related parameters in qs_control by the ones given in the
526 : !> TDDFPT/MGRID subsection. The original parameters are stored into the 'mgrid_saved'
527 : !> variable.
528 : !> \param qs_control Quickstep control parameters (modified on exit)
529 : !> \param tddfpt_control TDDFPT control parameters
530 : !> \param mgrid_saved structure to hold global MGRID-related parameters (initialised on exit)
531 : !> \par History
532 : !> * 09.2016 created [Sergey Chulkov]
533 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
534 : !> \note the code to build the 'e_cutoff' list was taken from the subroutine read_mgrid_section()
535 : ! **************************************************************************************************
536 10 : SUBROUTINE init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved)
537 : TYPE(qs_control_type), POINTER :: qs_control
538 : TYPE(tddfpt2_control_type), POINTER :: tddfpt_control
539 : TYPE(mgrid_saved_parameters), INTENT(out) :: mgrid_saved
540 :
541 : CHARACTER(LEN=*), PARAMETER :: routineN = 'init_tddfpt_mgrid'
542 :
543 : INTEGER :: handle, igrid, ngrids
544 :
545 10 : CALL timeset(routineN, handle)
546 :
547 : ! ++ save global plane-wave grid parameters to the variable 'mgrid_saved'
548 10 : mgrid_saved%commensurate_mgrids = qs_control%commensurate_mgrids
549 10 : mgrid_saved%realspace_mgrids = qs_control%realspace_mgrids
550 10 : mgrid_saved%skip_load_balance = qs_control%skip_load_balance_distributed
551 10 : mgrid_saved%cutoff = qs_control%cutoff
552 10 : mgrid_saved%progression_factor = qs_control%progression_factor
553 10 : mgrid_saved%relative_cutoff = qs_control%relative_cutoff
554 10 : mgrid_saved%e_cutoff => qs_control%e_cutoff
555 :
556 : ! ++ set parameters from 'tddfpt_control' as default ones for all newly allocated plane-wave grids
557 10 : qs_control%commensurate_mgrids = tddfpt_control%mgrid_commensurate_mgrids
558 10 : qs_control%realspace_mgrids = tddfpt_control%mgrid_realspace_mgrids
559 10 : qs_control%skip_load_balance_distributed = tddfpt_control%mgrid_skip_load_balance
560 10 : qs_control%cutoff = tddfpt_control%mgrid_cutoff
561 10 : qs_control%progression_factor = tddfpt_control%mgrid_progression_factor
562 10 : qs_control%relative_cutoff = tddfpt_control%mgrid_relative_cutoff
563 :
564 30 : ALLOCATE (qs_control%e_cutoff(tddfpt_control%mgrid_ngrids))
565 10 : ngrids = tddfpt_control%mgrid_ngrids
566 10 : IF (ASSOCIATED(tddfpt_control%mgrid_e_cutoff)) THEN
567 : ! following read_mgrid_section() there is a magic scale factor there (0.5_dp)
568 0 : DO igrid = 1, ngrids
569 0 : qs_control%e_cutoff(igrid) = tddfpt_control%mgrid_e_cutoff(igrid)*0.5_dp
570 : END DO
571 : ! ++ round 'qs_control%cutoff' upward to the nearest sub-grid's cutoff value;
572 : ! here we take advantage of the fact that the array 'e_cutoff' has been sorted in descending order
573 0 : DO igrid = ngrids, 1, -1
574 0 : IF (qs_control%cutoff <= qs_control%e_cutoff(igrid)) THEN
575 0 : qs_control%cutoff = qs_control%e_cutoff(igrid)
576 0 : EXIT
577 : END IF
578 : END DO
579 : ! igrid == 0 if qs_control%cutoff is larger than the largest manually provided cutoff value;
580 : ! use the largest actual value
581 0 : IF (igrid <= 0) &
582 0 : qs_control%cutoff = qs_control%e_cutoff(1)
583 : ELSE
584 10 : qs_control%e_cutoff(1) = qs_control%cutoff
585 44 : DO igrid = 2, ngrids
586 44 : qs_control%e_cutoff(igrid) = qs_control%e_cutoff(igrid - 1)/qs_control%progression_factor
587 : END DO
588 : END IF
589 :
590 10 : CALL timestop(handle)
591 10 : END SUBROUTINE init_tddfpt_mgrid
592 :
593 : ! **************************************************************************************************
594 : !> \brief Restore the global multi-grid related parameters stored in the 'mgrid_saved' variable.
595 : !> \param qs_control Quickstep control parameters (modified on exit)
596 : !> \param mgrid_saved structure that holds global MGRID-related parameters
597 : !> \par History
598 : !> * 09.2016 created [Sergey Chulkov]
599 : ! **************************************************************************************************
600 10 : SUBROUTINE restore_qs_mgrid(qs_control, mgrid_saved)
601 : TYPE(qs_control_type), POINTER :: qs_control
602 : TYPE(mgrid_saved_parameters), INTENT(in) :: mgrid_saved
603 :
604 : CHARACTER(LEN=*), PARAMETER :: routineN = 'restore_qs_mgrid'
605 :
606 : INTEGER :: handle
607 :
608 10 : CALL timeset(routineN, handle)
609 :
610 10 : IF (ASSOCIATED(qs_control%e_cutoff)) &
611 10 : DEALLOCATE (qs_control%e_cutoff)
612 :
613 10 : qs_control%commensurate_mgrids = mgrid_saved%commensurate_mgrids
614 10 : qs_control%realspace_mgrids = mgrid_saved%realspace_mgrids
615 10 : qs_control%skip_load_balance_distributed = mgrid_saved%skip_load_balance
616 10 : qs_control%cutoff = mgrid_saved%cutoff
617 10 : qs_control%progression_factor = mgrid_saved%progression_factor
618 10 : qs_control%relative_cutoff = mgrid_saved%relative_cutoff
619 10 : qs_control%e_cutoff => mgrid_saved%e_cutoff
620 :
621 10 : CALL timestop(handle)
622 10 : END SUBROUTINE restore_qs_mgrid
623 :
624 : ! **************************************************************************************************
625 : !> \brief Distribute atoms across the two-dimensional grid of processors.
626 : !> \param distribution_2d new two-dimensional distribution of pairs of particles
627 : !> (allocated and initialised on exit)
628 : !> \param dbcsr_dist new DBCSR distribution (allocated and initialised on exit)
629 : !> \param blacs_env BLACS parallel environment
630 : !> \param qs_env Quickstep environment
631 : !> \par History
632 : !> * 09.2016 created [Sergey Chulkov]
633 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
634 : ! **************************************************************************************************
635 24 : SUBROUTINE tddfpt_build_distribution_2d(distribution_2d, dbcsr_dist, blacs_env, qs_env)
636 : TYPE(distribution_2d_type), POINTER :: distribution_2d
637 : TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist
638 : TYPE(cp_blacs_env_type), POINTER :: blacs_env
639 : TYPE(qs_environment_type), POINTER :: qs_env
640 :
641 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_build_distribution_2d'
642 :
643 : INTEGER :: handle
644 12 : TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
645 : TYPE(cell_type), POINTER :: cell
646 12 : TYPE(molecule_kind_type), DIMENSION(:), POINTER :: molecule_kind_set
647 12 : TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set
648 12 : TYPE(particle_type), DIMENSION(:), POINTER :: particle_set
649 12 : TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
650 : TYPE(section_vals_type), POINTER :: input
651 :
652 12 : CALL timeset(routineN, handle)
653 :
654 : CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, &
655 : molecule_kind_set=molecule_kind_set, molecule_set=molecule_set, &
656 12 : particle_set=particle_set, qs_kind_set=qs_kind_set)
657 :
658 12 : NULLIFY (distribution_2d)
659 : CALL distribute_molecules_2d(cell=cell, &
660 : atomic_kind_set=atomic_kind_set, &
661 : particle_set=particle_set, &
662 : qs_kind_set=qs_kind_set, &
663 : molecule_kind_set=molecule_kind_set, &
664 : molecule_set=molecule_set, &
665 : distribution_2d=distribution_2d, &
666 : blacs_env=blacs_env, &
667 12 : force_env_section=input)
668 :
669 12 : ALLOCATE (dbcsr_dist)
670 12 : CALL cp_dbcsr_dist2d_to_dist(distribution_2d, dbcsr_dist)
671 :
672 12 : CALL timestop(handle)
673 12 : END SUBROUTINE tddfpt_build_distribution_2d
674 :
675 : ! **************************************************************************************************
676 : !> \brief Build task and neighbour lists for the given plane wave environment and basis set.
677 : !> \param task_list new task list (allocated and initialised on exit)
678 : !> \param sab new list of neighbours (allocated and initialised on exit)
679 : !> \param basis_type type of the basis set
680 : !> \param distribution_2d two-dimensional distribution of pairs of particles
681 : !> \param pw_env plane wave environment
682 : !> \param qs_env Quickstep environment
683 : !> \param skip_load_balance do not perform load balancing
684 : !> \param reorder_grid_ranks re-optimise grid ranks and re-create the real-space grid descriptor
685 : !> as well as grids
686 : !> \par History
687 : !> * 09.2016 created [Sergey Chulkov]
688 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
689 : ! **************************************************************************************************
690 32 : SUBROUTINE tddfpt_build_tasklist(task_list, sab, basis_type, distribution_2d, pw_env, qs_env, &
691 : skip_load_balance, reorder_grid_ranks)
692 : TYPE(task_list_type), POINTER :: task_list
693 : TYPE(neighbor_list_set_p_type), DIMENSION(:), &
694 : POINTER :: sab
695 : CHARACTER(len=*), INTENT(in) :: basis_type
696 : TYPE(distribution_2d_type), POINTER :: distribution_2d
697 : TYPE(pw_env_type), POINTER :: pw_env
698 : TYPE(qs_environment_type), POINTER :: qs_env
699 : LOGICAL, INTENT(in) :: skip_load_balance, reorder_grid_ranks
700 :
701 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_build_tasklist'
702 :
703 : INTEGER :: handle, ikind, nkinds
704 32 : LOGICAL, ALLOCATABLE, DIMENSION(:) :: orb_present
705 : REAL(kind=dp) :: subcells
706 32 : REAL(kind=dp), ALLOCATABLE, DIMENSION(:) :: orb_radius
707 32 : REAL(kind=dp), ALLOCATABLE, DIMENSION(:, :) :: pair_radius
708 32 : TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
709 : TYPE(cell_type), POINTER :: cell
710 : TYPE(distribution_1d_type), POINTER :: local_particles
711 : TYPE(gto_basis_set_type), POINTER :: orb_basis_set
712 32 : TYPE(local_atoms_type), ALLOCATABLE, DIMENSION(:) :: atom2d
713 32 : TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set
714 32 : TYPE(particle_type), DIMENSION(:), POINTER :: particle_set
715 32 : TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
716 : TYPE(qs_ks_env_type), POINTER :: ks_env
717 : TYPE(section_vals_type), POINTER :: input
718 :
719 32 : CALL timeset(routineN, handle)
720 :
721 : CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, &
722 : ks_env=ks_env, local_particles=local_particles, molecule_set=molecule_set, &
723 32 : particle_set=particle_set, qs_kind_set=qs_kind_set)
724 :
725 32 : nkinds = SIZE(atomic_kind_set)
726 :
727 32 : IF (.NOT. (ASSOCIATED(sab))) THEN
728 108 : ALLOCATE (atom2d(nkinds))
729 : CALL atom2d_build(atom2d, local_particles, distribution_2d, atomic_kind_set, &
730 20 : molecule_set, molecule_only=.FALSE., particle_set=particle_set)
731 :
732 60 : ALLOCATE (orb_present(nkinds))
733 60 : ALLOCATE (orb_radius(nkinds))
734 80 : ALLOCATE (pair_radius(nkinds, nkinds))
735 :
736 68 : DO ikind = 1, nkinds
737 48 : CALL get_qs_kind(qs_kind_set(ikind), basis_set=orb_basis_set, basis_type=basis_type)
738 68 : IF (ASSOCIATED(orb_basis_set)) THEN
739 48 : orb_present(ikind) = .TRUE.
740 48 : CALL get_gto_basis_set(gto_basis_set=orb_basis_set, kind_radius=orb_radius(ikind))
741 : ELSE
742 0 : orb_present(ikind) = .FALSE.
743 0 : orb_radius(ikind) = 0.0_dp
744 : END IF
745 : END DO
746 :
747 20 : CALL pair_radius_setup(orb_present, orb_present, orb_radius, orb_radius, pair_radius)
748 :
749 20 : NULLIFY (sab)
750 20 : CALL section_vals_val_get(input, "DFT%SUBCELLS", r_val=subcells)
751 : CALL build_neighbor_lists(sab, particle_set, atom2d, cell, pair_radius, &
752 20 : mic=.FALSE., subcells=subcells, molecular=.FALSE., nlname="sab_orb")
753 :
754 20 : CALL atom2d_cleanup(atom2d)
755 40 : DEALLOCATE (atom2d, orb_present, orb_radius, pair_radius)
756 : END IF
757 :
758 32 : CALL allocate_task_list(task_list)
759 : CALL generate_qs_task_list(ks_env, task_list, basis_type=basis_type, &
760 : reorder_rs_grid_ranks=reorder_grid_ranks, &
761 : skip_load_balance_distributed=skip_load_balance, &
762 32 : pw_env_external=pw_env, sab_orb_external=sab)
763 :
764 32 : CALL timestop(handle)
765 64 : END SUBROUTINE tddfpt_build_tasklist
766 :
767 : ! **************************************************************************************************
768 : !> \brief Create a DBCSR matrix based on a template matrix, distribution object, and the list of
769 : !> neighbours.
770 : !> \param matrix matrix to create
771 : !> \param template template matrix
772 : !> \param dbcsr_dist DBCSR distribution
773 : !> \param sab list of neighbours
774 : !> \par History
775 : !> * 09.2016 created [Sergey Chulkov]
776 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
777 : ! **************************************************************************************************
778 2894 : SUBROUTINE tddfpt_dbcsr_create_by_dist(matrix, template, dbcsr_dist, sab)
779 : TYPE(dbcsr_type), POINTER :: matrix, template
780 : TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist
781 : TYPE(neighbor_list_set_p_type), DIMENSION(:), &
782 : POINTER :: sab
783 :
784 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_dbcsr_create_by_dist'
785 :
786 : CHARACTER :: matrix_type
787 : CHARACTER(len=default_string_length) :: matrix_name
788 : INTEGER :: handle
789 2894 : INTEGER, DIMENSION(:), POINTER :: col_blk_sizes, row_blk_sizes
790 :
791 2894 : CALL timeset(routineN, handle)
792 :
793 2894 : CPASSERT(ASSOCIATED(template))
794 : CALL dbcsr_get_info(template, row_blk_size=row_blk_sizes, col_blk_size=col_blk_sizes, &
795 2894 : name=matrix_name, matrix_type=matrix_type)
796 :
797 2894 : IF (ASSOCIATED(matrix)) THEN
798 2722 : CALL dbcsr_release(matrix)
799 : ELSE
800 172 : ALLOCATE (matrix)
801 : END IF
802 :
803 2894 : CALL dbcsr_create(matrix, matrix_name, dbcsr_dist, matrix_type, row_blk_sizes, col_blk_sizes)
804 2894 : CALL cp_dbcsr_alloc_block_from_nbl(matrix, sab)
805 :
806 2894 : CALL timestop(handle)
807 :
808 2894 : END SUBROUTINE tddfpt_dbcsr_create_by_dist
809 :
810 : ! **************************************************************************************************
811 : !> \brief Replicate a globally distributed matrix across all sub-groups. At the end
812 : !> every sub-group will hold a local copy of the original globally distributed matrix.
813 : !>
814 : !> |--------------------|
815 : !> fm_src | 0 1 2 3 |
816 : !> |--------------------|
817 : !> / MPI ranks \
818 : !> |/_ _\|
819 : !> |--------------------| |--------------------|
820 : !> fm_dest_subgroup0 | 0 1 | | 2 3 | fm_dest_subgroup1
821 : !> |--------------------| |--------------------|
822 : !> subgroup 0 subgroup 1
823 : !>
824 : !> \param fm_src globally distributed matrix to replicate
825 : !> \param fm_dest_sub subgroup-specific copy of the replicated matrix
826 : !> \param sub_env subgroup environment
827 : !> \par History
828 : !> * 09.2016 created [Sergey Chulkov]
829 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
830 : ! **************************************************************************************************
831 6 : SUBROUTINE tddfpt_fm_replicate_across_subgroups(fm_src, fm_dest_sub, sub_env)
832 : TYPE(cp_fm_type), INTENT(IN) :: fm_src, fm_dest_sub
833 : TYPE(tddfpt_subgroup_env_type), INTENT(in) :: sub_env
834 :
835 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_fm_replicate_across_subgroups'
836 :
837 : INTEGER :: handle, igroup, igroup_local, ncols_global_dest, ncols_global_src, ngroups, &
838 : nrows_global_dest, nrows_global_src
839 : TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
840 : TYPE(cp_fm_type) :: fm_null
841 : TYPE(mp_para_env_type), POINTER :: para_env_global
842 :
843 24 : IF (sub_env%is_split) THEN
844 6 : CALL timeset(routineN, handle)
845 :
846 : CALL cp_fm_get_info(fm_src, nrow_global=nrows_global_src, ncol_global=ncols_global_src, &
847 6 : context=blacs_env_global, para_env=para_env_global)
848 6 : CALL cp_fm_get_info(fm_dest_sub, nrow_global=nrows_global_dest, ncol_global=ncols_global_dest)
849 :
850 : IF (debug_this_module) THEN
851 6 : CPASSERT(nrows_global_src == nrows_global_dest)
852 6 : CPASSERT(ncols_global_src == ncols_global_dest)
853 : END IF
854 :
855 6 : igroup_local = sub_env%group_distribution(para_env_global%mepos)
856 6 : ngroups = sub_env%ngroups
857 :
858 18 : DO igroup = 0, ngroups - 1
859 18 : IF (igroup == igroup_local) THEN
860 6 : CALL cp_fm_copy_general(fm_src, fm_dest_sub, para_env_global)
861 : ELSE
862 6 : CALL cp_fm_copy_general(fm_src, fm_null, para_env_global)
863 : END IF
864 : END DO
865 :
866 6 : CALL timestop(handle)
867 : END IF
868 6 : END SUBROUTINE tddfpt_fm_replicate_across_subgroups
869 0 : END MODULE qs_tddfpt2_subgroups
870 :
|