Line data Source code
1 : !--------------------------------------------------------------------------------------------------!
2 : ! CP2K: A general program to perform molecular dynamics simulations !
3 : ! Copyright 2000-2026 CP2K developers group <https://cp2k.org> !
4 : ! !
5 : ! SPDX-License-Identifier: GPL-2.0-or-later !
6 : !--------------------------------------------------------------------------------------------------!
7 :
8 : ! **************************************************************************************************
9 : !> \brief Generate Gaussian cube files
10 : ! **************************************************************************************************
11 : MODULE realspace_grid_openpmd
12 :
13 : #ifdef __OPENPMD
14 : USE cp_files, ONLY: close_file, &
15 : open_file
16 : USE cp_log_handling, ONLY: cp_logger_get_default_io_unit
17 : USE cp_output_handling_openpmd, ONLY: cp_openpmd_get_value_unit_nr, &
18 : cp_openpmd_per_call_value_type
19 : USE kinds, ONLY: default_string_length, &
20 : dp
21 : USE message_passing, ONLY: &
22 : file_amode_rdonly, file_offset, mp_comm_type, mp_file_descriptor_type, mp_file_type, &
23 : mp_file_type_free, mp_file_type_hindexed_make_chv, mp_file_type_set_view_chv, &
24 : mpi_character_size
25 : #if defined(__parallel)
26 : #if defined(__MPI_F08)
27 : USE mpi_f08, ONLY: mpi_allreduce, mpi_integer, mpi_bxor, mpi_allgather
28 : #else
29 : USE mpi, ONLY: mpi_allreduce, mpi_integer, mpi_bxor, mpi_allgather
30 : #endif
31 : #endif
32 :
33 : USE openpmd_api, ONLY: &
34 : openpmd_attributable_type, &
35 : openpmd_dynamic_memory_view_type_1d, &
36 : openpmd_dynamic_memory_view_type_3d, &
37 : openpmd_mesh_type, &
38 : openpmd_particle_species_type, &
39 : openpmd_record_component_type, &
40 : openpmd_record_type, openpmd_type_double, openpmd_type_int
41 : USE pw_grid_types, ONLY: PW_MODE_LOCAL
42 : USE pw_types, ONLY: pw_r3d_rs_type
43 : USE util, ONLY: sort_unique
44 :
45 : #else
46 :
47 : USE pw_types, ONLY: pw_r3d_rs_type
48 : USE kinds, ONLY: dp
49 :
50 : #endif
51 :
52 : #include "../base/base_uses.f90"
53 :
54 : IMPLICIT NONE
55 :
56 : PRIVATE
57 :
58 : PUBLIC ::pw_to_openpmd
59 :
60 : #ifdef __OPENPMD
61 : CHARACTER(len=*), PARAMETER, PRIVATE :: moduleN = 'realspace_grid_openpmd'
62 : LOGICAL, PARAMETER, PRIVATE :: debug_this_module = .FALSE.
63 : LOGICAL, PRIVATE :: parses_linebreaks = .FALSE., &
64 : parse_test = .TRUE.
65 :
66 : TYPE cp_openpmd_write_buffer_1d
67 : REAL(KIND=dp), POINTER :: buffer(:)
68 : END TYPE cp_openpmd_write_buffer_1d
69 : #endif
70 :
71 : CONTAINS
72 :
73 : #ifdef __OPENPMD
74 : ! **************************************************************************************************
75 : !> \brief ...
76 : !> \param particles_z ...
77 : !> \param res_atom_types ...
78 : !> \param res_atom_counts ...
79 : !> \param res_len ...
80 : ! **************************************************************************************************
81 : SUBROUTINE pw_get_atom_types(particles_z, res_atom_types, res_atom_counts, res_len)
82 : INTEGER, DIMENSION(:), INTENT(IN) :: particles_z
83 : INTEGER, ALLOCATABLE, DIMENSION(:), INTENT(OUT) :: res_atom_types, res_atom_counts
84 : INTEGER, INTENT(OUT) :: res_len
85 :
86 : INTEGER :: current_atom_number, i
87 : INTEGER, ALLOCATABLE, DIMENSION(:) :: particles_z_sorted
88 : LOGICAL :: unique
89 :
90 : ALLOCATE (particles_z_sorted(SIZE(particles_z)))
91 : particles_z_sorted(:) = particles_z(:)
92 : CALL sort_unique(particles_z_sorted, unique)
93 :
94 : ALLOCATE (res_atom_types(MIN(118, SIZE(particles_z))))
95 : ALLOCATE (res_atom_counts(MIN(118, SIZE(particles_z))))
96 : current_atom_number = -1
97 : res_len = 0
98 : DO i = 1, SIZE(particles_z_sorted)
99 : IF (particles_z_sorted(i) /= current_atom_number) THEN
100 : res_len = res_len + 1
101 : current_atom_number = particles_z_sorted(i)
102 : res_atom_types(res_len) = current_atom_number
103 : res_atom_counts(res_len) = 1
104 : ELSE
105 : res_atom_counts(res_len) = res_atom_counts(res_len) + 1
106 : END IF
107 : END DO
108 :
109 : END SUBROUTINE pw_get_atom_types
110 :
111 : ! **************************************************************************************************
112 : !> \brief ...
113 : !> \param particles_z ...
114 : !> \param particles_r ...
115 : !> \param particles_zeff ...
116 : !> \param atom_type ...
117 : !> \param atom_count ...
118 : !> \param openpmd_data ...
119 : !> \param do_write_data ...
120 : ! **************************************************************************************************
121 : SUBROUTINE pw_write_particle_species( &
122 : particles_z, &
123 : particles_r, &
124 : particles_zeff, &
125 : atom_type, &
126 : atom_count, &
127 : openpmd_data, &
128 : do_write_data &
129 : )
130 : INTEGER, DIMENSION(:), INTENT(IN) :: particles_z
131 : REAL(KIND=dp), DIMENSION(:, :), INTENT(IN) :: particles_r
132 : REAL(KIND=dp), DIMENSION(:), INTENT(IN), OPTIONAL :: particles_zeff
133 : INTEGER, INTENT(IN) :: atom_type, atom_count
134 : TYPE(cp_openpmd_per_call_value_type) :: openpmd_data
135 : LOGICAL :: do_write_data
136 :
137 : CHARACTER(len=1), DIMENSION(3), PARAMETER :: dims = ["x", "y", "z"]
138 :
139 : CHARACTER(len=3) :: atom_type_as_string
140 : CHARACTER(len=default_string_length) :: species_name
141 : INTEGER :: i, j, k
142 : INTEGER, DIMENSION(1) :: global_extent, global_offset, &
143 : local_extent
144 : TYPE(cp_openpmd_write_buffer_1d) :: charge_write_buffer
145 : TYPE(cp_openpmd_write_buffer_1d), DIMENSION(3) :: write_buffers
146 : TYPE(openpmd_attributable_type) :: attr
147 : TYPE(openpmd_dynamic_memory_view_type_1d) :: unresolved_charge_write_buffer
148 : TYPE(openpmd_dynamic_memory_view_type_1d), &
149 : DIMENSION(3) :: unresolved_write_buffers
150 : TYPE(openpmd_particle_species_type) :: species
151 : TYPE(openpmd_record_component_type) :: charge_component, position_component, &
152 : position_offset_component
153 : TYPE(openpmd_record_type) :: charge, position, position_offset
154 :
155 : ! TODO: The charge is probably constant per species?
156 : ! If yes, we could use a constant component and save storage space
157 :
158 : global_extent(1) = atom_count
159 : IF (do_write_data) THEN
160 : global_offset(1) = 0
161 : local_extent(1) = atom_count
162 : ELSE
163 : global_offset(1) = 0
164 : local_extent(1) = 0
165 : END IF
166 :
167 : WRITE (atom_type_as_string, '(I3)') atom_type
168 : species_name = TRIM(openpmd_data%name_prefix)//"-"//ADJUSTL(atom_type_as_string)
169 :
170 : species = openpmd_data%iteration%get_particle_species(TRIM(species_name))
171 :
172 : position_offset = species%get_record("positionOffset")
173 : position = species%get_record("position")
174 : DO k = 1, SIZE(dims)
175 : position_offset_component = position_offset%get_component(dims(k))
176 : CALL position_offset_component%make_constant_zero(openpmd_type_int, global_extent)
177 : position_component = position%get_component(dims(k))
178 : CALL position_component%reset_dataset(openpmd_type_double, global_extent)
179 : IF (do_write_data) THEN
180 : unresolved_write_buffers(k) = &
181 : position_component%store_chunk_span_1d_double(global_offset, local_extent)
182 : write_buffers(k)%buffer => unresolved_write_buffers(k)%resolve_double(DEALLOCATE=.FALSE.)
183 : END IF
184 : END DO
185 :
186 : IF (PRESENT(particles_zeff)) THEN
187 : charge = species%get_record("charge")
188 : charge_component = charge%as_record_component()
189 : CALL charge_component%reset_dataset(openpmd_type_double, global_extent)
190 : IF (do_write_data) THEN
191 : unresolved_charge_write_buffer = charge_component%store_chunk_span_1d_double(global_offset, local_extent)
192 : charge_write_buffer%buffer => unresolved_charge_write_buffer%resolve_double(DEALLOCATE=.FALSE.)
193 : END IF
194 : END IF
195 :
196 : IF (do_write_data) THEN
197 : ! Resolve Spans for a second time to allow for internal reallocations in BP4 engine of ADIOS2
198 : DO k = 1, SIZE(dims)
199 : write_buffers(k)%buffer = unresolved_write_buffers(k)%resolve_double(DEALLOCATE=.TRUE.)
200 : END DO
201 : IF (PRESENT(particles_zeff)) THEN
202 : charge_write_buffer%buffer = unresolved_charge_write_buffer%resolve_double(DEALLOCATE=.TRUE.)
203 : END IF
204 : j = 1
205 : DO i = 1, SIZE(particles_z)
206 : IF (particles_z(i) == atom_type) THEN
207 : DO k = 1, 3
208 : write_buffers(k)%buffer(j) = particles_r(k, i)
209 : END DO
210 : IF (PRESENT(particles_zeff)) THEN
211 : charge_write_buffer%buffer(j) = particles_zeff(i)
212 : END IF
213 : j = j + 1
214 : END IF
215 : END DO
216 : END IF
217 : attr = openpmd_data%iteration%as_attributable()
218 : CALL attr%series_flush("hdf5.independent_stores = true")
219 : END SUBROUTINE pw_write_particle_species
220 :
221 : ! **************************************************************************************************
222 : !> \brief ...
223 : !> \param particles_z ...
224 : !> \param particles_r ...
225 : !> \param particles_zeff ...
226 : !> \param atom_types ...
227 : !> \param atom_counts ...
228 : !> \param num_atom_types ...
229 : !> \param openpmd_data ...
230 : !> \param gid ...
231 : ! **************************************************************************************************
232 : SUBROUTINE pw_write_particles( &
233 : particles_z, &
234 : particles_r, &
235 : particles_zeff, &
236 : atom_types, &
237 : atom_counts, &
238 : num_atom_types, &
239 : openpmd_data, &
240 : gid &
241 : )
242 : INTEGER, DIMENSION(:), INTENT(IN) :: particles_z
243 : REAL(KIND=dp), DIMENSION(:, :), INTENT(IN) :: particles_r
244 : REAL(KIND=dp), DIMENSION(:), INTENT(IN), OPTIONAL :: particles_zeff
245 : INTEGER, DIMENSION(:), INTENT(IN) :: atom_types, atom_counts
246 : INTEGER, INTENT(IN), TARGET :: num_atom_types
247 : TYPE(cp_openpmd_per_call_value_type) :: openpmd_data
248 : TYPE(mp_comm_type), OPTIONAL :: gid
249 :
250 : INTEGER :: i, mpi_rank
251 : LOGICAL :: do_write_data
252 :
253 : IF (PRESENT(gid)) THEN
254 : CALL gid%get_rank(mpi_rank)
255 : do_write_data = mpi_rank == 0
256 : ELSE
257 : do_write_data = .TRUE.
258 : END IF
259 : DO i = 1, num_atom_types
260 : CALL pw_write_particle_species( &
261 : particles_z, &
262 : particles_r, &
263 : particles_zeff, &
264 : atom_types(i), &
265 : atom_counts(i), &
266 : openpmd_data, &
267 : do_write_data &
268 : )
269 : END DO
270 : END SUBROUTINE pw_write_particles
271 :
272 : ! **************************************************************************************************
273 : !> \brief ...
274 : !> \param pw ...
275 : !> \param unit_nr ...
276 : !> \param title ...
277 : !> \param particles_r ...
278 : !> \param particles_z ...
279 : !> \param particles_zeff ...
280 : !> \param stride ...
281 : !> \param zero_tails ...
282 : !> \param silent ...
283 : !> \param mpi_io ...
284 : ! **************************************************************************************************
285 : SUBROUTINE pw_to_openpmd( &
286 : pw, &
287 : unit_nr, &
288 : title, &
289 : particles_r, &
290 : particles_z, &
291 : particles_zeff, &
292 : stride, &
293 : zero_tails, &
294 : silent, &
295 : mpi_io &
296 : )
297 : TYPE(pw_r3d_rs_type), INTENT(IN) :: pw
298 : INTEGER :: unit_nr
299 : CHARACTER(*), INTENT(IN), OPTIONAL :: title
300 : REAL(KIND=dp), DIMENSION(:, :), INTENT(IN), &
301 : OPTIONAL :: particles_r
302 : INTEGER, DIMENSION(:), INTENT(IN), OPTIONAL :: particles_z
303 : REAL(KIND=dp), DIMENSION(:), INTENT(IN), OPTIONAL :: particles_zeff
304 : INTEGER, DIMENSION(:), OPTIONAL, POINTER :: stride
305 : LOGICAL, INTENT(IN), OPTIONAL :: zero_tails, silent, mpi_io
306 :
307 : CHARACTER(len=*), PARAMETER :: routineN = 'pw_to_openpmd'
308 : INTEGER, PARAMETER :: entry_len = 13, num_entries_line = 6
309 :
310 : INTEGER :: count1, count2, count3, handle, i, I1, &
311 : I2, I3, iat, L1, L2, L3, my_rank, &
312 : my_stride(3), np, num_atom_types, &
313 : num_pe, U1, U2, U3
314 : INTEGER, ALLOCATABLE, DIMENSION(:) :: atom_counts, atom_types
315 : INTEGER, DIMENSION(3) :: global_extent, local_extent, offset
316 : LOGICAL :: be_silent, my_zero_tails, parallel_write
317 : REAL(KIND=dp), DIMENSION(3) :: grid_spacing
318 : REAL(KIND=dp), POINTER :: write_buffer(:, :, :)
319 : TYPE(cp_openpmd_per_call_value_type) :: openpmd_data
320 : TYPE(mp_comm_type) :: gid
321 : TYPE(openpmd_attributable_type) :: attr
322 : TYPE(openpmd_dynamic_memory_view_type_3d) :: unresolved_write_buffer
323 : TYPE(openpmd_mesh_type) :: mesh
324 : TYPE(openpmd_record_component_type) :: scalar_mesh
325 :
326 : CALL timeset(routineN, handle)
327 :
328 : my_zero_tails = .FALSE.
329 : be_silent = .FALSE.
330 : parallel_write = .FALSE.
331 : gid = pw%pw_grid%para%group
332 : IF (PRESENT(zero_tails)) my_zero_tails = zero_tails
333 : IF (PRESENT(silent)) be_silent = silent
334 : IF (PRESENT(mpi_io)) parallel_write = mpi_io
335 : my_stride = 1
336 : IF (PRESENT(stride)) THEN
337 : IF (SIZE(stride) /= 1 .AND. SIZE(stride) /= 3) &
338 : CALL cp_abort(__LOCATION__, "STRIDE keyword can accept only 1 "// &
339 : "(the same for X,Y,Z) or 3 values. Correct your input file.")
340 : IF (SIZE(stride) == 1) THEN
341 : DO i = 1, 3
342 : my_stride(i) = stride(1)
343 : END DO
344 : ELSE
345 : my_stride = stride(1:3)
346 : END IF
347 : CPASSERT(my_stride(1) > 0)
348 : CPASSERT(my_stride(2) > 0)
349 : CPASSERT(my_stride(3) > 0)
350 : END IF
351 :
352 : openpmd_data = cp_openpmd_get_value_unit_nr(unit_nr)
353 :
354 : CPASSERT(PRESENT(particles_z) .EQV. PRESENT(particles_r))
355 : np = 0
356 : IF (PRESENT(particles_z)) THEN
357 : CALL pw_get_atom_types(particles_z, atom_types, atom_counts, num_atom_types)
358 : CPASSERT(SIZE(particles_z) == SIZE(particles_r, dim=2))
359 : np = SIZE(particles_z)
360 : END IF
361 :
362 : DO i = 1, 3
363 : ! Notes:
364 : ! 1. This loses information on the rotation of the mesh, the mesh is stored
365 : ! without reference to a global coordinate system
366 : ! 2. This assumes that the coordinate system is not sheared
367 : grid_spacing(i) = SQRT(SUM(pw%pw_grid%dh(:, i)**2))*REAL(my_stride(i), dp)
368 : END DO
369 :
370 : IF (PRESENT(particles_z)) THEN
371 : IF (parallel_write) THEN
372 : CALL pw_write_particles( &
373 : particles_z, &
374 : particles_r, &
375 : particles_zeff, &
376 : atom_types, &
377 : atom_counts, &
378 : num_atom_types, &
379 : openpmd_data, &
380 : gid &
381 : )
382 : ELSE
383 : CALL pw_write_particles( &
384 : particles_z, &
385 : particles_r, &
386 : particles_zeff, &
387 : atom_types, &
388 : atom_counts, &
389 : num_atom_types, &
390 : openpmd_data &
391 : )
392 : END IF
393 : END IF
394 :
395 : DO iat = 1, 3
396 : global_extent(iat) = (pw%pw_grid%npts(iat) + my_stride(iat) - 1)/my_stride(iat)
397 : ! '+ 1' because upper end is inclusive, '- 1' for upper gaussian bracket
398 : offset(iat) = ((pw%pw_grid%bounds_local(1, iat) - pw%pw_grid%bounds(1, iat) + my_stride(iat) - 1)/my_stride(iat))
399 : ! refer local_extent to the global offset first in order to have consistent rounding
400 : local_extent(iat) = ((pw%pw_grid%bounds_local(2, iat) + 1 - pw%pw_grid%bounds(1, iat) + my_stride(iat) - 1)/my_stride(iat))
401 : END DO
402 : local_extent = local_extent - offset
403 :
404 : mesh = openpmd_data%iteration%get_mesh(TRIM(openpmd_data%name_prefix))
405 : CALL mesh%set_axis_labels(["x", "y", "z"])
406 : CALL mesh%set_position([0.5_dp, 0.5_dp, 0.5_dp])
407 : CALL mesh%set_grid_global_offset([0._dp, 0._dp, 0._dp])
408 : CALL mesh%set_grid_spacing(grid_spacing)
409 : scalar_mesh = mesh%as_record_component()
410 : CALL scalar_mesh%reset_dataset(openpmd_type_double, global_extent)
411 :
412 : ! shortcut
413 : ! need to adjust L1/U1 for uneven distributions across MPI ranks
414 : ! (when working with a stride, we might have to skip the first n values)
415 : ! so keep this consistent with the offset and local_extent computed above
416 : ! L1 = pw%pw_grid%bounds_local(1, 1)
417 : L1 = offset(1)*my_stride(1)
418 : L2 = pw%pw_grid%bounds_local(1, 2)
419 : L3 = pw%pw_grid%bounds_local(1, 3)
420 : ! U1 = pw%pw_grid%bounds_local(2, 1)
421 : ! offset + local_extent is the start index for the next rank already
422 : ! since the indexes are inclusive, subtract 1 from the boundary index
423 : U1 = (offset(1) + local_extent(1) - 1)*my_stride(1)
424 : U2 = pw%pw_grid%bounds_local(2, 2)
425 : U3 = pw%pw_grid%bounds_local(2, 3)
426 :
427 : my_rank = pw%pw_grid%para%group%mepos
428 : num_pe = pw%pw_grid%para%group%num_pe
429 :
430 : IF (ALL(my_stride == 1)) THEN
431 : CALL scalar_mesh%store_chunk(pw%array(L1:U1, L2:U2, L3:U3), offset)
432 : ! Are there some conditions under which we can skip this flush?
433 : attr = openpmd_data%iteration%as_attributable()
434 : CALL attr%series_flush("hdf5.independent_stores = false")
435 : ELSE
436 : count3 = 0
437 : DO I3 = L3, U3, my_stride(3)
438 : ! maybe add an overload to provide `buf` here for HDF5, might have better performance
439 : ! for intermittent flushing
440 : ! or just call the buffer in the outer function if memory is no problem...
441 : unresolved_write_buffer = scalar_mesh%store_chunk_span_3d_double( &
442 : [offset(1), offset(2), offset(3) + count3], &
443 : [local_extent(1), local_extent(2), 1])
444 : write_buffer => unresolved_write_buffer%resolve_double(DEALLOCATE=.TRUE.)
445 :
446 : ! Sanity checks: ensure buffer is associated and matches expected shape
447 : CPASSERT(ASSOCIATED(write_buffer))
448 : CPASSERT(SIZE(write_buffer, 1) == local_extent(1))
449 : CPASSERT(SIZE(write_buffer, 2) == local_extent(2))
450 : CPASSERT(SIZE(write_buffer, 3) == 1)
451 :
452 : count2 = 0
453 : DO I2 = L2, U2, my_stride(2)
454 : ! This loop deals with ray (:, count2, count3) of the local subspace
455 : ! The write buffer itself has been allocated for slice (:, :, count3)
456 : count1 = 0
457 : DO I1 = L1, U1, my_stride(1)
458 : write_buffer(count1 + 1, count2 + 1, 1) = pw%array(I1, I2, I3)
459 : ! Debug: print the target indices in write_buffer to the command line
460 : ! WRITE(*,*) 'write_buffer index:', count1 + 1, ',', count2 + 1, ',', 1
461 : count1 = count1 + 1
462 : END DO
463 : count2 = count2 + 1
464 : END DO
465 : count3 = count3 + 1
466 : END DO
467 : END IF
468 :
469 : CALL timestop(handle)
470 :
471 : END SUBROUTINE pw_to_openpmd
472 :
473 : #else
474 :
475 : ! **************************************************************************************************
476 : !> \brief ...
477 : !> \param pw ...
478 : !> \param unit_nr ...
479 : !> \param title ...
480 : !> \param particles_r ...
481 : !> \param particles_z ...
482 : !> \param particles_zeff ...
483 : !> \param stride ...
484 : !> \param zero_tails ...
485 : !> \param silent ...
486 : !> \param mpi_io ...
487 : ! **************************************************************************************************
488 0 : SUBROUTINE pw_to_openpmd( &
489 : pw, &
490 : unit_nr, &
491 : title, &
492 0 : particles_r, &
493 0 : particles_z, &
494 0 : particles_zeff, &
495 : stride, &
496 : zero_tails, &
497 : silent, &
498 : mpi_io &
499 : )
500 : TYPE(pw_r3d_rs_type), INTENT(IN) :: pw
501 : INTEGER :: unit_nr
502 : CHARACTER(*), INTENT(IN), OPTIONAL :: title
503 : REAL(KIND=dp), DIMENSION(:, :), INTENT(IN), &
504 : OPTIONAL :: particles_r
505 : INTEGER, DIMENSION(:), INTENT(IN), OPTIONAL :: particles_z
506 : REAL(KIND=dp), DIMENSION(:), INTENT(IN), OPTIONAL :: particles_zeff
507 : INTEGER, DIMENSION(:), OPTIONAL, POINTER :: stride
508 : LOGICAL, INTENT(IN), OPTIONAL :: zero_tails, silent, mpi_io
509 :
510 : MARK_USED(pw)
511 : MARK_USED(unit_nr)
512 : MARK_USED(title)
513 : MARK_USED(particles_r)
514 : MARK_USED(particles_z)
515 : MARK_USED(particles_zeff)
516 : MARK_USED(stride)
517 : MARK_USED(zero_tails)
518 : MARK_USED(silent)
519 : MARK_USED(mpi_io)
520 0 : CPABORT("CP2K compiled without the openPMD-api")
521 :
522 0 : END SUBROUTINE pw_to_openpmd
523 :
524 : #endif
525 :
526 : END MODULE realspace_grid_openpmd
|