=== This is the CP2K Performance-Test === Already up to date. Current branch master is up to date. Already up to date. Current branch master is up to date. GIT Revision: d6d429ae51f26379f2dc33b64015d7bb356f05fe ################# ARCHITECTURE FILE ################## #!/bin/bash # # CP2K arch file for Cray-XC50 (Piz Daint, CSCS, GPU partition) # # Tested with: GNU 9.3.0, Cray-MPICH 7.7.18, Cray-libsci 20.09.1, Cray-FFTW 3.3.8.10, # COSMA 2.6.6, ELPA 2022.11.001, LIBINT 2.6.0, LIBPEXSI 1.2.0, # LIBXC 6.1.0, LIBVORI 220621, LIBXSMM 1.17, PLUMED 2.8.1, # SIRIUS 7.4.3, SPGLIB 1.16.2 # # Usage: Source this arch file and then run make as instructed. # A full toolchain installation is performed as default. # Replace or adapt the "module add" commands below if needed. # # Author: Matthias Krack (19.04.2023) # # \ if [ "${0}" = "${BASH_SOURCE}" ]; then \ echo "ERROR: Script ${0##*/} must be sourced"; \ echo "Usage: source ${0##*/}"; \ exit 1; \ fi; \ this_file=${BASH_SOURCE##*/}; \ if [ -n "${1}" ]; then \ gcc_version="${1}"; \ else \ gcc_version="9.3.0"; \ fi; \ module add daint-gpu; \ module rm PrgEnv-cray; \ module add PrgEnv-gnu; \ module rm gcc; \ module add gcc/${gcc_version}; \ module add cray-fftw/3.3.8.10; \ module add cudatoolkit; \ echo "Expected setup:"; \ echo " cray-mpich/7.7.18"; \ echo " craype-haswell"; \ echo " daint-gpu/21.09"; \ echo " craype/2.7.10"; \ echo " cray-libsci/20.09.1"; \ echo " PrgEnv-gnu/6.0.10"; \ echo " gcc/${gcc_version}"; \ echo " cray-fftw/3.3.8.10"; \ echo " cudatoolkit/11.0.2_3.38-8.1__g5b73779"; \ module list; \ module -f save cp2k_gpu_gnu_psmp; \ echo "To load the required modules in your batch job script, use:"; \ echo " module restore cp2k_gpu_gnu_psmp"; \ cd tools/toolchain; \ ./install_cp2k_toolchain.sh --enable-cuda=yes --gpu-ver=P100 -j${maxtasks} --no-arch-files --with-gcc=system --with-libvdwxc --with-pexsi --with-plumed; \ cd ../..; \ printf "Sourcing ${PWD}/tools/toolchain/install/setup ... "; \ source ${PWD}/tools/toolchain/install/setup; \ printf "done\n"; \ echo "Check the output above for error messages and consistency!"; \ echo; \ echo "If everything is OK, you can build a CP2K production binary with"; \ echo " make -j ARCH=${this_file%.*} VERSION=${this_file##*.}"; \ echo; \ echo "Alternatively, you can add further checks, e.g. for regression testing, with"; \ echo " make -j ARCH=${this_file%.*} VERSION=${this_file##*.} DO_CHECKS=yes"; \ echo "or build CP2K as a library with"; \ echo " make -j ARCH=${this_file%.*} VERSION=${this_file##*.} libcp2k"; \ echo; \ return # Set options DO_CHECKS := no USE_ACC := yes USE_COSMA := 2.6.6 USE_ELPA := 2022.11.001 USE_LIBINT := 2.6.0 USE_LIBPEXSI := 1.2.0 USE_LIBVORI := 220621 USE_LIBXC := 6.1.0 USE_LIBXSMM := 1.17 USE_PLUMED := 2.8.1 #USE_QUIP := 0.9.10 USE_SIRIUS := 7.4.3 USE_SPGLIB := 1.16.2 # Only needed for SIRIUS LIBVDWXC_VER := 0.4.0 SPFFT_VER := 1.0.6 SPLA_VER := 1.5.4 HDF5_VER := 1.12.0 # Only needed for LIBPEXSI SCOTCH_VER := 6.0.0 SUPERLU_VER := 6.1.0 LMAX := 5 MAX_CONTR := 4 GPUVER := P100 OFFLOAD_TARGET := cuda CC := cc CXX := CC OFFLOAD_CC := nvcc FC := ftn LD := ftn AR := ar -r # cc, CC, and ftn include already the proper -march flag CFLAGS := -O2 -fopenmp -fopenmp-simd -ftree-vectorize -funroll-loops -g DFLAGS := -D__parallel DFLAGS += -D__SCALAPACK DFLAGS += -D__FFTW3 DFLAGS += -D__MAX_CONTR=$(strip $(MAX_CONTR)) INSTALL_PATH := $(PWD)/tools/toolchain/install ifeq ($(DO_CHECKS), yes) DFLAGS += -D__CHECK_DIAG endif ifeq ($(USE_ACC), yes) DFLAGS += -D__DBCSR_ACC DFLAGS += -D__OFFLOAD_CUDA # Possibly no performance gain with PW_CUDA currently DFLAGS += -D__NO_OFFLOAD_PW endif ifneq ($(USE_PLUMED),) USE_PLUMED := $(strip $(USE_PLUMED)) PLUMED_LIB := $(INSTALL_PATH)/plumed-$(USE_PLUMED)/lib DFLAGS += -D__PLUMED2 USE_GSL := 2.7 LIBS += $(PLUMED_LIB)/libplumed.a endif ifneq ($(USE_ELPA),) USE_ELPA := $(strip $(USE_ELPA)) TARGET := nvidia ELPA_INC := $(INSTALL_PATH)/elpa-$(USE_ELPA)/$(TARGET)/include/elpa-$(USE_ELPA) ELPA_LIB := $(INSTALL_PATH)/elpa-$(USE_ELPA)/$(TARGET)/lib CFLAGS += -I$(ELPA_INC)/elpa -I$(ELPA_INC)/modules DFLAGS += -D__ELPA ifeq ($(TARGET), nvidia) DFLAGS += -D__ELPA_NVIDIA_GPU endif LIBS += $(ELPA_LIB)/libelpa.a endif ifneq ($(USE_QUIP),) USE_QUIP := $(strip $(USE_QUIP)) QUIP_INC := $(INSTALL_PATH)/quip-$(USE_QUIP)/include QUIP_LIB := $(INSTALL_PATH)/quip-$(USE_QUIP)/lib CFLAGS += -I$(QUIP_INC) DFLAGS += -D__QUIP LIBS += $(QUIP_LIB)/libquip_core.a LIBS += $(QUIP_LIB)/libatoms.a LIBS += $(QUIP_LIB)/libFoX_sax.a LIBS += $(QUIP_LIB)/libFoX_common.a LIBS += $(QUIP_LIB)/libFoX_utils.a LIBS += $(QUIP_LIB)/libFoX_fsys.a endif ifneq ($(USE_LIBPEXSI),) USE_LIBPEXSI := $(strip $(USE_LIBPEXSI)) SCOTCH_VER := $(strip $(SCOTCH_VER)) SUPERLU_VER := $(strip $(SUPERLU_VER)) LIBPEXSI_INC := $(INSTALL_PATH)/pexsi-$(USE_LIBPEXSI)/include LIBPEXSI_LIB := $(INSTALL_PATH)/pexsi-$(USE_LIBPEXSI)/lib SCOTCH_INC := $(INSTALL_PATH)/scotch-$(SCOTCH_VER)/include SCOTCH_LIB := $(INSTALL_PATH)/scotch-$(SCOTCH_VER)/lib SUPERLU_INC := $(INSTALL_PATH)/superlu_dist-$(SUPERLU_VER)/include SUPERLU_LIB := $(INSTALL_PATH)/superlu_dist-$(SUPERLU_VER)/lib CFLAGS += -I$(LIBPEXSI_INC) -I$(SCOTCH_INC) -I$(SUPERLU_INC) DFLAGS += -D__LIBPEXSI LIBS += $(LIBPEXSI_LIB)/libpexsi.a LIBS += $(SUPERLU_LIB)/libsuperlu_dist.a LIBS += $(SCOTCH_LIB)/libptscotchparmetis.a LIBS += $(SCOTCH_LIB)/libptscotch.a LIBS += $(SCOTCH_LIB)/libptscotcherr.a LIBS += $(SCOTCH_LIB)/libscotchmetis.a LIBS += $(SCOTCH_LIB)/libscotch.a endif ifneq ($(USE_LIBVORI),) USE_LIBVORI := $(strip $(USE_LIBVORI)) LIBVORI_LIB := $(INSTALL_PATH)/libvori-$(USE_LIBVORI)/lib DFLAGS += -D__LIBVORI LIBS += $(LIBVORI_LIB)/libvori.a endif ifneq ($(USE_LIBXC),) USE_LIBXC := $(strip $(USE_LIBXC)) LIBXC_INC := $(INSTALL_PATH)/libxc-$(USE_LIBXC)/include LIBXC_LIB := $(INSTALL_PATH)/libxc-$(USE_LIBXC)/lib CFLAGS += -I$(LIBXC_INC) DFLAGS += -D__LIBXC LIBS += $(LIBXC_LIB)/libxcf03.a LIBS += $(LIBXC_LIB)/libxc.a endif ifneq ($(USE_LIBINT),) USE_LIBINT := $(strip $(USE_LIBINT)) LMAX := $(strip $(LMAX)) LIBINT_INC := $(INSTALL_PATH)/libint-v$(USE_LIBINT)-cp2k-lmax-$(LMAX)/include LIBINT_LIB := $(INSTALL_PATH)/libint-v$(USE_LIBINT)-cp2k-lmax-$(LMAX)/lib CFLAGS += -I$(LIBINT_INC) DFLAGS += -D__LIBINT LIBS += $(LIBINT_LIB)/libint2.a endif ifneq ($(USE_SPGLIB),) USE_SPGLIB := $(strip $(USE_SPGLIB)) SPGLIB_INC := $(INSTALL_PATH)/spglib-$(USE_SPGLIB)/include SPGLIB_LIB := $(INSTALL_PATH)/spglib-$(USE_SPGLIB)/lib CFLAGS += -I$(SPGLIB_INC) DFLAGS += -D__SPGLIB LIBS += $(SPGLIB_LIB)/libsymspg.a endif ifneq ($(USE_LIBXSMM),) USE_LIBXSMM := $(strip $(USE_LIBXSMM)) LIBXSMM_INC := $(INSTALL_PATH)/libxsmm-$(USE_LIBXSMM)/include LIBXSMM_LIB := $(INSTALL_PATH)/libxsmm-$(USE_LIBXSMM)/lib CFLAGS += -I$(LIBXSMM_INC) DFLAGS += -D__LIBXSMM LIBS += $(LIBXSMM_LIB)/libxsmmf.a LIBS += $(LIBXSMM_LIB)/libxsmm.a endif ifneq ($(USE_SIRIUS),) USE_SIRIUS := $(strip $(USE_SIRIUS)) HDF5_VER := $(strip $(HDF5_VER)) HDF5_LIB := $(INSTALL_PATH)/hdf5-$(HDF5_VER)/lib LIBVDWXC_VER := $(strip $(LIBVDWXC_VER)) LIBVDWXC_INC := $(INSTALL_PATH)/libvdwxc-$(LIBVDWXC_VER)/include LIBVDWXC_LIB := $(INSTALL_PATH)/libvdwxc-$(LIBVDWXC_VER)/lib SPFFT_VER := $(strip $(SPFFT_VER)) SPFFT_INC := $(INSTALL_PATH)/SpFFT-$(SPFFT_VER)/include SPLA_VER := $(strip $(SPLA_VER)) SPLA_INC := $(INSTALL_PATH)/SpLA-$(SPLA_VER)/include/spla ifeq ($(USE_ACC), yes) DFLAGS += -D__OFFLOAD_GEMM SPFFT_LIB := $(INSTALL_PATH)/SpFFT-$(SPFFT_VER)/lib/cuda SPLA_LIB := $(INSTALL_PATH)/SpLA-$(SPLA_VER)/lib/cuda SIRIUS_INC := $(INSTALL_PATH)/sirius-$(USE_SIRIUS)/include/cuda SIRIUS_LIB := $(INSTALL_PATH)/sirius-$(USE_SIRIUS)/lib/cuda else SPFFT_LIB := $(INSTALL_PATH)/SpFFT-$(SPFFT_VER)/lib SPLA_LIB := $(INSTALL_PATH)/SpLA-$(SPLA_VER)/lib SIRIUS_INC := $(INSTALL_PATH)/sirius-$(USE_SIRIUS)/include SIRIUS_LIB := $(INSTALL_PATH)/sirius-$(USE_SIRIUS)/lib endif CFLAGS += -I$(LIBVDWXC_INC) CFLAGS += -I$(SPFFT_INC) CFLAGS += -I$(SPLA_INC) CFLAGS += -I$(SIRIUS_INC) DFLAGS += -D__HDF5 DFLAGS += -D__LIBVDWXC DFLAGS += -D__SPFFT DFLAGS += -D__SPLA DFLAGS += -D__SIRIUS LIBS += $(SIRIUS_LIB)/libsirius.a LIBS += $(SPLA_LIB)/libspla.a LIBS += $(SPFFT_LIB)/libspfft.a LIBS += $(LIBVDWXC_LIB)/libvdwxc.a LIBS += $(HDF5_LIB)/libhdf5.a endif ifneq ($(USE_COSMA),) USE_COSMA := $(strip $(USE_COSMA)) ifeq ($(USE_ACC), yes) USE_COSMA := $(USE_COSMA)-cuda endif COSMA_INC := $(INSTALL_PATH)/COSMA-$(USE_COSMA)/include COSMA_LIB := $(INSTALL_PATH)/COSMA-$(USE_COSMA)/lib CFLAGS += -I$(COSMA_INC) DFLAGS += -D__COSMA LIBS += $(COSMA_LIB)/libcosma_prefixed_pxgemm.a LIBS += $(COSMA_LIB)/libcosma.a LIBS += $(COSMA_LIB)/libcosta.a LIBS += $(COSMA_LIB)/libTiled-MM.a endif ifneq ($(USE_GSL),) USE_GSL := $(strip $(USE_GSL)) GSL_INC := $(INSTALL_PATH)/gsl-$(USE_GSL)/include GSL_LIB := $(INSTALL_PATH)/gsl-$(USE_GSL)/lib CFLAGS += -I$(GSL_INC) DFLAGS += -D__GSL LIBS += $(GSL_LIB)/libgsl.a endif CFLAGS += $(DFLAGS) CXXFLAGS := $(CFLAGS) -std=c++11 OFFLOAD_FLAGS := $(DFLAGS) -O3 -Xcompiler="-fopenmp" -arch sm_60 --std=c++11 FCFLAGS := $(CFLAGS) ifeq ($(shell [ $(shell gcc -dumpversion | cut -d. -f1) -gt 9 ] && echo yes), yes) FCFLAGS += -fallow-argument-mismatch endif FCFLAGS += -fbacktrace FCFLAGS += -ffree-form FCFLAGS += -ffree-line-length-none FCFLAGS += -fno-omit-frame-pointer FCFLAGS += -std=f2008 ifneq ($(CUDA_HOME),) CUDA_LIB := $(CUDA_HOME)/lib64 LDFLAGS := $(FCFLAGS) -L$(CUDA_LIB) -Wl,-rpath=$(CUDA_LIB) else LDFLAGS := $(FCFLAGS) endif LIBS += -lcusolver -lcudart -lnvrtc -lcuda -lcufft -lcublas -lrt LIBS += -lz -ldl -lpthread -lstdc++ # End ############### END ARCHITECTURE FILE ################ ===== TESTS (description) ===== ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 RI-RPA/RI-MP2 correlation energy input file: benchmarks/QS_mp2_rpa/32-H2O/RI-RPA.inp required files: ['benchmarks/QS_mp2_rpa/32-H2O/BASIS_H2O', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32.xyz', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32-PBE-TZ.inp', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32-RI-dRPA-TZ.inp'] output file: result.log # nodes = 8 # ranks/node = 2 # threads/rank = 6 nrepeat = 1 time[min] = 15 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/01 job id: 46745965 --- Point --- name: 10 plot: h2o_32_ri_rpa_mp2 regex: Total RI-RPA Time= label: RI-RPA (8n/2r/6t) --- Point --- name: 11 plot: h2o_32_ri_rpa_mp2_mem regex: Estimated peak process memory label: RI-RPA (8n/2r/6t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 RI-RPA/RI-MP2 correlation energy input file: benchmarks/QS_mp2_rpa/32-H2O/RI-MP2.inp required files: ['benchmarks/QS_mp2_rpa/32-H2O/BASIS_H2O', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32.xyz', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32-PBE-TZ.inp', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32-HF-TZ.inp', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32-RI-MP2-TZ.inp'] output file: result.log # nodes = 8 # ranks/node = 6 # threads/rank = 2 nrepeat = 1 time[min] = 15 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/02 job id: 46745966 --- Point --- name: 20 plot: h2o_32_ri_rpa_mp2 regex: Total MP2 Time= label: RI-MP2 (8n/6r/2t) --- Point --- name: 21 plot: h2o_32_ri_rpa_mp2_mem regex: Estimated peak process memory label: RI-MP2 (8n/6r/2t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 12 # threads/rank = 1 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/03 job id: 46745967 --- Point --- name: 100 plot: h2o_64_md regex: CP2K label: (8n/12r/1t) --- Point --- name: 101 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/12r/1t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 6 # threads/rank = 2 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/04 job id: 46745968 --- Point --- name: 102 plot: h2o_64_md regex: CP2K label: (8n/6r/2t) --- Point --- name: 103 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/6r/2t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 4 # threads/rank = 3 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/05 job id: 46745970 --- Point --- name: 104 plot: h2o_64_md regex: CP2K label: (8n/4r/3t) --- Point --- name: 105 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/4r/3t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 3 # threads/rank = 4 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/06 job id: 46745973 --- Point --- name: 106 plot: h2o_64_md regex: CP2K label: (8n/3r/4t) --- Point --- name: 107 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/3r/4t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 2 # threads/rank = 6 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/07 job id: 46745975 --- Point --- name: 108 plot: h2o_64_md regex: CP2K label: (8n/2r/6t) --- Point --- name: 109 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/2r/6t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 1 # threads/rank = 12 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/08 job id: 46745977 --- Point --- name: 110 plot: h2o_64_md regex: CP2K label: (8n/1r/12t) --- Point --- name: 111 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/1r/12t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 12 # threads/rank = 1 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/09 job id: 46745979 --- Point --- name: 200 plot: h2o_128_md regex: CP2K label: (8n/12r/1t) --- Point --- name: 201 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/12r/1t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 6 # threads/rank = 2 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/10 job id: 46745981 --- Point --- name: 202 plot: h2o_128_md regex: CP2K label: (8n/6r/2t) --- Point --- name: 203 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/6r/2t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 4 # threads/rank = 3 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/11 job id: 46745984 --- Point --- name: 204 plot: h2o_128_md regex: CP2K label: (8n/4r/3t) --- Point --- name: 205 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/4r/3t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 3 # threads/rank = 4 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/12 job id: 46745987 --- Point --- name: 206 plot: h2o_128_md regex: CP2K label: (8n/3r/4t) --- Point --- name: 207 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/3r/4t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 2 # threads/rank = 6 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/13 job id: 46745988 --- Point --- name: 208 plot: h2o_128_md regex: CP2K label: (8n/2r/6t) --- Point --- name: 209 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/2r/6t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 1 # threads/rank = 12 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/14 job id: 46745989 --- Point --- name: 210 plot: h2o_128_md regex: CP2K label: (8n/1r/12t) --- Point --- name: 211 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/1r/12t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 12 # threads/rank = 1 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/15 job id: 46745990 --- Point --- name: 400 plot: h2o_256_md regex: CP2K label: (8n/12r/1t) --- Point --- name: 401 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/12r/1t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 6 # threads/rank = 2 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/16 job id: 46745991 --- Point --- name: 402 plot: h2o_256_md regex: CP2K label: (8n/6r/2t) --- Point --- name: 403 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/6r/2t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 4 # threads/rank = 3 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/17 job id: 46745992 --- Point --- name: 404 plot: h2o_256_md regex: CP2K label: (8n/4r/3t) --- Point --- name: 405 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/4r/3t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 3 # threads/rank = 4 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/18 job id: 46745993 --- Point --- name: 406 plot: h2o_256_md regex: CP2K label: (8n/3r/4t) --- Point --- name: 407 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/3r/4t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 2 # threads/rank = 6 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/19 job id: 46745994 --- Point --- name: 408 plot: h2o_256_md regex: CP2K label: (8n/2r/6t) --- Point --- name: 409 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/2r/6t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 1 # threads/rank = 12 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/20 job id: 46745995 --- Point --- name: 410 plot: h2o_256_md regex: CP2K label: (8n/1r/12t) --- Point --- name: 411 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/1r/12t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 12 # threads/rank = 1 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/21 job id: 46745996 --- Point --- name: 500 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/12r/1t) --- Point --- name: 501 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/12r/1t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 6 # threads/rank = 2 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/22 job id: 46745997 --- Point --- name: 502 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/6r/2t) --- Point --- name: 503 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/6r/2t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 4 # threads/rank = 3 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/23 job id: 46745999 --- Point --- name: 504 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/4r/3t) --- Point --- name: 505 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/4r/3t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 3 # threads/rank = 4 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/24 job id: 46746000 --- Point --- name: 506 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/3r/4t) --- Point --- name: 507 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/3r/4t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 2 # threads/rank = 6 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/25 job id: 46746001 --- Point --- name: 508 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/2r/6t) --- Point --- name: 509 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/2r/6t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 1 # threads/rank = 12 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/26 job id: 46746002 --- Point --- name: 510 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/1r/12t) --- Point --- name: 511 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/1r/12t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: 512 H2O (4 NVE MD steps on 64 nodes) input file: benchmarks/QS/00512_H2O/H2O-512_md.inp required files: [] output file: result.log # nodes = 64 # ranks/node = 12 # threads/rank = 1 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/27 job id: 46746003 --- Point --- name: 601 plot: h2o_512_md regex: CP2K label: (64n/12r/1t) --- Point --- name: 602 plot: h2o_512_md_mem regex: Estimated peak process memory label: (64n/12r/1t) ~~~~~~~ END TEST ~~~~~~~ === END TESTS (description) === ===== PLOTS (description) ===== ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_32_ri_rpa_mp2", title="32 H2O molecules (RI-MP2, RI-RPA)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_32_ri_rpa_mp2_mem", title="32 H2O molecules (RI-MP2, RI-RPA)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_64_md", title="64 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_64_md_mem", title="64 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_128_md", title="128 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_128_md_mem", title="128 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_256_md", title="256 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_256_md_mem", title="256 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_32_nrep3_ls", title="864 H2O molecules (LS SCF)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_32_nrep3_ls_mem", title="864 H2O molecules (LS SCF)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_512_md", title="512 H2O (4 NVE MD steps on 64 nodes)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_512_md_mem", title="512 H2O (4 NVE MD steps on 64 nodes)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" === END PLOTS (description) === ============ RESULTS ============ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/01/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 0.000000E+00 0.0% 0.0% 0.0% flops max/rank 0.000000E+00 0.0% 0.0% 0.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 0 0.0% 0.0% 0.0% number of processed stacks 0 0.0% 0.0% 0.0% average stack size 0.0 0.0 0.0 marketing flops 0.000000E+00 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 1 12. MP_Allreduce 19 21. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 15 177869. MP_Allreduce 424 8. MP_Sync 3 MP_comm_split 1 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.021 0.037 132.618 132.619 farming_run 1 2.0 132.166 132.168 132.588 132.592 ------------------------------------------------------------------------------- @@@@@@@@@@ Run number: 2 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 4194304 0.0% 0.0% 100.0% flops 14 x 32 x 32 154140672 0.0% 0.0% 100.0% flops 29 x 32 x 32 159645696 0.0% 0.0% 100.0% flops 14 x 14 x 32 208732160 0.0% 0.0% 100.0% flops 29 x 14 x 32 212860928 0.0% 0.0% 100.0% flops 14 x 29 x 32 212860928 0.0% 0.0% 100.0% flops 29 x 29 x 32 227352576 0.0% 0.0% 100.0% flops 14 x 32 x 14 896801644032 0.0% 0.0% 100.0% flops 29 x 32 x 14 928925089792 0.0% 0.0% 100.0% flops 14 x 32 x 29 928925089792 0.0% 0.0% 100.0% flops 29 x 32 x 29 962100985856 0.0% 0.0% 100.0% flops 32 x 32 x 14 1693169221632 0.0% 0.0% 100.0% flops 32 x 32 x 29 1753639550976 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 7.164741E+12 0.0% 0.0% 100.0% flops max/rank 447.801317E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 249492158 0.0% 0.0% 100.0% number of processed stacks 164328 0.0% 0.0% 100.0% average stack size 0.0 0.0 1518.3 marketing flops 7.165779E+12 ------------------------------------------------------------------------------- # multiplications 1160 max memory usage/rank 1.462092E+09 # max total images/rank 1 # max 3D layers 1 # MPI messages exchanged 2592 MPI messages size (bytes): total size 1.140326E+09 min size 0.000000E+00 max size 1.663488E+06 average size 439.940750E+03 MPI breakdown and total messages size (bytes): size <= 128 132 0 128 < size <= 8192 348 2850816 8192 < size <= 32768 0 0 32768 < size <= 131072 1536 179306496 131072 < size <= 4194304 576 958169088 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 14 12. MP_Allreduce 2308 54. MP_Alltoall 4670 822215. MP_ISend 2604 90577. MP_IRecv 2604 90574. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 12 MP_Bcast 230 1103589. MP_Allreduce 571 1938539. MP_Sync 25 MP_Alltoall 38 9316958. MP_SendRecv 120 384007. MP_ISendRecv 45 235435. MP_Wait 191 MP_comm_split 10 MP_ISend 127 3867574. MP_IRecv 127 3866554. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.006 0.027 114.434 114.435 qs_energies 1 2.0 0.000 0.000 114.241 114.244 mp2_main 1 3.0 0.000 0.000 112.217 112.220 mp2_gpw_main 1 4.0 0.019 0.023 111.215 111.218 mp2_ri_gpw_compute_in 1 5.0 0.171 0.172 92.666 92.780 mp2_ri_gpw_compute_in_loop 1 6.0 0.004 0.005 55.307 55.421 mp2_eri_3c_integrate_gpw 272 7.0 0.153 0.169 41.607 46.546 get_2c_integrals 1 6.0 0.008 0.009 36.592 37.188 integrate_v_rspace 273 8.0 0.433 0.448 25.044 29.648 pw_transfer 6555 10.6 0.376 0.382 27.268 27.453 fft_wrap_pw1pw2 5465 11.4 0.045 0.047 25.933 26.081 grid_integrate_task_list 273 9.0 20.874 25.919 20.874 25.919 fft_wrap_pw1pw2_100 2178 12.4 1.173 1.255 23.459 23.610 compute_2c_integrals 1 7.0 0.002 0.002 19.209 19.211 compute_2c_integrals_loop_lm 1 8.0 0.002 0.004 18.789 18.943 mp2_eri_2c_integrate_gpw 1 9.0 2.400 2.438 18.787 18.939 rpa_ri_compute_en 1 5.0 0.019 0.023 18.439 18.511 cp_fm_cholesky_decompose 12 8.2 17.317 17.914 17.317 17.914 cholesky_decomp 1 7.0 0.000 0.000 16.224 16.821 fft3d_s 5443 13.4 16.072 16.290 16.095 16.312 ao_to_mo_and_store_B_mult_1 272 7.0 10.857 15.569 10.857 15.569 calculate_wavefunction 272 8.0 5.398 5.484 12.489 13.126 rpa_num_int 1 6.0 0.000 0.000 10.616 10.617 rpa_num_int_RPA_matrix_operati 8 7.0 0.000 0.000 10.602 10.615 calc_mat_Q 8 8.0 0.000 0.000 9.467 9.556 contract_S_to_Q 8 9.0 0.000 0.000 8.888 8.975 parallel_gemm_fm 14 9.1 0.000 0.000 8.479 8.580 parallel_gemm_fm_cosma 14 10.1 8.479 8.580 8.479 8.580 calc_potential_gpw 544 9.5 0.005 0.005 8.185 8.529 mp2_eri_2c_integrate_gpw_pot_l 272 10.0 0.001 0.001 8.150 8.377 potential_pw2rs 545 10.0 0.108 0.110 7.673 8.309 collocate_single_gaussian 272 10.0 0.039 0.042 7.429 7.680 create_integ_mat 1 6.0 0.022 0.027 7.611 7.612 array2fm 1 7.0 0.000 0.000 6.714 7.063 pw_scatter_s 2720 13.7 4.414 4.587 4.414 4.587 pw_gather_s 2722 13.2 3.872 4.234 3.872 4.234 array2fm_buffer_send 1 8.0 2.971 3.143 2.971 3.143 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="10", plot="h2o_32_ri_rpa_mp2", label="RI-RPA (8n/2r/6t)", y=111.216248, yerr=0.000000 PlotPoint: name="11", plot="h2o_32_ri_rpa_mp2_mem", label="RI-RPA (8n/2r/6t)", y=2806.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/02/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 0.000000E+00 0.0% 0.0% 0.0% flops max/rank 0.000000E+00 0.0% 0.0% 0.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 0 0.0% 0.0% 0.0% number of processed stacks 0 0.0% 0.0% 0.0% average stack size 0.0 0.0 0.0 marketing flops 0.000000E+00 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 1 12. MP_Allreduce 19 21. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 22 205321. MP_Allreduce 424 9. MP_Sync 4 MP_comm_split 1 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.027 0.037 396.729 396.731 farming_run 1 2.0 396.125 396.136 396.694 396.697 ------------------------------------------------------------------------------- @@@@@@@@@@ Run number: 2 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 16777216 0.0% 0.0% 100.0% flops 14 x 32 x 32 565182464 0.0% 0.0% 100.0% flops 29 x 32 x 32 585367552 0.0% 0.0% 100.0% flops 14 x 14 x 32 626196480 0.0% 0.0% 100.0% flops 29 x 14 x 32 638582784 0.0% 0.0% 100.0% flops 14 x 29 x 32 638582784 0.0% 0.0% 100.0% flops 29 x 29 x 32 682057728 0.0% 0.0% 100.0% flops 14 x 32 x 14 897827141120 0.0% 0.0% 100.0% flops 29 x 32 x 14 929989394432 0.0% 0.0% 100.0% flops 14 x 32 x 29 929989394432 0.0% 0.0% 100.0% flops 29 x 32 x 29 963203301376 0.0% 0.0% 100.0% flops 32 x 32 x 14 1693481172992 0.0% 0.0% 100.0% flops 32 x 32 x 29 1753962643456 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 7.172206E+12 0.0% 0.0% 100.0% flops max/rank 150.696064E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 249788822 0.0% 0.0% 100.0% number of processed stacks 98736 0.0% 0.0% 100.0% average stack size 0.0 0.0 2529.9 marketing flops 7.174951E+12 ------------------------------------------------------------------------------- # multiplications 1140 max memory usage/rank 1.219498E+09 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 61440 MPI messages size (bytes): total size 6.073508E+09 min size 0.000000E+00 max size 642.960000E+03 average size 98.852664E+03 MPI breakdown and total messages size (bytes): size <= 128 32004 0 128 < size <= 8192 1820 14909440 8192 < size <= 32768 0 0 32768 < size <= 131072 18640 1081442304 131072 < size <= 4194304 8976 4977156096 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 14 12. MP_Allreduce 1003 44. MP_Alltoall 1797 713538. MP_ISend 3686 54943. MP_IRecv 3622 54292. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 12 MP_Bcast 743 386399. MP_Allreduce 2021 21391. MP_Sync 37 MP_Alltoall 77 11555801. MP_SendRecv 2876 2171486. MP_ISendRecv 1034 172620. MP_Wait 1346 MP_comm_split 7 MP_ISend 264 362227. MP_IRecv 264 362718. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.010 0.030 210.596 210.597 qs_energies 1 2.0 0.000 0.000 210.385 210.392 scf_env_do_scf 1 3.0 0.000 0.000 107.399 107.400 qs_ks_update_qs_env 5 5.0 0.000 0.000 106.549 106.556 rebuild_ks_matrix 4 6.0 0.000 0.000 106.548 106.555 qs_ks_build_kohn_sham_matrix 4 7.0 0.056 0.063 106.548 106.555 hfx_ks_matrix 4 8.0 0.001 0.001 106.171 106.175 integrate_four_center 4 9.0 0.143 0.461 106.170 106.175 mp2_main 1 3.0 0.000 0.000 102.695 102.702 mp2_gpw_main 1 4.0 0.031 0.043 101.851 101.861 integrate_four_center_main 4 10.0 0.086 0.509 97.033 100.825 integrate_four_center_bin 267 11.0 96.947 100.756 96.947 100.756 init_scf_loop 1 4.0 0.000 0.000 93.181 93.181 mp2_ri_gpw_compute_in 1 5.0 0.064 0.065 74.890 75.986 mp2_ri_gpw_compute_in_loop 1 6.0 0.002 0.002 54.516 55.612 mp2_eri_3c_integrate_gpw 91 7.0 0.144 0.161 42.175 47.225 integrate_v_rspace 95 8.0 0.397 0.568 28.563 33.457 pw_transfer 2240 10.6 0.143 0.165 29.942 30.387 fft_wrap_pw1pw2 1868 11.4 0.017 0.020 28.939 29.455 ao_to_mo_and_store_B_mult_1 91 7.0 10.661 29.201 10.661 29.201 grid_integrate_task_list 95 9.0 23.858 28.970 23.858 28.970 mp2_ri_gpw_compute_en 1 5.0 0.055 0.064 26.817 28.590 fft_wrap_pw1pw2_100 730 12.4 1.292 1.472 26.642 27.211 mp2_ri_gpw_compute_en_RI_loop 1 6.0 1.837 1.905 25.075 25.087 get_2c_integrals 1 6.0 0.000 0.000 20.286 20.310 compute_2c_integrals 1 7.0 0.003 0.004 19.266 19.269 compute_2c_integrals_loop_lm 1 8.0 0.001 0.001 18.912 19.142 mp2_eri_2c_integrate_gpw 1 9.0 1.737 1.876 18.911 19.140 fft3d_s 1823 13.4 18.447 18.997 18.460 19.010 scf_env_do_scf_inner_loop 4 4.0 0.000 0.000 14.217 14.217 calculate_wavefunction 91 8.0 2.005 2.035 9.721 9.976 mp2_ri_gpw_compute_en_expansio 172 7.0 0.557 0.578 8.751 9.432 potential_pw2rs 186 10.0 0.034 0.035 8.659 9.258 local_gemm 172 8.0 8.194 8.856 8.194 8.856 mp2_eri_2c_integrate_gpw_pot_l 91 10.0 0.000 0.001 8.279 8.634 mp2_ri_gpw_compute_en_comm 22 7.0 0.503 0.532 8.090 8.525 calc_potential_gpw 182 9.5 0.002 0.002 7.926 8.138 collocate_single_gaussian 91 10.0 0.016 0.019 7.897 8.133 mp_sync 37 10.5 3.824 7.737 3.824 7.737 mp_sendrecv_dm3 2068 8.0 6.119 6.526 6.119 6.526 mp2_ri_gpw_compute_en_ener 172 7.0 6.353 6.450 6.353 6.450 pw_gather_s 912 13.2 4.903 5.498 4.903 5.498 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="20", plot="h2o_32_ri_rpa_mp2", label="RI-MP2 (8n/6r/2t)", y=101.852548, yerr=0.000000 PlotPoint: name="21", plot="h2o_32_ri_rpa_mp2_mem", label="RI-MP2 (8n/6r/2t)", y=1507.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/03/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 29.277748E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 5055360 0.0% 0.0% 100.0% average stack size 0.0 0.0 29.1 marketing flops 2.107592E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 450.949120E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 9436608 MPI messages size (bytes): total size 333.233553E+09 min size 0.000000E+00 max size 315.840000E+03 average size 35.312852E+03 MPI breakdown and total messages size (bytes): size <= 128 4913240 0 128 < size <= 8192 1155432 9465298944 8192 < size <= 32768 1984512 54190407680 32768 < size <= 131072 551296 42776657920 131072 < size <= 4194304 832128 226802306368 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3683 62385. MP_Allreduce 10329 270. MP_Sync 530 MP_Alltoall 2083 MP_SendRecv 22610 5520. MP_ISendRecv 22610 5520. MP_Wait 37876 MP_comm_split 50 MP_ISend 20771 42672. MP_IRecv 20771 42672. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.011 0.027 53.292 53.293 qs_mol_dyn_low 1 2.0 0.003 0.004 53.096 53.101 qs_forces 11 3.9 0.002 0.003 53.034 53.035 qs_energies 11 4.9 0.002 0.002 51.511 51.524 scf_env_do_scf 11 5.9 0.000 0.001 45.523 45.523 scf_env_do_scf_inner_loop 108 6.5 0.002 0.007 43.500 43.500 qs_scf_new_mos 108 7.5 0.000 0.001 33.346 33.614 qs_scf_loop_do_ot 108 8.5 0.000 0.001 33.346 33.614 dbcsr_multiply_generic 2286 12.5 0.091 0.096 33.114 33.549 ot_scf_mini 108 9.5 0.002 0.002 31.731 31.908 multiply_cannon 2286 13.5 0.186 0.195 26.004 27.649 multiply_cannon_loop 2286 14.5 1.500 1.594 25.344 27.001 velocity_verlet 10 3.0 0.001 0.001 25.899 25.900 ot_mini 108 10.5 0.001 0.001 19.235 19.476 qs_ot_get_derivative 108 11.5 0.001 0.001 16.290 16.477 mp_waitall_1 245248 16.5 8.247 14.667 8.247 14.667 multiply_cannon_metrocomm3 54864 15.5 0.068 0.075 5.937 13.049 multiply_cannon_multrec 54864 15.5 4.249 6.507 7.784 11.309 qs_ot_get_p 119 10.4 0.001 0.001 7.961 8.239 rebuild_ks_matrix 119 8.3 0.000 0.000 7.879 8.010 qs_ks_build_kohn_sham_matrix 119 9.3 0.011 0.011 7.878 8.009 multiply_cannon_sync_h2d 54864 15.5 5.945 7.217 5.945 7.217 qs_ks_update_qs_env 119 7.6 0.001 0.001 6.929 7.042 mp_sum_l 7287 12.8 5.223 6.900 5.223 6.900 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 5.583 6.049 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 5.361 5.474 qs_ot_p2m_diag 50 11.0 0.004 0.006 5.220 5.246 dbcsr_mm_accdrv_process 76910 16.1 1.175 1.879 3.456 4.874 init_scf_run 11 5.9 0.000 0.001 4.774 4.774 scf_env_initial_rho_setup 11 6.9 0.001 0.001 4.774 4.774 cp_dbcsr_syevd 50 12.0 0.003 0.003 4.530 4.530 sum_up_and_integrate 119 10.3 0.012 0.015 4.519 4.528 integrate_v_rspace 119 11.3 0.003 0.003 4.506 4.516 qs_rho_update_rho_low 119 7.7 0.001 0.001 4.236 4.358 calculate_rho_elec 119 8.7 0.012 0.017 4.235 4.357 cp_fm_diag_elpa 50 13.0 0.000 0.000 4.335 4.335 cp_fm_redistribute_end 50 14.0 2.212 4.311 2.219 4.314 cp_fm_diag_elpa_base 50 14.0 2.089 4.205 2.093 4.213 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 2.877 3.063 apply_single 119 13.6 0.000 0.001 2.877 3.062 calculate_dm_sparse 119 9.5 0.000 0.001 2.928 3.046 jit_kernel_multiply 13 15.8 2.217 2.925 2.217 2.925 multiply_cannon_metrocomm1 54864 15.5 0.053 0.058 1.686 2.843 rs_pw_transfer 974 11.9 0.012 0.013 2.716 2.832 ot_diis_step 108 11.5 0.006 0.006 2.655 2.655 calculate_first_density_matrix 1 7.0 0.000 0.000 2.630 2.633 qs_ot_get_orbitals 108 10.5 0.000 0.000 2.365 2.430 density_rs2pw 119 9.7 0.004 0.005 2.194 2.335 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 2.320 2.322 acc_transpose_blocks 54864 15.5 0.236 0.259 1.778 2.218 grid_integrate_task_list 119 12.3 2.038 2.134 2.038 2.134 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.070 2.119 wfi_extrapolate 11 7.9 0.001 0.001 2.083 2.083 init_scf_loop 11 6.9 0.000 0.001 2.006 2.006 mp_sum_d 4135 12.0 1.326 1.986 1.326 1.986 potential_pw2rs 119 12.3 0.004 0.005 1.841 1.852 pw_transfer 1439 11.6 0.052 0.057 1.702 1.771 fft_wrap_pw1pw2 1201 12.6 0.007 0.007 1.626 1.697 make_m2s 4572 13.5 0.054 0.056 1.574 1.621 make_images 4572 14.5 0.133 0.138 1.492 1.539 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.001 1.484 1.512 grid_collocate_task_list 119 9.7 1.383 1.447 1.383 1.447 mp_alltoall_d11v 2130 13.8 1.231 1.445 1.231 1.445 mp_waitany 12084 13.8 1.253 1.437 1.253 1.437 fft3d_ps 1201 14.6 0.366 0.471 1.269 1.330 fft_wrap_pw1pw2_140 487 13.2 0.183 0.199 1.256 1.326 dbcsr_dot_sd 1205 11.9 0.049 0.060 0.725 1.111 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="100", plot="h2o_64_md", label="(8n/12r/1t)", y=53.293000, yerr=0.000000 PlotPoint: name="101", plot="h2o_64_md_mem", label="(8n/12r/1t)", y=430.000000, yerr=1.348400 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/04/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 57.173320E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 3066240 0.0% 0.0% 100.0% average stack size 0.0 0.0 47.9 marketing flops 2.107592E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 483.336192E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 2194560 MPI messages size (bytes): total size 310.646604E+09 min size 0.000000E+00 max size 1.145520E+06 average size 141.553031E+03 MPI breakdown and total messages size (bytes): size <= 128 724648 0 128 < size <= 8192 253512 2076770304 8192 < size <= 32768 281952 4619501568 32768 < size <= 131072 494448 39143342080 131072 < size <= 4194304 440000 264807943488 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3672 62664. MP_Allreduce 10306 303. MP_Sync 54 MP_Alltoall 2060 1365795. MP_SendRecv 16779 37093. MP_ISendRecv 16779 37093. MP_Wait 23539 MP_comm_split 50 MP_ISend 5720 128509. MP_IRecv 5720 128509. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.018 0.036 38.444 38.445 qs_mol_dyn_low 1 2.0 0.003 0.004 38.196 38.203 qs_forces 11 3.9 0.003 0.011 38.137 38.138 qs_energies 11 4.9 0.002 0.005 36.429 36.433 scf_env_do_scf 11 5.9 0.001 0.002 31.273 31.273 scf_env_do_scf_inner_loop 108 6.5 0.002 0.009 28.834 28.836 dbcsr_multiply_generic 2286 12.5 0.099 0.103 21.119 21.481 qs_scf_new_mos 108 7.5 0.001 0.001 19.736 19.986 qs_scf_loop_do_ot 108 8.5 0.001 0.001 19.736 19.985 ot_scf_mini 108 9.5 0.002 0.003 18.847 19.021 velocity_verlet 10 3.0 0.001 0.002 17.963 17.964 multiply_cannon 2286 13.5 0.208 0.219 16.294 17.824 multiply_cannon_loop 2286 14.5 0.909 0.985 15.215 16.602 ot_mini 108 10.5 0.001 0.001 11.556 11.790 mp_waitall_1 200699 16.5 5.507 10.757 5.507 10.757 multiply_cannon_metrocomm3 27432 15.5 0.067 0.070 4.100 9.559 qs_ot_get_derivative 108 11.5 0.001 0.001 9.109 9.284 multiply_cannon_multrec 27432 15.5 1.981 4.448 5.913 8.701 rebuild_ks_matrix 119 8.3 0.000 0.000 7.170 7.307 qs_ks_build_kohn_sham_matrix 119 9.3 0.013 0.014 7.170 7.306 qs_ks_update_qs_env 119 7.6 0.001 0.001 6.314 6.437 dbcsr_mm_accdrv_process 47894 16.0 3.097 5.192 3.862 5.707 qs_ot_get_p 119 10.4 0.001 0.001 4.435 4.663 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 3.515 4.364 sum_up_and_integrate 119 10.3 0.025 0.028 4.174 4.181 integrate_v_rspace 119 11.3 0.002 0.003 4.149 4.157 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 3.042 4.116 apply_single 119 13.6 0.000 0.000 3.041 4.116 mp_sum_l 7287 12.8 1.995 3.950 1.995 3.950 init_scf_run 11 5.9 0.000 0.001 3.902 3.903 scf_env_initial_rho_setup 11 6.9 0.001 0.002 3.902 3.902 qs_rho_update_rho_low 119 7.7 0.001 0.001 3.772 3.804 calculate_rho_elec 119 8.7 0.021 0.024 3.771 3.804 qs_ot_p2m_diag 50 11.0 0.009 0.017 3.034 3.053 rs_pw_transfer 974 11.9 0.010 0.011 2.495 2.921 multiply_cannon_sync_h2d 27432 15.5 2.167 2.837 2.167 2.837 cp_dbcsr_syevd 50 12.0 0.003 0.006 2.602 2.603 make_m2s 4572 13.5 0.053 0.054 2.384 2.599 make_images 4572 14.5 0.203 0.240 2.298 2.513 density_rs2pw 119 9.7 0.006 0.020 2.045 2.474 init_scf_loop 11 6.9 0.002 0.013 2.419 2.420 calculate_first_density_matrix 1 7.0 0.000 0.001 2.398 2.401 ot_diis_step 108 11.5 0.010 0.011 2.399 2.399 cp_fm_diag_elpa 50 13.0 0.000 0.002 2.286 2.286 cp_fm_redistribute_end 50 14.0 1.157 2.258 1.160 2.261 cp_fm_diag_elpa_base 50 14.0 1.068 2.172 1.097 2.206 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 2.082 2.174 jit_kernel_multiply 11 16.2 0.713 2.170 0.713 2.170 calculate_dm_sparse 119 9.5 0.000 0.001 2.069 2.147 grid_integrate_task_list 119 12.3 1.879 1.964 1.879 1.964 pw_transfer 1439 11.6 0.065 0.069 1.905 1.944 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 1.915 1.918 potential_pw2rs 119 12.3 0.006 0.006 1.845 1.855 fft_wrap_pw1pw2 1201 12.6 0.008 0.009 1.813 1.853 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 1.729 1.771 acc_transpose_blocks 27432 15.5 0.115 0.121 1.242 1.554 prepare_preconditioner 11 7.9 0.000 0.000 1.498 1.525 make_preconditioner 11 8.9 0.001 0.003 1.498 1.525 make_images_data 4572 15.5 0.045 0.052 1.114 1.516 fft_wrap_pw1pw2_140 487 13.2 0.205 0.215 1.438 1.475 make_full_inverse_cholesky 11 9.9 0.000 0.000 1.398 1.452 grid_collocate_task_list 119 9.7 1.284 1.441 1.284 1.441 wfi_extrapolate 11 7.9 0.001 0.001 1.431 1.431 hybrid_alltoall_any 4725 16.4 0.051 0.111 0.961 1.427 fft3d_ps 1201 14.6 0.517 0.577 1.359 1.391 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.000 1.284 1.291 mp_allgather_i34 2286 14.5 0.518 1.266 0.518 1.266 mp_alltoall_d11v 2130 13.8 1.139 1.253 1.139 1.253 qs_ot_get_orbitals 108 10.5 0.000 0.000 1.153 1.205 mp_sum_d 4135 12.0 0.578 1.033 0.578 1.033 mp_waitany 5720 13.7 0.552 1.025 0.552 1.025 rs_pw_transfer_RS2PW_140 130 11.5 0.141 0.149 0.579 1.001 qs_energies_init_hamiltonians 11 5.9 0.000 0.002 0.981 0.982 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 0.915 0.928 acc_transpose_blocks_kernels 27432 16.5 0.183 0.273 0.688 0.924 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="102", plot="h2o_64_md", label="(8n/6r/2t)", y=38.445000, yerr=0.000000 PlotPoint: name="103", plot="h2o_64_md_mem", label="(8n/6r/2t)", y=460.454545, yerr=0.890724 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/05/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 59.051995E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 3143552 0.0% 0.0% 100.0% average stack size 0.0 0.0 46.8 marketing flops 2.107587E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 518.291456E+06 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 950976 MPI messages size (bytes): total size 203.844256E+09 min size 0.000000E+00 max size 1.638400E+06 average size 214.352688E+03 MPI breakdown and total messages size (bytes): size <= 128 6424 0 128 < size <= 8192 253512 2076770304 8192 < size <= 32768 179424 2939682816 32768 < size <= 131072 181440 14863564800 131072 < size <= 4194304 330176 183964913216 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3622 63497. MP_Allreduce 10155 305. MP_Sync 54 MP_Alltoall 1821 2520850. MP_SendRecv 11067 57667. MP_ISendRecv 11067 57667. MP_Wait 21987 MP_ISend 9880 92618. MP_IRecv 9880 92618. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.014 0.032 32.025 32.026 qs_mol_dyn_low 1 2.0 0.003 0.004 31.771 31.778 qs_forces 11 3.9 0.004 0.009 31.713 31.714 qs_energies 11 4.9 0.002 0.005 30.113 30.115 scf_env_do_scf 11 5.9 0.002 0.014 25.437 25.437 scf_env_do_scf_inner_loop 108 6.5 0.002 0.006 22.833 22.835 dbcsr_multiply_generic 2286 12.5 0.094 0.096 16.158 16.232 velocity_verlet 10 3.0 0.001 0.002 15.116 15.117 qs_scf_new_mos 108 7.5 0.001 0.001 14.637 14.656 qs_scf_loop_do_ot 108 8.5 0.001 0.001 14.637 14.655 ot_scf_mini 108 9.5 0.002 0.003 13.927 13.938 multiply_cannon 2286 13.5 0.194 0.201 12.967 13.768 multiply_cannon_loop 2286 14.5 0.637 0.667 12.197 13.006 ot_mini 108 10.5 0.001 0.001 8.573 8.588 qs_ot_get_derivative 108 11.5 0.001 0.001 7.063 7.087 multiply_cannon_multrec 18288 15.5 1.949 2.905 6.766 7.073 rebuild_ks_matrix 119 8.3 0.000 0.000 6.453 6.475 qs_ks_build_kohn_sham_matrix 119 9.3 0.012 0.013 6.453 6.474 qs_ks_update_qs_env 119 7.6 0.001 0.001 5.711 5.736 dbcsr_mm_accdrv_process 38222 16.0 3.927 5.344 4.732 5.441 sum_up_and_integrate 119 10.3 0.031 0.032 3.997 4.007 integrate_v_rspace 119 11.3 0.003 0.003 3.966 3.975 init_scf_run 11 5.9 0.000 0.001 3.501 3.501 scf_env_initial_rho_setup 11 6.9 0.001 0.001 3.500 3.501 qs_rho_update_rho_low 119 7.7 0.001 0.001 3.475 3.481 calculate_rho_elec 119 8.7 0.031 0.032 3.474 3.481 mp_waitall_1 158411 16.6 2.493 3.381 2.493 3.381 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 2.617 3.276 qs_ot_get_p 119 10.4 0.001 0.002 3.219 3.242 init_scf_loop 11 6.9 0.003 0.026 2.586 2.588 rs_pw_transfer 974 11.9 0.010 0.010 2.245 2.537 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 2.076 2.371 apply_single 119 13.6 0.000 0.000 2.076 2.370 calculate_first_density_matrix 1 7.0 0.000 0.002 2.257 2.259 density_rs2pw 119 9.7 0.004 0.004 1.945 2.233 qs_ot_p2m_diag 50 11.0 0.012 0.013 2.190 2.194 multiply_cannon_metrocomm3 18288 15.5 0.044 0.046 1.355 2.071 jit_kernel_multiply 11 16.0 0.752 2.043 0.752 2.043 grid_integrate_task_list 119 12.3 1.857 1.950 1.857 1.950 cp_dbcsr_syevd 50 12.0 0.003 0.003 1.928 1.929 pw_transfer 1439 11.6 0.065 0.069 1.868 1.876 make_m2s 4572 13.5 0.044 0.046 1.711 1.866 calculate_dm_sparse 119 9.5 0.000 0.001 1.839 1.848 fft_wrap_pw1pw2 1201 12.6 0.008 0.008 1.775 1.785 make_images 4572 14.5 0.193 0.207 1.627 1.781 prepare_preconditioner 11 7.9 0.000 0.000 1.706 1.709 make_preconditioner 11 8.9 0.000 0.001 1.706 1.709 cp_fm_diag_elpa 50 13.0 0.000 0.000 1.686 1.696 cp_fm_diag_elpa_base 50 14.0 1.663 1.679 1.684 1.695 potential_pw2rs 119 12.3 0.007 0.008 1.687 1.692 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 1.656 1.658 make_full_inverse_cholesky 11 9.9 0.000 0.000 1.558 1.644 multiply_cannon_sync_h2d 18288 15.5 1.401 1.579 1.401 1.579 qs_ot_get_derivative_diag 49 12.0 0.001 0.002 1.517 1.526 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 1.506 1.512 ot_diis_step 108 11.5 0.011 0.011 1.476 1.477 fft_wrap_pw1pw2_140 487 13.2 0.257 0.262 1.440 1.449 grid_collocate_task_list 119 9.7 1.265 1.419 1.265 1.419 mp_sum_l 7287 12.8 1.094 1.412 1.094 1.412 fft3d_ps 1201 14.6 0.527 0.543 1.258 1.266 acc_transpose_blocks 18288 15.5 0.079 0.081 1.226 1.259 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.000 1.219 1.223 wfi_extrapolate 11 7.9 0.001 0.001 1.150 1.150 qs_energies_init_hamiltonians 11 5.9 0.001 0.002 0.955 0.956 make_images_data 4572 15.5 0.045 0.048 0.769 0.924 qs_ot_get_orbitals 108 10.5 0.000 0.000 0.859 0.881 hybrid_alltoall_any 4725 16.4 0.055 0.113 0.668 0.872 mp_waitany 9880 13.7 0.564 0.866 0.564 0.866 rs_pw_transfer_RS2PW_140 130 11.5 0.120 0.123 0.555 0.840 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 0.837 0.838 acc_transpose_blocks_kernels 18288 16.5 0.211 0.220 0.773 0.798 mp_alltoall_d11v 2130 13.8 0.663 0.784 0.663 0.784 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.000 0.656 0.740 cp_fm_cholesky_invert 11 10.9 0.698 0.701 0.698 0.701 mp_alltoall_z22v 1201 16.6 0.592 0.661 0.592 0.661 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="104", plot="h2o_64_md", label="(8n/4r/3t)", y=32.026000, yerr=0.000000 PlotPoint: name="105", plot="h2o_64_md_mem", label="(8n/4r/3t)", y=492.636364, yerr=2.532434 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/06/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 114.044384E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 3805952 0.0% 0.0% 100.0% average stack size 0.0 0.0 38.6 marketing flops 2.107592E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 551.997440E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 1042416 MPI messages size (bytes): total size 150.443262E+09 min size 0.000000E+00 max size 1.188816E+06 average size 144.321719E+03 MPI breakdown and total messages size (bytes): size <= 128 228256 0 128 < size <= 8192 126888 1039466496 8192 < size <= 32768 191472 3137077248 32768 < size <= 131072 295800 25899827200 131072 < size <= 4194304 200000 120367247040 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3622 63496. MP_Allreduce 10154 346. MP_Sync 54 MP_Alltoall 1582 2412273. MP_SendRecv 8211 74133. MP_ISendRecv 8211 74133. MP_Wait 16271 MP_ISend 7280 135929. MP_IRecv 7280 135929. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.011 0.029 34.457 34.458 qs_mol_dyn_low 1 2.0 0.003 0.004 34.283 34.304 qs_forces 11 3.9 0.002 0.002 34.195 34.196 qs_energies 11 4.9 0.002 0.002 32.465 32.470 scf_env_do_scf 11 5.9 0.000 0.002 27.594 27.595 scf_env_do_scf_inner_loop 108 6.5 0.002 0.006 24.227 24.227 dbcsr_multiply_generic 2286 12.5 0.098 0.102 17.710 17.828 velocity_verlet 10 3.0 0.001 0.004 17.598 17.601 qs_scf_new_mos 108 7.5 0.001 0.001 15.826 15.877 qs_scf_loop_do_ot 108 8.5 0.001 0.001 15.826 15.876 ot_scf_mini 108 9.5 0.002 0.003 14.898 14.950 multiply_cannon 2286 13.5 0.228 0.268 14.225 14.659 multiply_cannon_loop 2286 14.5 0.945 0.974 13.330 13.748 ot_mini 108 10.5 0.001 0.001 9.017 9.085 multiply_cannon_multrec 27432 15.5 2.333 2.998 8.593 8.978 dbcsr_mm_accdrv_process 47916 15.9 5.413 7.205 6.164 7.317 qs_ot_get_derivative 108 11.5 0.001 0.001 7.217 7.271 rebuild_ks_matrix 119 8.3 0.000 0.000 6.574 6.627 qs_ks_build_kohn_sham_matrix 119 9.3 0.012 0.014 6.573 6.626 qs_ks_update_qs_env 119 7.6 0.001 0.001 5.817 5.863 sum_up_and_integrate 119 10.3 0.036 0.038 3.827 3.836 integrate_v_rspace 119 11.3 0.003 0.003 3.791 3.800 init_scf_run 11 5.9 0.000 0.001 3.554 3.554 scf_env_initial_rho_setup 11 6.9 0.001 0.001 3.554 3.554 qs_rho_update_rho_low 119 7.7 0.001 0.001 3.501 3.533 calculate_rho_elec 119 8.7 0.040 0.046 3.501 3.532 qs_ot_get_p 119 10.4 0.001 0.001 3.303 3.381 init_scf_loop 11 6.9 0.002 0.018 3.348 3.348 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 2.636 3.106 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 2.098 2.569 apply_single 119 13.6 0.000 0.000 2.098 2.568 prepare_preconditioner 11 7.9 0.000 0.000 2.512 2.523 make_preconditioner 11 8.9 0.000 0.000 2.512 2.523 make_full_inverse_cholesky 11 9.9 0.000 0.000 2.119 2.450 calculate_first_density_matrix 1 7.0 0.000 0.000 2.200 2.202 make_m2s 4572 13.5 0.054 0.055 2.078 2.173 mp_waitall_1 137007 16.6 1.571 2.146 1.571 2.146 rs_pw_transfer 974 11.9 0.009 0.010 1.909 2.098 qs_ot_p2m_diag 50 11.0 0.015 0.023 2.078 2.087 density_rs2pw 119 9.7 0.004 0.004 1.879 2.075 calculate_dm_sparse 119 9.5 0.000 0.000 2.015 2.068 make_images 4572 14.5 0.273 0.335 1.972 2.065 pw_transfer 1439 11.6 0.065 0.071 1.967 2.001 grid_integrate_task_list 119 12.3 1.886 1.975 1.886 1.975 fft_wrap_pw1pw2 1201 12.6 0.008 0.008 1.874 1.911 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 1.869 1.895 jit_kernel_multiply 10 16.2 0.690 1.819 0.690 1.819 cp_dbcsr_syevd 50 12.0 0.003 0.003 1.769 1.769 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 1.765 1.766 ot_diis_step 108 11.5 0.012 0.012 1.760 1.760 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 1.601 1.614 fft_wrap_pw1pw2_140 487 13.2 0.292 0.305 1.556 1.594 cp_fm_diag_elpa 50 13.0 0.000 0.000 1.536 1.545 cp_fm_diag_elpa_base 50 14.0 1.502 1.518 1.534 1.544 mp_sum_l 7287 12.8 0.981 1.517 0.981 1.517 potential_pw2rs 119 12.3 0.009 0.010 1.511 1.514 acc_transpose_blocks 27432 15.5 0.116 0.119 1.460 1.485 grid_collocate_task_list 119 9.7 1.261 1.389 1.261 1.389 fft3d_ps 1201 14.6 0.555 0.609 1.307 1.331 wfi_extrapolate 11 7.9 0.001 0.001 1.305 1.305 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.000 1.204 1.215 multiply_cannon_metrocomm3 27432 15.5 0.039 0.039 0.672 1.169 cp_fm_upper_to_full 72 14.2 0.814 1.160 0.814 1.160 qs_ot_get_orbitals 108 10.5 0.000 0.000 1.084 1.104 qs_energies_init_hamiltonians 11 5.9 0.000 0.001 1.070 1.071 multiply_cannon_sync_h2d 27432 15.5 0.985 1.061 0.985 1.061 dbcsr_complete_redistribute 329 12.2 0.123 0.150 0.749 1.027 make_images_data 4572 15.5 0.045 0.048 0.802 0.906 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.000 0.793 0.868 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 0.846 0.851 hybrid_alltoall_any 4725 16.4 0.062 0.151 0.694 0.843 acc_transpose_blocks_kernels 27432 16.5 0.270 0.277 0.815 0.831 copy_fm_to_dbcsr 176 11.2 0.001 0.001 0.553 0.823 mp_alltoall_d11v 2130 13.8 0.702 0.787 0.702 0.787 cp_fm_cholesky_invert 11 10.9 0.716 0.719 0.716 0.719 mp_alltoall_i22 627 13.8 0.428 0.715 0.428 0.715 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="106", plot="h2o_64_md", label="(8n/3r/4t)", y=34.458000, yerr=0.000000 PlotPoint: name="107", plot="h2o_64_md_mem", label="(8n/3r/4t)", y=523.454545, yerr=3.499705 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/07/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 117.977176E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 1384136 0.0% 0.0% 100.0% average stack size 0.0 0.0 106.2 marketing flops 2.107587E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 593.149952E+06 # max total images/rank 1 # max 3D layers 1 # MPI messages exchanged 219456 MPI messages size (bytes): total size 97.042514E+09 min size 0.000000E+00 max size 3.276800E+06 average size 442.195750E+03 MPI breakdown and total messages size (bytes): size <= 128 1452 0 128 < size <= 8192 0 0 8192 < size <= 32768 101892 3336634368 32768 < size <= 131072 0 0 131072 < size <= 4194304 116112 93705670464 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 14 12. MP_Allreduce 8156 20. MP_Alltoall 8655 64935. MP_ISend 36532 168375. MP_IRecv 36532 168349. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3622 63495. MP_Allreduce 10154 346. MP_Sync 54 MP_Alltoall 1582 3682667. MP_SendRecv 5355 94533. MP_ISendRecv 5355 94533. MP_Wait 11335 MP_ISend 5200 225425. MP_IRecv 5200 225425. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.036 0.038 27.630 27.631 qs_mol_dyn_low 1 2.0 0.003 0.004 27.452 27.459 qs_forces 11 3.9 0.002 0.003 27.394 27.395 qs_energies 11 4.9 0.002 0.002 25.666 25.668 scf_env_do_scf 11 5.9 0.001 0.001 21.093 21.094 scf_env_do_scf_inner_loop 108 6.5 0.002 0.006 18.658 18.659 velocity_verlet 10 3.0 0.002 0.006 14.184 14.187 dbcsr_multiply_generic 2286 12.5 0.090 0.094 11.910 12.051 qs_scf_new_mos 108 7.5 0.001 0.001 10.705 10.734 qs_scf_loop_do_ot 108 8.5 0.001 0.001 10.705 10.734 ot_scf_mini 108 9.5 0.002 0.002 10.047 10.076 multiply_cannon 2286 13.5 0.229 0.238 9.505 9.982 multiply_cannon_loop 2286 14.5 0.332 0.342 8.618 8.870 multiply_cannon_multrec 9144 15.5 1.593 1.997 5.742 6.219 rebuild_ks_matrix 119 8.3 0.000 0.000 5.985 6.007 qs_ks_build_kohn_sham_matrix 119 9.3 0.012 0.014 5.984 6.007 ot_mini 108 10.5 0.001 0.001 5.658 5.694 qs_ks_update_qs_env 119 7.6 0.001 0.001 5.314 5.335 qs_ot_get_derivative 108 11.5 0.001 0.001 4.375 4.405 dbcsr_mm_accdrv_process 12550 15.8 2.912 3.714 4.047 4.122 sum_up_and_integrate 119 10.3 0.038 0.042 3.682 3.686 integrate_v_rspace 119 11.3 0.003 0.003 3.643 3.648 qs_rho_update_rho_low 119 7.7 0.001 0.001 3.497 3.502 calculate_rho_elec 119 8.7 0.060 0.061 3.497 3.501 init_scf_run 11 5.9 0.000 0.001 3.154 3.154 scf_env_initial_rho_setup 11 6.9 0.001 0.001 3.154 3.154 qs_ot_get_p 119 10.4 0.001 0.001 2.624 2.668 init_scf_loop 11 6.9 0.000 0.001 2.414 2.416 calculate_first_density_matrix 1 7.0 0.000 0.000 2.042 2.043 pw_transfer 1439 11.6 0.066 0.069 2.014 2.025 mp_waitall_1 115863 16.7 1.419 1.986 1.419 1.986 grid_integrate_task_list 119 12.3 1.903 1.965 1.903 1.965 density_rs2pw 119 9.7 0.004 0.004 1.795 1.931 fft_wrap_pw1pw2 1201 12.6 0.008 0.008 1.919 1.931 make_m2s 4572 13.5 0.035 0.036 1.660 1.819 rs_pw_transfer 974 11.9 0.008 0.008 1.615 1.754 qs_ot_p2m_diag 50 11.0 0.022 0.023 1.746 1.749 jit_kernel_multiply 10 15.6 1.097 1.738 1.097 1.738 make_images 4572 14.5 0.270 0.303 1.572 1.729 calculate_dm_sparse 119 9.5 0.000 0.000 1.679 1.700 prepare_preconditioner 11 7.9 0.000 0.000 1.684 1.688 make_preconditioner 11 8.9 0.000 0.000 1.684 1.688 make_full_inverse_cholesky 11 9.9 0.000 0.000 1.576 1.599 fft_wrap_pw1pw2_140 487 13.2 0.368 0.379 1.586 1.598 cp_dbcsr_syevd 50 12.0 0.003 0.003 1.556 1.557 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 1.491 1.493 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 1.467 1.481 grid_collocate_task_list 119 9.7 1.308 1.425 1.308 1.425 potential_pw2rs 119 12.3 0.011 0.011 1.353 1.356 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 1.346 1.355 cp_fm_diag_elpa 50 13.0 0.000 0.000 1.285 1.294 cp_fm_diag_elpa_base 50 14.0 1.259 1.275 1.284 1.292 ot_diis_step 108 11.5 0.012 0.013 1.273 1.273 fft3d_ps 1201 14.6 0.557 0.567 1.248 1.257 qs_energies_init_hamiltonians 11 5.9 0.001 0.001 1.211 1.212 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 1.160 1.178 apply_single 119 13.6 0.000 0.000 1.160 1.178 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.000 1.112 1.118 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 1.088 1.104 wfi_extrapolate 11 7.9 0.001 0.001 1.065 1.065 hybrid_alltoall_any 4725 16.4 0.063 0.176 0.747 0.979 make_images_data 4572 15.5 0.039 0.042 0.775 0.951 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.000 0.867 0.918 acc_transpose_blocks 9144 15.5 0.040 0.040 0.886 0.897 multiply_cannon_metrocomm3 9144 15.5 0.019 0.019 0.403 0.881 cp_fm_cholesky_invert 11 10.9 0.839 0.841 0.839 0.841 mp_alltoall_d11v 2130 13.8 0.731 0.806 0.731 0.806 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 0.793 0.796 multiply_cannon_sync_h2d 9144 15.5 0.708 0.788 0.708 0.788 qs_ot_get_orbitals 108 10.5 0.000 0.000 0.711 0.721 qs_env_update_s_mstruct 11 6.9 0.000 0.001 0.666 0.715 acc_transpose_blocks_kernels 9144 16.5 0.118 0.122 0.638 0.647 mp_allgather_i34 2286 14.5 0.218 0.617 0.218 0.617 mp_alltoall_z22v 1201 16.6 0.566 0.595 0.566 0.595 qs_create_task_list 11 7.9 0.001 0.001 0.541 0.567 generate_qs_task_list 11 8.9 0.187 0.209 0.540 0.566 mp_waitany 5200 13.7 0.441 0.561 0.441 0.561 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="108", plot="h2o_64_md", label="(8n/2r/6t)", y=27.631000, yerr=0.000000 PlotPoint: name="109", plot="h2o_64_md_mem", label="(8n/2r/6t)", y=564.545455, yerr=2.425757 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/08/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 235.585836E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 1388964 0.0% 0.0% 100.0% average stack size 0.0 0.0 105.8 marketing flops 2.107587E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 746.184704E+06 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 91440 MPI messages size (bytes): total size 85.748679E+09 min size 0.000000E+00 max size 6.553600E+06 average size 937.758938E+03 MPI breakdown and total messages size (bytes): size <= 128 572 0 128 < size <= 8192 0 0 8192 < size <= 32768 21148 692256768 32768 < size <= 131072 19224 1259864064 131072 < size <= 4194304 41040 21941452800 4194304 < size <= 16777216 9456 61855174464 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3622 63729. MP_Allreduce 10154 429. MP_Sync 54 MP_Alltoall 1582 7383731. MP_SendRecv 2499 189067. MP_ISendRecv 2499 189067. MP_Wait 6399 MP_ISend 3120 546875. MP_IRecv 3120 546875. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.015 0.031 41.314 41.314 qs_mol_dyn_low 1 2.0 0.003 0.004 41.114 41.121 qs_forces 11 3.9 0.002 0.002 41.056 41.057 qs_energies 11 4.9 0.001 0.002 39.065 39.069 scf_env_do_scf 11 5.9 0.003 0.024 33.359 33.359 scf_env_do_scf_inner_loop 108 6.5 0.003 0.006 25.494 25.495 velocity_verlet 10 3.0 0.001 0.001 23.343 23.348 dbcsr_multiply_generic 2286 12.5 0.098 0.099 17.264 17.433 qs_scf_new_mos 108 7.5 0.001 0.001 15.700 15.803 qs_scf_loop_do_ot 108 8.5 0.001 0.001 15.699 15.802 ot_scf_mini 108 9.5 0.002 0.002 14.622 14.731 multiply_cannon 2286 13.5 0.301 0.305 13.490 14.358 multiply_cannon_loop 2286 14.5 0.345 0.350 12.216 13.112 ot_mini 108 10.5 0.001 0.001 8.697 8.819 multiply_cannon_multrec 9144 15.5 3.398 4.806 8.507 8.591 init_scf_loop 11 6.9 0.000 0.000 7.814 7.816 rebuild_ks_matrix 119 8.3 0.000 0.000 7.323 7.468 qs_ks_build_kohn_sham_matrix 119 9.3 0.013 0.013 7.323 7.467 prepare_preconditioner 11 7.9 0.000 0.000 6.851 6.864 make_preconditioner 11 8.9 0.000 0.000 6.851 6.864 qs_ot_get_derivative 108 11.5 0.001 0.001 6.680 6.785 make_full_inverse_cholesky 11 9.9 0.000 0.000 5.397 6.745 qs_ks_update_qs_env 119 7.6 0.001 0.001 6.602 6.733 dbcsr_mm_accdrv_process 12550 15.8 4.118 5.603 4.985 6.324 cp_fm_upper_to_full 72 14.2 3.167 4.562 3.167 4.562 qs_rho_update_rho_low 119 7.7 0.001 0.001 4.258 4.263 calculate_rho_elec 119 8.7 0.118 0.121 4.258 4.262 sum_up_and_integrate 119 10.3 0.065 0.066 4.051 4.057 integrate_v_rspace 119 11.3 0.004 0.004 3.986 3.993 init_scf_run 11 5.9 0.000 0.001 3.688 3.688 scf_env_initial_rho_setup 11 6.9 0.001 0.001 3.688 3.688 qs_ot_get_p 119 10.4 0.001 0.001 3.206 3.342 mp_waitall_1 94719 16.7 2.288 3.234 2.288 3.234 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 2.461 2.851 dbcsr_complete_redistribute 329 12.2 0.290 0.296 1.994 2.814 pw_transfer 1439 11.6 0.069 0.070 2.650 2.655 fft_wrap_pw1pw2 1201 12.6 0.009 0.009 2.552 2.557 copy_fm_to_dbcsr 176 11.2 0.001 0.001 1.681 2.487 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 2.152 2.452 apply_single 119 13.6 0.000 0.000 2.152 2.452 make_m2s 4572 13.5 0.038 0.038 2.247 2.422 mp_alltoall_i22 627 13.8 1.506 2.339 1.506 2.339 make_images 4572 14.5 0.354 0.383 2.128 2.303 transfer_fm_to_dbcsr 11 9.9 0.000 0.000 1.448 2.247 calculate_first_density_matrix 1 7.0 0.000 0.000 2.230 2.234 density_rs2pw 119 9.7 0.004 0.004 2.151 2.170 calculate_dm_sparse 119 9.5 0.000 0.000 2.145 2.163 fft_wrap_pw1pw2_140 487 13.2 0.621 0.624 2.140 2.145 grid_integrate_task_list 119 12.3 2.125 2.138 2.125 2.138 multiply_cannon_metrocomm3 9144 15.5 0.020 0.020 1.282 2.125 ot_diis_step 108 11.5 0.014 0.014 1.993 1.993 qs_ot_p2m_diag 50 11.0 0.043 0.044 1.956 1.957 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 1.899 1.900 qs_energies_init_hamiltonians 11 5.9 0.001 0.001 1.750 1.751 mp_sum_l 7287 12.8 0.988 1.738 0.988 1.738 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 1.671 1.723 cp_dbcsr_syevd 50 12.0 0.003 0.003 1.683 1.683 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 1.583 1.609 fft3d_ps 1201 14.6 0.592 0.600 1.563 1.567 grid_collocate_task_list 119 9.7 1.520 1.531 1.520 1.531 rs_pw_transfer 974 11.9 0.009 0.009 1.445 1.479 cp_fm_cholesky_invert 11 10.9 1.465 1.469 1.465 1.469 potential_pw2rs 119 12.3 0.014 0.015 1.387 1.390 wfi_extrapolate 11 7.9 0.001 0.001 1.390 1.390 cp_fm_diag_elpa 50 13.0 0.000 0.000 1.388 1.388 cp_fm_diag_elpa_base 50 14.0 1.242 1.294 1.386 1.386 hybrid_alltoall_any 4725 16.4 0.087 0.147 1.099 1.352 make_images_data 4572 15.5 0.043 0.046 1.049 1.274 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.000 1.214 1.220 qs_ot_get_orbitals 108 10.5 0.000 0.000 1.132 1.150 qs_env_update_s_mstruct 11 6.9 0.000 0.000 1.091 1.114 mp_alltoall_d11v 2130 13.8 1.057 1.067 1.057 1.067 multiply_cannon_sync_h2d 9144 15.5 1.034 1.037 1.034 1.037 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.001 0.984 1.023 jit_kernel_multiply 6 15.7 0.839 1.013 0.839 1.013 qs_create_task_list 11 7.9 0.001 0.001 0.939 0.952 generate_qs_task_list 11 8.9 0.368 0.387 0.938 0.952 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 0.934 0.947 acc_transpose_blocks 9144 15.5 0.040 0.041 0.899 0.902 mp_alltoall_z22v 1201 16.6 0.835 0.857 0.835 0.857 copy_dbcsr_to_fm 153 11.3 0.002 0.002 0.787 0.834 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="110", plot="h2o_64_md", label="(8n/1r/12t)", y=41.314000, yerr=0.000000 PlotPoint: name="111", plot="h2o_64_md_mem", label="(8n/1r/12t)", y=701.636364, yerr=14.405004 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/09/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 198.287135E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 8410880 0.0% 0.0% 100.0% average stack size 0.0 0.0 117.0 marketing flops 15.646302E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 497.684480E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 8483040 MPI messages size (bytes): total size 1.160510E+12 min size 0.000000E+00 max size 1.161504E+06 average size 136.803609E+03 MPI breakdown and total messages size (bytes): size <= 128 1836752 0 128 < size <= 8192 1040592 8524529664 8192 < size <= 32768 1486976 24362614784 32768 < size <= 131072 2491776 216971345920 131072 < size <= 4194304 1626944 910632720448 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3473 66218. MP_Allreduce 9776 488. MP_Sync 52 MP_Alltoall 1938 2185759. MP_SendRecv 20900 9096. MP_ISendRecv 20900 9096. MP_Wait 37268 MP_ISend 14300 82312. MP_IRecv 14300 82312. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.014 0.029 80.276 80.277 qs_mol_dyn_low 1 2.0 0.003 0.004 79.882 79.891 qs_forces 11 3.9 0.004 0.009 79.816 79.817 qs_energies 11 4.9 0.003 0.004 76.937 76.949 scf_env_do_scf 11 5.9 0.000 0.001 68.241 68.243 scf_env_do_scf_inner_loop 99 6.5 0.002 0.006 62.904 62.904 dbcsr_multiply_generic 2055 12.4 0.106 0.110 49.571 49.807 qs_scf_new_mos 99 7.5 0.000 0.001 45.570 45.675 qs_scf_loop_do_ot 99 8.5 0.001 0.001 45.569 45.674 ot_scf_mini 99 9.5 0.002 0.002 43.269 43.408 multiply_cannon 2055 13.4 0.183 0.189 41.760 42.569 velocity_verlet 10 3.0 0.001 0.002 41.838 41.840 multiply_cannon_loop 2055 14.4 1.535 1.576 40.860 41.675 ot_mini 99 10.5 0.001 0.001 25.507 25.634 qs_ot_get_derivative 99 11.5 0.001 0.001 18.767 18.911 multiply_cannon_multrec 49320 15.4 12.498 13.181 17.359 17.916 rebuild_ks_matrix 110 8.3 0.000 0.000 14.574 14.685 qs_ks_build_kohn_sham_matrix 110 9.3 0.011 0.014 14.573 14.685 qs_ks_update_qs_env 110 7.6 0.001 0.001 12.748 12.846 mp_waitall_1 220248 16.4 10.139 11.135 10.139 11.135 multiply_cannon_sync_h2d 49320 15.4 10.358 10.945 10.358 10.945 qs_ot_get_p 110 10.4 0.001 0.001 9.387 9.521 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 7.185 7.715 apply_single 110 13.6 0.000 0.001 7.185 7.715 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 7.081 7.617 multiply_cannon_metrocomm3 49320 15.4 0.082 0.087 6.203 7.244 sum_up_and_integrate 110 10.3 0.037 0.044 7.123 7.138 integrate_v_rspace 110 11.3 0.003 0.005 7.085 7.110 qs_rho_update_rho_low 110 7.6 0.001 0.001 6.607 6.746 calculate_rho_elec 110 8.6 0.022 0.026 6.606 6.745 init_scf_run 11 5.9 0.000 0.001 6.666 6.667 scf_env_initial_rho_setup 11 6.9 0.001 0.001 6.666 6.666 ot_diis_step 99 11.5 0.005 0.005 6.553 6.553 qs_ot_p2m_diag 48 11.0 0.013 0.019 6.419 6.442 cp_dbcsr_syevd 48 12.0 0.003 0.003 5.582 5.582 init_scf_loop 11 6.9 0.000 0.001 5.312 5.312 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 5.236 5.305 cp_fm_diag_elpa 48 13.0 0.000 0.000 5.032 5.055 cp_fm_diag_elpa_base 48 14.0 5.020 5.045 5.030 5.054 dbcsr_mm_accdrv_process 87628 16.1 1.925 1.987 4.735 4.991 rs_pw_transfer 902 11.9 0.012 0.013 3.624 4.453 mp_sum_l 6594 12.7 3.609 4.343 3.609 4.343 density_rs2pw 110 9.6 0.004 0.005 3.318 4.170 wfi_extrapolate 11 7.9 0.001 0.001 3.981 3.981 make_m2s 4110 13.4 0.060 0.065 3.747 3.848 calculate_dm_sparse 110 9.5 0.001 0.001 3.663 3.783 make_images 4110 14.4 0.177 0.189 3.652 3.757 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 3.514 3.518 grid_integrate_task_list 110 12.3 3.318 3.497 3.318 3.497 multiply_cannon_metrocomm1 49320 15.4 0.062 0.064 2.185 3.273 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 3.190 3.233 prepare_preconditioner 11 7.9 0.000 0.000 3.160 3.179 make_preconditioner 11 8.9 0.000 0.000 3.160 3.179 qs_ot_get_orbitals 99 10.5 0.001 0.001 3.098 3.141 pw_transfer 1331 11.6 0.055 0.067 3.029 3.102 fft_wrap_pw1pw2 1111 12.6 0.008 0.009 2.940 3.016 make_full_inverse_cholesky 11 9.9 0.000 0.000 2.954 2.989 mp_waitany 14300 13.8 1.854 2.724 1.854 2.724 fft_wrap_pw1pw2_140 451 13.1 0.453 0.497 2.530 2.609 calculate_first_density_matrix 1 7.0 0.000 0.001 2.596 2.601 potential_pw2rs 110 12.3 0.006 0.007 2.550 2.566 jit_kernel_multiply 13 15.9 2.532 2.550 2.532 2.550 mp_alltoall_d11v 2046 13.8 2.046 2.441 2.046 2.441 grid_collocate_task_list 110 9.6 2.175 2.392 2.175 2.392 fft3d_ps 1111 14.6 0.779 0.864 2.133 2.177 acc_transpose_blocks 49320 15.4 0.218 0.227 2.111 2.173 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.956 1.970 make_images_data 4110 15.4 0.043 0.046 1.741 1.878 mp_sum_d 3889 11.9 1.290 1.795 1.290 1.795 hybrid_alltoall_any 4261 16.3 0.083 0.480 1.514 1.775 rs_pw_transfer_RS2PW_140 121 11.5 0.171 0.180 0.946 1.769 cp_fm_cholesky_invert 11 10.9 1.716 1.720 1.716 1.720 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 1.639 1.660 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="200", plot="h2o_128_md", label="(8n/12r/1t)", y=80.277000, yerr=0.000000 PlotPoint: name="201", plot="h2o_128_md_mem", label="(8n/12r/1t)", y=472.272727, yerr=2.665978 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/10/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 390.715586E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 5019072 0.0% 0.0% 100.0% average stack size 0.0 0.0 196.1 marketing flops 15.646302E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 584.744960E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 1972800 MPI messages size (bytes): total size 1.077520E+12 min size 0.000000E+00 max size 4.537280E+06 average size 546.188250E+03 MPI breakdown and total messages size (bytes): size <= 128 14916 0 128 < size <= 8192 222984 1826684928 8192 < size <= 32768 520356 13399818240 32768 < size <= 131072 372336 35386294272 131072 < size <= 4194304 787758 788321309808 4194304 < size <= 16777216 54450 238588003280 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3473 66437. MP_Allreduce 9775 566. MP_Sync 52 MP_Alltoall 1717 3631244. MP_SendRecv 10340 26400. MP_ISendRecv 10340 26400. MP_Wait 22352 MP_ISend 10164 155761. MP_IRecv 10164 155761. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.012 0.030 68.127 68.128 qs_mol_dyn_low 1 2.0 0.003 0.004 67.913 67.923 qs_forces 11 3.9 0.003 0.003 67.829 67.830 qs_energies 11 4.9 0.001 0.002 64.465 64.468 scf_env_do_scf 11 5.9 0.000 0.001 56.064 56.068 scf_env_do_scf_inner_loop 99 6.5 0.002 0.007 48.713 48.713 dbcsr_multiply_generic 2055 12.4 0.126 0.131 37.161 37.380 velocity_verlet 10 3.0 0.001 0.001 35.727 35.728 qs_scf_new_mos 99 7.5 0.001 0.001 32.409 32.547 qs_scf_loop_do_ot 99 8.5 0.001 0.001 32.408 32.546 multiply_cannon 2055 13.4 0.223 0.248 30.759 32.247 ot_scf_mini 99 9.5 0.003 0.003 30.766 30.895 multiply_cannon_loop 2055 14.4 0.929 0.949 29.507 30.694 ot_mini 99 10.5 0.001 0.001 17.951 18.094 multiply_cannon_multrec 24660 15.4 7.596 9.207 13.820 15.658 rebuild_ks_matrix 110 8.3 0.000 0.000 13.809 13.944 qs_ks_build_kohn_sham_matrix 110 9.3 0.013 0.014 13.809 13.944 qs_ot_get_derivative 99 11.5 0.001 0.001 12.185 12.320 qs_ks_update_qs_env 110 7.6 0.001 0.001 12.121 12.245 mp_waitall_1 176588 16.5 7.398 10.132 7.398 10.132 multiply_cannon_sync_h2d 24660 15.4 6.904 7.862 6.904 7.862 multiply_cannon_metrocomm3 24660 15.4 0.068 0.072 5.081 7.680 init_scf_loop 11 6.9 0.000 0.001 7.318 7.318 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 6.466 7.292 apply_single 110 13.6 0.000 0.001 6.465 7.291 sum_up_and_integrate 110 10.3 0.053 0.060 6.703 6.717 integrate_v_rspace 110 11.3 0.003 0.003 6.649 6.662 qs_ot_get_p 110 10.4 0.001 0.001 6.254 6.390 dbcsr_mm_accdrv_process 52282 16.1 4.572 5.324 6.063 6.341 qs_rho_update_rho_low 110 7.6 0.001 0.001 6.156 6.162 calculate_rho_elec 110 8.6 0.040 0.048 6.155 6.162 init_scf_run 11 5.9 0.000 0.001 6.054 6.054 scf_env_initial_rho_setup 11 6.9 0.001 0.001 6.053 6.054 ot_diis_step 99 11.5 0.010 0.010 5.720 5.720 prepare_preconditioner 11 7.9 0.000 0.000 5.311 5.331 make_preconditioner 11 8.9 0.000 0.000 5.311 5.330 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 4.584 5.318 make_full_inverse_cholesky 11 9.9 0.000 0.000 4.909 5.063 make_m2s 4110 13.4 0.057 0.059 4.030 4.518 qs_ot_p2m_diag 48 11.0 0.029 0.044 4.435 4.455 make_images 4110 14.4 0.404 0.450 3.926 4.411 cp_dbcsr_syevd 48 12.0 0.003 0.003 4.012 4.012 density_rs2pw 110 9.6 0.005 0.005 3.226 3.737 pw_transfer 1331 11.6 0.067 0.072 3.584 3.717 fft_wrap_pw1pw2 1111 12.6 0.008 0.009 3.477 3.607 wfi_extrapolate 11 7.9 0.001 0.001 3.483 3.483 grid_integrate_task_list 110 12.3 3.247 3.480 3.247 3.480 cp_fm_diag_elpa 48 13.0 0.000 0.000 3.463 3.476 cp_fm_diag_elpa_base 48 14.0 3.416 3.431 3.461 3.473 rs_pw_transfer 902 11.9 0.012 0.014 2.898 3.440 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.002 3.210 3.212 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 3.115 3.189 fft_wrap_pw1pw2_140 451 13.1 0.530 0.546 2.997 3.131 calculate_dm_sparse 110 9.5 0.001 0.001 2.944 2.971 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.854 2.912 make_images_data 4110 15.4 0.047 0.052 2.266 2.725 hybrid_alltoall_any 4261 16.3 0.103 0.447 1.960 2.679 fft3d_ps 1111 14.6 1.103 1.326 2.436 2.583 grid_collocate_task_list 110 9.6 2.111 2.563 2.111 2.563 calculate_first_density_matrix 1 7.0 0.000 0.000 2.481 2.485 cp_fm_cholesky_invert 11 10.9 2.470 2.476 2.470 2.476 potential_pw2rs 110 12.3 0.008 0.009 2.375 2.383 mp_sum_l 6594 12.7 1.734 2.322 1.734 2.322 jit_kernel_multiply 11 16.1 1.141 2.138 1.141 2.138 mp_alltoall_d11v 2046 13.8 1.687 1.963 1.687 1.963 qs_ot_get_orbitals 99 10.5 0.001 0.001 1.926 1.950 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.845 1.856 qs_energies_init_hamiltonians 11 5.9 0.000 0.001 1.845 1.846 multiply_cannon_metrocomm4 22605 15.4 0.078 0.081 0.819 1.802 mp_waitany 10164 13.8 1.227 1.739 1.227 1.739 mp_irecv_dv 57340 16.2 0.690 1.680 0.690 1.680 acc_transpose_blocks 24660 15.4 0.117 0.120 1.535 1.568 mp_allgather_i34 2055 14.4 0.578 1.554 0.578 1.554 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 1.535 1.543 rs_pw_transfer_RS2PW_140 121 11.5 0.208 0.218 0.969 1.516 build_core_hamiltonian_matrix_ 11 4.9 0.001 0.001 1.367 1.474 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="202", plot="h2o_128_md", label="(8n/6r/2t)", y=68.128000, yerr=0.000000 PlotPoint: name="203", plot="h2o_128_md_mem", label="(8n/6r/2t)", y=552.454545, yerr=7.101391 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/11/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 404.681598E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 3346752 0.0% 0.0% 100.0% average stack size 0.0 0.0 294.1 marketing flops 15.646297E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 657.371136E+06 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 854880 MPI messages size (bytes): total size 708.322787E+09 min size 0.000000E+00 max size 6.553600E+06 average size 828.564000E+03 MPI breakdown and total messages size (bytes): size <= 128 6424 0 128 < size <= 8192 0 0 8192 < size <= 32768 222984 7302414336 32768 < size <= 131072 153888 10085203968 131072 < size <= 4194304 389376 200257044480 4194304 < size <= 16777216 82208 490679162176 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3473 66428. MP_Allreduce 9774 562. MP_Sync 52 MP_Alltoall 1496 4511006. MP_SendRecv 6820 27424. MP_ISendRecv 6820 27424. MP_Wait 25498 MP_ISend 17072 115022. MP_IRecv 17072 115022. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.014 0.030 59.237 59.237 qs_mol_dyn_low 1 2.0 0.003 0.003 58.939 58.948 qs_forces 11 3.9 0.003 0.003 58.875 58.876 qs_energies 11 4.9 0.002 0.010 55.621 55.625 scf_env_do_scf 11 5.9 0.001 0.002 47.828 47.829 scf_env_do_scf_inner_loop 99 6.5 0.002 0.006 39.516 39.520 velocity_verlet 10 3.0 0.001 0.002 32.165 32.167 dbcsr_multiply_generic 2055 12.4 0.105 0.110 27.990 28.226 qs_scf_new_mos 99 7.5 0.001 0.001 24.365 24.455 qs_scf_loop_do_ot 99 8.5 0.001 0.001 24.365 24.454 ot_scf_mini 99 9.5 0.003 0.004 23.159 23.272 multiply_cannon 2055 13.4 0.214 0.232 21.944 22.992 multiply_cannon_loop 2055 14.4 0.622 0.636 20.775 21.788 ot_mini 99 10.5 0.001 0.001 13.264 13.375 rebuild_ks_matrix 110 8.3 0.000 0.000 12.492 12.604 qs_ks_build_kohn_sham_matrix 110 9.3 0.013 0.015 12.492 12.604 qs_ks_update_qs_env 110 7.6 0.001 0.001 10.967 11.071 multiply_cannon_multrec 16440 15.4 3.960 5.231 9.691 10.901 mp_waitall_1 139946 16.5 6.729 10.117 6.729 10.117 qs_ot_get_derivative 99 11.5 0.001 0.001 8.865 8.979 init_scf_loop 11 6.9 0.001 0.005 8.280 8.301 multiply_cannon_metrocomm3 16440 15.4 0.043 0.046 4.204 7.423 sum_up_and_integrate 110 10.3 0.062 0.062 6.651 6.664 integrate_v_rspace 110 11.3 0.003 0.003 6.589 6.603 prepare_preconditioner 11 7.9 0.000 0.000 6.465 6.482 make_preconditioner 11 8.9 0.000 0.002 6.465 6.482 make_full_inverse_cholesky 11 9.9 0.000 0.000 5.823 6.187 qs_rho_update_rho_low 110 7.6 0.001 0.001 6.028 6.034 calculate_rho_elec 110 8.6 0.059 0.060 6.028 6.033 dbcsr_mm_accdrv_process 34862 16.1 4.570 5.001 5.585 5.726 init_scf_run 11 5.9 0.000 0.001 5.386 5.386 scf_env_initial_rho_setup 11 6.9 0.001 0.001 5.385 5.386 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 4.913 5.353 apply_single 110 13.6 0.000 0.000 4.913 5.353 qs_ot_get_p 110 10.4 0.001 0.002 5.167 5.320 make_m2s 4110 13.4 0.050 0.052 4.038 4.419 multiply_cannon_sync_h2d 16440 15.4 3.695 4.390 3.695 4.390 ot_diis_step 99 11.5 0.010 0.011 4.372 4.372 density_rs2pw 110 9.6 0.005 0.005 3.064 4.328 make_images 4110 14.4 0.396 0.513 3.927 4.308 rs_pw_transfer 902 11.9 0.011 0.012 2.637 3.896 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 3.086 3.719 pw_transfer 1331 11.6 0.066 0.073 3.618 3.626 qs_ot_p2m_diag 48 11.0 0.042 0.044 3.599 3.604 fft_wrap_pw1pw2 1111 12.6 0.008 0.009 3.511 3.521 grid_integrate_task_list 110 12.3 3.284 3.490 3.284 3.490 cp_dbcsr_syevd 48 12.0 0.003 0.003 3.269 3.269 fft_wrap_pw1pw2_140 451 13.1 0.646 0.655 3.053 3.062 wfi_extrapolate 11 7.9 0.001 0.001 2.939 2.939 make_images_data 4110 15.4 0.044 0.048 2.354 2.832 cp_fm_diag_elpa 48 13.0 0.000 0.000 2.753 2.763 cp_fm_diag_elpa_base 48 14.0 2.686 2.714 2.751 2.761 hybrid_alltoall_any 4261 16.3 0.106 0.378 2.064 2.737 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 2.717 2.718 grid_collocate_task_list 110 9.6 2.138 2.550 2.138 2.550 cp_fm_cholesky_invert 11 10.9 2.528 2.534 2.528 2.534 calculate_dm_sparse 110 9.5 0.001 0.001 2.479 2.515 mp_waitany 17072 13.8 1.202 2.511 1.202 2.511 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.418 2.472 multiply_cannon_metrocomm4 14385 15.4 0.045 0.049 0.870 2.402 calculate_first_density_matrix 1 7.0 0.000 0.002 2.361 2.363 fft3d_ps 1111 14.6 1.088 1.101 2.322 2.334 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 2.247 2.304 mp_irecv_dv 48980 15.7 0.800 2.278 0.800 2.278 potential_pw2rs 110 12.3 0.011 0.011 2.241 2.247 rs_pw_transfer_RS2PW_140 121 11.5 0.178 0.184 0.930 2.184 mp_alltoall_d11v 2046 13.8 1.709 2.180 1.709 2.180 mp_sum_l 6594 12.7 1.393 1.996 1.393 1.996 qs_energies_init_hamiltonians 11 5.9 0.001 0.005 1.974 1.975 dbcsr_complete_redistribute 325 12.2 0.324 0.350 1.333 1.794 cp_fm_upper_to_full 70 14.2 1.404 1.768 1.404 1.768 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.721 1.734 cp_fm_cholesky_decompose 22 10.9 1.505 1.523 1.505 1.523 mp_allgather_i34 2055 14.4 0.488 1.521 0.488 1.521 jit_kernel_multiply 8 16.5 0.628 1.517 0.628 1.517 build_core_hamiltonian_matrix_ 11 4.9 0.001 0.001 1.355 1.476 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 1.452 1.465 rs_gather_matrices 110 12.3 0.236 0.262 0.985 1.398 copy_fm_to_dbcsr 174 11.2 0.001 0.001 0.921 1.368 qs_ot_get_orbitals 99 10.5 0.001 0.001 1.225 1.234 acc_transpose_blocks 16440 15.4 0.078 0.080 1.213 1.230 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="204", plot="h2o_128_md", label="(8n/4r/3t)", y=59.237000, yerr=0.000000 PlotPoint: name="205", plot="h2o_128_md_mem", label="(8n/4r/3t)", y=622.818182, yerr=7.017098 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/12/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 601.317074E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 4916280 0.0% 0.0% 100.0% average stack size 0.0 0.0 200.2 marketing flops 15.646302E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 736.301056E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 937080 MPI messages size (bytes): total size 523.723932E+09 min size 0.000000E+00 max size 4.537280E+06 average size 558.889250E+03 MPI breakdown and total messages size (bytes): size <= 128 6996 0 128 < size <= 8192 264 2162688 8192 < size <= 32768 304932 8165326848 32768 < size <= 131072 110640 6338641920 131072 < size <= 4194304 489498 400769458320 4194304 < size <= 16777216 24750 108449092400 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3473 66426. MP_Allreduce 9774 603. MP_Sync 52 MP_Alltoall 1496 5863162. MP_SendRecv 5060 43184. MP_ISendRecv 5060 43184. MP_Wait 20042 MP_ISend 13376 163145. MP_IRecv 13376 163145. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.016 0.029 65.070 65.071 qs_mol_dyn_low 1 2.0 0.003 0.004 64.795 64.804 qs_forces 11 3.9 0.018 0.128 64.714 64.714 qs_energies 11 4.9 0.002 0.005 61.109 61.114 scf_env_do_scf 11 5.9 0.001 0.001 52.811 52.814 scf_env_do_scf_inner_loop 99 6.5 0.002 0.007 41.238 41.239 velocity_verlet 10 3.0 0.001 0.003 37.160 37.163 dbcsr_multiply_generic 2055 12.4 0.112 0.118 29.196 29.372 qs_scf_new_mos 99 7.5 0.001 0.001 26.011 26.112 qs_scf_loop_do_ot 99 8.5 0.001 0.001 26.011 26.112 ot_scf_mini 99 9.5 0.003 0.004 24.394 24.494 multiply_cannon 2055 13.4 0.244 0.266 22.233 23.312 multiply_cannon_loop 2055 14.4 0.890 0.907 20.883 21.437 ot_mini 99 10.5 0.001 0.001 13.888 14.009 multiply_cannon_multrec 24660 15.4 4.196 6.720 12.622 13.868 rebuild_ks_matrix 110 8.3 0.000 0.000 12.377 12.511 qs_ks_build_kohn_sham_matrix 110 9.3 0.012 0.013 12.377 12.511 init_scf_loop 11 6.9 0.001 0.003 11.532 11.532 qs_ks_update_qs_env 110 7.6 0.001 0.001 10.892 11.010 qs_ot_get_derivative 99 11.5 0.001 0.001 9.735 9.838 prepare_preconditioner 11 7.9 0.000 0.000 9.768 9.784 make_preconditioner 11 8.9 0.000 0.001 9.768 9.784 make_full_inverse_cholesky 11 9.9 0.000 0.000 8.011 9.467 dbcsr_mm_accdrv_process 52304 16.0 6.985 8.670 8.277 9.215 sum_up_and_integrate 110 10.3 0.068 0.072 6.671 6.685 integrate_v_rspace 110 11.3 0.003 0.004 6.603 6.615 qs_rho_update_rho_low 110 7.6 0.001 0.001 6.202 6.210 calculate_rho_elec 110 8.6 0.078 0.082 6.201 6.210 mp_waitall_1 121746 16.5 4.230 6.179 4.230 6.179 qs_ot_get_p 110 10.4 0.001 0.001 5.523 5.677 make_m2s 4110 13.4 0.059 0.061 5.397 5.661 make_images 4110 14.4 0.584 0.706 5.257 5.518 init_scf_run 11 5.9 0.000 0.001 5.500 5.501 scf_env_initial_rho_setup 11 6.9 0.001 0.001 5.500 5.500 cp_fm_upper_to_full 70 14.2 3.314 4.820 3.314 4.820 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 4.020 4.163 apply_single 110 13.6 0.000 0.000 4.020 4.163 ot_diis_step 99 11.5 0.011 0.011 4.118 4.118 qs_ot_p2m_diag 48 11.0 0.056 0.065 3.793 3.808 pw_transfer 1331 11.6 0.066 0.075 3.771 3.803 dbcsr_complete_redistribute 325 12.2 0.412 0.462 2.616 3.768 fft_wrap_pw1pw2 1111 12.6 0.008 0.009 3.665 3.700 density_rs2pw 110 9.6 0.004 0.005 3.072 3.600 grid_integrate_task_list 110 12.3 3.379 3.596 3.379 3.596 multiply_cannon_sync_h2d 24660 15.4 3.173 3.372 3.173 3.372 cp_dbcsr_syevd 48 12.0 0.003 0.003 3.317 3.318 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 3.255 3.306 copy_fm_to_dbcsr 174 11.2 0.001 0.002 2.099 3.233 fft_wrap_pw1pw2_140 451 13.1 0.676 0.701 3.142 3.182 multiply_cannon_metrocomm3 24660 15.4 0.036 0.037 1.360 3.129 make_images_data 4110 15.4 0.046 0.050 2.764 3.086 hybrid_alltoall_any 4261 16.3 0.121 0.462 2.338 3.027 wfi_extrapolate 11 7.9 0.001 0.001 3.027 3.027 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 2.949 3.011 rs_pw_transfer 902 11.9 0.010 0.011 2.442 2.980 calculate_dm_sparse 110 9.5 0.001 0.001 2.904 2.932 transfer_fm_to_dbcsr 11 9.9 0.000 0.000 1.748 2.867 mp_alltoall_i22 605 13.7 1.644 2.818 1.644 2.818 cp_fm_diag_elpa 48 13.0 0.000 0.000 2.797 2.807 cp_fm_diag_elpa_base 48 14.0 2.651 2.708 2.795 2.805 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 2.780 2.782 cp_fm_cholesky_invert 11 10.9 2.639 2.647 2.639 2.647 grid_collocate_task_list 110 9.6 2.243 2.499 2.243 2.499 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.424 2.465 fft3d_ps 1111 14.6 1.084 1.119 2.413 2.433 calculate_first_density_matrix 1 7.0 0.000 0.003 2.380 2.383 qs_energies_init_hamiltonians 11 5.9 0.001 0.002 2.292 2.293 potential_pw2rs 110 12.3 0.013 0.013 2.179 2.189 mp_alltoall_d11v 2046 13.8 1.729 2.123 1.729 2.123 jit_kernel_multiply 10 15.7 0.958 1.813 0.958 1.813 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.733 1.748 qs_ot_get_orbitals 99 10.5 0.001 0.001 1.682 1.719 build_core_hamiltonian_matrix_ 11 4.9 0.001 0.001 1.597 1.698 cp_fm_cholesky_decompose 22 10.9 1.646 1.694 1.646 1.694 mp_waitany 13376 13.8 1.091 1.578 1.091 1.578 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 1.523 1.536 acc_transpose_blocks 24660 15.4 0.109 0.111 1.510 1.532 multiply_cannon_metrocomm4 20550 15.4 0.060 0.063 0.853 1.487 mp_irecv_dv 62702 16.1 0.752 1.410 0.752 1.410 mp_sum_l 6594 12.7 0.885 1.406 0.885 1.406 rs_pw_transfer_RS2PW_140 121 11.5 0.169 0.178 0.849 1.396 qs_env_update_s_mstruct 11 6.9 0.000 0.001 1.234 1.343 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="206", plot="h2o_128_md", label="(8n/3r/4t)", y=65.071000, yerr=0.000000 PlotPoint: name="207", plot="h2o_128_md_mem", label="(8n/3r/4t)", y=695.454545, yerr=11.664738 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/13/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 807.299199E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 1438408 0.0% 0.0% 100.0% average stack size 0.0 0.0 684.2 marketing flops 15.646297E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 836.104192E+06 # max total images/rank 1 # max 3D layers 1 # MPI messages exchanged 197280 MPI messages size (bytes): total size 339.125567E+09 min size 0.000000E+00 max size 13.107200E+06 average size 1.719006E+06 MPI breakdown and total messages size (bytes): size <= 128 1452 0 128 < size <= 8192 0 0 8192 < size <= 32768 132 4325376 32768 < size <= 131072 88656 11620319232 131072 < size <= 4194304 89424 117209825280 4194304 < size <= 16777216 17616 210291069504 16777216 < size 0 0 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 14 12. MP_Allreduce 7346 33. MP_Alltoall 8043 263767. MP_ISend 32836 654203. MP_IRecv 32836 654587. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3473 66424. MP_Allreduce 9774 644. MP_Sync 52 MP_Alltoall 1496 8504061. MP_SendRecv 3300 54848. MP_ISendRecv 3300 54848. MP_Wait 13926 MP_ISend 9240 278857. MP_IRecv 9240 278857. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.013 0.031 54.486 54.487 qs_mol_dyn_low 1 2.0 0.003 0.006 54.250 54.260 qs_forces 11 3.9 0.003 0.003 54.185 54.185 qs_energies 11 4.9 0.001 0.002 50.454 50.459 scf_env_do_scf 11 5.9 0.000 0.001 42.258 42.258 scf_env_do_scf_inner_loop 99 6.5 0.002 0.007 34.669 34.670 velocity_verlet 10 3.0 0.001 0.001 30.965 30.968 dbcsr_multiply_generic 2055 12.4 0.103 0.108 22.305 22.482 qs_scf_new_mos 99 7.5 0.001 0.001 19.562 19.631 qs_scf_loop_do_ot 99 8.5 0.001 0.001 19.561 19.630 ot_scf_mini 99 9.5 0.002 0.002 18.330 18.389 multiply_cannon 2055 13.4 0.246 0.254 17.019 18.127 multiply_cannon_loop 2055 14.4 0.323 0.333 15.727 16.075 rebuild_ks_matrix 110 8.3 0.000 0.000 12.022 12.087 qs_ks_build_kohn_sham_matrix 110 9.3 0.013 0.014 12.021 12.087 qs_ks_update_qs_env 110 7.6 0.001 0.001 10.634 10.695 ot_mini 99 10.5 0.001 0.001 10.002 10.058 multiply_cannon_multrec 8220 15.4 3.170 4.866 7.371 8.655 init_scf_loop 11 6.9 0.000 0.000 7.542 7.543 mp_waitall_1 103326 16.6 5.770 7.524 5.770 7.524 sum_up_and_integrate 110 10.3 0.080 0.082 6.633 6.645 integrate_v_rspace 110 11.3 0.003 0.003 6.553 6.564 qs_rho_update_rho_low 110 7.6 0.001 0.001 6.307 6.323 calculate_rho_elec 110 8.6 0.115 0.116 6.307 6.323 qs_ot_get_derivative 99 11.5 0.001 0.001 6.160 6.219 prepare_preconditioner 11 7.9 0.000 0.000 5.878 5.884 make_preconditioner 11 8.9 0.000 0.000 5.877 5.884 make_full_inverse_cholesky 11 9.9 0.000 0.000 5.449 5.522 init_scf_run 11 5.9 0.000 0.001 5.074 5.074 scf_env_initial_rho_setup 11 6.9 0.001 0.001 5.073 5.073 dbcsr_mm_accdrv_process 17442 15.9 2.847 3.923 4.070 4.947 qs_ot_get_p 110 10.4 0.001 0.001 4.459 4.520 multiply_cannon_metrocomm3 8220 15.4 0.018 0.018 3.013 4.507 make_m2s 4110 13.4 0.039 0.040 4.070 4.316 make_images 4110 14.4 0.645 0.702 3.943 4.187 pw_transfer 1331 11.6 0.066 0.073 3.933 3.958 fft_wrap_pw1pw2 1111 12.6 0.008 0.009 3.825 3.854 ot_diis_step 99 11.5 0.012 0.012 3.823 3.823 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 3.726 3.804 apply_single 110 13.6 0.000 0.000 3.726 3.804 grid_integrate_task_list 110 12.3 3.449 3.711 3.449 3.711 density_rs2pw 110 9.6 0.004 0.005 3.005 3.380 fft_wrap_pw1pw2_140 451 13.1 0.844 0.855 3.320 3.364 qs_ot_p2m_diag 48 11.0 0.081 0.084 3.163 3.166 multiply_cannon_sync_h2d 8220 15.4 2.893 3.076 2.893 3.076 cp_dbcsr_syevd 48 12.0 0.003 0.003 2.869 2.870 cp_fm_cholesky_invert 11 10.9 2.816 2.820 2.816 2.820 wfi_extrapolate 11 7.9 0.001 0.001 2.728 2.728 make_images_data 4110 15.4 0.039 0.044 2.311 2.693 qs_energies_init_hamiltonians 11 5.9 0.001 0.002 2.672 2.672 hybrid_alltoall_any 4261 16.3 0.202 0.877 2.240 2.605 grid_collocate_task_list 110 9.6 2.333 2.590 2.333 2.590 rs_pw_transfer 902 11.9 0.010 0.011 2.148 2.552 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 2.505 2.507 calculate_dm_sparse 110 9.5 0.001 0.001 2.452 2.496 cp_fm_diag_elpa 48 13.0 0.000 0.000 2.374 2.382 cp_fm_diag_elpa_base 48 14.0 2.319 2.344 2.372 2.380 fft3d_ps 1111 14.6 1.142 1.191 2.354 2.373 calculate_first_density_matrix 1 7.0 0.000 0.000 2.249 2.250 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.159 2.180 potential_pw2rs 110 12.3 0.016 0.016 2.054 2.057 mp_alltoall_d11v 2046 13.8 1.631 1.981 1.631 1.981 build_core_hamiltonian_matrix_ 11 4.9 0.001 0.001 1.771 1.980 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 1.861 1.896 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 1.731 1.758 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.718 1.723 qs_env_update_s_mstruct 11 6.9 0.000 0.000 1.511 1.638 cp_fm_cholesky_decompose 22 10.9 1.612 1.624 1.612 1.624 mp_allgather_i34 2055 14.4 0.478 1.521 0.478 1.521 jit_kernel_multiply 9 15.8 0.915 1.515 0.915 1.515 dbcsr_complete_redistribute 325 12.2 0.565 0.594 1.420 1.513 mp_waitany 9240 13.8 1.070 1.503 1.070 1.503 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 1.422 1.436 multiply_cannon_metrocomm4 6165 15.4 0.018 0.019 0.483 1.360 qs_create_task_list 11 7.9 0.000 0.000 1.227 1.329 generate_qs_task_list 11 8.9 0.375 0.442 1.226 1.329 mp_irecv_dv 24056 15.7 0.460 1.322 0.460 1.322 rs_gather_matrices 110 12.3 0.327 0.371 0.960 1.301 rs_pw_transfer_RS2PW_140 121 11.5 0.164 0.170 0.803 1.205 copy_dbcsr_to_fm 151 11.3 0.003 0.003 1.116 1.153 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="208", plot="h2o_128_md", label="(8n/2r/6t)", y=54.487000, yerr=0.000000 PlotPoint: name="209", plot="h2o_128_md_mem", label="(8n/2r/6t)", y=781.818182, yerr=10.512488 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/14/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 1.612391E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 1464624 0.0% 0.0% 100.0% average stack size 0.0 0.0 672.0 marketing flops 15.646297E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 1.374900E+09 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 82200 MPI messages size (bytes): total size 297.640985E+09 min size 0.000000E+00 max size 26.214400E+06 average size 3.620936E+06 MPI breakdown and total messages size (bytes): size <= 128 572 0 128 < size <= 8192 0 0 8192 < size <= 32768 44 1441792 32768 < size <= 131072 18560 2432696320 131072 < size <= 4194304 54216 84915781632 4194304 < size <= 16777216 0 0 16777216 < size 8808 210291069504 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3462 67104. MP_Allreduce 9752 812. MP_Sync 52 MP_Alltoall 1474 16505187. MP_SendRecv 2310 360267. MP_ISendRecv 2310 360267. MP_Wait 5214 MP_ISend 2420 1187840. MP_IRecv 2420 1187840. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.017 0.036 87.487 87.488 qs_mol_dyn_low 1 2.0 0.003 0.004 87.193 87.203 qs_forces 11 3.9 0.003 0.003 87.128 87.129 qs_energies 11 4.9 0.002 0.002 82.868 82.870 scf_env_do_scf 11 5.9 0.001 0.001 72.823 72.823 velocity_verlet 10 3.0 0.001 0.001 56.262 56.268 scf_env_do_scf_inner_loop 99 6.5 0.003 0.007 44.211 44.212 dbcsr_multiply_generic 2055 12.4 0.117 0.122 28.398 28.541 init_scf_loop 11 6.9 0.000 0.000 28.536 28.538 prepare_preconditioner 11 7.9 0.000 0.000 26.494 26.508 make_preconditioner 11 8.9 0.000 0.000 26.494 26.508 qs_scf_new_mos 99 7.5 0.001 0.001 25.949 26.042 qs_scf_loop_do_ot 99 8.5 0.001 0.001 25.948 26.041 make_full_inverse_cholesky 11 9.9 0.000 0.000 20.653 25.959 ot_scf_mini 99 9.5 0.002 0.002 24.191 24.269 multiply_cannon 2055 13.4 0.345 0.379 21.396 22.095 multiply_cannon_loop 2055 14.4 0.342 0.346 19.590 19.991 cp_fm_upper_to_full 70 14.2 12.859 18.414 12.859 18.414 rebuild_ks_matrix 110 8.3 0.000 0.001 14.197 14.289 qs_ks_build_kohn_sham_matrix 110 9.3 0.014 0.014 14.197 14.289 ot_mini 99 10.5 0.001 0.001 13.492 13.568 qs_ks_update_qs_env 110 7.6 0.001 0.001 12.804 12.888 dbcsr_complete_redistribute 325 12.2 1.027 1.056 7.449 10.832 copy_fm_to_dbcsr 174 11.2 0.001 0.001 6.389 9.771 multiply_cannon_multrec 8220 15.4 4.331 4.513 9.521 9.638 transfer_fm_to_dbcsr 11 9.9 0.000 0.000 5.825 9.167 qs_ot_get_derivative 99 11.5 0.001 0.001 8.840 8.916 mp_alltoall_i22 605 13.7 5.469 8.886 5.469 8.886 mp_waitall_1 84994 16.7 7.432 8.419 7.432 8.419 qs_rho_update_rho_low 110 7.6 0.001 0.001 7.712 7.747 calculate_rho_elec 110 8.6 0.228 0.228 7.711 7.747 sum_up_and_integrate 110 10.3 0.151 0.152 7.422 7.436 integrate_v_rspace 110 11.3 0.004 0.004 7.272 7.284 make_m2s 4110 13.4 0.043 0.043 5.203 5.737 init_scf_run 11 5.9 0.000 0.001 5.735 5.735 scf_env_initial_rho_setup 11 6.9 0.001 0.001 5.735 5.735 qs_ot_get_p 110 10.4 0.001 0.001 5.528 5.611 make_images 4110 14.4 0.885 0.935 5.018 5.553 dbcsr_mm_accdrv_process 11614 15.7 3.303 3.797 5.047 5.328 cp_fm_cholesky_invert 11 10.9 5.284 5.288 5.284 5.288 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 4.722 5.264 apply_single 110 13.6 0.000 0.000 4.721 5.264 pw_transfer 1331 11.6 0.075 0.076 5.015 5.030 multiply_cannon_metrocomm3 8220 15.4 0.018 0.019 4.708 4.985 fft_wrap_pw1pw2 1111 12.6 0.009 0.010 4.897 4.912 ot_diis_step 99 11.5 0.016 0.017 4.635 4.635 fft_wrap_pw1pw2_140 451 13.1 1.344 1.352 4.271 4.282 qs_ot_p2m_diag 48 11.0 0.151 0.156 3.966 3.973 multiply_cannon_sync_h2d 8220 15.4 3.939 3.941 3.939 3.941 grid_integrate_task_list 110 12.3 3.791 3.857 3.791 3.857 density_rs2pw 110 9.6 0.005 0.005 3.687 3.719 qs_energies_init_hamiltonians 11 5.9 0.001 0.001 3.691 3.692 hybrid_alltoall_any 4261 16.3 0.256 0.552 2.826 3.574 cp_dbcsr_syevd 48 12.0 0.003 0.003 3.529 3.530 make_images_data 4110 15.4 0.042 0.045 2.835 3.477 wfi_extrapolate 11 7.9 0.001 0.001 3.356 3.356 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 2.836 3.308 calculate_dm_sparse 110 9.5 0.001 0.001 3.113 3.133 cp_fm_diag_elpa 48 13.0 0.000 0.000 2.940 2.940 cp_fm_diag_elpa_base 48 14.0 2.402 2.600 2.938 2.939 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 2.876 2.880 fft3d_ps 1111 14.6 1.296 1.307 2.816 2.833 grid_collocate_task_list 110 9.6 2.689 2.702 2.689 2.702 potential_pw2rs 110 12.3 0.022 0.022 2.409 2.419 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.364 2.397 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 2.353 2.393 rs_pw_transfer 902 11.9 0.010 0.011 2.281 2.302 qs_env_update_s_mstruct 11 6.9 0.000 0.000 2.216 2.278 calculate_first_density_matrix 1 7.0 0.000 0.000 2.259 2.260 build_core_hamiltonian_matrix_ 11 4.9 0.001 0.001 2.102 2.203 mp_alltoall_d11v 2046 13.8 1.929 2.015 1.929 2.015 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.996 2.005 cp_fm_cholesky_decompose 22 10.9 1.977 1.998 1.977 1.998 qs_create_task_list 11 7.9 0.001 0.001 1.898 1.944 generate_qs_task_list 11 8.9 0.734 0.786 1.898 1.944 copy_dbcsr_to_fm 151 11.3 0.003 0.003 1.799 1.857 jit_kernel_multiply 10 15.4 1.543 1.760 1.543 1.760 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="210", plot="h2o_128_md", label="(8n/1r/12t)", y=87.488000, yerr=0.000000 PlotPoint: name="211", plot="h2o_128_md_mem", label="(8n/1r/12t)", y=1226.181818, yerr=63.629350 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/15/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1420242647040 0.0% 0.0% 100.0% flops 32 x 32 x 32 1943472701440 0.0% 0.0% 100.0% flops 22 x 9 x 32 1972057190400 0.0% 0.0% 100.0% flops 9 x 22 x 32 1977770336256 0.0% 0.0% 100.0% flops 22 x 22 x 32 2734287699968 0.0% 0.0% 100.0% flops 32 x 32 x 9 4416300122112 0.0% 0.0% 100.0% flops 32 x 32 x 22 5397700149248 0.0% 0.0% 100.0% flops 9 x 32 x 32 5443971710976 0.0% 0.0% 100.0% flops 22 x 32 x 32 6653743202304 0.0% 0.0% 100.0% flops 9 x 32 x 9 11528903135232 0.0% 0.0% 100.0% flops 22 x 32 x 9 15129160814592 0.0% 0.0% 100.0% flops 9 x 32 x 22 15129160814592 0.0% 0.0% 100.0% flops 22 x 32 x 22 19767995056128 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 93.514766E+12 0.0% 0.0% 100.0% flops max/rank 1.094965E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6755941440 0.0% 0.0% 100.0% number of processed stacks 11950464 0.0% 0.0% 100.0% average stack size 0.0 0.0 565.3 marketing flops 144.580175E+12 ------------------------------------------------------------------------------- # multiplications 2507 max memory usage/rank 627.142656E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 10348896 MPI messages size (bytes): total size 4.491514E+12 min size 0.000000E+00 max size 4.537280E+06 average size 434.009000E+03 MPI breakdown and total messages size (bytes): size <= 128 65736 0 128 < size <= 8192 1232 10092544 8192 < size <= 32768 3576680 95640223744 32768 < size <= 131072 1294784 74079797248 131072 < size <= 4194304 5148576 3175955383376 4194304 < size <= 16777216 261888 1145794321408 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 4002 57767. MP_Allreduce 11084 796. MP_Sync 87 MP_Alltoall 2226 2278642. MP_SendRecv 24320 18752. MP_ISendRecv 24320 18752. MP_Wait 42476 MP_ISend 16020 108028. MP_IRecv 16020 108028. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.017 0.034 206.118 206.119 qs_mol_dyn_low 1 2.0 0.003 0.009 205.628 205.641 qs_forces 11 3.9 0.009 0.010 205.544 205.545 qs_energies 11 4.9 0.002 0.003 199.894 199.908 scf_env_do_scf 11 5.9 0.001 0.001 183.375 183.379 scf_env_do_scf_inner_loop 117 6.6 0.003 0.008 163.174 163.176 dbcsr_multiply_generic 2507 12.6 0.177 0.181 124.068 124.584 qs_scf_new_mos 117 7.6 0.001 0.001 122.914 123.138 qs_scf_loop_do_ot 117 8.6 0.001 0.001 122.913 123.137 velocity_verlet 10 3.0 0.001 0.002 122.976 122.977 ot_scf_mini 117 9.6 0.003 0.003 116.278 116.514 multiply_cannon 2507 13.6 0.239 0.246 100.817 102.307 multiply_cannon_loop 2507 14.6 2.101 2.143 98.694 99.986 ot_mini 117 10.6 0.001 0.001 65.197 65.414 multiply_cannon_multrec 60168 15.6 33.399 35.406 41.583 43.134 qs_ot_get_derivative 117 11.6 0.001 0.001 40.432 40.655 rebuild_ks_matrix 128 8.3 0.001 0.001 33.952 34.219 qs_ks_build_kohn_sham_matrix 128 9.3 0.016 0.018 33.952 34.219 mp_waitall_1 267128 16.5 27.902 30.792 27.902 30.792 qs_ks_update_qs_env 128 7.6 0.001 0.001 30.439 30.705 qs_ot_get_p 128 10.4 0.001 0.001 29.091 29.415 multiply_cannon_sync_h2d 60168 15.6 27.563 29.118 27.563 29.118 apply_preconditioner_dbcsr 128 12.6 0.000 0.001 24.366 25.218 apply_single 128 13.6 0.001 0.001 24.365 25.218 ot_diis_step 117 11.6 0.007 0.008 24.530 24.531 qs_ot_p2m_diag 83 11.4 0.079 0.091 22.620 22.686 init_scf_loop 11 6.9 0.002 0.016 20.129 20.130 cp_dbcsr_syevd 83 12.4 0.005 0.005 19.973 19.973 qs_ot_get_derivative_diag 77 12.4 0.002 0.002 19.248 19.444 multiply_cannon_metrocomm3 60168 15.6 0.118 0.122 15.698 18.009 cp_fm_diag_elpa 83 13.4 0.000 0.000 17.069 17.098 cp_fm_diag_elpa_base 83 14.4 17.004 17.048 17.066 17.095 prepare_preconditioner 11 7.9 0.000 0.000 15.537 15.573 make_preconditioner 11 8.9 0.000 0.000 15.537 15.573 make_full_inverse_cholesky 11 9.9 0.000 0.000 14.784 14.953 sum_up_and_integrate 128 10.3 0.090 0.109 14.427 14.444 integrate_v_rspace 128 11.3 0.004 0.005 14.337 14.356 make_m2s 5014 13.6 0.103 0.110 13.684 14.022 qs_rho_update_rho_low 128 7.7 0.001 0.001 13.769 13.906 calculate_rho_elec 128 8.7 0.046 0.065 13.768 13.905 make_images 5014 14.6 0.405 0.422 13.510 13.863 init_scf_run 11 5.9 0.000 0.001 12.371 12.371 scf_env_initial_rho_setup 11 6.9 0.001 0.001 12.371 12.371 density_rs2pw 128 9.7 0.006 0.007 7.021 10.309 mp_sum_l 7950 12.9 8.299 9.626 8.299 9.626 wfi_extrapolate 11 7.9 0.001 0.001 9.060 9.060 rs_pw_transfer 1046 11.9 0.017 0.020 5.710 9.009 cp_fm_cholesky_invert 11 10.9 8.794 8.801 8.794 8.801 calculate_dm_sparse 128 9.5 0.001 0.001 8.436 8.536 dbcsr_mm_accdrv_process 124484 16.2 3.164 3.276 7.743 8.260 qs_ot_get_derivative_taylor 40 13.0 0.001 0.001 7.958 8.097 qs_ot_get_orbitals 117 10.6 0.001 0.001 7.886 7.952 grid_integrate_task_list 128 12.3 7.306 7.897 7.306 7.897 pw_transfer 1547 11.6 0.075 0.086 7.693 7.867 fft_wrap_pw1pw2 1291 12.7 0.011 0.012 7.489 7.663 make_images_data 5014 15.6 0.070 0.076 6.761 7.597 multiply_cannon_metrocomm1 60168 15.6 0.090 0.095 5.767 7.421 hybrid_alltoall_any 5200 16.5 0.293 2.264 5.924 7.348 fft_wrap_pw1pw2_140 523 13.2 1.281 1.322 6.607 6.780 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.003 6.751 6.762 mp_waitany 16020 13.9 2.754 6.027 2.754 6.027 grid_collocate_task_list 128 9.7 4.707 6.024 4.707 6.024 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 5.812 5.872 fft3d_ps 1291 14.7 2.133 2.790 5.244 5.558 rs_pw_transfer_RS2PW_140 139 11.5 0.282 0.301 2.180 5.485 mp_alltoall_d11v 2415 14.1 4.162 5.277 4.162 5.277 cp_fm_cholesky_decompose 22 10.9 4.575 4.589 4.575 4.589 potential_pw2rs 128 12.3 0.010 0.011 4.543 4.561 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="400", plot="h2o_256_md", label="(8n/12r/1t)", y=206.119000, yerr=0.000000 PlotPoint: name="401", plot="h2o_256_md_mem", label="(8n/12r/1t)", y=594.090909, yerr=6.416662 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/16/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1420241154048 0.0% 0.0% 100.0% flops 32 x 32 x 32 1943472701440 0.0% 0.0% 100.0% flops 22 x 9 x 32 1972057190400 0.0% 0.0% 100.0% flops 9 x 22 x 32 1977770336256 0.0% 0.0% 100.0% flops 22 x 22 x 32 2734287699968 0.0% 0.0% 100.0% flops 32 x 32 x 9 4416300122112 0.0% 0.0% 100.0% flops 32 x 32 x 22 5397700149248 0.0% 0.0% 100.0% flops 9 x 32 x 32 5443971710976 0.0% 0.0% 100.0% flops 22 x 32 x 32 6653743202304 0.0% 0.0% 100.0% flops 9 x 32 x 9 11528896499712 0.0% 0.0% 100.0% flops 22 x 32 x 9 15129160814592 0.0% 0.0% 100.0% flops 9 x 32 x 22 15129160814592 0.0% 0.0% 100.0% flops 22 x 32 x 22 19767995056128 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 93.514757E+12 0.0% 0.0% 100.0% flops max/rank 2.183246E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6755939872 0.0% 0.0% 100.0% number of processed stacks 5975232 0.0% 0.0% 100.0% average stack size 0.0 0.0 1130.7 marketing flops 144.580175E+12 ------------------------------------------------------------------------------- # multiplications 2507 max memory usage/rank 827.191296E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 2406720 MPI messages size (bytes): total size 4.100942E+12 min size 0.000000E+00 max size 17.653760E+06 average size 1.703955E+06 MPI breakdown and total messages size (bytes): size <= 128 14916 0 128 < size <= 8192 0 0 8192 < size <= 32768 70860 2317615104 32768 < size <= 131072 722992 55511613440 131072 < size <= 4194304 1375664 1398181724160 4194304 < size <= 16777216 154704 1463834560144 16777216 < size 67584 1181116006400 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3992 58363. MP_Allreduce 11058 960. MP_Sync 87 MP_Alltoall 1969 6478743. MP_SendRecv 12032 47072. MP_ISendRecv 12032 47072. MP_Wait 25916 MP_ISend 11748 212467. MP_IRecv 11748 212467. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.013 0.030 188.912 188.913 qs_mol_dyn_low 1 2.0 0.003 0.004 188.554 188.568 qs_forces 11 3.9 0.004 0.005 188.420 188.422 qs_energies 11 4.9 0.002 0.002 181.535 181.547 scf_env_do_scf 11 5.9 0.001 0.002 165.255 165.265 scf_env_do_scf_inner_loop 117 6.6 0.003 0.008 132.778 132.780 velocity_verlet 10 3.0 0.001 0.001 119.103 119.106 dbcsr_multiply_generic 2507 12.6 0.185 0.191 96.969 98.126 qs_scf_new_mos 117 7.6 0.001 0.001 92.581 93.128 qs_scf_loop_do_ot 117 8.6 0.001 0.001 92.580 93.128 ot_scf_mini 117 9.6 0.004 0.004 87.798 88.401 multiply_cannon 2507 13.6 0.480 0.535 77.305 81.628 multiply_cannon_loop 2507 14.6 1.256 1.295 74.073 76.494 ot_mini 117 10.6 0.001 0.001 49.413 49.966 mp_waitall_1 214728 16.6 24.184 39.264 24.184 39.264 multiply_cannon_multrec 30084 15.6 21.931 26.453 31.528 36.275 rebuild_ks_matrix 128 8.3 0.001 0.001 33.486 34.136 qs_ks_build_kohn_sham_matrix 128 9.3 0.017 0.020 33.486 34.135 init_scf_loop 11 6.9 0.000 0.000 32.390 32.391 qs_ks_update_qs_env 128 7.6 0.001 0.001 30.044 30.634 multiply_cannon_metrocomm3 30084 15.6 0.093 0.099 15.768 29.051 qs_ot_get_derivative 117 11.6 0.001 0.002 27.531 28.135 prepare_preconditioner 11 7.9 0.000 0.000 27.977 28.040 make_preconditioner 11 8.9 0.000 0.000 27.977 28.040 make_full_inverse_cholesky 11 9.9 0.000 0.000 26.669 27.194 apply_preconditioner_dbcsr 128 12.6 0.000 0.000 21.960 23.026 apply_single 128 13.6 0.001 0.001 21.959 23.026 multiply_cannon_sync_h2d 30084 15.6 19.175 22.208 19.175 22.208 ot_diis_step 117 11.6 0.014 0.015 21.705 21.707 qs_ot_get_p 128 10.4 0.001 0.001 20.242 20.977 cp_fm_cholesky_invert 11 10.9 16.280 16.292 16.280 16.292 make_m2s 5014 13.6 0.090 0.096 13.970 15.603 qs_ot_p2m_diag 83 11.4 0.187 0.215 15.466 15.498 make_images 5014 14.6 1.175 1.364 13.772 15.401 sum_up_and_integrate 128 10.3 0.117 0.135 14.883 14.914 integrate_v_rspace 128 11.3 0.004 0.004 14.765 14.799 cp_dbcsr_syevd 83 12.4 0.006 0.006 14.344 14.345 qs_rho_update_rho_low 128 7.7 0.001 0.001 14.133 14.162 calculate_rho_elec 128 8.7 0.089 0.106 14.132 14.162 init_scf_run 11 5.9 0.000 0.001 11.510 11.511 scf_env_initial_rho_setup 11 6.9 0.001 0.001 11.510 11.511 cp_fm_diag_elpa 83 13.4 0.000 0.001 11.248 11.273 qs_ot_get_derivative_diag 77 12.4 0.002 0.002 10.836 11.270 cp_fm_diag_elpa_base 83 14.4 10.995 11.086 11.243 11.267 multiply_cannon_metrocomm4 27577 15.6 0.104 0.119 3.813 10.667 make_images_data 5014 15.6 0.067 0.074 8.453 10.398 mp_irecv_dv 69486 16.3 3.611 10.275 3.611 10.275 density_rs2pw 128 9.7 0.007 0.008 7.377 9.814 hybrid_alltoall_any 5200 16.5 0.343 1.516 7.164 9.773 dbcsr_mm_accdrv_process 62242 16.2 4.450 5.111 9.051 9.660 pw_transfer 1547 11.6 0.086 0.096 8.710 8.770 fft_wrap_pw1pw2 1291 12.7 0.011 0.012 8.484 8.547 wfi_extrapolate 11 7.9 0.001 0.001 8.385 8.385 rs_pw_transfer 1046 11.9 0.015 0.017 5.616 8.061 grid_integrate_task_list 128 12.3 7.392 7.795 7.392 7.795 fft_wrap_pw1pw2_140 523 13.2 1.347 1.371 7.531 7.604 qs_ot_get_derivative_taylor 40 13.0 0.001 0.001 6.145 6.950 cp_fm_cholesky_decompose 22 10.9 6.761 6.837 6.761 6.837 calculate_dm_sparse 128 9.5 0.001 0.001 6.433 6.568 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 6.148 6.158 grid_collocate_task_list 128 9.7 4.836 6.020 4.836 6.020 fft3d_ps 1291 14.7 2.810 2.989 5.771 5.823 mp_sum_l 7950 12.9 3.907 5.801 3.907 5.801 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 5.324 5.486 qs_ot_get_orbitals 117 10.6 0.001 0.001 5.275 5.336 mp_alltoall_d11v 2415 14.1 4.114 4.994 4.114 4.994 mp_waitany 11748 13.9 2.492 4.950 2.492 4.950 potential_pw2rs 128 12.3 0.015 0.018 4.862 4.877 mp_allgather_i34 2507 14.6 1.796 4.783 1.796 4.783 rs_pw_transfer_RS2PW_140 139 11.5 0.355 0.382 2.098 4.511 dbcsr_complete_redistribute 395 12.7 0.770 0.880 3.134 3.985 mp_sum_d 4465 12.1 2.578 3.890 2.578 3.890 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 3.833 3.889 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="402", plot="h2o_256_md", label="(8n/6r/2t)", y=188.913000, yerr=0.000000 PlotPoint: name="403", plot="h2o_256_md_mem", label="(8n/6r/2t)", y=788.090909, yerr=2.874798 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/17/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1420241154048 0.0% 0.0% 100.0% flops 32 x 32 x 32 1943472701440 0.0% 0.0% 100.0% flops 22 x 9 x 32 1972057190400 0.0% 0.0% 100.0% flops 9 x 22 x 32 1977770336256 0.0% 0.0% 100.0% flops 22 x 22 x 32 2734287699968 0.0% 0.0% 100.0% flops 32 x 32 x 9 4416300122112 0.0% 0.0% 100.0% flops 32 x 32 x 22 5397700149248 0.0% 0.0% 100.0% flops 9 x 32 x 32 5443971710976 0.0% 0.0% 100.0% flops 22 x 32 x 32 6653743202304 0.0% 0.0% 100.0% flops 9 x 32 x 9 11528896499712 0.0% 0.0% 100.0% flops 22 x 32 x 9 15129160814592 0.0% 0.0% 100.0% flops 9 x 32 x 22 15129160814592 0.0% 0.0% 100.0% flops 22 x 32 x 22 19767995056128 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 93.514757E+12 0.0% 0.0% 100.0% flops max/rank 2.928533E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6755939872 0.0% 0.0% 100.0% number of processed stacks 3984192 0.0% 0.0% 100.0% average stack size 0.0 0.0 1695.7 marketing flops 144.579337E+12 ------------------------------------------------------------------------------- # multiplications 2507 max memory usage/rank 935.059456E+06 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 1042912 MPI messages size (bytes): total size 2.716210E+12 min size 0.000000E+00 max size 26.214400E+06 average size 2.604448E+06 MPI breakdown and total messages size (bytes): size <= 128 6424 0 128 < size <= 8192 0 0 8192 < size <= 32768 264 8650752 32768 < size <= 131072 281856 36943429632 131072 < size <= 4194304 660064 996105256960 4194304 < size <= 16777216 65632 931531083728 16777216 < size 28672 751619276800 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3992 58357. MP_Allreduce 11057 1000. MP_Sync 87 MP_Alltoall 1712 9388896. MP_SendRecv 7936 75008. MP_ISendRecv 7936 75008. MP_Wait 21820 MP_ISend 11748 275205. MP_IRecv 11748 275205. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.064 0.172 175.279 175.281 qs_mol_dyn_low 1 2.0 0.003 0.005 174.664 174.677 qs_forces 11 3.9 0.008 0.016 174.565 174.571 qs_energies 11 4.9 0.004 0.008 167.868 167.884 scf_env_do_scf 11 5.9 0.001 0.005 152.333 152.334 scf_env_do_scf_inner_loop 117 6.6 0.003 0.009 117.491 117.492 velocity_verlet 10 3.0 0.001 0.002 112.553 112.555 dbcsr_multiply_generic 2507 12.6 0.176 0.181 81.552 82.652 qs_scf_new_mos 117 7.6 0.001 0.001 78.943 79.240 qs_scf_loop_do_ot 117 8.6 0.001 0.001 78.942 79.239 ot_scf_mini 117 9.6 0.004 0.004 74.802 75.158 multiply_cannon 2507 13.6 0.507 0.522 61.907 66.803 multiply_cannon_loop 2507 14.6 0.865 0.893 58.652 61.362 ot_mini 117 10.6 0.001 0.001 42.408 42.799 init_scf_loop 11 6.9 0.015 0.048 34.741 34.742 mp_waitall_1 170520 16.6 24.835 34.224 24.835 34.224 rebuild_ks_matrix 128 8.3 0.001 0.001 31.096 31.562 qs_ks_build_kohn_sham_matrix 128 9.3 0.017 0.018 31.095 31.561 prepare_preconditioner 11 7.9 0.000 0.001 30.661 30.713 make_preconditioner 11 8.9 0.003 0.023 30.661 30.712 make_full_inverse_cholesky 11 9.9 0.000 0.000 28.329 29.745 qs_ks_update_qs_env 128 7.6 0.001 0.001 27.957 28.387 multiply_cannon_multrec 20056 15.6 13.484 16.329 22.136 25.033 multiply_cannon_metrocomm3 20056 15.6 0.059 0.064 15.127 24.925 qs_ot_get_derivative 117 11.6 0.001 0.002 22.759 23.124 apply_preconditioner_dbcsr 128 12.6 0.000 0.000 19.823 20.787 apply_single 128 13.6 0.001 0.001 19.823 20.786 ot_diis_step 117 11.6 0.018 0.018 19.549 19.550 qs_ot_get_p 128 10.4 0.011 0.081 17.792 18.223 multiply_cannon_sync_h2d 20056 15.6 14.298 15.996 14.298 15.996 make_m2s 5014 13.6 0.081 0.087 14.588 15.804 make_images 5014 14.6 1.181 1.269 14.362 15.578 sum_up_and_integrate 128 10.3 0.134 0.146 14.822 14.848 integrate_v_rspace 128 11.3 0.004 0.006 14.688 14.719 qs_rho_update_rho_low 128 7.7 0.001 0.001 14.384 14.419 calculate_rho_elec 128 8.7 0.132 0.148 14.383 14.418 cp_fm_cholesky_invert 11 10.9 14.217 14.226 14.217 14.226 qs_ot_p2m_diag 83 11.4 0.266 0.273 13.458 13.467 cp_dbcsr_syevd 83 12.4 0.005 0.006 12.472 12.474 make_images_data 5014 15.6 0.061 0.068 9.031 10.668 init_scf_run 11 5.9 0.000 0.001 10.434 10.434 scf_env_initial_rho_setup 11 6.9 0.001 0.001 10.434 10.434 hybrid_alltoall_any 5200 16.5 0.436 1.972 7.879 9.977 cp_fm_diag_elpa 83 13.4 0.000 0.001 9.493 9.506 cp_fm_diag_elpa_base 83 14.4 9.087 9.245 9.490 9.502 multiply_cannon_metrocomm4 17549 15.6 0.062 0.073 3.432 9.215 qs_ot_get_derivative_diag 77 12.4 0.002 0.003 8.916 9.189 density_rs2pw 128 9.7 0.006 0.007 7.129 9.159 mp_irecv_dv 50230 16.2 3.310 8.967 3.310 8.967 pw_transfer 1547 11.6 0.086 0.102 8.823 8.924 fft_wrap_pw1pw2 1291 12.7 0.011 0.012 8.598 8.703 dbcsr_mm_accdrv_process 41502 16.2 4.510 5.233 8.101 8.240 grid_integrate_task_list 128 12.3 7.529 7.963 7.529 7.963 fft_wrap_pw1pw2_140 523 13.2 1.428 1.454 7.656 7.776 wfi_extrapolate 11 7.9 0.001 0.001 7.440 7.440 cp_fm_upper_to_full 105 14.8 5.826 7.306 5.826 7.306 cp_fm_cholesky_decompose 22 10.9 7.238 7.254 7.238 7.254 rs_pw_transfer 1046 11.9 0.014 0.014 5.099 7.148 dbcsr_complete_redistribute 395 12.7 1.170 1.197 4.368 6.143 grid_collocate_task_list 128 9.7 5.019 5.928 5.019 5.928 calculate_dm_sparse 128 9.5 0.001 0.001 5.729 5.854 fft3d_ps 1291 14.7 2.741 2.963 5.695 5.776 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 5.415 5.422 qs_ot_get_derivative_taylor 40 13.0 0.001 0.001 4.528 5.247 copy_fm_to_dbcsr 209 11.7 0.002 0.002 3.228 4.993 mp_alltoall_d11v 2415 14.1 4.179 4.908 4.179 4.908 mp_sum_l 7950 12.9 3.273 4.779 3.273 4.779 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 4.568 4.697 potential_pw2rs 128 12.3 0.021 0.023 4.632 4.643 mp_allgather_i34 2507 14.6 1.760 4.591 1.760 4.591 mp_waitany 11748 13.9 2.386 4.463 2.386 4.463 transfer_fm_to_dbcsr 11 9.9 0.019 0.029 2.306 4.046 rs_pw_transfer_RS2PW_140 139 11.5 0.332 0.352 1.973 4.010 qs_ot_get_orbitals 117 10.6 0.001 0.001 3.970 3.999 qs_energies_init_hamiltonians 11 5.9 0.033 0.037 3.840 3.844 mp_alltoall_i22 716 14.1 1.915 3.785 1.915 3.785 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 3.641 3.678 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="404", plot="h2o_256_md", label="(8n/4r/3t)", y=175.281000, yerr=0.000000 PlotPoint: name="405", plot="h2o_256_md_mem", label="(8n/4r/3t)", y=888.363636, yerr=7.413412 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/18/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1420242647040 0.0% 0.0% 100.0% flops 32 x 32 x 32 1943472701440 0.0% 0.0% 100.0% flops 22 x 9 x 32 1972057190400 0.0% 0.0% 100.0% flops 9 x 22 x 32 1977770336256 0.0% 0.0% 100.0% flops 22 x 22 x 32 2734287699968 0.0% 0.0% 100.0% flops 32 x 32 x 9 4416300122112 0.0% 0.0% 100.0% flops 32 x 32 x 22 5397700149248 0.0% 0.0% 100.0% flops 9 x 32 x 32 5443971710976 0.0% 0.0% 100.0% flops 22 x 32 x 32 6653743202304 0.0% 0.0% 100.0% flops 9 x 32 x 9 11528903135232 0.0% 0.0% 100.0% flops 22 x 32 x 9 15129160814592 0.0% 0.0% 100.0% flops 9 x 32 x 22 15129160814592 0.0% 0.0% 100.0% flops 22 x 32 x 22 19767995056128 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 93.514766E+12 0.0% 0.0% 100.0% flops max/rank 4.353791E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6755941440 0.0% 0.0% 100.0% number of processed stacks 5977344 0.0% 0.0% 100.0% average stack size 0.0 0.0 1130.3 marketing flops 144.580175E+12 ------------------------------------------------------------------------------- # multiplications 2507 max memory usage/rank 1.128956E+09 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 1143192 MPI messages size (bytes): total size 2.023815E+12 min size 0.000000E+00 max size 17.653760E+06 average size 1.770320E+06 MPI breakdown and total messages size (bytes): size <= 128 6996 0 128 < size <= 8192 0 0 8192 < size <= 32768 396 8650752 32768 < size <= 131072 319024 36042702848 131072 < size <= 4194304 715736 785529176064 4194304 < size <= 16777216 70320 665379475120 16777216 < size 30720 536870912000 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 4002 58210. MP_Allreduce 11082 1082. MP_Sync 87 MP_Alltoall 1712 12503107. MP_SendRecv 5888 75008. MP_ISendRecv 5888 75008. MP_Wait 22442 MP_ISend 14952 244818. MP_IRecv 14952 244818. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.019 0.034 190.373 190.374 qs_mol_dyn_low 1 2.0 0.003 0.005 189.854 189.868 qs_forces 11 3.9 0.005 0.010 189.736 189.742 qs_energies 11 4.9 0.002 0.004 182.470 182.483 scf_env_do_scf 11 5.9 0.001 0.003 165.541 165.548 velocity_verlet 10 3.0 0.001 0.002 125.898 125.902 scf_env_do_scf_inner_loop 117 6.6 0.003 0.008 119.501 119.502 dbcsr_multiply_generic 2507 12.6 0.184 0.189 81.182 82.006 qs_scf_new_mos 117 7.6 0.001 0.001 81.146 81.484 qs_scf_loop_do_ot 117 8.6 0.001 0.001 81.145 81.483 ot_scf_mini 117 9.6 0.003 0.005 76.430 76.748 multiply_cannon 2507 13.6 0.566 0.603 55.004 58.599 multiply_cannon_loop 2507 14.6 1.187 1.212 51.001 52.906 init_scf_loop 11 6.9 0.001 0.003 45.919 45.920 ot_mini 117 10.6 0.001 0.001 43.225 43.524 prepare_preconditioner 11 7.9 0.000 0.000 41.695 41.717 make_preconditioner 11 8.9 0.000 0.001 41.695 41.717 make_full_inverse_cholesky 11 9.9 0.013 0.023 35.364 40.337 multiply_cannon_multrec 30084 15.6 13.934 19.534 25.869 30.962 rebuild_ks_matrix 128 8.3 0.001 0.001 30.245 30.567 qs_ks_build_kohn_sham_matrix 128 9.3 0.017 0.021 30.245 30.566 qs_ks_update_qs_env 128 7.6 0.001 0.001 27.189 27.470 mp_waitall_1 147882 16.7 17.724 27.152 17.724 27.152 qs_ot_get_derivative 117 11.6 0.001 0.002 23.330 23.644 make_m2s 5014 13.6 0.098 0.103 21.792 22.717 make_images 5014 14.6 1.994 2.584 21.486 22.413 apply_preconditioner_dbcsr 128 12.6 0.000 0.001 19.280 19.882 apply_single 128 13.6 0.001 0.001 19.279 19.882 ot_diis_step 117 11.6 0.017 0.018 19.770 19.771 qs_ot_get_p 128 10.4 0.001 0.001 18.936 19.301 cp_fm_upper_to_full 105 14.8 11.422 16.792 11.422 16.792 cp_fm_cholesky_invert 11 10.9 15.773 15.782 15.773 15.782 qs_rho_update_rho_low 128 7.7 0.001 0.001 15.260 15.279 calculate_rho_elec 128 8.7 0.176 0.192 15.259 15.279 multiply_cannon_metrocomm3 30084 15.6 0.048 0.051 6.686 15.185 sum_up_and_integrate 128 10.3 0.141 0.154 15.022 15.050 integrate_v_rspace 128 11.3 0.004 0.006 14.880 14.914 qs_ot_p2m_diag 83 11.4 0.343 0.391 14.623 14.674 cp_dbcsr_syevd 83 12.4 0.005 0.006 13.350 13.352 make_images_data 5014 15.6 0.065 0.069 11.331 13.345 multiply_cannon_sync_h2d 30084 15.6 11.626 13.151 11.626 13.151 dbcsr_complete_redistribute 395 12.7 1.512 1.633 9.009 12.720 dbcsr_mm_accdrv_process 62264 16.2 7.385 8.239 11.504 12.069 hybrid_alltoall_any 5200 16.5 0.531 2.220 10.159 11.977 copy_fm_to_dbcsr 209 11.7 0.001 0.002 7.656 11.358 init_scf_run 11 5.9 0.000 0.001 10.979 10.980 scf_env_initial_rho_setup 11 6.9 0.001 0.001 10.979 10.980 cp_fm_diag_elpa 83 13.4 0.000 0.001 10.180 10.193 cp_fm_diag_elpa_base 83 14.4 9.227 9.555 10.174 10.190 qs_ot_get_derivative_diag 77 12.4 0.002 0.002 9.847 10.084 transfer_fm_to_dbcsr 11 9.9 0.001 0.004 6.309 9.934 mp_alltoall_i22 716 14.1 5.532 9.278 5.532 9.278 pw_transfer 1547 11.6 0.087 0.102 9.138 9.227 fft_wrap_pw1pw2 1291 12.7 0.011 0.012 8.911 9.005 density_rs2pw 128 9.7 0.006 0.007 6.951 8.325 grid_integrate_task_list 128 12.3 7.729 8.183 7.729 8.183 fft_wrap_pw1pw2_140 523 13.2 1.572 1.605 7.942 8.052 wfi_extrapolate 11 7.9 0.001 0.001 7.870 7.871 multiply_cannon_metrocomm4 25070 15.6 0.080 0.092 2.871 7.623 cp_fm_cholesky_decompose 22 10.9 7.400 7.504 7.400 7.504 mp_irecv_dv 76098 16.2 2.723 7.344 2.723 7.344 mp_alltoall_d11v 2415 14.1 5.893 6.600 5.893 6.600 calculate_dm_sparse 128 9.5 0.001 0.001 6.388 6.495 rs_pw_transfer 1046 11.9 0.013 0.015 4.621 6.036 grid_collocate_task_list 128 9.7 5.197 5.970 5.197 5.970 fft3d_ps 1291 14.7 2.844 2.922 5.813 5.869 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 5.601 5.654 qs_ot_get_derivative_taylor 40 13.0 0.001 0.001 4.497 4.590 qs_ot_get_orbitals 117 10.6 0.001 0.001 4.498 4.551 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 4.441 4.520 qs_energies_init_hamiltonians 11 5.9 0.001 0.002 4.509 4.510 potential_pw2rs 128 12.3 0.024 0.024 4.489 4.501 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="406", plot="h2o_256_md", label="(8n/3r/4t)", y=190.374000, yerr=0.000000 PlotPoint: name="407", plot="h2o_256_md_mem", label="(8n/3r/4t)", y=1071.090909, yerr=12.168921 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/19/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1399806074880 0.0% 0.0% 100.0% flops 32 x 32 x 32 1917702897664 0.0% 0.0% 100.0% flops 22 x 9 x 32 1943685697536 0.0% 0.0% 100.0% flops 9 x 22 x 32 1949313282048 0.0% 0.0% 100.0% flops 22 x 22 x 32 2694948675584 0.0% 0.0% 100.0% flops 32 x 32 x 9 4338990710784 0.0% 0.0% 100.0% flops 32 x 32 x 22 5303210868736 0.0% 0.0% 100.0% flops 9 x 32 x 32 5347334946816 0.0% 0.0% 100.0% flops 22 x 32 x 32 6535631601664 0.0% 0.0% 100.0% flops 9 x 32 x 9 11360521838592 0.0% 0.0% 100.0% flops 22 x 32 x 9 14909203279872 0.0% 0.0% 100.0% flops 9 x 32 x 22 14909203279872 0.0% 0.0% 100.0% flops 22 x 32 x 22 19481731219456 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 92.091284E+12 0.0% 0.0% 100.0% flops max/rank 5.775835E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6655257696 0.0% 0.0% 100.0% number of processed stacks 1930488 0.0% 0.0% 100.0% average stack size 0.0 0.0 3447.4 marketing flops 142.449033E+12 ------------------------------------------------------------------------------- # multiplications 2469 max memory usage/rank 1.492939E+09 # max total images/rank 1 # max 3D layers 1 # MPI messages exchanged 237024 MPI messages size (bytes): total size 1.311056E+12 min size 0.000000E+00 max size 52.428800E+06 average size 5.531321E+06 MPI breakdown and total messages size (bytes): size <= 128 1452 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 132 8650752 131072 < size <= 4194304 112272 58862862336 4194304 < size <= 16777216 103248 541316874240 16777216 < size 19920 710867104896 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 14 12. MP_Allreduce 8791 52. MP_Alltoall 9550 807217. MP_ISend 39460 2102267. MP_IRecv 39460 2101369. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 4008 58120. MP_Allreduce 11097 1166. MP_Sync 87 MP_Alltoall 1688 18818124. MP_SendRecv 3780 122880. MP_ISendRecv 3780 122880. MP_Wait 15878 MP_ISend 10520 423668. MP_IRecv 10520 423668. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.015 0.033 172.441 172.441 qs_mol_dyn_low 1 2.0 0.003 0.004 172.047 172.060 qs_forces 11 3.9 0.004 0.005 171.939 171.943 qs_energies 11 4.9 0.002 0.002 164.251 164.258 scf_env_do_scf 11 5.9 0.001 0.002 146.815 146.819 velocity_verlet 10 3.0 0.001 0.001 112.725 112.728 scf_env_do_scf_inner_loop 115 6.6 0.003 0.008 111.121 111.122 dbcsr_multiply_generic 2469 12.5 0.175 0.188 73.722 74.315 qs_scf_new_mos 115 7.6 0.001 0.001 72.968 73.049 qs_scf_loop_do_ot 115 8.6 0.001 0.001 72.967 73.049 ot_scf_mini 115 9.6 0.003 0.004 68.608 68.698 multiply_cannon 2469 13.5 0.575 0.627 53.864 58.395 multiply_cannon_loop 2469 14.5 0.441 0.450 49.241 50.295 ot_mini 115 10.6 0.001 0.001 39.182 39.260 init_scf_loop 11 6.9 0.000 0.000 35.548 35.550 mp_waitall_1 123870 16.7 25.923 33.603 25.923 33.603 prepare_preconditioner 11 7.9 0.000 0.000 31.569 31.594 make_preconditioner 11 8.9 0.000 0.000 31.569 31.594 rebuild_ks_matrix 126 8.3 0.001 0.001 29.816 29.949 qs_ks_build_kohn_sham_matrix 126 9.3 0.017 0.018 29.816 29.948 make_full_inverse_cholesky 11 9.9 0.019 0.028 29.540 29.792 qs_ks_update_qs_env 126 7.6 0.001 0.001 26.930 27.053 multiply_cannon_multrec 9876 15.5 10.277 14.399 17.492 22.024 apply_preconditioner_dbcsr 126 12.6 0.000 0.000 19.364 19.895 apply_single 126 13.6 0.001 0.001 19.364 19.895 multiply_cannon_metrocomm3 9876 15.5 0.023 0.025 12.377 19.853 ot_diis_step 115 11.6 0.019 0.020 19.826 19.826 qs_ot_get_derivative 115 11.6 0.001 0.002 19.291 19.380 make_m2s 4938 13.5 0.067 0.072 16.322 18.559 make_images 4938 14.5 2.311 2.686 16.026 18.261 cp_fm_cholesky_invert 11 10.9 18.078 18.084 18.078 18.084 qs_ot_get_p 126 10.4 0.001 0.001 16.531 16.643 qs_rho_update_rho_low 126 7.7 0.001 0.001 15.161 15.195 calculate_rho_elec 126 8.7 0.255 0.265 15.161 15.195 sum_up_and_integrate 126 10.3 0.179 0.188 15.048 15.094 integrate_v_rspace 126 11.3 0.004 0.006 14.868 14.924 qs_ot_p2m_diag 83 11.4 0.496 0.501 12.844 12.859 make_images_data 4938 15.5 0.053 0.060 9.981 12.431 multiply_cannon_sync_h2d 9876 15.5 11.352 12.157 11.352 12.157 hybrid_alltoall_any 5124 16.4 0.838 3.762 9.825 12.092 cp_dbcsr_syevd 83 12.4 0.005 0.005 11.726 11.728 init_scf_run 11 5.9 0.000 0.001 10.555 10.555 scf_env_initial_rho_setup 11 6.9 0.001 0.001 10.555 10.555 pw_transfer 1523 11.6 0.085 0.094 9.282 9.307 fft_wrap_pw1pw2 1271 12.7 0.010 0.011 9.061 9.089 cp_fm_diag_elpa 83 13.4 0.000 0.000 8.762 8.772 cp_fm_diag_elpa_base 83 14.4 8.531 8.601 8.759 8.768 grid_integrate_task_list 126 12.3 7.889 8.441 7.889 8.441 fft_wrap_pw1pw2_140 515 13.2 1.898 1.938 8.051 8.084 multiply_cannon_metrocomm1 9876 15.5 0.028 0.029 4.360 8.051 cp_fm_cholesky_decompose 22 10.9 7.823 7.945 7.823 7.945 qs_ot_get_derivative_diag 77 12.4 0.002 0.003 7.860 7.915 density_rs2pw 126 9.7 0.006 0.006 6.715 7.639 wfi_extrapolate 11 7.9 0.001 0.001 7.577 7.577 dbcsr_mm_accdrv_process 20442 16.1 2.548 3.421 6.853 7.521 mp_allgather_i34 2469 14.5 2.839 7.423 2.839 7.423 calculate_dm_sparse 126 9.5 0.001 0.001 6.003 6.101 grid_collocate_task_list 126 9.7 5.429 6.095 5.429 6.095 mp_alltoall_d11v 2399 14.1 4.898 5.956 4.898 5.956 fft3d_ps 1271 14.7 2.722 2.827 5.570 5.610 dbcsr_complete_redistribute 395 12.7 2.141 2.178 5.029 5.360 qs_energies_init_hamiltonians 11 5.9 0.001 0.001 5.346 5.346 multiply_cannon_metrocomm4 7407 15.5 0.023 0.027 1.842 5.299 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 5.281 5.297 mp_irecv_dv 28424 15.9 1.808 5.215 1.808 5.215 rs_pw_transfer 1030 11.9 0.012 0.013 4.053 4.980 potential_pw2rs 126 12.3 0.026 0.027 4.350 4.361 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 4.214 4.253 build_core_hamiltonian_matrix_ 11 4.9 0.001 0.001 3.551 3.858 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 3.782 3.798 qs_ot_get_orbitals 115 10.6 0.001 0.001 3.652 3.681 copy_dbcsr_to_fm 186 11.8 0.004 0.004 3.556 3.677 copy_fm_to_dbcsr 209 11.7 0.002 0.002 3.240 3.551 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="408", plot="h2o_256_md", label="(8n/2r/6t)", y=172.441000, yerr=0.000000 PlotPoint: name="409", plot="h2o_256_md_mem", label="(8n/2r/6t)", y=1391.090909, yerr=44.679575 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/20/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1410022950912 0.0% 0.0% 100.0% flops 32 x 32 x 32 1924145348608 0.0% 0.0% 100.0% flops 22 x 9 x 32 1957871443968 0.0% 0.0% 100.0% flops 9 x 22 x 32 1963544850432 0.0% 0.0% 100.0% flops 22 x 22 x 32 2714615709696 0.0% 0.0% 100.0% flops 32 x 32 x 9 4377645416448 0.0% 0.0% 100.0% flops 32 x 32 x 22 5350455508992 0.0% 0.0% 100.0% flops 9 x 32 x 32 5395653328896 0.0% 0.0% 100.0% flops 22 x 32 x 32 6594687401984 0.0% 0.0% 100.0% flops 9 x 32 x 9 11444706349056 0.0% 0.0% 100.0% flops 22 x 32 x 9 15019187724288 0.0% 0.0% 100.0% flops 9 x 32 x 22 15019187724288 0.0% 0.0% 100.0% flops 22 x 32 x 22 19624853225472 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 92.796577E+12 0.0% 0.0% 100.0% flops max/rank 11.606413E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6705500544 0.0% 0.0% 100.0% number of processed stacks 1947808 0.0% 0.0% 100.0% average stack size 0.0 0.0 3442.6 marketing flops 143.507742E+12 ------------------------------------------------------------------------------- # multiplications 2485 max memory usage/rank 3.038294E+09 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 99400 MPI messages size (bytes): total size 1.127422E+12 min size 0.000000E+00 max size 104.857600E+06 average size 11.342272E+06 MPI breakdown and total messages size (bytes): size <= 128 572 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 44 2883584 131072 < size <= 4194304 44768 34745614336 4194304 < size <= 16777216 43984 376564613120 16777216 < size 10032 716108613552 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3949 59908. MP_Allreduce 10950 1507. MP_Sync 86 MP_Alltoall 1700 36954374. MP_SendRecv 1778 218624. MP_ISendRecv 1778 218624. MP_Wait 9728 MP_ISend 6360 1080477. MP_IRecv 6360 1080477. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.023 0.041 298.116 298.117 qs_mol_dyn_low 1 2.0 0.003 0.004 297.580 297.592 qs_forces 11 3.9 0.005 0.005 297.486 297.489 qs_energies 11 4.9 0.002 0.002 288.058 288.068 scf_env_do_scf 11 5.9 0.001 0.002 265.691 265.707 velocity_verlet 10 3.0 0.001 0.002 214.555 214.563 scf_env_do_scf_inner_loop 116 6.6 0.003 0.008 138.898 138.899 init_scf_loop 11 6.9 0.000 0.000 126.538 126.540 prepare_preconditioner 11 7.9 0.000 0.000 121.484 121.508 make_preconditioner 11 8.9 0.000 0.000 121.484 121.508 make_full_inverse_cholesky 11 9.9 0.036 0.039 96.284 118.652 qs_scf_new_mos 116 7.6 0.001 0.001 88.774 88.934 qs_scf_loop_do_ot 116 8.6 0.001 0.001 88.773 88.933 ot_scf_mini 116 9.6 0.003 0.004 84.027 84.082 dbcsr_multiply_generic 2485 12.5 0.217 0.229 82.311 82.852 cp_fm_upper_to_full 104 14.8 54.358 78.568 54.358 78.568 multiply_cannon 2485 13.5 0.703 0.763 58.227 58.586 multiply_cannon_loop 2485 14.5 0.470 0.479 54.624 55.887 dbcsr_complete_redistribute 393 12.7 4.010 4.045 30.856 44.321 ot_mini 116 10.6 0.001 0.001 43.724 43.778 copy_fm_to_dbcsr 208 11.6 0.001 0.002 27.497 40.960 transfer_fm_to_dbcsr 11 9.9 0.029 0.034 25.155 38.420 rebuild_ks_matrix 127 8.3 0.001 0.001 37.997 38.026 qs_ks_build_kohn_sham_matrix 127 9.3 0.017 0.018 37.996 38.026 mp_alltoall_i22 712 14.1 22.989 36.607 22.989 36.607 qs_ks_update_qs_env 127 7.6 0.001 0.001 34.914 34.955 cp_fm_cholesky_invert 11 10.9 32.992 32.998 32.992 32.998 mp_waitall_1 102768 16.8 27.904 31.503 27.904 31.503 qs_ot_get_p 127 10.4 0.001 0.001 24.919 24.952 qs_ot_get_derivative 116 11.6 0.002 0.002 23.688 23.738 qs_ot_p2m_diag 82 11.4 0.868 0.873 21.039 21.067 make_m2s 4970 13.5 0.077 0.080 19.564 20.473 qs_rho_update_rho_low 127 7.7 0.001 0.001 20.243 20.271 calculate_rho_elec 127 8.7 0.480 0.480 20.242 20.271 make_images 4970 14.5 3.749 3.939 19.094 20.008 ot_diis_step 116 11.6 0.022 0.022 20.004 20.006 multiply_cannon_metrocomm3 9940 15.5 0.023 0.025 18.791 19.891 sum_up_and_integrate 127 10.3 0.319 0.322 19.515 19.596 apply_preconditioner_dbcsr 127 12.6 0.000 0.000 19.264 19.466 apply_single 127 13.6 0.001 0.001 19.263 19.465 cp_dbcsr_syevd 82 12.4 0.006 0.006 19.341 19.342 integrate_v_rspace 127 11.3 0.004 0.004 19.196 19.276 multiply_cannon_multrec 9940 15.5 10.403 12.214 17.748 17.792 cp_fm_diag_elpa 82 13.4 0.000 0.000 16.214 16.215 cp_fm_diag_elpa_base 82 14.4 11.768 13.330 16.210 16.211 multiply_cannon_sync_h2d 9940 15.5 15.519 15.535 15.519 15.535 init_scf_run 11 5.9 0.000 0.001 12.272 12.272 scf_env_initial_rho_setup 11 6.9 0.001 0.001 12.272 12.272 hybrid_alltoall_any 5155 16.4 1.293 3.027 10.490 12.068 make_images_data 4970 15.5 0.061 0.064 10.381 11.988 pw_transfer 1535 11.6 0.092 0.093 11.213 11.219 fft_wrap_pw1pw2 1281 12.7 0.011 0.012 10.979 10.986 mp_alltoall_d11v 2401 14.1 8.097 9.785 8.097 9.785 fft_wrap_pw1pw2_140 519 13.2 3.054 3.087 9.774 9.780 qs_ot_get_derivative_diag 76 12.4 0.002 0.002 9.166 9.206 wfi_extrapolate 11 7.9 0.001 0.001 9.121 9.121 grid_integrate_task_list 127 12.3 8.778 8.944 8.778 8.944 dbcsr_mm_accdrv_process 20590 16.0 3.690 5.580 7.109 8.901 cp_fm_cholesky_decompose 22 10.9 8.792 8.816 8.792 8.816 qs_energies_init_hamiltonians 11 5.9 0.001 0.003 8.066 8.067 density_rs2pw 127 9.7 0.006 0.006 7.815 7.982 calculate_dm_sparse 127 9.5 0.001 0.001 6.587 6.724 grid_collocate_task_list 127 9.7 6.427 6.493 6.427 6.493 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 6.359 6.417 rs_scatter_matrices 138 9.7 3.635 4.541 5.971 6.197 fft3d_ps 1281 14.7 2.820 2.829 6.086 6.118 copy_dbcsr_to_fm 185 11.7 0.004 0.004 6.012 6.063 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="410", plot="h2o_256_md", label="(8n/1r/12t)", y=298.117000, yerr=0.000000 PlotPoint: name="411", plot="h2o_256_md_mem", label="(8n/1r/12t)", y=2709.727273, yerr=166.445782 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/21/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 2.766000E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 419739 0.0% 0.0% 100.0% average stack size 0.0 0.0 22952.9 marketing flops 1.742116E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 1.256055E+09 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 458208 MPI messages size (bytes): total size 3.456111E+12 min size 0.000000E+00 max size 18.735064E+06 average size 7.542668E+06 MPI breakdown and total messages size (bytes): size <= 128 112896 0 128 < size <= 8192 0 0 8192 < size <= 32768 224 5687808 32768 < size <= 131072 10528 813356544 131072 < size <= 4194304 36422 76284728544 4194304 < size <= 16777216 294266 3312457683808 16777216 < size 3872 66548597808 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 255669. MP_Allreduce 3139 6114. MP_Sync 4 MP_Alltoall 54 MP_SendRecv 285 19200. MP_ISendRecv 285 19200. MP_Wait 1017 MP_ISend 642 197829. MP_IRecv 642 197607. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.017 0.037 85.153 85.154 qs_energies 1 2.0 0.000 0.000 84.734 84.748 ls_scf 1 3.0 0.000 0.000 83.828 83.841 dbcsr_multiply_generic 111 6.7 0.015 0.017 72.723 72.870 multiply_cannon 111 7.7 0.017 0.020 55.887 57.117 multiply_cannon_loop 111 8.7 0.210 0.227 52.468 53.974 ls_scf_main 1 4.0 0.000 0.000 52.386 52.386 density_matrix_trs4 2 5.0 0.002 0.003 46.819 46.920 ls_scf_init_scf 1 4.0 0.000 0.000 28.393 28.394 ls_scf_init_matrix_S 1 5.0 0.000 0.000 27.247 27.310 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 25.099 25.121 mp_waitall_1 11031 10.9 22.410 24.911 22.410 24.911 multiply_cannon_multrec 2664 9.7 8.211 8.997 15.627 17.340 multiply_cannon_sync_h2d 2664 9.7 13.557 14.843 13.557 14.843 make_m2s 222 7.7 0.009 0.012 13.145 13.659 make_images 222 8.7 0.102 0.112 13.124 13.639 multiply_cannon_metrocomm1 2664 9.7 0.010 0.011 9.652 12.944 make_images_data 222 9.7 0.005 0.006 7.675 8.296 multiply_cannon_metrocomm3 2664 9.7 0.010 0.011 5.427 8.111 dbcsr_mm_accdrv_process 4760 10.4 0.523 0.627 7.031 8.048 hybrid_alltoall_any 227 10.6 0.220 1.865 6.564 7.930 dbcsr_mm_accdrv_process_sort 4760 11.4 6.308 7.226 6.308 7.226 calculate_norms 4752 9.8 5.522 6.202 5.522 6.202 apply_matrix_preconditioner 6 5.3 0.000 0.000 5.023 5.167 mp_sum_l 887 5.1 3.146 4.744 3.146 4.744 multiply_cannon_metrocomm4 2442 9.7 0.012 0.015 2.053 3.600 mp_irecv_dv 6231 10.9 2.037 3.567 2.037 3.567 make_images_sizes 222 9.7 0.000 0.000 0.766 3.512 mp_alltoall_i44 222 10.7 0.766 3.512 0.766 3.512 dbcsr_multiply_generic_mpsum_f 86 7.8 0.000 0.000 2.361 3.464 arnoldi_extremal 4 6.8 0.000 0.000 3.251 3.273 arnoldi_normal_ev 4 7.8 0.001 0.002 3.251 3.273 build_subspace 16 8.4 0.009 0.013 3.159 3.161 ls_scf_post 1 4.0 0.000 0.000 3.049 3.062 ls_scf_store_result 1 5.0 0.000 0.000 2.864 2.908 dbcsr_special_finalize 555 9.7 0.005 0.007 2.339 2.862 dbcsr_merge_single_wm 555 10.7 0.455 0.593 2.330 2.852 make_images_pack 222 9.7 2.214 2.641 2.216 2.643 dbcsr_sort_data 658 11.4 2.133 2.608 2.133 2.608 dbcsr_matrix_vector_mult 304 9.0 0.006 0.013 2.331 2.556 dbcsr_matrix_vector_mult_local 304 10.0 2.070 2.458 2.072 2.459 ls_scf_dm_to_ks 2 5.0 0.000 0.000 2.340 2.421 buffer_matrices_ensure_size 222 8.7 1.764 2.093 1.764 2.093 qs_ks_update_qs_env 3 6.3 0.000 0.000 1.794 1.795 rebuild_ks_matrix 3 7.3 0.000 0.000 1.784 1.785 qs_ks_build_kohn_sham_matrix 3 8.3 0.001 0.001 1.784 1.785 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="500", plot="h2o_32_nrep3_ls", label="(8n/12r/1t)", y=85.154000, yerr=0.000000 PlotPoint: name="501", plot="h2o_32_nrep3_ls_mem", label="(8n/12r/1t)", y=1128.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/22/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 5.588524E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 368848 0.0% 0.0% 100.0% average stack size 0.0 0.0 26119.8 marketing flops 1.742116E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 2.076967E+09 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 106560 MPI messages size (bytes): total size 2.699093E+12 min size 0.000000E+00 max size 72.286792E+06 average size 25.329324E+06 MPI breakdown and total messages size (bytes): size <= 128 23040 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 3264 325830144 131072 < size <= 4194304 5280 3328561104 4194304 < size <= 16777216 12709 156766962056 16777216 < size 62267 2538670978840 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 266696. MP_Allreduce 3138 10075. MP_Sync 4 MP_Alltoall 47 15335933. MP_SendRecv 141 57600. MP_ISendRecv 141 57600. MP_Wait 687 MP_ISend 462 414589. MP_IRecv 462 413870. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.028 0.041 91.433 91.434 qs_energies 1 2.0 0.000 0.000 90.944 90.948 ls_scf 1 3.0 0.000 0.001 89.568 89.571 dbcsr_multiply_generic 111 6.7 0.018 0.031 75.504 75.773 multiply_cannon 111 7.7 0.030 0.062 53.492 57.276 ls_scf_main 1 4.0 0.000 0.005 55.140 55.149 multiply_cannon_loop 111 8.7 0.118 0.125 50.206 53.086 density_matrix_trs4 2 5.0 0.002 0.004 49.325 49.546 ls_scf_init_scf 1 4.0 0.001 0.003 30.936 30.937 mp_waitall_1 9105 10.9 21.219 30.328 21.219 30.328 ls_scf_init_matrix_S 1 5.0 0.000 0.000 29.705 29.821 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 27.266 27.279 multiply_cannon_multrec 1332 9.7 13.209 16.759 22.530 27.253 multiply_cannon_metrocomm3 1332 9.7 0.007 0.009 11.743 21.088 make_m2s 222 7.7 0.008 0.011 15.409 16.116 make_images 222 8.7 1.597 1.939 15.378 16.087 dbcsr_mm_accdrv_process 4041 10.4 0.291 0.455 8.924 10.553 dbcsr_mm_accdrv_process_sort 4041 11.4 8.514 10.099 8.514 10.099 make_images_data 222 9.7 0.004 0.005 8.863 9.895 hybrid_alltoall_any 227 10.6 0.523 2.420 8.256 9.297 mp_sum_l 887 5.1 5.295 8.059 5.295 8.059 multiply_cannon_metrocomm4 1221 9.7 0.007 0.009 3.199 7.661 mp_irecv_dv 3311 11.0 3.179 7.597 3.179 7.597 calculate_norms 2376 9.8 6.025 6.818 6.025 6.818 dbcsr_multiply_generic_mpsum_f 86 7.8 0.000 0.000 4.021 6.520 multiply_cannon_sync_h2d 1332 9.7 4.843 5.987 4.843 5.987 apply_matrix_preconditioner 6 5.3 0.000 0.000 5.081 5.310 arnoldi_extremal 4 6.8 0.000 0.000 4.634 4.666 arnoldi_normal_ev 4 7.8 0.001 0.004 4.634 4.665 build_subspace 16 8.4 0.017 0.039 4.363 4.365 ls_scf_post 1 4.0 0.000 0.001 3.492 3.496 dbcsr_matrix_vector_mult 304 9.0 0.010 0.021 3.119 3.355 ls_scf_store_result 1 5.0 0.000 0.000 3.205 3.313 dbcsr_matrix_vector_mult_local 304 10.0 2.743 3.227 2.745 3.229 multiply_cannon_metrocomm1 1332 9.7 0.003 0.004 1.251 2.956 ls_scf_dm_to_ks 2 5.0 0.000 0.001 2.654 2.744 mp_allgather_i34 111 8.7 0.977 2.470 0.977 2.470 make_images_pack 222 9.7 2.043 2.420 2.046 2.423 dbcsr_sort_data 436 11.2 1.811 2.083 1.811 2.083 qs_ks_update_qs_env 3 6.3 0.000 0.000 1.898 1.901 rebuild_ks_matrix 3 7.3 0.000 0.000 1.886 1.888 qs_ks_build_kohn_sham_matrix 3 8.3 0.001 0.002 1.886 1.888 dbcsr_data_new 4174 10.1 1.612 1.858 1.612 1.858 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="502", plot="h2o_32_nrep3_ls", label="(8n/6r/2t)", y=91.434000, yerr=0.000000 PlotPoint: name="503", plot="h2o_32_nrep3_ls_mem", label="(8n/6r/2t)", y=1726.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/23/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 8.404608E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 353133 0.0% 0.0% 100.0% average stack size 0.0 0.0 27282.1 marketing flops 1.742118E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 2.780959E+09 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 46176 MPI messages size (bytes): total size 1.924064E+12 min size 0.000000E+00 max size 108.059888E+06 average size 41.668048E+06 MPI breakdown and total messages size (bytes): size <= 128 9984 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 0 0 131072 < size <= 4194304 3328 1170063360 4194304 < size <= 16777216 1870 19378539600 16777216 < size 30994 1903514987232 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 265470. MP_Allreduce 3138 10896. MP_Sync 4 MP_Alltoall 47 23526250. MP_SendRecv 93 57600. MP_ISendRecv 93 57600. MP_Wait 639 MP_ISend 462 560046. MP_IRecv 462 560662. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.035 0.054 95.034 95.035 qs_energies 1 2.0 0.000 0.000 94.411 94.414 ls_scf 1 3.0 0.000 0.001 92.997 93.001 dbcsr_multiply_generic 111 6.7 0.016 0.017 77.536 77.839 ls_scf_main 1 4.0 0.000 0.002 58.338 58.353 multiply_cannon 111 7.7 0.044 0.099 53.268 57.027 multiply_cannon_loop 111 8.7 0.101 0.106 49.732 54.191 density_matrix_trs4 2 5.0 0.003 0.009 52.272 52.456 mp_waitall_1 7281 11.0 24.230 34.582 24.230 34.582 ls_scf_init_scf 1 4.0 0.000 0.002 31.156 31.158 ls_scf_init_matrix_S 1 5.0 0.000 0.000 29.941 30.021 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 27.544 27.553 multiply_cannon_multrec 888 9.7 12.672 15.369 21.309 24.600 multiply_cannon_metrocomm3 888 9.7 0.004 0.005 11.215 23.827 make_m2s 222 7.7 0.007 0.008 17.129 18.393 make_images 222 8.7 1.987 2.310 17.090 18.352 make_images_data 222 9.7 0.004 0.005 9.812 10.899 hybrid_alltoall_any 227 10.6 0.621 2.864 9.513 10.830 dbcsr_mm_accdrv_process 3754 10.4 0.247 0.419 8.151 9.408 mp_sum_l 887 5.1 5.541 9.392 5.541 9.392 dbcsr_mm_accdrv_process_sort 3754 11.4 7.780 8.989 7.780 8.989 dbcsr_multiply_generic_mpsum_f 86 7.8 0.000 0.000 4.299 7.547 multiply_cannon_sync_h2d 888 9.7 6.023 7.475 6.023 7.475 multiply_cannon_metrocomm4 777 9.7 0.004 0.005 2.440 7.009 mp_irecv_dv 2335 11.1 2.425 6.956 2.425 6.956 multiply_cannon_metrocomm1 888 9.7 0.003 0.003 3.832 6.567 apply_matrix_preconditioner 6 5.3 0.000 0.000 4.951 5.170 arnoldi_extremal 4 6.8 0.000 0.000 5.104 5.116 arnoldi_normal_ev 4 7.8 0.001 0.004 5.104 5.116 build_subspace 16 8.4 0.014 0.020 4.805 4.817 calculate_norms 1584 9.8 4.252 4.610 4.252 4.610 mp_allgather_i34 111 8.7 1.364 3.816 1.364 3.816 dbcsr_matrix_vector_mult 304 9.0 0.010 0.021 3.420 3.748 dbcsr_matrix_vector_mult_local 304 10.0 3.021 3.589 3.023 3.591 ls_scf_post 1 4.0 0.000 0.001 3.503 3.507 ls_scf_store_result 1 5.0 0.000 0.000 3.252 3.326 ls_scf_dm_to_ks 2 5.0 0.000 0.001 2.891 2.999 dbcsr_sort_data 325 11.1 1.901 2.201 1.901 2.201 make_images_sizes 222 9.7 0.000 0.000 0.959 2.153 mp_alltoall_i44 222 10.7 0.959 2.153 0.959 2.153 make_images_pack 222 9.7 1.811 2.107 1.814 2.110 dbcsr_data_release 9322 10.9 1.349 2.043 1.349 2.043 qs_ks_update_qs_env 3 6.3 0.000 0.000 1.935 1.937 rebuild_ks_matrix 3 7.3 0.000 0.000 1.916 1.918 qs_ks_build_kohn_sham_matrix 3 8.3 0.001 0.006 1.916 1.918 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="504", plot="h2o_32_nrep3_ls", label="(8n/4r/3t)", y=95.035000, yerr=0.000000 PlotPoint: name="505", plot="h2o_32_nrep3_ls_mem", label="(8n/4r/3t)", y=2239.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/24/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 10.747127E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 369794 0.0% 0.0% 100.0% average stack size 0.0 0.0 26053.0 marketing flops 1.742116E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 3.320771E+09 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 50616 MPI messages size (bytes): total size 1.536549E+12 min size 0.000000E+00 max size 72.286792E+06 average size 30.356986E+06 MPI breakdown and total messages size (bytes): size <= 128 10368 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 1056 104411904 131072 < size <= 4194304 3168 831638784 4194304 < size <= 16777216 3103 33613273640 16777216 < size 32921 1501999894888 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 266696. MP_Allreduce 3138 13030. MP_Sync 4 MP_Alltoall 47 30278988. MP_SendRecv 69 86400. MP_ISendRecv 69 86400. MP_Wait 531 MP_ISend 378 823502. MP_IRecv 378 823753. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.035 0.054 97.262 97.262 qs_energies 1 2.0 0.000 0.000 96.636 96.643 ls_scf 1 3.0 0.000 0.000 95.002 95.010 dbcsr_multiply_generic 111 6.7 0.017 0.018 78.684 78.991 ls_scf_main 1 4.0 0.000 0.000 59.170 59.171 multiply_cannon 111 7.7 0.056 0.133 51.944 56.157 density_matrix_trs4 2 5.0 0.002 0.003 52.943 53.103 multiply_cannon_loop 111 8.7 0.116 0.125 46.820 49.798 ls_scf_init_scf 1 4.0 0.000 0.000 32.527 32.528 ls_scf_init_matrix_S 1 5.0 0.000 0.000 31.307 31.373 mp_waitall_1 6369 11.0 22.812 28.967 22.812 28.967 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 28.817 28.831 multiply_cannon_multrec 1332 9.7 14.210 16.924 22.177 24.876 make_m2s 222 7.7 0.008 0.009 21.028 22.487 make_images 222 8.7 3.151 3.613 20.977 22.438 multiply_cannon_metrocomm3 1332 9.7 0.003 0.004 9.484 17.417 make_images_data 222 9.7 0.004 0.005 11.720 13.248 hybrid_alltoall_any 227 10.6 0.803 3.794 11.028 12.975 dbcsr_mm_accdrv_process 3641 10.4 0.197 0.400 7.616 9.177 dbcsr_mm_accdrv_process_sort 3641 11.4 7.242 8.757 7.242 8.757 mp_sum_l 887 5.1 4.127 7.741 4.127 7.741 multiply_cannon_sync_h2d 1332 9.7 5.430 6.537 5.430 6.537 dbcsr_multiply_generic_mpsum_f 86 7.8 0.000 0.000 3.158 6.127 multiply_cannon_metrocomm4 1110 9.7 0.004 0.007 2.066 5.974 mp_irecv_dv 3229 10.9 2.044 5.899 2.044 5.899 multiply_cannon_metrocomm1 1332 9.7 0.003 0.003 2.447 5.417 arnoldi_extremal 4 6.8 0.000 0.000 5.194 5.206 arnoldi_normal_ev 4 7.8 0.001 0.004 5.194 5.205 build_subspace 16 8.4 0.014 0.021 4.859 4.868 apply_matrix_preconditioner 6 5.3 0.000 0.000 4.586 4.761 mp_allgather_i34 111 8.7 2.236 4.667 2.236 4.667 calculate_norms 2376 9.8 4.236 4.599 4.236 4.599 dbcsr_matrix_vector_mult 304 9.0 0.010 0.021 3.569 3.878 dbcsr_matrix_vector_mult_local 304 10.0 3.192 3.696 3.194 3.698 dbcsr_sort_data 658 11.4 3.048 3.504 3.048 3.504 ls_scf_post 1 4.0 0.000 0.000 3.305 3.312 dbcsr_special_finalize 555 9.7 0.006 0.007 2.804 3.235 dbcsr_merge_single_wm 555 10.7 0.538 0.664 2.796 3.226 ls_scf_dm_to_ks 2 5.0 0.000 0.000 3.079 3.126 ls_scf_store_result 1 5.0 0.000 0.000 3.029 3.109 dbcsr_data_release 10477 10.7 1.579 2.436 1.579 2.436 dbcsr_finalize 304 7.8 0.050 0.062 1.809 2.040 qs_ks_update_qs_env 3 6.3 0.000 0.000 1.961 1.963 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="506", plot="h2o_32_nrep3_ls", label="(8n/3r/4t)", y=97.262000, yerr=0.000000 PlotPoint: name="507", plot="h2o_32_nrep3_ls_mem", label="(8n/3r/4t)", y=2738.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/25/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 15.383312E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 336818 0.0% 0.0% 100.0% average stack size 0.0 0.0 28603.7 marketing flops 1.742118E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 4.660462E+09 # max total images/rank 1 # max 3D layers 1 # MPI messages exchanged 10656 MPI messages size (bytes): total size 1.149035E+12 min size 0.000000E+00 max size 203.538048E+06 average size 107.829832E+06 MPI breakdown and total messages size (bytes): size <= 128 2304 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 0 0 131072 < size <= 4194304 768 702038016 4194304 < size <= 16777216 0 0 16777216 < size 7584 1148332810224 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 2 12. MP_Allreduce 705 128. MP_Alltoall 310 12920694. MP_ISend 1776 40180424. MP_IRecv 1776 40465030. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 265558. MP_Allreduce 3129 15263. MP_Sync 4 MP_Alltoall 47 46208988. MP_SendRecv 45 115200. MP_ISendRecv 45 115200. MP_Wait 528 MP_ISend 420 924980. MP_IRecv 420 924528. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.041 0.057 99.583 99.584 qs_energies 1 2.0 0.000 0.000 98.817 98.822 ls_scf 1 3.0 0.000 0.000 96.870 96.879 dbcsr_multiply_generic 111 6.7 0.018 0.018 77.934 78.147 ls_scf_main 1 4.0 0.000 0.000 62.068 62.070 multiply_cannon 111 7.7 0.108 0.202 55.400 60.202 density_matrix_trs4 2 5.0 0.002 0.003 54.844 54.947 multiply_cannon_loop 111 8.7 0.069 0.076 50.787 52.511 mp_waitall_1 5436 11.0 26.145 31.693 26.145 31.693 ls_scf_init_scf 1 4.0 0.000 0.000 31.182 31.187 ls_scf_init_matrix_S 1 5.0 0.000 0.000 29.895 29.929 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 27.560 27.573 multiply_cannon_multrec 444 9.7 14.159 16.413 21.275 23.372 make_m2s 222 7.7 0.006 0.007 17.690 20.269 make_images 222 8.7 3.735 4.446 17.627 20.209 multiply_cannon_metrocomm1 444 9.7 0.002 0.002 10.789 16.286 multiply_cannon_metrocomm3 444 9.7 0.001 0.002 6.297 15.150 make_images_data 222 9.7 0.003 0.004 9.864 12.396 hybrid_alltoall_any 227 10.6 0.791 3.790 9.666 12.146 dbcsr_mm_accdrv_process 3003 10.4 0.188 0.348 6.826 7.947 multiply_cannon_sync_h2d 444 9.7 6.560 7.603 6.560 7.603 dbcsr_mm_accdrv_process_sort 3003 11.4 6.509 7.602 6.509 7.602 mp_allgather_i34 111 8.7 2.826 7.017 2.826 7.017 arnoldi_extremal 4 6.8 0.000 0.000 6.016 6.057 arnoldi_normal_ev 4 7.8 0.001 0.004 6.016 6.057 build_subspace 16 8.4 0.015 0.020 5.417 5.425 mp_sum_l 887 5.1 2.875 4.846 2.875 4.846 apply_matrix_preconditioner 6 5.3 0.000 0.000 4.630 4.773 dbcsr_matrix_vector_mult 304 9.0 0.011 0.021 4.193 4.383 dbcsr_matrix_vector_mult_local 304 10.0 3.704 4.184 3.706 4.186 multiply_cannon_metrocomm4 333 9.7 0.001 0.002 1.633 3.909 mp_irecv_dv 1241 11.2 1.616 3.885 1.616 3.885 calculate_norms 792 9.8 3.547 3.707 3.547 3.707 ls_scf_dm_to_ks 2 5.0 0.000 0.000 3.608 3.707 ls_scf_post 1 4.0 0.000 0.000 3.619 3.623 dbcsr_multiply_generic_mpsum_f 86 7.8 0.000 0.000 1.973 3.528 make_images_sizes 222 9.7 0.000 0.000 1.150 3.514 mp_alltoall_i44 222 10.7 1.150 3.514 1.150 3.514 ls_scf_store_result 1 5.0 0.000 0.000 3.407 3.439 dbcsr_finalize 304 7.8 0.062 0.078 2.208 2.361 dbcsr_merge_all 275 8.9 0.475 0.540 2.057 2.196 qs_ks_update_qs_env 3 6.3 0.000 0.000 2.092 2.093 rebuild_ks_matrix 3 7.3 0.000 0.000 2.059 2.061 qs_ks_build_kohn_sham_matrix 3 8.3 0.001 0.001 2.059 2.061 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="508", plot="h2o_32_nrep3_ls", label="(8n/2r/6t)", y=99.584000, yerr=0.000000 PlotPoint: name="509", plot="h2o_32_nrep3_ls_mem", label="(8n/2r/6t)", y=3701.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/26/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 30.358840E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 339931 0.0% 0.0% 100.0% average stack size 0.0 0.0 28341.7 marketing flops 1.742118E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 8.735584E+09 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 4440 MPI messages size (bytes): total size 770.525954E+09 min size 0.000000E+00 max size 399.069120E+06 average size 173.541888E+06 MPI breakdown and total messages size (bytes): size <= 128 640 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 0 0 131072 < size <= 4194304 640 468025344 4194304 < size <= 16777216 0 0 16777216 < size 3160 770057961712 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 284111. MP_Allreduce 3123 21388. MP_Sync 4 MP_Alltoall 47 88727262. MP_SendRecv 42 732600. MP_ISendRecv 42 732600. MP_Wait 267 MP_ISend 180 3337386. MP_IRecv 180 3339494. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.076 0.097 108.096 108.097 qs_energies 1 2.0 0.000 0.000 106.723 106.738 ls_scf 1 3.0 0.000 0.000 103.785 103.799 dbcsr_multiply_generic 111 6.7 0.024 0.026 77.227 77.336 ls_scf_main 1 4.0 0.000 0.000 65.922 65.923 density_matrix_trs4 2 5.0 0.002 0.003 56.659 56.708 multiply_cannon 111 7.7 0.139 0.224 49.646 51.493 multiply_cannon_loop 111 8.7 0.068 0.071 46.254 47.169 ls_scf_init_scf 1 4.0 0.000 0.000 34.199 34.200 ls_scf_init_matrix_S 1 5.0 0.000 0.000 32.618 32.632 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 29.777 29.790 mp_waitall_1 4527 11.1 21.989 25.556 21.989 25.556 make_m2s 222 7.7 0.007 0.007 24.029 24.921 make_images 222 8.7 4.593 4.970 23.919 24.810 multiply_cannon_multrec 444 9.7 17.971 18.631 22.634 23.204 hybrid_alltoall_any 227 10.6 1.662 3.636 13.016 15.914 make_images_data 222 9.7 0.004 0.004 13.250 15.580 multiply_cannon_metrocomm3 444 9.7 0.001 0.001 10.160 10.745 multiply_cannon_sync_h2d 444 9.7 8.851 8.900 8.851 8.900 arnoldi_extremal 4 6.8 0.000 0.000 7.419 7.434 arnoldi_normal_ev 4 7.8 0.002 0.008 7.419 7.433 build_subspace 16 8.4 0.026 0.036 6.861 6.871 dbcsr_matrix_vector_mult 304 9.0 0.017 0.033 5.510 5.654 dbcsr_matrix_vector_mult_local 304 10.0 5.060 5.367 5.063 5.370 ls_scf_dm_to_ks 2 5.0 0.000 0.000 5.229 5.326 apply_matrix_preconditioner 6 5.3 0.000 0.000 5.009 5.258 dbcsr_mm_accdrv_process 1814 10.4 0.254 0.318 4.496 4.616 dbcsr_mm_accdrv_process_sort 1814 11.4 4.196 4.322 4.196 4.322 ls_scf_post 1 4.0 0.000 0.000 3.664 3.678 make_images_sizes 222 9.7 0.000 0.000 1.426 3.496 mp_alltoall_i44 222 10.7 1.426 3.496 1.426 3.496 ls_scf_store_result 1 5.0 0.000 0.000 3.415 3.424 calculate_norms 792 9.8 3.236 3.271 3.236 3.271 dbcsr_finalize 304 7.8 0.082 0.090 3.092 3.178 mp_allgather_i34 111 8.7 1.000 3.088 1.000 3.088 dbcsr_merge_all 275 8.9 0.894 0.922 2.877 2.957 qs_energies_init_hamiltonians 1 3.0 0.006 0.010 2.908 2.908 dbcsr_complete_redistribute 5 7.6 1.448 1.477 2.775 2.906 dbcsr_data_release 12724 10.6 2.330 2.867 2.330 2.867 matrix_ls_to_qs 2 6.0 0.000 0.000 2.439 2.576 dbcsr_sort_data 325 11.1 2.440 2.503 2.440 2.503 qs_ks_update_qs_env 3 6.3 0.000 0.000 2.492 2.493 rebuild_ks_matrix 3 7.3 0.000 0.000 2.426 2.427 qs_ks_build_kohn_sham_matrix 3 8.3 0.001 0.001 2.426 2.427 dbcsr_new_transposed 4 7.5 0.243 0.249 2.301 2.320 dbcsr_frobenius_norm 74 6.6 2.058 2.136 2.209 2.251 dbcsr_add_d 103 6.2 0.000 0.000 2.138 2.217 dbcsr_add_anytype 103 7.2 0.860 0.895 2.138 2.217 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="510", plot="h2o_32_nrep3_ls", label="(8n/1r/12t)", y=108.097000, yerr=0.000000 PlotPoint: name="511", plot="h2o_32_nrep3_ls_mem", label="(8n/1r/12t)", y=6829.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/d6d429ae51f26379f2dc33b64015d7bb356f05fe_performance_tests/27/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 7009386627072 0.0% 0.0% 100.0% flops 9 x 9 x 32 7335108845568 0.0% 0.0% 100.0% flops 9 x 22 x 32 9866241589248 0.0% 0.0% 100.0% flops 22 x 9 x 32 9884108906496 0.0% 0.0% 100.0% flops 22 x 22 x 32 13354440523776 0.0% 0.0% 100.0% flops 32 x 32 x 9 20607185977344 0.0% 0.0% 100.0% flops 32 x 32 x 22 25186560638976 0.0% 0.0% 100.0% flops 9 x 32 x 32 28458319085568 0.0% 0.0% 100.0% flops 22 x 32 x 32 34782389993472 0.0% 0.0% 100.0% flops 9 x 32 x 9 42881542373376 0.0% 0.0% 100.0% flops 22 x 32 x 9 55680402235392 0.0% 0.0% 100.0% flops 9 x 32 x 22 55680402235392 0.0% 0.0% 100.0% flops 22 x 32 x 22 72328573419520 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 383.054662E+12 0.0% 0.0% 100.0% flops max/rank 733.641090E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 26899403712 0.0% 0.0% 100.0% number of processed stacks 118860288 0.0% 0.0% 100.0% average stack size 0.0 0.0 226.3 marketing flops 780.439111E+12 ------------------------------------------------------------------------------- # multiplications 1445 max memory usage/rank 582.258688E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 102097920 MPI messages size (bytes): total size 37.227590E+12 min size 0.000000E+00 max size 4.551360E+06 average size 364.626312E+03 MPI breakdown and total messages size (bytes): size <= 128 731472 0 128 < size <= 8192 11922720 97670922240 8192 < size <= 32768 24718992 614677610496 32768 < size <= 131072 20000256 1970081366016 131072 < size <= 4194304 42515668 24886801223040 4194304 < size <= 16777216 2208812 9656099886720 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 4640 76559. MP_Allreduce 13232 2081. MP_Sync 1064 MP_Alltoall 2588 510352365. MP_SendRecv 168740 11136. MP_ISendRecv 92040 11136. MP_Wait 102830 MP_comm_split 40 MP_ISend 26090 85106. MP_IRecv 37890 59644. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.018 0.052 226.154 226.155 qs_mol_dyn_low 1 2.0 0.003 0.011 225.198 225.221 qs_forces 5 3.8 0.004 0.005 225.127 225.128 qs_energies 5 4.8 0.003 0.005 222.135 222.169 scf_env_do_scf 5 5.8 0.000 0.001 205.318 205.320 scf_env_do_scf_inner_loop 105 6.6 0.003 0.010 179.001 179.002 qs_scf_new_mos 105 7.6 0.000 0.001 138.686 138.867 qs_scf_loop_do_ot 105 8.6 0.001 0.001 138.686 138.866 dbcsr_multiply_generic 1445 12.2 0.122 0.129 132.950 133.449 ot_scf_mini 105 9.6 0.003 0.003 129.024 129.234 multiply_cannon 1445 13.2 0.274 0.288 114.812 116.920 multiply_cannon_loop 1445 14.2 2.415 2.526 113.213 114.652 velocity_verlet 4 3.0 0.001 0.003 105.719 105.719 ot_mini 105 10.6 0.001 0.001 58.607 58.711 multiply_cannon_multrec 69360 15.2 31.780 37.252 39.417 44.541 mp_waitall_1 488190 16.1 33.251 41.201 33.251 41.201 qs_ot_get_p 112 10.4 0.001 0.001 40.834 41.138 qs_ot_get_derivative 55 11.6 0.001 0.001 37.055 37.185 multiply_cannon_sync_h2d 69360 15.2 30.782 34.629 30.782 34.629 multiply_cannon_metrocomm3 69360 15.2 0.200 0.212 24.543 32.974 qs_ot_p2m_diag 40 11.0 0.020 0.031 30.029 30.103 rebuild_ks_matrix 110 8.4 0.000 0.000 28.878 29.080 qs_ks_build_kohn_sham_matrix 110 9.4 0.012 0.018 28.877 29.080 cp_dbcsr_syevd 40 12.0 0.002 0.002 26.989 26.990 qs_ks_update_qs_env 112 7.6 0.001 0.001 26.482 26.668 init_scf_loop 7 6.6 0.000 0.002 26.287 26.288 apply_preconditioner_dbcsr 62 12.6 0.000 0.000 22.897 23.142 apply_single 62 13.6 0.000 0.000 22.897 23.142 cp_fm_syevd 40 13.0 0.000 0.001 21.944 22.083 prepare_preconditioner 7 7.6 0.000 0.000 21.469 21.515 make_preconditioner 7 8.6 0.000 0.000 21.469 21.515 ot_new_cg_direction 55 11.6 0.001 0.001 20.877 20.877 qs_rho_update_rho_low 110 7.6 0.001 0.001 17.329 17.714 calculate_rho_elec 110 8.6 0.030 0.034 17.328 17.713 cp_fm_redistribute_end 40 14.0 8.656 17.275 8.661 17.276 cp_fm_syevd_base 40 14.0 8.607 17.228 8.607 17.228 make_full_inverse_cholesky 7 9.6 0.000 0.000 14.564 14.638 qs_ot_get_orbitals 105 10.6 0.001 0.001 14.428 14.610 init_scf_run 5 5.8 0.000 0.000 14.120 14.121 scf_env_initial_rho_setup 5 6.8 0.001 0.003 14.120 14.121 qs_ot_get_derivative_taylor 37 12.8 0.001 0.001 13.505 13.577 rs_pw_transfer 690 11.5 0.011 0.013 11.442 12.735 density_rs2pw 110 9.6 0.006 0.008 11.336 12.578 mp_sum_l 4764 12.2 11.578 12.370 11.578 12.370 pw_transfer 1645 12.4 0.083 0.104 11.855 12.037 fft_wrap_pw1pw2 1425 13.5 0.013 0.016 11.713 11.893 calculate_dm_sparse 110 9.5 0.000 0.001 11.208 11.376 qs_vxc_create 110 10.4 0.002 0.004 10.309 10.338 cp_fm_cholesky_invert 7 10.6 10.296 10.303 10.296 10.303 fft_wrap_pw1pw2_240 915 15.0 1.209 1.287 9.952 10.145 qs_ot_get_derivative_diag 18 12.0 0.000 0.000 9.876 9.946 cp_dbcsr_sm_fm_multiply 15 9.3 0.001 0.002 9.662 9.680 acc_transpose_blocks 69360 15.2 0.371 0.393 8.829 9.483 check_diag 80 13.5 8.587 8.857 9.298 9.438 cp_dbcsr_sm_fm_multiply_core 15 10.3 0.000 0.000 9.282 9.338 calculate_first_density_matrix 1 7.0 0.000 0.001 9.230 9.247 sum_up_and_integrate 60 10.3 0.028 0.031 8.617 8.628 integrate_v_rspace 60 11.3 0.002 0.002 8.589 8.601 dbcsr_mm_accdrv_process 154766 15.8 4.032 4.239 7.506 8.333 fft3d_pb 915 16.0 2.382 2.560 7.978 8.151 multiply_cannon_metrocomm1 69360 15.2 0.094 0.100 4.429 7.871 xc_rho_set_and_dset_create 110 12.4 0.077 0.096 7.254 7.496 acc_transpose_blocks_kernels 69360 16.2 0.861 0.905 6.886 7.453 xc_vxc_pw_create 60 11.3 0.039 0.050 6.876 6.905 make_full_single_inverse 7 9.6 0.001 0.001 6.621 6.667 make_m2s 2890 13.2 0.076 0.085 6.076 6.646 jit_kernel_transpose 5 15.0 6.025 6.563 6.025 6.563 make_images 2890 14.2 0.243 0.260 5.977 6.549 xc_pw_derive 510 13.4 0.006 0.007 6.076 6.136 mp_waitany 7680 13.5 4.443 5.846 4.443 5.846 mp_alltoall_z22v 2340 17.7 5.025 5.358 5.025 5.358 multiply_cannon_metrocomm4 67915 15.2 0.184 0.205 2.060 4.919 potential_pw2rs 60 12.3 0.003 0.004 4.796 4.828 wfi_extrapolate 5 7.8 0.000 0.001 4.578 4.578 grid_collocate_task_list 110 9.6 4.117 4.557 4.117 4.557 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="601", plot="h2o_512_md", label="(64n/12r/1t)", y=226.155000, yerr=0.000000 PlotPoint: name="602", plot="h2o_512_md_mem", label="(64n/12r/1t)", y=555.000000, yerr=2.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ========= END RESULTS =========== CommitSHA: d6d429ae51f26379f2dc33b64015d7bb356f05fe Summary: empty Status: OK