=== This is the CP2K Performance-Test === Updating 2cd2f20ff..8ef42d3b3 Fast-forward src/nequip_unittest.F | 10 ++++++---- src/torch_api.F | 24 ++++++++++++++++++++++++ src/torch_c_api.cpp | 24 ++++++++++++++++++++---- 3 files changed, 50 insertions(+), 8 deletions(-) Current branch master is up to date. Already up to date. Current branch master is up to date. GIT Revision: 8ef42d3b316011b445a73f25f7afd24a63b63184 ################# ARCHITECTURE FILE ################## #!/bin/bash # # CP2K arch file for Cray-XC50 (Piz Daint, CSCS, GPU partition) # # Tested with: GNU 9.3.0, Cray-MPICH 7.7.18, Cray-libsci 20.09.1, Cray-FFTW 3.3.8.10, # COSMA 2.6.2, ELPA 2022.05.001, LIBINT 2.6.0, LIBPEXSI 1.2.0, # LIBXC 6.0.0, LIBVORI 220621, LIBXSMM 1.17, PLUMED 2.8.0, # SIRIUS 7.3.2, SPGLIB 1.16.2 # # Usage: Source this arch file and then run make as instructed. # A full toolchain installation is performed as default. # Replace or adapt the "module add" commands below if needed. # # Author: Matthias Krack (26.10.2022) # # \ if [ "${0}" = "${BASH_SOURCE}" ]; then \ echo "ERROR: Script ${0##*/} must be sourced"; \ echo "Usage: source ${0##*/}"; \ exit 1; \ fi; \ this_file=${BASH_SOURCE##*/}; \ if [ -n "${1}" ]; then \ gcc_version="${1}"; \ else \ gcc_version="9.3.0"; \ fi; \ module add daint-gpu; \ module rm PrgEnv-cray; \ module add PrgEnv-gnu; \ module rm gcc; \ module add gcc/${gcc_version}; \ module add cray-fftw/3.3.8.10; \ module add cudatoolkit; \ echo "Expected setup:"; \ echo " cray-mpich/7.7.18"; \ echo " craype-haswell"; \ echo " daint-gpu/21.09"; \ echo " craype/2.7.10"; \ echo " cray-libsci/20.09.1"; \ echo " PrgEnv-gnu/6.0.10"; \ echo " gcc/${gcc_version}"; \ echo " cray-fftw/3.3.8.10"; \ echo " cudatoolkit/11.0.2_3.38-8.1__g5b73779"; \ module list; \ module -f save cp2k_gpu_gnu_psmp; \ echo "To load the required modules in your batch job script, use:"; \ echo " module restore cp2k_gpu_gnu_psmp"; \ cd tools/toolchain; \ ./install_cp2k_toolchain.sh --enable-cuda=yes --gpu-ver=P100 --no-arch-files --with-gcc=system --with-libvdwxc --with-pexsi --with-plumed; \ cd ../..; \ printf "Sourcing ${PWD}/tools/toolchain/install/setup ... "; \ source ${PWD}/tools/toolchain/install/setup; \ printf "done\n"; \ echo "Check the output above for error messages and consistency!"; \ echo "If everything is OK, you can build a CP2K production binary with"; \ echo " make -j ARCH=${this_file%.*} VERSION=${this_file##*.}"; \ echo "Alternatively, you can add further checks, e.g. for regression testing, with"; \ echo " make -j ARCH=${this_file%.*} VERSION=${this_file##*.} DO_CHECKS=yes"; \ echo "or build CP2K as a library with"; \ echo " make -j ARCH=${this_file%.*} VERSION=${this_file##*.} libcp2k"; \ return # Set options DO_CHECKS := no USE_ACC := yes USE_COSMA := 2.6.2 USE_ELPA := 2022.05.001 USE_LIBINT := 2.6.0 USE_LIBPEXSI := 1.2.0 USE_LIBVORI := 220621 USE_LIBXC := 6.0.0 USE_LIBXSMM := 1.17 USE_PLUMED := 2.8.0 #USE_QUIP := 0.9.10 USE_SIRIUS := 7.3.2 USE_SPGLIB := 1.16.2 # Only needed for SIRIUS LIBVDWXC_VER := 0.4.0 SPFFT_VER := 1.0.6 SPLA_VER := 1.5.4 HDF5_VER := 1.12.0 # Only needed for LIBPEXSI SCOTCH_VER := 6.0.0 SUPERLU_VER := 6.1.0 LMAX := 5 MAX_CONTR := 4 GPUVER := P100 OFFLOAD_TARGET := cuda CC := cc CXX := CC OFFLOAD_CC := nvcc FC := ftn LD := ftn AR := ar -r # cc, CC, and ftn include already the proper -march flag CFLAGS := -O2 -fopenmp -fopenmp-simd -ftree-vectorize -funroll-loops -g DFLAGS := -D__parallel DFLAGS += -D__SCALAPACK DFLAGS += -D__FFTW3 DFLAGS += -D__MPI_VERSION=3 DFLAGS += -D__MAX_CONTR=$(strip $(MAX_CONTR)) INSTALL_PATH := $(PWD)/tools/toolchain/install ifeq ($(DO_CHECKS), yes) DFLAGS += -D__CHECK_DIAG endif ifeq ($(USE_ACC), yes) DFLAGS += -D__DBCSR_ACC DFLAGS += -D__OFFLOAD_CUDA # Possibly no performance gain with PW_CUDA currently DFLAGS += -D__NO_OFFLOAD_PW endif ifneq ($(USE_PLUMED),) USE_PLUMED := $(strip $(USE_PLUMED)) PLUMED_LIB := $(INSTALL_PATH)/plumed-$(USE_PLUMED)/lib DFLAGS += -D__PLUMED2 USE_GSL := 2.7 LIBS += $(PLUMED_LIB)/libplumed.a endif ifneq ($(USE_ELPA),) USE_ELPA := $(strip $(USE_ELPA)) TARGET := nvidia ELPA_INC := $(INSTALL_PATH)/elpa-$(USE_ELPA)/$(TARGET)/include/elpa-$(USE_ELPA) ELPA_LIB := $(INSTALL_PATH)/elpa-$(USE_ELPA)/$(TARGET)/lib CFLAGS += -I$(ELPA_INC)/elpa -I$(ELPA_INC)/modules DFLAGS += -D__ELPA ifeq ($(TARGET), nvidia) DFLAGS += -D__ELPA_NVIDIA_GPU endif LIBS += $(ELPA_LIB)/libelpa.a endif ifneq ($(USE_QUIP),) USE_QUIP := $(strip $(USE_QUIP)) QUIP_INC := $(INSTALL_PATH)/quip-$(USE_QUIP)/include QUIP_LIB := $(INSTALL_PATH)/quip-$(USE_QUIP)/lib CFLAGS += -I$(QUIP_INC) DFLAGS += -D__QUIP LIBS += $(QUIP_LIB)/libquip_core.a LIBS += $(QUIP_LIB)/libatoms.a LIBS += $(QUIP_LIB)/libFoX_sax.a LIBS += $(QUIP_LIB)/libFoX_common.a LIBS += $(QUIP_LIB)/libFoX_utils.a LIBS += $(QUIP_LIB)/libFoX_fsys.a endif ifneq ($(USE_LIBPEXSI),) USE_LIBPEXSI := $(strip $(USE_LIBPEXSI)) SCOTCH_VER := $(strip $(SCOTCH_VER)) SUPERLU_VER := $(strip $(SUPERLU_VER)) LIBPEXSI_INC := $(INSTALL_PATH)/pexsi-$(USE_LIBPEXSI)/include LIBPEXSI_LIB := $(INSTALL_PATH)/pexsi-$(USE_LIBPEXSI)/lib SCOTCH_INC := $(INSTALL_PATH)/scotch-$(SCOTCH_VER)/include SCOTCH_LIB := $(INSTALL_PATH)/scotch-$(SCOTCH_VER)/lib SUPERLU_INC := $(INSTALL_PATH)/superlu_dist-$(SUPERLU_VER)/include SUPERLU_LIB := $(INSTALL_PATH)/superlu_dist-$(SUPERLU_VER)/lib CFLAGS += -I$(LIBPEXSI_INC) -I$(SCOTCH_INC) -I$(SUPERLU_INC) DFLAGS += -D__LIBPEXSI LIBS += $(LIBPEXSI_LIB)/libpexsi.a LIBS += $(SUPERLU_LIB)/libsuperlu_dist.a LIBS += $(SCOTCH_LIB)/libptscotchparmetis.a LIBS += $(SCOTCH_LIB)/libptscotch.a LIBS += $(SCOTCH_LIB)/libptscotcherr.a LIBS += $(SCOTCH_LIB)/libscotchmetis.a LIBS += $(SCOTCH_LIB)/libscotch.a endif ifneq ($(USE_LIBVORI),) USE_LIBVORI := $(strip $(USE_LIBVORI)) LIBVORI_LIB := $(INSTALL_PATH)/libvori-$(USE_LIBVORI)/lib DFLAGS += -D__LIBVORI LIBS += $(LIBVORI_LIB)/libvori.a endif ifneq ($(USE_LIBXC),) USE_LIBXC := $(strip $(USE_LIBXC)) LIBXC_INC := $(INSTALL_PATH)/libxc-$(USE_LIBXC)/include LIBXC_LIB := $(INSTALL_PATH)/libxc-$(USE_LIBXC)/lib CFLAGS += -I$(LIBXC_INC) DFLAGS += -D__LIBXC LIBS += $(LIBXC_LIB)/libxcf03.a LIBS += $(LIBXC_LIB)/libxc.a endif ifneq ($(USE_LIBINT),) USE_LIBINT := $(strip $(USE_LIBINT)) LMAX := $(strip $(LMAX)) LIBINT_INC := $(INSTALL_PATH)/libint-v$(USE_LIBINT)-cp2k-lmax-$(LMAX)/include LIBINT_LIB := $(INSTALL_PATH)/libint-v$(USE_LIBINT)-cp2k-lmax-$(LMAX)/lib CFLAGS += -I$(LIBINT_INC) DFLAGS += -D__LIBINT LIBS += $(LIBINT_LIB)/libint2.a endif ifneq ($(USE_SPGLIB),) USE_SPGLIB := $(strip $(USE_SPGLIB)) SPGLIB_INC := $(INSTALL_PATH)/spglib-$(USE_SPGLIB)/include SPGLIB_LIB := $(INSTALL_PATH)/spglib-$(USE_SPGLIB)/lib CFLAGS += -I$(SPGLIB_INC) DFLAGS += -D__SPGLIB LIBS += $(SPGLIB_LIB)/libsymspg.a endif ifneq ($(USE_LIBXSMM),) USE_LIBXSMM := $(strip $(USE_LIBXSMM)) LIBXSMM_INC := $(INSTALL_PATH)/libxsmm-$(USE_LIBXSMM)/include LIBXSMM_LIB := $(INSTALL_PATH)/libxsmm-$(USE_LIBXSMM)/lib CFLAGS += -I$(LIBXSMM_INC) DFLAGS += -D__LIBXSMM LIBS += $(LIBXSMM_LIB)/libxsmmf.a LIBS += $(LIBXSMM_LIB)/libxsmm.a endif ifneq ($(USE_SIRIUS),) USE_SIRIUS := $(strip $(USE_SIRIUS)) HDF5_VER := $(strip $(HDF5_VER)) HDF5_LIB := $(INSTALL_PATH)/hdf5-$(HDF5_VER)/lib LIBVDWXC_VER := $(strip $(LIBVDWXC_VER)) LIBVDWXC_INC := $(INSTALL_PATH)/libvdwxc-$(LIBVDWXC_VER)/include LIBVDWXC_LIB := $(INSTALL_PATH)/libvdwxc-$(LIBVDWXC_VER)/lib SPFFT_VER := $(strip $(SPFFT_VER)) SPFFT_INC := $(INSTALL_PATH)/SpFFT-$(SPFFT_VER)/include SPLA_VER := $(strip $(SPLA_VER)) SPLA_INC := $(INSTALL_PATH)/SpLA-$(SPLA_VER)/include/spla ifeq ($(USE_ACC), yes) DFLAGS += -D__OFFLOAD_GEMM SPFFT_LIB := $(INSTALL_PATH)/SpFFT-$(SPFFT_VER)/lib/cuda SPLA_LIB := $(INSTALL_PATH)/SpLA-$(SPLA_VER)/lib/cuda SIRIUS_INC := $(INSTALL_PATH)/sirius-$(USE_SIRIUS)/include/cuda SIRIUS_LIB := $(INSTALL_PATH)/sirius-$(USE_SIRIUS)/lib/cuda else SPFFT_LIB := $(INSTALL_PATH)/SpFFT-$(SPFFT_VER)/lib SPLA_LIB := $(INSTALL_PATH)/SpLA-$(SPLA_VER)/lib SIRIUS_INC := $(INSTALL_PATH)/sirius-$(USE_SIRIUS)/include SIRIUS_LIB := $(INSTALL_PATH)/sirius-$(USE_SIRIUS)/lib endif CFLAGS += -I$(LIBVDWXC_INC) CFLAGS += -I$(SPFFT_INC) CFLAGS += -I$(SPLA_INC) CFLAGS += -I$(SIRIUS_INC) DFLAGS += -D__HDF5 DFLAGS += -D__LIBVDWXC DFLAGS += -D__SPFFT DFLAGS += -D__SPLA DFLAGS += -D__SIRIUS LIBS += $(SIRIUS_LIB)/libsirius.a LIBS += $(SPLA_LIB)/libspla.a LIBS += $(SPFFT_LIB)/libspfft.a LIBS += $(LIBVDWXC_LIB)/libvdwxc.a LIBS += $(HDF5_LIB)/libhdf5.a endif ifneq ($(USE_COSMA),) USE_COSMA := $(strip $(USE_COSMA)) ifeq ($(USE_ACC), yes) USE_COSMA := $(USE_COSMA)-cuda endif COSMA_INC := $(INSTALL_PATH)/COSMA-$(USE_COSMA)/include COSMA_LIB := $(INSTALL_PATH)/COSMA-$(USE_COSMA)/lib CFLAGS += -I$(COSMA_INC) DFLAGS += -D__COSMA LIBS += $(COSMA_LIB)/libcosma_prefixed_pxgemm.a LIBS += $(COSMA_LIB)/libcosma.a LIBS += $(COSMA_LIB)/libcosta_prefixed_scalapack.a LIBS += $(COSMA_LIB)/libcosta.a LIBS += $(COSMA_LIB)/libTiled-MM.a endif ifneq ($(USE_GSL),) USE_GSL := $(strip $(USE_GSL)) GSL_INC := $(INSTALL_PATH)/gsl-$(USE_GSL)/include GSL_LIB := $(INSTALL_PATH)/gsl-$(USE_GSL)/lib CFLAGS += -I$(GSL_INC) DFLAGS += -D__GSL LIBS += $(GSL_LIB)/libgsl.a endif CFLAGS += $(DFLAGS) CXXFLAGS := $(CFLAGS) -std=c++11 OFFLOAD_FLAGS := $(DFLAGS) -O3 -Xcompiler="-fopenmp" -arch sm_60 --std=c++11 FCFLAGS := $(CFLAGS) ifeq ($(shell [ $(shell gcc -dumpversion | cut -d. -f1) -gt 9 ] && echo yes), yes) FCFLAGS += -fallow-argument-mismatch endif FCFLAGS += -fbacktrace FCFLAGS += -ffree-form FCFLAGS += -ffree-line-length-none FCFLAGS += -fno-omit-frame-pointer FCFLAGS += -std=f2008 ifneq ($(CUDA_HOME),) CUDA_LIB := $(CUDA_HOME)/lib64 LDFLAGS := $(FCFLAGS) -L$(CUDA_LIB) -Wl,-rpath=$(CUDA_LIB) else LDFLAGS := $(FCFLAGS) endif LIBS += -lcusolver -lcudart -lnvrtc -lcuda -lcufft -lcublas -lrt LIBS += -lz -ldl -lpthread -lstdc++ # End ############### END ARCHITECTURE FILE ################ ===== TESTS (description) ===== ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 RI-RPA/RI-MP2 correlation energy input file: benchmarks/QS_mp2_rpa/32-H2O/RI-RPA.inp required files: ['benchmarks/QS_mp2_rpa/32-H2O/BASIS_H2O', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32.xyz', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32-PBE-TZ.inp', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32-RI-dRPA-TZ.inp'] output file: result.log # nodes = 8 # ranks/node = 2 # threads/rank = 6 nrepeat = 1 time[min] = 15 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/01 job id: 43310478 --- Point --- name: 10 plot: h2o_32_ri_rpa_mp2 regex: Total RI-RPA Time= label: RI-RPA (8n/2r/6t) --- Point --- name: 11 plot: h2o_32_ri_rpa_mp2_mem regex: Estimated peak process memory label: RI-RPA (8n/2r/6t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 RI-RPA/RI-MP2 correlation energy input file: benchmarks/QS_mp2_rpa/32-H2O/RI-MP2.inp required files: ['benchmarks/QS_mp2_rpa/32-H2O/BASIS_H2O', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32.xyz', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32-PBE-TZ.inp', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32-HF-TZ.inp', 'benchmarks/QS_mp2_rpa/32-H2O/H2O-32-RI-MP2-TZ.inp'] output file: result.log # nodes = 8 # ranks/node = 6 # threads/rank = 2 nrepeat = 1 time[min] = 15 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/02 job id: 43310480 --- Point --- name: 20 plot: h2o_32_ri_rpa_mp2 regex: Total MP2 Time= label: RI-MP2 (8n/6r/2t) --- Point --- name: 21 plot: h2o_32_ri_rpa_mp2_mem regex: Estimated peak process memory label: RI-MP2 (8n/6r/2t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 12 # threads/rank = 1 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/03 job id: 43310482 --- Point --- name: 100 plot: h2o_64_md regex: CP2K label: (8n/12r/1t) --- Point --- name: 101 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/12r/1t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 6 # threads/rank = 2 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/04 job id: 43310483 --- Point --- name: 102 plot: h2o_64_md regex: CP2K label: (8n/6r/2t) --- Point --- name: 103 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/6r/2t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 4 # threads/rank = 3 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/05 job id: 43310484 --- Point --- name: 104 plot: h2o_64_md regex: CP2K label: (8n/4r/3t) --- Point --- name: 105 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/4r/3t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 3 # threads/rank = 4 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/06 job id: 43310485 --- Point --- name: 106 plot: h2o_64_md regex: CP2K label: (8n/3r/4t) --- Point --- name: 107 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/3r/4t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 2 # threads/rank = 6 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/07 job id: 43310486 --- Point --- name: 108 plot: h2o_64_md regex: CP2K label: (8n/2r/6t) --- Point --- name: 109 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/2r/6t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-64 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-64.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 1 # threads/rank = 12 nrepeat = 1 time[min] = 5 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/08 job id: 43310487 --- Point --- name: 110 plot: h2o_64_md regex: CP2K label: (8n/1r/12t) --- Point --- name: 111 plot: h2o_64_md_mem regex: Estimated peak process memory label: (8n/1r/12t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 12 # threads/rank = 1 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/09 job id: 43310488 --- Point --- name: 200 plot: h2o_128_md regex: CP2K label: (8n/12r/1t) --- Point --- name: 201 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/12r/1t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 6 # threads/rank = 2 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/10 job id: 43310489 --- Point --- name: 202 plot: h2o_128_md regex: CP2K label: (8n/6r/2t) --- Point --- name: 203 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/6r/2t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 4 # threads/rank = 3 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/11 job id: 43310490 --- Point --- name: 204 plot: h2o_128_md regex: CP2K label: (8n/4r/3t) --- Point --- name: 205 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/4r/3t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 3 # threads/rank = 4 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/12 job id: 43310492 --- Point --- name: 206 plot: h2o_128_md regex: CP2K label: (8n/3r/4t) --- Point --- name: 207 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/3r/4t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 2 # threads/rank = 6 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/13 job id: 43310493 --- Point --- name: 208 plot: h2o_128_md regex: CP2K label: (8n/2r/6t) --- Point --- name: 209 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/2r/6t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-128 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-128.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 1 # threads/rank = 12 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/14 job id: 43310494 --- Point --- name: 210 plot: h2o_128_md regex: CP2K label: (8n/1r/12t) --- Point --- name: 211 plot: h2o_128_md_mem regex: Estimated peak process memory label: (8n/1r/12t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 12 # threads/rank = 1 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/15 job id: 43310495 --- Point --- name: 400 plot: h2o_256_md regex: CP2K label: (8n/12r/1t) --- Point --- name: 401 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/12r/1t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 6 # threads/rank = 2 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/16 job id: 43310497 --- Point --- name: 402 plot: h2o_256_md regex: CP2K label: (8n/6r/2t) --- Point --- name: 403 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/6r/2t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 4 # threads/rank = 3 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/17 job id: 43310498 --- Point --- name: 404 plot: h2o_256_md regex: CP2K label: (8n/4r/3t) --- Point --- name: 405 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/4r/3t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 3 # threads/rank = 4 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/18 job id: 43310499 --- Point --- name: 406 plot: h2o_256_md regex: CP2K label: (8n/3r/4t) --- Point --- name: 407 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/3r/4t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 2 # threads/rank = 6 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/19 job id: 43310500 --- Point --- name: 408 plot: h2o_256_md regex: CP2K label: (8n/2r/6t) --- Point --- name: 409 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/2r/6t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-256 test - DBCSR dominated (MPI/OMP) input file: benchmarks/QS/H2O-256.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 1 # threads/rank = 12 nrepeat = 1 time[min] = 30 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/20 job id: 43310501 --- Point --- name: 410 plot: h2o_256_md regex: CP2K label: (8n/1r/12t) --- Point --- name: 411 plot: h2o_256_md_mem regex: Estimated peak process memory label: (8n/1r/12t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 12 # threads/rank = 1 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/21 job id: 43310503 --- Point --- name: 500 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/12r/1t) --- Point --- name: 501 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/12r/1t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 6 # threads/rank = 2 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/22 job id: 43310510 --- Point --- name: 502 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/6r/2t) --- Point --- name: 503 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/6r/2t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 4 # threads/rank = 3 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/23 job id: 43310514 --- Point --- name: 504 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/4r/3t) --- Point --- name: 505 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/4r/3t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 3 # threads/rank = 4 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/24 job id: 43310515 --- Point --- name: 506 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/3r/4t) --- Point --- name: 507 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/3r/4t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 2 # threads/rank = 6 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/25 job id: 43310516 --- Point --- name: 508 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/2r/6t) --- Point --- name: 509 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/2r/6t) ~~~~~~~ END TEST ~~~~~~~ ~~~~~~~~~ TEST ~~~~~~~~~ description: H2O-32 (NREP 3) linear scaling test (864 H2O) input file: benchmarks/QS_DM_LS/H2O-dft-ls.inp required files: [] output file: result.log # nodes = 8 # ranks/node = 1 # threads/rank = 12 nrepeat = 1 time[min] = 10 run dir: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/26 job id: 43310517 --- Point --- name: 510 plot: h2o_32_nrep3_ls regex: CP2K label: (8n/1r/12t) --- Point --- name: 511 plot: h2o_32_nrep3_ls_mem regex: Estimated peak process memory label: (8n/1r/12t) ~~~~~~~ END TEST ~~~~~~~ === END TESTS (description) === ===== PLOTS (description) ===== ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_32_ri_rpa_mp2", title="32 H2O molecules (RI-MP2, RI-RPA)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_32_ri_rpa_mp2_mem", title="32 H2O molecules (RI-MP2, RI-RPA)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_64_md", title="64 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_64_md_mem", title="64 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_128_md", title="128 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_128_md_mem", title="128 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_256_md", title="256 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_256_md_mem", title="256 H2O molecules (10 MD steps)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_32_nrep3_ls", title="864 H2O molecules (LS SCF)", xlabel="Revision", ylabel="Time [s]" ~~~~~~~~~ PLOT ~~~~~~~~~ Plot: name="h2o_32_nrep3_ls_mem", title="864 H2O molecules (LS SCF)", xlabel="Revision", ylabel="Est. peak process memory [MiB]" === END PLOTS (description) === ============ RESULTS ============ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/01/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 0.000000E+00 0.0% 0.0% 0.0% flops max/rank 0.000000E+00 0.0% 0.0% 0.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 0 0.0% 0.0% 0.0% number of processed stacks 0 0.0% 0.0% 0.0% average stack size 0.0 0.0 0.0 marketing flops 0.000000E+00 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 1 12. MP_Allreduce 19 21. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 15 177869. MP_Allreduce 344 9. MP_Sync 3 MP_comm_split 1 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.020 0.035 133.135 133.136 farming_run 1 2.0 132.610 132.612 133.107 133.111 ------------------------------------------------------------------------------- @@@@@@@@@@ Run number: 2 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 4194304 0.0% 0.0% 100.0% flops 14 x 32 x 32 154140672 0.0% 0.0% 100.0% flops 29 x 32 x 32 159645696 0.0% 0.0% 100.0% flops 14 x 14 x 32 208732160 0.0% 0.0% 100.0% flops 29 x 14 x 32 212860928 0.0% 0.0% 100.0% flops 14 x 29 x 32 212860928 0.0% 0.0% 100.0% flops 29 x 29 x 32 227352576 0.0% 0.0% 100.0% flops 14 x 32 x 14 896801644032 0.0% 0.0% 100.0% flops 29 x 32 x 14 928925089792 0.0% 0.0% 100.0% flops 14 x 32 x 29 928925089792 0.0% 0.0% 100.0% flops 29 x 32 x 29 962100985856 0.0% 0.0% 100.0% flops 32 x 32 x 14 1693169221632 0.0% 0.0% 100.0% flops 32 x 32 x 29 1753639550976 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 7.164741E+12 0.0% 0.0% 100.0% flops max/rank 447.801317E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 249492158 0.0% 0.0% 100.0% number of processed stacks 164328 0.0% 0.0% 100.0% average stack size 0.0 0.0 1518.3 marketing flops 7.165779E+12 ------------------------------------------------------------------------------- # multiplications 1160 max memory usage/rank 1.457414E+09 # max total images/rank 1 # max 3D layers 1 # MPI messages exchanged 2592 MPI messages size (bytes): total size 1.140326E+09 min size 0.000000E+00 max size 1.663488E+06 average size 439.940750E+03 MPI breakdown and total messages size (bytes): size <= 128 132 0 128 < size <= 8192 348 2850816 8192 < size <= 32768 0 0 32768 < size <= 131072 1536 179306496 131072 < size <= 4194304 576 958169088 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 14 12. MP_Allreduce 2308 54. MP_Alltoall 4670 822215. MP_ISend 2604 90577. MP_IRecv 2604 90574. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 12 MP_Bcast 228 1113141. MP_Allreduce 485 2282278. MP_Sync 27 MP_Alltoall 38 9316958. MP_SendRecv 30 829726. MP_ISendRecv 135 235435. MP_Wait 281 MP_comm_split 8 MP_ISend 127 3867574. MP_IRecv 127 3866554. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.011 0.028 115.322 115.323 qs_energies 1 2.0 0.000 0.000 115.066 115.068 mp2_main 1 3.0 0.000 0.000 113.192 113.194 mp2_gpw_main 1 4.0 0.034 0.045 112.308 112.310 mp2_ri_gpw_compute_in 1 5.0 0.187 0.350 93.241 93.736 mp2_ri_gpw_compute_in_loop 1 6.0 0.004 0.005 55.438 55.930 mp2_eri_3c_integrate_gpw 272 7.0 0.154 0.176 41.759 47.214 get_2c_integrals 1 6.0 0.000 0.000 36.781 37.449 integrate_v_rspace 273 8.0 0.436 0.450 25.141 30.352 pw_transfer 6555 10.6 0.379 0.387 27.448 28.015 fft_wrap_pw1pw2 5465 11.4 0.045 0.047 26.147 26.697 grid_integrate_task_list 273 9.0 20.941 26.650 20.941 26.650 fft_wrap_pw1pw2_100 2178 12.4 1.182 1.262 23.709 24.263 compute_2c_integrals 1 7.0 0.003 0.003 19.331 19.332 rpa_ri_compute_en 1 5.0 0.000 0.000 18.944 19.117 compute_2c_integrals_loop_lm 1 8.0 0.003 0.004 18.897 19.050 mp2_eri_2c_integrate_gpw 1 9.0 2.375 2.415 18.894 19.046 cp_fm_cholesky_decompose 12 8.2 17.403 18.044 17.403 18.044 cholesky_decomp 1 7.0 0.000 0.000 16.293 16.939 fft3d_s 5443 13.4 16.210 16.723 16.233 16.744 ao_to_mo_and_store_B_mult_1 272 7.0 10.842 15.548 10.842 15.548 calculate_wavefunction 272 8.0 5.387 5.546 12.538 13.166 rpa_num_int 1 6.0 0.001 0.009 10.627 10.636 rpa_num_int_RPA_matrix_operati 8 7.0 0.000 0.000 10.491 10.521 calc_mat_Q 8 8.0 0.000 0.000 9.328 9.451 contract_S_to_Q 8 9.0 0.000 0.000 8.748 8.872 calc_potential_gpw 544 9.5 0.005 0.006 8.289 8.615 mp2_eri_2c_integrate_gpw_pot_l 272 10.0 0.001 0.002 8.227 8.446 parallel_gemm_fm 14 9.1 0.000 0.000 8.321 8.433 parallel_gemm_fm_cosma 14 10.1 8.321 8.433 8.321 8.433 potential_pw2rs 545 10.0 0.107 0.109 7.675 8.315 create_integ_mat 1 6.0 0.014 0.027 7.731 7.740 collocate_single_gaussian 272 10.0 0.040 0.043 7.465 7.683 array2fm 1 7.0 0.000 0.000 6.760 7.207 pw_scatter_s 2720 13.7 4.483 4.693 4.483 4.693 pw_gather_s 2722 13.2 3.902 4.276 3.902 4.276 array2fm_buffer_send 1 8.0 3.016 3.232 3.016 3.232 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="10", plot="h2o_32_ri_rpa_mp2", label="RI-RPA (8n/2r/6t)", y=112.308326, yerr=0.000000 PlotPoint: name="11", plot="h2o_32_ri_rpa_mp2_mem", label="RI-RPA (8n/2r/6t)", y=2729.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/02/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 0.000000E+00 0.0% 0.0% 0.0% flops max/rank 0.000000E+00 0.0% 0.0% 0.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 0 0.0% 0.0% 0.0% number of processed stacks 0 0.0% 0.0% 0.0% average stack size 0.0 0.0 0.0 marketing flops 0.000000E+00 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 1 12. MP_Allreduce 19 21. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 22 205321. MP_Allreduce 344 10. MP_Sync 4 MP_comm_split 1 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.027 0.039 400.245 400.247 farming_run 1 2.0 399.035 399.042 400.206 400.209 ------------------------------------------------------------------------------- @@@@@@@@@@ Run number: 2 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 16777216 0.0% 0.0% 100.0% flops 14 x 32 x 32 565182464 0.0% 0.0% 100.0% flops 29 x 32 x 32 585367552 0.0% 0.0% 100.0% flops 14 x 14 x 32 626196480 0.0% 0.0% 100.0% flops 29 x 14 x 32 638582784 0.0% 0.0% 100.0% flops 14 x 29 x 32 638582784 0.0% 0.0% 100.0% flops 29 x 29 x 32 682057728 0.0% 0.0% 100.0% flops 14 x 32 x 14 897827128576 0.0% 0.0% 100.0% flops 29 x 32 x 14 929989394432 0.0% 0.0% 100.0% flops 14 x 32 x 29 929989394432 0.0% 0.0% 100.0% flops 29 x 32 x 29 963203301376 0.0% 0.0% 100.0% flops 32 x 32 x 14 1693481172992 0.0% 0.0% 100.0% flops 32 x 32 x 29 1753962643456 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 7.172206E+12 0.0% 0.0% 100.0% flops max/rank 150.696064E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 249788821 0.0% 0.0% 100.0% number of processed stacks 98736 0.0% 0.0% 100.0% average stack size 0.0 0.0 2529.9 marketing flops 7.174951E+12 ------------------------------------------------------------------------------- # multiplications 1140 max memory usage/rank 1.223655E+09 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 61440 MPI messages size (bytes): total size 6.073508E+09 min size 0.000000E+00 max size 642.960000E+03 average size 98.852664E+03 MPI breakdown and total messages size (bytes): size <= 128 32004 0 128 < size <= 8192 1820 14909440 8192 < size <= 32768 0 0 32768 < size <= 131072 18640 1081442304 131072 < size <= 4194304 8976 4977156096 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 14 12. MP_Allreduce 1003 44. MP_Alltoall 1797 713538. MP_ISend 3686 54943. MP_IRecv 3622 54292. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 12 MP_Bcast 703 408373. MP_Allreduce 1821 23730. MP_Sync 38 MP_Alltoall 77 MP_SendRecv 2171 2843495. MP_ISendRecv 1739 144022. MP_Wait 2051 MP_comm_split 7 MP_ISend 264 362227. MP_IRecv 264 362718. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.013 0.049 211.382 211.383 qs_energies 1 2.0 0.000 0.000 210.612 210.622 scf_env_do_scf 1 3.0 0.000 0.000 107.146 107.146 qs_ks_update_qs_env 5 5.0 0.000 0.000 106.269 106.278 rebuild_ks_matrix 4 6.0 0.000 0.000 106.268 106.277 qs_ks_build_kohn_sham_matrix 4 7.0 0.058 0.066 106.268 106.277 hfx_ks_matrix 4 8.0 0.001 0.001 105.894 105.897 integrate_four_center 4 9.0 0.143 0.453 105.893 105.897 mp2_main 1 3.0 0.000 0.000 103.180 103.190 mp2_gpw_main 1 4.0 0.034 0.055 102.295 102.308 integrate_four_center_main 4 10.0 0.057 0.605 97.138 100.077 integrate_four_center_bin 262 11.0 97.081 99.772 97.081 99.772 init_scf_loop 1 4.0 0.000 0.000 92.729 92.729 mp2_ri_gpw_compute_in 1 5.0 0.066 0.073 75.356 76.463 mp2_ri_gpw_compute_in_loop 1 6.0 0.002 0.002 54.523 55.627 mp2_eri_3c_integrate_gpw 91 7.0 0.142 0.154 42.180 47.180 integrate_v_rspace 95 8.0 0.397 0.569 28.562 33.441 pw_transfer 2240 10.6 0.147 0.167 29.876 30.366 ao_to_mo_and_store_B_mult_1 91 7.0 10.650 29.618 10.650 29.618 fft_wrap_pw1pw2 1868 11.4 0.018 0.022 28.897 29.385 grid_integrate_task_list 95 9.0 23.871 28.952 23.871 28.952 mp2_ri_gpw_compute_en 1 5.0 0.073 0.111 26.763 28.528 fft_wrap_pw1pw2_100 730 12.4 1.271 1.463 26.626 27.166 mp2_ri_gpw_compute_en_RI_loop 1 6.0 1.882 2.188 24.979 24.989 get_2c_integrals 1 6.0 0.019 0.171 20.646 20.751 compute_2c_integrals 1 7.0 0.032 0.216 19.604 19.612 compute_2c_integrals_loop_lm 1 8.0 0.004 0.026 18.837 19.205 mp2_eri_2c_integrate_gpw 1 9.0 1.738 1.912 18.833 19.204 fft3d_s 1823 13.4 18.405 18.776 18.419 18.789 scf_env_do_scf_inner_loop 4 4.0 0.000 0.000 14.415 14.415 calculate_wavefunction 91 8.0 2.011 2.049 9.731 9.958 mp2_ri_gpw_compute_en_expansio 172 7.0 0.557 0.591 8.721 9.204 potential_pw2rs 186 10.0 0.034 0.035 8.599 9.180 mp2_eri_2c_integrate_gpw_pot_l 91 10.0 0.001 0.001 8.226 8.649 local_gemm 172 8.0 8.164 8.640 8.164 8.640 mp2_ri_gpw_compute_en_comm 22 7.0 0.498 0.520 7.986 8.425 collocate_single_gaussian 91 10.0 0.017 0.040 7.867 8.184 calc_potential_gpw 182 9.5 0.002 0.002 7.916 8.072 mp_sync 38 10.4 3.517 6.870 3.517 6.870 mp_sendrecv_dm3 2068 8.0 6.022 6.453 6.022 6.453 mp2_ri_gpw_compute_en_ener 172 7.0 6.345 6.439 6.345 6.439 pw_gather_s 912 13.2 4.903 5.463 4.903 5.463 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="20", plot="h2o_32_ri_rpa_mp2", label="RI-MP2 (8n/6r/2t)", y=102.305782, yerr=0.000000 PlotPoint: name="21", plot="h2o_32_ri_rpa_mp2_mem", label="RI-MP2 (8n/6r/2t)", y=1511.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/03/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 29.277748E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 5055360 0.0% 0.0% 100.0% average stack size 0.0 0.0 29.1 marketing flops 2.107592E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 451.399680E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 9436608 MPI messages size (bytes): total size 333.233553E+09 min size 0.000000E+00 max size 315.840000E+03 average size 35.312852E+03 MPI breakdown and total messages size (bytes): size <= 128 4913240 0 128 < size <= 8192 1155432 9465298944 8192 < size <= 32768 1984512 54190407680 32768 < size <= 131072 551296 42776657920 131072 < size <= 4194304 832128 226802306368 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3683 62385. MP_Allreduce 10249 271. MP_Sync 580 MP_Alltoall 2083 MP_ISendRecv 45220 5520. MP_Wait 60486 MP_comm_split 50 MP_ISend 20771 42672. MP_IRecv 20771 42672. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.043 0.171 51.632 51.653 qs_mol_dyn_low 1 2.0 0.008 0.026 51.120 51.128 qs_forces 11 3.9 0.002 0.003 51.020 51.022 qs_energies 11 4.9 0.001 0.003 49.558 49.572 scf_env_do_scf 11 5.9 0.001 0.001 43.325 43.325 scf_env_do_scf_inner_loop 108 6.5 0.002 0.006 41.315 41.315 dbcsr_multiply_generic 2286 12.5 0.094 0.096 33.212 33.722 qs_scf_new_mos 108 7.5 0.000 0.000 31.587 31.888 qs_scf_loop_do_ot 108 8.5 0.000 0.001 31.587 31.888 ot_scf_mini 108 9.5 0.002 0.002 29.931 30.123 multiply_cannon 2286 13.5 0.184 0.194 25.953 27.469 multiply_cannon_loop 2286 14.5 1.503 1.583 25.275 26.831 velocity_verlet 10 3.0 0.003 0.012 25.397 25.399 ot_mini 108 10.5 0.001 0.001 19.242 19.483 qs_ot_get_derivative 108 11.5 0.022 0.172 16.249 16.450 mp_waitall_1 267858 16.1 8.494 14.904 8.494 14.904 multiply_cannon_metrocomm3 54864 15.5 0.067 0.072 5.971 13.300 multiply_cannon_multrec 54864 15.5 4.235 6.567 7.690 11.216 rebuild_ks_matrix 119 8.3 0.000 0.000 7.697 7.818 qs_ks_build_kohn_sham_matrix 119 9.3 0.010 0.011 7.697 7.818 multiply_cannon_sync_h2d 54864 15.5 5.909 7.732 5.909 7.732 mp_sum_l 7207 12.9 5.572 7.225 5.572 7.225 qs_ks_update_qs_env 119 7.6 0.001 0.001 6.782 6.892 qs_ot_get_p 119 10.4 0.001 0.002 6.109 6.404 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 5.537 5.984 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 5.321 5.446 init_scf_run 11 5.9 0.000 0.001 4.963 4.963 scf_env_initial_rho_setup 11 6.9 0.001 0.001 4.963 4.963 dbcsr_mm_accdrv_process 76910 16.1 1.144 1.801 3.377 4.803 sum_up_and_integrate 119 10.3 0.012 0.014 4.379 4.386 integrate_v_rspace 119 11.3 0.002 0.002 4.367 4.376 qs_rho_update_rho_low 119 7.7 0.000 0.001 3.903 4.041 calculate_rho_elec 119 8.7 0.011 0.016 3.902 4.040 qs_ot_p2m_diag 50 11.0 0.004 0.007 3.388 3.437 multiply_cannon_metrocomm1 54864 15.5 0.050 0.056 1.765 3.259 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 2.866 3.051 apply_single 119 13.6 0.000 0.000 2.866 3.051 calculate_dm_sparse 119 9.5 0.000 0.000 2.874 3.009 jit_kernel_multiply 13 15.8 2.173 2.915 2.173 2.915 calculate_first_density_matrix 1 7.0 0.000 0.001 2.853 2.857 rs_pw_transfer 974 11.9 0.011 0.013 2.655 2.784 ot_diis_step 108 11.5 0.006 0.006 2.693 2.693 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 2.637 2.639 cp_dbcsr_syevd 50 12.0 0.003 0.003 2.601 2.601 qs_ot_get_orbitals 108 10.5 0.000 0.000 2.397 2.449 cp_fm_diag_elpa 50 13.0 0.000 0.000 2.405 2.406 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.334 2.401 cp_fm_redistribute_end 50 14.0 2.183 2.383 2.188 2.385 cp_fm_diag_elpa_base 50 14.0 0.196 2.350 0.197 2.359 acc_transpose_blocks 54864 15.5 0.228 0.248 1.728 2.172 density_rs2pw 119 9.7 0.004 0.004 2.029 2.120 grid_integrate_task_list 119 12.3 2.024 2.118 2.024 2.118 wfi_extrapolate 11 7.9 0.001 0.001 2.053 2.053 init_scf_loop 11 6.9 0.000 0.001 1.994 1.994 mp_sum_d 4125 12.0 1.352 1.977 1.352 1.977 potential_pw2rs 119 12.3 0.004 0.004 1.764 1.775 make_m2s 4572 13.5 0.054 0.056 1.573 1.611 pw_transfer 1439 11.6 0.051 0.054 1.516 1.582 make_images 4572 14.5 0.134 0.140 1.490 1.527 fft_wrap_pw1pw2 1201 12.6 0.007 0.007 1.442 1.508 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.000 1.381 1.405 mp_alltoall_d11v 2130 13.8 1.239 1.393 1.239 1.393 grid_collocate_task_list 119 9.7 1.288 1.353 1.288 1.353 mp_waitany 12084 13.8 1.232 1.344 1.232 1.344 fft3d_ps 1201 14.6 0.354 0.454 1.220 1.281 dbcsr_dot_sd 1205 11.9 0.047 0.057 0.760 1.174 fft_wrap_pw1pw2_140 487 13.2 0.080 0.093 1.096 1.165 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="100", plot="h2o_64_md", label="(8n/12r/1t)", y=51.653000, yerr=0.000000 PlotPoint: name="101", plot="h2o_64_md_mem", label="(8n/12r/1t)", y=430.636364, yerr=0.481046 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/04/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 57.173320E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 3066240 0.0% 0.0% 100.0% average stack size 0.0 0.0 47.9 marketing flops 2.107592E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 486.825984E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 2194560 MPI messages size (bytes): total size 310.646604E+09 min size 0.000000E+00 max size 1.145520E+06 average size 141.553031E+03 MPI breakdown and total messages size (bytes): size <= 128 724648 0 128 < size <= 8192 253512 2076770304 8192 < size <= 32768 281952 4619501568 32768 < size <= 131072 494448 39143342080 131072 < size <= 4194304 440000 264807943488 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3672 62664. MP_Allreduce 10226 305. MP_Sync 104 MP_Alltoall 2060 1624790. MP_ISendRecv 33558 37093. MP_Wait 40318 MP_comm_split 50 MP_ISend 5720 128509. MP_IRecv 5720 128509. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.016 0.031 39.377 39.378 qs_mol_dyn_low 1 2.0 0.005 0.017 39.140 39.148 qs_forces 11 3.9 0.005 0.032 39.035 39.039 qs_energies 11 4.9 0.007 0.019 37.330 37.338 scf_env_do_scf 11 5.9 0.000 0.001 31.123 31.124 scf_env_do_scf_inner_loop 108 6.5 0.002 0.007 28.630 28.630 dbcsr_multiply_generic 2286 12.5 0.100 0.103 21.943 22.282 qs_scf_new_mos 108 7.5 0.001 0.001 19.736 19.980 qs_scf_loop_do_ot 108 8.5 0.001 0.001 19.735 19.979 ot_scf_mini 108 9.5 0.003 0.003 18.846 19.020 multiply_cannon 2286 13.5 0.209 0.218 17.000 18.610 velocity_verlet 10 3.0 0.002 0.012 18.105 18.109 multiply_cannon_loop 2286 14.5 0.908 0.983 15.869 17.250 ot_mini 108 10.5 0.001 0.001 11.749 11.986 mp_waitall_1 217478 16.2 6.058 11.541 6.058 11.541 multiply_cannon_metrocomm3 27432 15.5 0.067 0.070 4.342 10.058 qs_ot_get_derivative 108 11.5 0.001 0.001 9.297 9.472 multiply_cannon_multrec 27432 15.5 1.970 4.482 6.328 9.322 rebuild_ks_matrix 119 8.3 0.000 0.000 7.091 7.224 qs_ks_build_kohn_sham_matrix 119 9.3 0.012 0.015 7.090 7.224 qs_ks_update_qs_env 119 7.6 0.001 0.001 6.260 6.382 dbcsr_mm_accdrv_process 47894 16.0 3.296 5.554 4.289 6.340 init_scf_run 11 5.9 0.000 0.001 4.830 4.831 scf_env_initial_rho_setup 11 6.9 0.001 0.001 4.830 4.831 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 3.627 4.469 qs_ot_get_p 119 10.4 0.001 0.002 4.173 4.408 sum_up_and_integrate 119 10.3 0.025 0.028 4.129 4.136 integrate_v_rspace 119 11.3 0.002 0.003 4.105 4.113 apply_preconditioner_dbcsr 119 12.6 0.000 0.001 3.039 4.102 apply_single 119 13.6 0.000 0.000 3.039 4.102 mp_sum_l 7207 12.9 2.027 3.954 2.027 3.954 qs_rho_update_rho_low 119 7.7 0.001 0.001 3.570 3.613 calculate_rho_elec 119 8.7 0.021 0.024 3.570 3.612 calculate_first_density_matrix 1 7.0 0.000 0.002 3.327 3.329 multiply_cannon_sync_h2d 27432 15.5 2.207 2.885 2.207 2.885 qs_ot_p2m_diag 50 11.0 0.009 0.014 2.728 2.748 make_m2s 4572 13.5 0.053 0.055 2.467 2.680 make_images 4572 14.5 0.201 0.238 2.378 2.588 rs_pw_transfer 974 11.9 0.010 0.011 2.475 2.574 init_scf_loop 11 6.9 0.001 0.003 2.473 2.474 ot_diis_step 108 11.5 0.011 0.013 2.403 2.404 jit_kernel_multiply 9 16.2 0.941 2.354 0.941 2.354 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 2.328 2.330 calculate_dm_sparse 119 9.5 0.000 0.000 2.229 2.300 cp_dbcsr_syevd 50 12.0 0.003 0.003 2.288 2.289 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 2.125 2.217 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.124 2.163 density_rs2pw 119 9.7 0.004 0.004 1.886 1.996 cp_fm_diag_elpa 50 13.0 0.000 0.000 1.963 1.963 grid_integrate_task_list 119 12.3 1.838 1.942 1.838 1.942 cp_fm_redistribute_end 50 14.0 1.614 1.937 1.618 1.938 cp_fm_diag_elpa_base 50 14.0 0.311 1.884 0.319 1.915 potential_pw2rs 119 12.3 0.006 0.006 1.871 1.879 pw_transfer 1439 11.6 0.063 0.067 1.737 1.767 fft_wrap_pw1pw2 1201 12.6 0.007 0.008 1.647 1.679 acc_transpose_blocks 27432 15.5 0.109 0.114 1.277 1.611 prepare_preconditioner 11 7.9 0.000 0.000 1.577 1.604 make_preconditioner 11 8.9 0.000 0.001 1.577 1.604 make_images_data 4572 15.5 0.045 0.051 1.136 1.554 make_full_inverse_cholesky 11 9.9 0.000 0.000 1.477 1.533 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.000 1.449 1.456 wfi_extrapolate 11 7.9 0.001 0.003 1.455 1.455 hybrid_alltoall_any 4725 16.4 0.051 0.112 0.994 1.448 fft3d_ps 1201 14.6 0.497 0.551 1.355 1.384 mp_alltoall_d11v 2130 13.8 1.242 1.359 1.242 1.359 fft_wrap_pw1pw2_140 487 13.2 0.076 0.082 1.287 1.317 mp_allgather_i34 2286 14.5 0.574 1.317 0.574 1.317 grid_collocate_task_list 119 9.7 1.240 1.300 1.240 1.300 qs_ot_get_orbitals 108 10.5 0.000 0.000 1.192 1.242 mp_sum_d 4125 12.0 0.618 1.072 0.618 1.072 acc_transpose_blocks_kernels 27432 16.5 0.180 0.269 0.746 0.987 qs_energies_init_hamiltonians 11 5.9 0.002 0.005 0.952 0.954 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 0.887 0.900 parallel_gemm_fm 81 9.0 0.000 0.000 0.844 0.848 parallel_gemm_fm_cosma 81 10.0 0.844 0.848 0.844 0.848 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="102", plot="h2o_64_md", label="(8n/6r/2t)", y=39.378000, yerr=0.000000 PlotPoint: name="103", plot="h2o_64_md_mem", label="(8n/6r/2t)", y=464.090909, yerr=1.975051 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/05/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 59.051995E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 3143552 0.0% 0.0% 100.0% average stack size 0.0 0.0 46.8 marketing flops 2.107587E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 522.936320E+06 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 950976 MPI messages size (bytes): total size 203.844256E+09 min size 0.000000E+00 max size 1.638400E+06 average size 214.352688E+03 MPI breakdown and total messages size (bytes): size <= 128 6424 0 128 < size <= 8192 253512 2076770304 8192 < size <= 32768 179424 2939682816 32768 < size <= 131072 181440 14863564800 131072 < size <= 4194304 330176 183964913216 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3672 62660. MP_Allreduce 10225 303. MP_Sync 104 MP_Alltoall 1821 1607811. MP_ISendRecv 22134 57667. MP_Wait 33054 MP_comm_split 50 MP_ISend 9880 92618. MP_IRecv 9880 92618. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.015 0.030 32.600 32.600 qs_mol_dyn_low 1 2.0 0.003 0.003 32.352 32.359 qs_forces 11 3.9 0.002 0.002 32.292 32.293 qs_energies 11 4.9 0.002 0.005 30.741 30.744 scf_env_do_scf 11 5.9 0.001 0.001 25.125 25.125 scf_env_do_scf_inner_loop 108 6.5 0.002 0.006 22.551 22.551 dbcsr_multiply_generic 2286 12.5 0.095 0.097 17.217 17.326 velocity_verlet 10 3.0 0.001 0.002 15.127 15.128 qs_scf_new_mos 108 7.5 0.001 0.001 14.708 14.725 qs_scf_loop_do_ot 108 8.5 0.001 0.001 14.707 14.725 multiply_cannon 2286 13.5 0.201 0.213 13.949 14.690 multiply_cannon_loop 2286 14.5 0.640 0.671 13.161 13.997 ot_scf_mini 108 9.5 0.002 0.003 13.982 13.995 ot_mini 108 10.5 0.001 0.001 8.710 8.723 multiply_cannon_multrec 18288 15.5 1.975 3.116 7.457 7.823 qs_ot_get_derivative 108 11.5 0.001 0.001 7.210 7.220 dbcsr_mm_accdrv_process 38222 16.0 4.440 5.811 5.399 6.414 rebuild_ks_matrix 119 8.3 0.000 0.000 6.259 6.282 qs_ks_build_kohn_sham_matrix 119 9.3 0.012 0.015 6.259 6.281 qs_ks_update_qs_env 119 7.6 0.001 0.001 5.538 5.559 init_scf_run 11 5.9 0.000 0.001 4.411 4.411 scf_env_initial_rho_setup 11 6.9 0.001 0.001 4.411 4.411 mp_waitall_1 169478 16.3 2.779 4.009 2.779 4.009 sum_up_and_integrate 119 10.3 0.030 0.031 3.888 3.892 integrate_v_rspace 119 11.3 0.002 0.002 3.858 3.865 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 2.732 3.325 calculate_first_density_matrix 1 7.0 0.000 0.002 3.202 3.203 qs_rho_update_rho_low 119 7.7 0.001 0.001 3.179 3.184 calculate_rho_elec 119 8.7 0.030 0.031 3.179 3.183 qs_ot_get_p 119 10.4 0.001 0.001 3.102 3.127 calculate_dm_sparse 119 9.5 0.000 0.000 2.641 2.655 multiply_cannon_metrocomm3 18288 15.5 0.044 0.046 1.472 2.633 init_scf_loop 11 6.9 0.001 0.004 2.557 2.558 jit_kernel_multiply 11 16.0 0.908 2.533 0.908 2.533 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 2.007 2.347 apply_single 119 13.6 0.000 0.000 2.007 2.347 rs_pw_transfer 974 11.9 0.009 0.010 2.190 2.344 qs_ot_p2m_diag 50 11.0 0.012 0.013 2.055 2.061 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.001 2.009 2.014 make_m2s 4572 13.5 0.045 0.046 1.786 1.931 density_rs2pw 119 9.7 0.004 0.004 1.776 1.927 grid_integrate_task_list 119 12.3 1.801 1.883 1.801 1.883 make_images 4572 14.5 0.191 0.204 1.699 1.842 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 1.797 1.799 cp_dbcsr_syevd 50 12.0 0.004 0.004 1.787 1.787 prepare_preconditioner 11 7.9 0.000 0.000 1.764 1.771 make_preconditioner 11 8.9 0.000 0.002 1.764 1.771 pw_transfer 1439 11.6 0.064 0.067 1.703 1.713 make_full_inverse_cholesky 11 9.9 0.000 0.000 1.614 1.703 potential_pw2rs 119 12.3 0.007 0.008 1.678 1.685 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 1.626 1.632 acc_transpose_blocks 18288 15.5 0.075 0.078 1.549 1.625 fft_wrap_pw1pw2 1201 12.6 0.008 0.008 1.612 1.623 cp_fm_diag_elpa 50 13.0 0.000 0.000 1.560 1.561 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 1.553 1.561 multiply_cannon_sync_h2d 18288 15.5 1.375 1.542 1.375 1.542 cp_fm_redistribute_end 50 14.0 1.157 1.540 1.158 1.540 cp_fm_diag_elpa_base 50 14.0 0.366 1.485 0.381 1.523 ot_diis_step 108 11.5 0.011 0.011 1.477 1.477 mp_sum_l 7207 12.9 1.095 1.470 1.095 1.470 fft3d_ps 1201 14.6 0.506 0.523 1.299 1.310 fft_wrap_pw1pw2_140 487 13.2 0.086 0.089 1.275 1.285 grid_collocate_task_list 119 9.7 1.205 1.285 1.205 1.285 acc_transpose_blocks_kernels 18288 16.5 0.208 0.218 1.109 1.173 wfi_extrapolate 11 7.9 0.001 0.001 1.162 1.162 jit_kernel_transpose 5 15.6 0.900 0.963 0.900 0.963 make_images_data 4572 15.5 0.045 0.049 0.796 0.955 qs_energies_init_hamiltonians 11 5.9 0.001 0.002 0.946 0.947 qs_ot_get_orbitals 108 10.5 0.000 0.000 0.885 0.908 hybrid_alltoall_any 4725 16.4 0.055 0.114 0.686 0.884 mp_alltoall_d11v 2130 13.8 0.699 0.874 0.699 0.874 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 0.791 0.793 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.000 0.653 0.719 cp_fm_cholesky_invert 11 10.9 0.709 0.713 0.709 0.713 mp_alltoall_z22v 1201 16.6 0.639 0.706 0.639 0.706 rs_pw_transfer_RS2PW_140 130 11.5 0.118 0.120 0.518 0.671 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="104", plot="h2o_64_md", label="(8n/4r/3t)", y=32.600000, yerr=0.000000 PlotPoint: name="105", plot="h2o_64_md_mem", label="(8n/4r/3t)", y=497.454545, yerr=2.310594 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/06/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 114.044384E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 3805952 0.0% 0.0% 100.0% average stack size 0.0 0.0 38.6 marketing flops 2.107592E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 552.562688E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 1042416 MPI messages size (bytes): total size 150.443262E+09 min size 0.000000E+00 max size 1.188816E+06 average size 144.321719E+03 MPI breakdown and total messages size (bytes): size <= 128 228256 0 128 < size <= 8192 126888 1039466496 8192 < size <= 32768 191472 3137077248 32768 < size <= 131072 295800 25899827200 131072 < size <= 4194304 200000 120367247040 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3672 62659. MP_Allreduce 10224 344. MP_Sync 104 MP_Alltoall 1582 2412273. MP_ISendRecv 16422 74133. MP_Wait 24482 MP_comm_split 50 MP_ISend 7280 135929. MP_IRecv 7280 135929. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.058 0.106 34.973 34.974 qs_mol_dyn_low 1 2.0 0.003 0.003 34.713 34.720 qs_forces 11 3.9 0.002 0.002 34.643 34.643 qs_energies 11 4.9 0.001 0.003 32.957 32.961 scf_env_do_scf 11 5.9 0.001 0.001 27.370 27.371 scf_env_do_scf_inner_loop 108 6.5 0.044 0.337 24.030 24.031 dbcsr_multiply_generic 2286 12.5 0.101 0.102 17.949 18.049 velocity_verlet 10 3.0 0.002 0.002 17.278 17.281 qs_scf_new_mos 108 7.5 0.001 0.001 15.754 15.803 qs_scf_loop_do_ot 108 8.5 0.001 0.001 15.754 15.802 multiply_cannon 2286 13.5 0.227 0.259 14.483 14.900 ot_scf_mini 108 9.5 0.002 0.003 14.830 14.884 multiply_cannon_loop 2286 14.5 0.945 0.973 13.567 14.020 ot_mini 108 10.5 0.001 0.001 9.111 9.179 multiply_cannon_multrec 27432 15.5 2.323 3.019 8.598 8.914 dbcsr_mm_accdrv_process 47916 15.9 5.351 6.740 6.182 7.585 qs_ot_get_derivative 108 11.5 0.001 0.001 7.313 7.368 rebuild_ks_matrix 119 8.3 0.000 0.000 6.347 6.405 qs_ks_build_kohn_sham_matrix 119 9.3 0.012 0.013 6.346 6.404 qs_ks_update_qs_env 119 7.6 0.001 0.001 5.637 5.687 init_scf_run 11 5.9 0.000 0.001 4.162 4.162 scf_env_initial_rho_setup 11 6.9 0.001 0.001 4.161 4.162 sum_up_and_integrate 119 10.3 0.035 0.038 3.651 3.659 integrate_v_rspace 119 11.3 0.002 0.002 3.616 3.624 qs_rho_update_rho_low 119 7.7 0.001 0.001 3.474 3.544 calculate_rho_elec 119 8.7 0.040 0.046 3.474 3.544 init_scf_loop 11 6.9 0.000 0.002 3.321 3.322 qs_ot_get_p 119 10.4 0.001 0.001 3.138 3.214 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 2.782 3.189 calculate_first_density_matrix 1 7.0 0.000 0.001 2.835 2.837 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 2.034 2.604 apply_single 119 13.6 0.000 0.000 2.034 2.604 prepare_preconditioner 11 7.9 0.000 0.000 2.498 2.504 make_preconditioner 11 8.9 0.000 0.001 2.498 2.504 mp_waitall_1 145218 16.4 1.966 2.453 1.966 2.453 make_full_inverse_cholesky 11 9.9 0.000 0.000 2.111 2.431 make_m2s 4572 13.5 0.054 0.056 2.100 2.204 calculate_dm_sparse 119 9.5 0.000 0.000 2.098 2.152 make_images 4572 14.5 0.273 0.335 1.992 2.093 rs_pw_transfer 974 11.9 0.009 0.009 1.935 2.027 qs_ot_p2m_diag 50 11.0 0.015 0.023 1.910 1.919 grid_integrate_task_list 119 12.3 1.835 1.910 1.835 1.910 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 1.869 1.897 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 1.804 1.805 ot_diis_step 108 11.5 0.012 0.012 1.757 1.758 density_rs2pw 119 9.7 0.003 0.004 1.673 1.745 pw_transfer 1439 11.6 0.063 0.067 1.702 1.732 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 1.641 1.655 mp_sum_l 7207 12.9 0.951 1.647 0.951 1.647 fft_wrap_pw1pw2 1201 12.6 0.008 0.008 1.612 1.645 acc_transpose_blocks 27432 15.5 0.111 0.112 1.488 1.619 cp_dbcsr_syevd 50 12.0 0.007 0.021 1.599 1.599 jit_kernel_multiply 8 16.0 0.772 1.485 0.772 1.485 potential_pw2rs 119 12.3 0.008 0.009 1.436 1.441 cp_fm_diag_elpa 50 13.0 0.000 0.000 1.377 1.381 fft_wrap_pw1pw2_140 487 13.2 0.084 0.091 1.330 1.363 cp_fm_redistribute_end 50 14.0 0.898 1.343 0.899 1.343 cp_fm_diag_elpa_base 50 14.0 0.423 1.287 0.442 1.328 fft3d_ps 1201 14.6 0.533 0.584 1.294 1.321 grid_collocate_task_list 119 9.7 1.221 1.310 1.221 1.310 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.004 1.291 1.299 wfi_extrapolate 11 7.9 0.001 0.001 1.280 1.280 multiply_cannon_metrocomm3 27432 15.5 0.038 0.040 0.757 1.202 qs_energies_init_hamiltonians 11 5.9 0.001 0.001 1.179 1.179 mp_alltoall_d11v 2130 13.8 0.990 1.116 0.990 1.116 qs_ot_get_orbitals 108 10.5 0.000 0.000 1.088 1.108 cp_fm_upper_to_full 72 13.5 0.792 1.096 0.792 1.096 multiply_cannon_sync_h2d 27432 15.5 1.006 1.083 1.006 1.083 dbcsr_complete_redistribute 329 12.2 0.119 0.145 0.745 1.015 acc_transpose_blocks_kernels 27432 16.5 0.265 0.273 0.866 0.999 make_images_data 4572 15.5 0.045 0.048 0.814 0.928 cp_fm_cholesky_decompose 22 10.9 0.556 0.889 0.556 0.889 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.000 0.793 0.871 hybrid_alltoall_any 4725 16.4 0.062 0.150 0.689 0.865 copy_fm_to_dbcsr 176 11.2 0.001 0.001 0.547 0.814 parallel_gemm_fm 81 9.0 0.000 0.000 0.492 0.810 parallel_gemm_fm_cosma 81 10.0 0.492 0.809 0.492 0.809 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 0.798 0.804 multiply_cannon_metrocomm1 27432 15.5 0.033 0.034 0.258 0.765 make_basis_sm 11 9.8 0.227 0.419 0.764 0.765 jit_kernel_transpose 5 15.6 0.601 0.737 0.601 0.737 cp_fm_cholesky_invert 11 10.9 0.727 0.730 0.727 0.730 qs_env_update_s_mstruct 11 6.9 0.000 0.001 0.687 0.718 mp_alltoall_i22 627 13.8 0.417 0.714 0.417 0.714 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="106", plot="h2o_64_md", label="(8n/3r/4t)", y=34.974000, yerr=0.000000 PlotPoint: name="107", plot="h2o_64_md_mem", label="(8n/3r/4t)", y=524.727273, yerr=3.816017 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/07/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 117.977176E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 1384136 0.0% 0.0% 100.0% average stack size 0.0 0.0 106.2 marketing flops 2.107587E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 594.391040E+06 # max total images/rank 1 # max 3D layers 1 # MPI messages exchanged 219456 MPI messages size (bytes): total size 97.042514E+09 min size 0.000000E+00 max size 3.276800E+06 average size 442.195750E+03 MPI breakdown and total messages size (bytes): size <= 128 1452 0 128 < size <= 8192 0 0 8192 < size <= 32768 101892 3336634368 32768 < size <= 131072 0 0 131072 < size <= 4194304 116112 93705670464 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 14 12. MP_Allreduce 8156 20. MP_Alltoall 8655 64935. MP_ISend 36532 168375. MP_IRecv 36532 168349. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3672 62658. MP_Allreduce 10224 344. MP_Sync 104 MP_Alltoall 1582 3682667. MP_ISendRecv 10710 94533. MP_Wait 16690 MP_comm_split 50 MP_ISend 5200 225425. MP_IRecv 5200 225425. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.013 0.031 27.012 27.012 qs_mol_dyn_low 1 2.0 0.003 0.003 26.815 26.823 qs_forces 11 3.9 0.002 0.002 26.760 26.760 qs_energies 11 4.9 0.002 0.002 25.091 25.094 scf_env_do_scf 11 5.9 0.000 0.001 20.432 20.432 scf_env_do_scf_inner_loop 108 6.5 0.002 0.006 18.000 18.001 velocity_verlet 10 3.0 0.002 0.002 13.790 13.793 dbcsr_multiply_generic 2286 12.5 0.094 0.097 12.040 12.117 qs_scf_new_mos 108 7.5 0.001 0.001 10.645 10.671 qs_scf_loop_do_ot 108 8.5 0.001 0.001 10.645 10.670 multiply_cannon 2286 13.5 0.235 0.243 9.631 10.067 ot_scf_mini 108 9.5 0.002 0.002 9.985 10.013 multiply_cannon_loop 2286 14.5 0.332 0.342 8.739 8.918 multiply_cannon_multrec 9144 15.5 1.701 1.982 5.940 6.157 rebuild_ks_matrix 119 8.3 0.000 0.000 5.680 5.702 qs_ks_build_kohn_sham_matrix 119 9.3 0.012 0.013 5.680 5.701 ot_mini 108 10.5 0.001 0.001 5.643 5.675 qs_ks_update_qs_env 119 7.6 0.001 0.001 5.067 5.086 qs_ot_get_derivative 108 11.5 0.001 0.001 4.380 4.407 dbcsr_mm_accdrv_process 12550 15.8 3.489 4.176 4.139 4.232 sum_up_and_integrate 119 10.3 0.037 0.041 3.450 3.454 integrate_v_rspace 119 11.3 0.002 0.003 3.412 3.417 init_scf_run 11 5.9 0.000 0.001 3.250 3.250 scf_env_initial_rho_setup 11 6.9 0.001 0.001 3.249 3.250 qs_rho_update_rho_low 119 7.7 0.001 0.001 3.113 3.121 calculate_rho_elec 119 8.7 0.059 0.061 3.113 3.120 qs_ot_get_p 119 10.4 0.001 0.001 2.605 2.642 init_scf_loop 11 6.9 0.000 0.000 2.413 2.414 calculate_first_density_matrix 1 7.0 0.000 0.000 2.162 2.163 mp_waitall_1 121218 16.5 1.422 1.937 1.422 1.937 grid_integrate_task_list 119 12.3 1.839 1.932 1.839 1.932 make_m2s 4572 13.5 0.035 0.036 1.663 1.830 calculate_dm_sparse 119 9.5 0.000 0.000 1.757 1.775 make_images 4572 14.5 0.268 0.302 1.572 1.737 qs_ot_p2m_diag 50 11.0 0.022 0.023 1.720 1.722 prepare_preconditioner 11 7.9 0.000 0.000 1.680 1.684 make_preconditioner 11 8.9 0.000 0.000 1.680 1.684 pw_transfer 1439 11.6 0.063 0.066 1.656 1.664 rs_pw_transfer 974 11.9 0.008 0.008 1.531 1.618 density_rs2pw 119 9.7 0.003 0.003 1.529 1.614 make_full_inverse_cholesky 11 9.9 0.000 0.000 1.572 1.598 fft_wrap_pw1pw2 1201 12.6 0.008 0.008 1.565 1.574 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 1.549 1.550 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 1.529 1.544 cp_dbcsr_syevd 50 12.0 0.004 0.004 1.531 1.531 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 1.366 1.375 grid_collocate_task_list 119 9.7 1.270 1.343 1.270 1.343 jit_kernel_multiply 7 15.7 0.612 1.306 0.612 1.306 fft_wrap_pw1pw2_140 487 13.2 0.082 0.085 1.286 1.295 cp_fm_diag_elpa 50 13.0 0.000 0.000 1.266 1.266 potential_pw2rs 119 12.3 0.010 0.010 1.252 1.255 ot_diis_step 108 11.5 0.012 0.013 1.251 1.251 cp_fm_redistribute_end 50 14.0 0.629 1.249 0.629 1.249 fft3d_ps 1201 14.6 0.537 0.550 1.232 1.239 cp_fm_diag_elpa_base 50 14.0 0.576 1.181 0.619 1.238 qs_energies_init_hamiltonians 11 5.9 0.001 0.002 1.206 1.207 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.000 1.188 1.193 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 1.126 1.146 apply_single 119 13.6 0.000 0.000 1.125 1.146 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 1.098 1.111 wfi_extrapolate 11 7.9 0.001 0.001 1.043 1.043 hybrid_alltoall_any 4725 16.4 0.062 0.176 0.755 1.006 make_images_data 4572 15.5 0.039 0.042 0.771 0.970 acc_transpose_blocks 9144 15.5 0.038 0.040 0.930 0.940 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.000 0.867 0.918 mp_alltoall_d11v 2130 13.8 0.774 0.890 0.774 0.890 cp_fm_cholesky_invert 11 10.9 0.829 0.831 0.829 0.831 multiply_cannon_sync_h2d 9144 15.5 0.707 0.796 0.707 0.796 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 0.733 0.736 qs_env_update_s_mstruct 11 6.9 0.000 0.000 0.662 0.711 qs_ot_get_orbitals 108 10.5 0.000 0.000 0.700 0.708 acc_transpose_blocks_kernels 9144 16.5 0.116 0.119 0.688 0.693 multiply_cannon_metrocomm3 9144 15.5 0.019 0.019 0.317 0.671 mp_allgather_i34 2286 14.5 0.222 0.617 0.222 0.617 mp_alltoall_z22v 1201 16.6 0.570 0.601 0.570 0.601 jit_kernel_transpose 5 15.6 0.572 0.577 0.572 0.577 qs_create_task_list 11 7.9 0.000 0.000 0.543 0.567 generate_qs_task_list 11 8.9 0.190 0.213 0.543 0.567 dbcsr_complete_redistribute 329 12.2 0.186 0.207 0.517 0.544 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="108", plot="h2o_64_md", label="(8n/2r/6t)", y=27.012000, yerr=0.000000 PlotPoint: name="109", plot="h2o_64_md_mem", label="(8n/2r/6t)", y=565.909091, yerr=1.781447 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/08/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 26877100032 0.0% 0.0% 100.0% flops 9 x 9 x 32 44168260608 0.0% 0.0% 100.0% flops 22 x 9 x 32 53835724800 0.0% 0.0% 100.0% flops 9 x 22 x 32 53885500416 0.0% 0.0% 100.0% flops 32 x 32 x 9 63568871424 0.0% 0.0% 100.0% flops 22 x 22 x 32 67007283200 0.0% 0.0% 100.0% flops 32 x 32 x 22 77695287296 0.0% 0.0% 100.0% flops 9 x 32 x 32 78422999040 0.0% 0.0% 100.0% flops 22 x 32 x 32 95850332160 0.0% 0.0% 100.0% flops 9 x 32 x 9 266263676928 0.0% 0.0% 100.0% flops 22 x 32 x 9 326697440256 0.0% 0.0% 100.0% flops 9 x 32 x 22 326697440256 0.0% 0.0% 100.0% flops 22 x 32 x 22 399918497792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 1.880888E+12 0.0% 0.0% 100.0% flops max/rank 235.585836E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 146984760 0.0% 0.0% 100.0% number of processed stacks 1388964 0.0% 0.0% 100.0% average stack size 0.0 0.0 105.8 marketing flops 2.107587E+12 ------------------------------------------------------------------------------- # multiplications 2286 max memory usage/rank 765.112320E+06 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 91440 MPI messages size (bytes): total size 85.748679E+09 min size 0.000000E+00 max size 6.553600E+06 average size 937.758938E+03 MPI breakdown and total messages size (bytes): size <= 128 572 0 128 < size <= 8192 0 0 8192 < size <= 32768 21148 692256768 32768 < size <= 131072 19224 1259864064 131072 < size <= 4194304 41040 21941452800 4194304 < size <= 16777216 9456 61855174464 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3622 63729. MP_Allreduce 10074 433. MP_Sync 54 MP_Alltoall 1582 7383731. MP_ISendRecv 4998 189067. MP_Wait 8898 MP_ISend 3120 546875. MP_IRecv 3120 546875. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.016 0.038 41.155 41.156 qs_mol_dyn_low 1 2.0 0.003 0.003 40.765 40.772 qs_forces 11 3.9 0.001 0.002 40.707 40.708 qs_energies 11 4.9 0.001 0.001 38.767 38.771 scf_env_do_scf 11 5.9 0.000 0.001 32.415 32.415 scf_env_do_scf_inner_loop 108 6.5 0.003 0.006 24.789 24.790 velocity_verlet 10 3.0 0.002 0.002 22.551 22.557 dbcsr_multiply_generic 2286 12.5 0.100 0.102 18.028 18.291 qs_scf_new_mos 108 7.5 0.001 0.001 15.998 16.097 qs_scf_loop_do_ot 108 8.5 0.001 0.001 15.997 16.097 multiply_cannon 2286 13.5 0.304 0.318 14.269 15.241 ot_scf_mini 108 9.5 0.002 0.002 14.918 15.019 multiply_cannon_loop 2286 14.5 0.345 0.351 13.025 14.043 ot_mini 108 10.5 0.001 0.001 9.084 9.204 multiply_cannon_multrec 9144 15.5 3.419 4.803 8.820 9.012 init_scf_loop 11 6.9 0.000 0.000 7.599 7.603 qs_ot_get_derivative 108 11.5 0.001 0.001 7.074 7.176 rebuild_ks_matrix 119 8.3 0.000 0.000 6.766 6.909 qs_ks_build_kohn_sham_matrix 119 9.3 0.013 0.013 6.766 6.909 prepare_preconditioner 11 7.9 0.000 0.000 6.692 6.705 make_preconditioner 11 8.9 0.000 0.000 6.692 6.705 dbcsr_mm_accdrv_process 12550 15.8 4.191 6.123 5.278 6.695 make_full_inverse_cholesky 11 9.9 0.000 0.000 5.290 6.583 qs_ks_update_qs_env 119 7.6 0.001 0.001 6.120 6.249 cp_fm_upper_to_full 72 14.2 3.147 4.491 3.147 4.491 mp_waitall_1 97218 16.6 2.704 3.963 2.704 3.963 init_scf_run 11 5.9 0.000 0.001 3.908 3.908 scf_env_initial_rho_setup 11 6.9 0.001 0.001 3.908 3.908 qs_rho_update_rho_low 119 7.7 0.001 0.001 3.631 3.666 calculate_rho_elec 119 8.7 0.118 0.121 3.630 3.665 sum_up_and_integrate 119 10.3 0.064 0.066 3.639 3.644 integrate_v_rspace 119 11.3 0.003 0.003 3.574 3.581 qs_ot_get_p 119 10.4 0.001 0.001 3.123 3.263 qs_ot_get_derivative_taylor 59 13.0 0.001 0.001 2.814 3.228 multiply_cannon_metrocomm3 9144 15.5 0.020 0.020 1.515 2.765 dbcsr_complete_redistribute 329 12.2 0.288 0.293 1.936 2.736 calculate_first_density_matrix 1 7.0 0.000 0.000 2.510 2.516 calculate_dm_sparse 119 9.5 0.000 0.000 2.433 2.456 copy_fm_to_dbcsr 176 11.2 0.001 0.001 1.631 2.430 make_m2s 4572 13.5 0.039 0.039 2.211 2.377 apply_preconditioner_dbcsr 119 12.6 0.000 0.000 2.161 2.346 apply_single 119 13.6 0.000 0.000 2.161 2.346 make_images 4572 14.5 0.353 0.382 2.089 2.254 transfer_fm_to_dbcsr 11 9.9 0.000 0.000 1.397 2.189 qs_energies_init_hamiltonians 11 5.9 0.001 0.001 2.182 2.182 mp_alltoall_i22 627 13.8 1.379 2.170 1.379 2.170 jit_kernel_multiply 10 15.6 1.061 2.118 1.061 2.118 grid_integrate_task_list 119 12.3 2.004 2.028 2.004 2.028 pw_transfer 1439 11.6 0.066 0.067 2.008 2.011 ot_diis_step 108 11.5 0.014 0.015 1.985 1.985 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 1.942 1.942 fft_wrap_pw1pw2 1201 12.6 0.008 0.008 1.912 1.915 qs_ot_p2m_diag 50 11.0 0.043 0.043 1.866 1.868 density_rs2pw 119 9.7 0.003 0.003 1.740 1.758 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 1.687 1.737 qs_ot_get_derivative_diag 49 12.0 0.001 0.001 1.664 1.716 mp_sum_l 7207 12.9 1.009 1.649 1.009 1.649 fft_wrap_pw1pw2_140 487 13.2 0.088 0.090 1.599 1.604 cp_dbcsr_syevd 50 12.0 0.004 0.004 1.591 1.591 fft3d_ps 1201 14.6 0.567 0.581 1.546 1.548 qs_env_update_s_mstruct 11 6.9 0.000 0.000 1.461 1.529 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.000 0.000 1.500 1.513 grid_collocate_task_list 119 9.7 1.446 1.457 1.446 1.457 cp_fm_cholesky_invert 11 10.9 1.411 1.414 1.411 1.414 rs_pw_transfer 974 11.9 0.009 0.009 1.356 1.384 wfi_extrapolate 11 7.9 0.001 0.001 1.334 1.334 cp_fm_diag_elpa 50 13.0 0.000 0.000 1.309 1.309 cp_fm_diag_elpa_base 50 14.0 1.166 1.218 1.307 1.307 acc_transpose_blocks 9144 15.5 0.038 0.038 1.099 1.281 hybrid_alltoall_any 4725 16.4 0.086 0.147 1.067 1.261 make_images_data 4572 15.5 0.042 0.045 1.018 1.226 potential_pw2rs 119 12.3 0.014 0.014 1.190 1.192 mp_alltoall_d11v 2130 13.8 1.117 1.188 1.117 1.188 qs_ot_get_orbitals 108 10.5 0.000 0.000 1.133 1.153 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.000 0.985 1.049 multiply_cannon_sync_h2d 9144 15.5 1.039 1.041 1.039 1.041 acc_transpose_blocks_kernels 9144 16.5 0.117 0.118 0.849 1.031 build_core_hamiltonian_matrix 11 6.9 0.001 0.001 0.591 0.965 qs_create_task_list 11 7.9 0.000 0.000 0.942 0.952 generate_qs_task_list 11 8.9 0.372 0.393 0.942 0.952 jit_kernel_transpose 5 15.6 0.733 0.914 0.733 0.914 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 0.858 0.872 mp_alltoall_z22v 1201 16.6 0.844 0.864 0.844 0.864 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="110", plot="h2o_64_md", label="(8n/1r/12t)", y=41.156000, yerr=0.000000 PlotPoint: name="111", plot="h2o_64_md_mem", label="(8n/1r/12t)", y=719.090909, yerr=14.462834 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/09/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 198.287135E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 8410880 0.0% 0.0% 100.0% average stack size 0.0 0.0 117.0 marketing flops 15.646302E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 502.632448E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 8483040 MPI messages size (bytes): total size 1.160510E+12 min size 0.000000E+00 max size 1.161504E+06 average size 136.803609E+03 MPI breakdown and total messages size (bytes): size <= 128 1836752 0 128 < size <= 8192 1040592 8524529664 8192 < size <= 32768 1486976 24362614784 32768 < size <= 131072 2491776 216971345920 131072 < size <= 4194304 1626944 910632720448 4194304 < size <= 16777216 0 0 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3521 65372. MP_Allreduce 9840 486. MP_Sync 100 MP_Alltoall 1938 291232. MP_ISendRecv 41800 9096. MP_Wait 58168 MP_comm_split 48 MP_ISend 14300 82312. MP_IRecv 14300 82312. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.012 0.029 83.564 83.565 qs_mol_dyn_low 1 2.0 0.003 0.003 83.308 83.318 qs_forces 11 3.9 0.002 0.002 82.757 82.758 qs_energies 11 4.9 0.002 0.002 79.924 79.943 scf_env_do_scf 11 5.9 0.000 0.001 70.699 70.701 scf_env_do_scf_inner_loop 99 6.5 0.002 0.006 65.182 65.183 dbcsr_multiply_generic 2055 12.4 0.107 0.111 53.129 53.399 qs_scf_new_mos 99 7.5 0.000 0.001 48.528 48.645 qs_scf_loop_do_ot 99 8.5 0.000 0.001 48.528 48.645 ot_scf_mini 99 9.5 0.002 0.002 46.149 46.235 multiply_cannon 2055 13.4 0.176 0.183 43.419 44.275 multiply_cannon_loop 2055 14.4 1.562 1.594 42.419 43.362 velocity_verlet 10 3.0 0.001 0.002 42.328 42.334 ot_mini 99 10.5 0.001 0.001 28.207 28.290 qs_ot_get_derivative 99 11.5 0.001 0.001 21.402 21.513 multiply_cannon_multrec 49320 15.4 12.106 13.011 17.790 18.784 rebuild_ks_matrix 110 8.3 0.000 0.000 14.273 14.454 qs_ks_build_kohn_sham_matrix 110 9.3 0.011 0.011 14.273 14.454 mp_waitall_1 241148 16.1 11.879 12.825 11.879 12.825 qs_ks_update_qs_env 110 7.6 0.001 0.001 12.551 12.713 multiply_cannon_sync_h2d 49320 15.4 9.933 10.601 9.933 10.601 qs_ot_get_p 110 10.4 0.001 0.001 9.351 9.444 multiply_cannon_metrocomm3 49320 15.4 0.079 0.084 7.173 8.375 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 7.437 8.104 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 7.303 7.669 apply_single 110 13.6 0.000 0.000 7.303 7.669 init_scf_run 11 5.9 0.000 0.001 7.221 7.222 scf_env_initial_rho_setup 11 6.9 0.001 0.001 7.221 7.222 sum_up_and_integrate 110 10.3 0.036 0.043 6.891 6.905 integrate_v_rspace 110 11.3 0.002 0.003 6.855 6.877 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 6.820 6.874 ot_diis_step 99 11.5 0.006 0.006 6.545 6.545 qs_rho_update_rho_low 110 7.6 0.001 0.001 6.049 6.165 calculate_rho_elec 110 8.6 0.020 0.024 6.048 6.165 dbcsr_mm_accdrv_process 87628 16.1 2.072 2.173 5.565 6.105 qs_ot_p2m_diag 48 11.0 0.012 0.019 6.042 6.076 mp_sum_l 6514 12.8 5.429 6.029 5.429 6.029 init_scf_loop 11 6.9 0.000 0.000 5.489 5.489 cp_dbcsr_syevd 48 12.0 0.016 0.031 5.073 5.074 cp_fm_diag_elpa 48 13.0 0.000 0.000 4.591 4.606 cp_fm_redistribute_end 48 14.0 3.983 4.553 3.986 4.554 cp_fm_diag_elpa_base 48 14.0 0.561 4.495 0.565 4.519 calculate_dm_sparse 110 9.5 0.000 0.001 3.897 3.991 wfi_extrapolate 11 7.9 0.001 0.001 3.968 3.968 make_m2s 4110 13.4 0.061 0.066 3.818 3.967 multiply_cannon_metrocomm1 49320 15.4 0.056 0.059 2.698 3.943 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 3.924 3.928 make_images 4110 14.4 0.178 0.193 3.723 3.875 rs_pw_transfer 902 11.9 0.011 0.013 3.636 3.826 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 3.578 3.608 jit_kernel_multiply 13 15.9 3.209 3.538 3.209 3.538 grid_integrate_task_list 110 12.3 3.254 3.414 3.254 3.414 prepare_preconditioner 11 7.9 0.000 0.000 3.361 3.385 make_preconditioner 11 8.9 0.000 0.000 3.361 3.384 qs_ot_get_orbitals 99 10.5 0.000 0.001 3.341 3.383 density_rs2pw 110 9.6 0.004 0.004 3.158 3.363 make_full_inverse_cholesky 11 9.9 0.000 0.000 3.142 3.179 calculate_first_density_matrix 1 7.0 0.000 0.000 3.164 3.173 pw_transfer 1331 11.6 0.054 0.063 2.740 2.795 fft_wrap_pw1pw2 1111 12.6 0.007 0.008 2.653 2.710 potential_pw2rs 110 12.3 0.006 0.007 2.506 2.527 mp_alltoall_d11v 2046 13.8 2.043 2.421 2.043 2.421 fft_wrap_pw1pw2_140 451 13.1 0.169 0.189 2.247 2.306 acc_transpose_blocks 49320 15.4 0.222 0.232 2.184 2.263 fft3d_ps 1111 14.6 0.754 0.839 2.191 2.237 grid_collocate_task_list 110 9.6 2.088 2.189 2.088 2.189 mp_waitany 14300 13.8 1.803 2.094 1.803 2.094 mp_sum_d 3879 11.9 1.529 2.024 1.529 2.024 make_images_data 4110 15.4 0.042 0.045 1.758 1.917 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.862 1.878 hybrid_alltoall_any 4261 16.3 0.082 0.480 1.528 1.824 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 1.791 1.813 cp_fm_cholesky_invert 11 10.9 1.808 1.811 1.808 1.811 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="200", plot="h2o_128_md", label="(8n/12r/1t)", y=83.565000, yerr=0.000000 PlotPoint: name="201", plot="h2o_128_md_mem", label="(8n/12r/1t)", y=476.727273, yerr=3.164890 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/10/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 390.715586E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 5019072 0.0% 0.0% 100.0% average stack size 0.0 0.0 196.1 marketing flops 15.646302E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 584.855552E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 1972800 MPI messages size (bytes): total size 1.077520E+12 min size 0.000000E+00 max size 4.537280E+06 average size 546.188250E+03 MPI breakdown and total messages size (bytes): size <= 128 14916 0 128 < size <= 8192 222984 1826684928 8192 < size <= 32768 520356 13399818240 32768 < size <= 131072 372336 35386294272 131072 < size <= 4194304 787758 788321309808 4194304 < size <= 16777216 54450 238588003280 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3521 65587. MP_Allreduce 9839 562. MP_Sync 100 MP_Alltoall 1717 2775622. MP_ISendRecv 20680 26400. MP_Wait 32692 MP_comm_split 48 MP_ISend 10164 155761. MP_IRecv 10164 155761. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.011 0.027 68.950 68.951 qs_mol_dyn_low 1 2.0 0.003 0.003 68.496 68.762 qs_forces 11 3.9 0.002 0.002 68.424 68.425 qs_energies 11 4.9 0.001 0.001 65.132 65.136 scf_env_do_scf 11 5.9 0.001 0.001 55.999 56.002 scf_env_do_scf_inner_loop 99 6.5 0.002 0.007 48.366 48.367 dbcsr_multiply_generic 2055 12.4 0.114 0.119 38.488 38.658 velocity_verlet 10 3.0 0.001 0.002 35.863 35.864 multiply_cannon 2055 13.4 0.222 0.243 31.843 33.008 qs_scf_new_mos 99 7.5 0.001 0.001 32.616 32.748 qs_scf_loop_do_ot 99 8.5 0.001 0.001 32.615 32.747 multiply_cannon_loop 2055 14.4 0.932 0.954 30.546 31.532 ot_scf_mini 99 9.5 0.003 0.003 30.961 31.080 ot_mini 99 10.5 0.001 0.001 18.427 18.554 multiply_cannon_multrec 24660 15.4 7.635 9.196 14.467 16.147 rebuild_ks_matrix 110 8.3 0.000 0.001 13.590 13.715 qs_ks_build_kohn_sham_matrix 110 9.3 0.012 0.014 13.589 13.714 qs_ot_get_derivative 99 11.5 0.001 0.001 12.559 12.680 qs_ks_update_qs_env 110 7.6 0.001 0.001 11.982 12.093 mp_waitall_1 186928 16.3 7.973 10.534 7.973 10.534 multiply_cannon_sync_h2d 24660 15.4 7.071 7.964 7.071 7.964 multiply_cannon_metrocomm3 24660 15.4 0.069 0.070 5.272 7.851 init_scf_loop 11 6.9 0.000 0.000 7.598 7.598 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 6.641 7.436 apply_single 110 13.6 0.000 0.001 6.641 7.435 dbcsr_mm_accdrv_process 52282 16.1 5.180 6.063 6.672 7.132 init_scf_run 11 5.9 0.000 0.001 6.767 6.768 scf_env_initial_rho_setup 11 6.9 0.001 0.001 6.767 6.768 sum_up_and_integrate 110 10.3 0.053 0.060 6.477 6.493 integrate_v_rspace 110 11.3 0.002 0.002 6.424 6.443 qs_ot_get_p 110 10.4 0.001 0.001 5.915 6.049 ot_diis_step 99 11.5 0.010 0.010 5.819 5.819 qs_rho_update_rho_low 110 7.6 0.001 0.001 5.688 5.697 calculate_rho_elec 110 8.6 0.039 0.047 5.687 5.697 prepare_preconditioner 11 7.9 0.000 0.000 5.590 5.608 make_preconditioner 11 8.9 0.000 0.000 5.590 5.608 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 4.702 5.494 make_full_inverse_cholesky 11 9.9 0.000 0.000 5.136 5.290 make_m2s 4110 13.4 0.057 0.061 4.193 4.680 make_images 4110 14.4 0.401 0.449 4.084 4.566 qs_ot_p2m_diag 48 11.0 0.029 0.044 4.078 4.098 cp_dbcsr_syevd 48 12.0 0.003 0.004 3.652 3.653 calculate_dm_sparse 110 9.5 0.001 0.001 3.565 3.598 wfi_extrapolate 11 7.9 0.001 0.001 3.479 3.479 pw_transfer 1331 11.6 0.066 0.072 3.251 3.394 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 3.332 3.334 grid_integrate_task_list 110 12.3 3.160 3.326 3.160 3.326 fft_wrap_pw1pw2 1111 12.6 0.008 0.008 3.144 3.282 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 3.156 3.217 calculate_first_density_matrix 1 7.0 0.000 0.000 3.203 3.206 density_rs2pw 110 9.6 0.004 0.004 2.981 3.154 cp_fm_diag_elpa 48 13.0 0.000 0.000 3.127 3.128 cp_fm_redistribute_end 48 14.0 2.328 3.102 2.330 3.102 cp_fm_diag_elpa_base 48 14.0 0.737 2.991 0.769 3.077 rs_pw_transfer 902 11.9 0.012 0.014 2.863 3.063 jit_kernel_multiply 12 16.3 1.145 3.061 1.145 3.061 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.946 2.987 hybrid_alltoall_any 4261 16.3 0.102 0.445 2.117 2.878 make_images_data 4110 15.4 0.047 0.050 2.371 2.847 fft_wrap_pw1pw2_140 451 13.1 0.200 0.217 2.664 2.801 fft3d_ps 1111 14.6 1.072 1.273 2.501 2.626 cp_fm_cholesky_invert 11 10.9 2.560 2.567 2.560 2.567 mp_sum_l 6514 12.8 1.822 2.407 1.822 2.407 potential_pw2rs 110 12.3 0.008 0.009 2.332 2.350 grid_collocate_task_list 110 9.6 2.120 2.288 2.120 2.288 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 2.143 2.152 mp_alltoall_d11v 2046 13.8 1.827 2.063 1.827 2.063 qs_ot_get_orbitals 99 10.5 0.000 0.001 1.939 1.958 qs_energies_init_hamiltonians 11 5.9 0.001 0.002 1.860 1.861 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.771 1.789 mp_allgather_i34 2055 14.4 0.626 1.689 0.626 1.689 multiply_cannon_metrocomm4 22605 15.4 0.075 0.078 0.780 1.657 acc_transpose_blocks 24660 15.4 0.109 0.114 1.521 1.550 mp_irecv_dv 57340 16.2 0.655 1.541 0.655 1.541 dbcsr_complete_redistribute 325 12.2 0.257 0.353 1.218 1.480 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.001 1.370 1.476 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="202", plot="h2o_128_md", label="(8n/6r/2t)", y=68.951000, yerr=0.000000 PlotPoint: name="203", plot="h2o_128_md_mem", label="(8n/6r/2t)", y=553.454545, yerr=6.485840 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/11/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 404.681598E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 3346752 0.0% 0.0% 100.0% average stack size 0.0 0.0 294.1 marketing flops 15.646297E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 662.089728E+06 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 854880 MPI messages size (bytes): total size 708.322787E+09 min size 0.000000E+00 max size 6.553600E+06 average size 828.564000E+03 MPI breakdown and total messages size (bytes): size <= 128 6424 0 128 < size <= 8192 0 0 8192 < size <= 32768 222984 7302414336 32768 < size <= 131072 153888 10085203968 131072 < size <= 4194304 389376 200257044480 4194304 < size <= 16777216 82208 490679162176 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3521 65578. MP_Allreduce 9838 559. MP_Sync 100 MP_Alltoall 1496 4511006. MP_ISendRecv 13640 27424. MP_Wait 32318 MP_comm_split 48 MP_ISend 17072 115022. MP_IRecv 17072 115022. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.032 0.049 60.338 60.339 qs_mol_dyn_low 1 2.0 0.003 0.003 60.064 60.073 qs_forces 11 3.9 0.005 0.008 59.552 59.554 qs_energies 11 4.9 0.002 0.007 56.381 56.387 scf_env_do_scf 11 5.9 0.001 0.001 48.466 48.467 scf_env_do_scf_inner_loop 99 6.5 0.002 0.007 39.747 39.749 velocity_verlet 10 3.0 0.162 0.324 32.712 32.718 dbcsr_multiply_generic 2055 12.4 0.109 0.112 28.751 29.024 qs_scf_new_mos 99 7.5 0.001 0.001 25.292 25.386 qs_scf_loop_do_ot 99 8.5 0.001 0.001 25.292 25.385 ot_scf_mini 99 9.5 0.002 0.003 24.073 24.183 multiply_cannon 2055 13.4 0.213 0.222 22.389 23.497 multiply_cannon_loop 2055 14.4 0.620 0.636 21.218 22.305 ot_mini 99 10.5 0.001 0.001 13.847 13.959 rebuild_ks_matrix 110 8.3 0.000 0.000 12.139 12.278 qs_ks_build_kohn_sham_matrix 110 9.3 0.012 0.014 12.139 12.278 qs_ks_update_qs_env 110 7.6 0.001 0.001 10.702 10.827 multiply_cannon_multrec 16440 15.4 3.934 4.836 9.979 10.766 mp_waitall_1 146766 16.3 7.149 10.082 7.149 10.082 qs_ot_get_derivative 99 11.5 0.001 0.001 9.429 9.537 init_scf_loop 11 6.9 0.013 0.104 8.682 8.687 multiply_cannon_metrocomm3 16440 15.4 0.043 0.045 4.260 7.110 prepare_preconditioner 11 7.9 0.000 0.000 6.821 6.842 make_preconditioner 11 8.9 0.000 0.000 6.821 6.842 make_full_inverse_cholesky 11 9.9 0.000 0.000 6.175 6.531 sum_up_and_integrate 110 10.3 0.061 0.062 6.380 6.394 integrate_v_rspace 110 11.3 0.002 0.002 6.319 6.334 dbcsr_mm_accdrv_process 34862 16.1 4.728 5.697 5.899 6.085 qs_ot_get_p 110 10.4 0.001 0.001 5.428 5.575 qs_rho_update_rho_low 110 7.6 0.001 0.001 5.520 5.531 calculate_rho_elec 110 8.6 0.058 0.058 5.519 5.530 init_scf_run 11 5.9 0.000 0.001 5.499 5.500 scf_env_initial_rho_setup 11 6.9 0.001 0.001 5.499 5.499 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 5.012 5.474 apply_single 110 13.6 0.000 0.000 5.012 5.474 make_m2s 4110 13.4 0.050 0.052 4.198 4.554 make_images 4110 14.4 0.394 0.513 4.081 4.434 ot_diis_step 99 11.5 0.010 0.011 4.390 4.390 multiply_cannon_sync_h2d 16440 15.4 3.688 4.221 3.688 4.221 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 3.116 3.847 qs_ot_p2m_diag 48 11.0 0.042 0.044 3.806 3.810 cp_dbcsr_syevd 48 12.0 0.003 0.004 3.474 3.474 grid_integrate_task_list 110 12.3 3.202 3.379 3.202 3.379 pw_transfer 1331 11.6 0.065 0.071 3.174 3.180 fft_wrap_pw1pw2 1111 12.6 0.008 0.008 3.068 3.076 density_rs2pw 110 9.6 0.004 0.004 2.800 2.969 cp_fm_diag_elpa 48 13.0 0.000 0.000 2.959 2.960 wfi_extrapolate 11 7.9 0.001 0.001 2.945 2.945 cp_fm_redistribute_end 48 14.0 1.838 2.934 1.840 2.934 make_images_data 4110 15.4 0.043 0.047 2.466 2.931 cp_fm_diag_elpa_base 48 14.0 1.030 2.805 1.091 2.912 hybrid_alltoall_any 4261 16.3 0.105 0.376 2.171 2.877 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 2.829 2.831 rs_pw_transfer 902 11.9 0.010 0.011 2.650 2.829 cp_fm_cholesky_invert 11 10.9 2.679 2.685 2.679 2.685 fft_wrap_pw1pw2_140 451 13.1 0.211 0.214 2.626 2.632 calculate_dm_sparse 110 9.5 0.001 0.001 2.526 2.554 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.484 2.533 calculate_first_density_matrix 1 7.0 0.000 0.001 2.473 2.475 multiply_cannon_metrocomm4 14385 15.4 0.044 0.048 0.879 2.423 fft3d_ps 1111 14.6 1.064 1.075 2.382 2.392 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 2.330 2.386 mp_irecv_dv 48980 15.7 0.810 2.301 0.810 2.301 grid_collocate_task_list 110 9.6 2.180 2.294 2.180 2.294 mp_sum_l 6514 12.8 1.544 2.222 1.544 2.222 potential_pw2rs 110 12.3 0.010 0.010 2.200 2.209 dbcsr_complete_redistribute 325 12.2 0.336 0.373 1.554 2.032 mp_alltoall_d11v 2046 13.8 1.744 2.011 1.744 2.011 qs_energies_init_hamiltonians 11 5.9 0.001 0.005 1.982 1.986 cp_fm_upper_to_full 70 13.6 1.410 1.870 1.410 1.870 jit_kernel_multiply 8 16.6 0.787 1.781 0.787 1.781 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.636 1.654 cp_fm_cholesky_decompose 22 10.9 1.587 1.609 1.587 1.609 mp_allgather_i34 2055 14.4 0.497 1.579 0.497 1.579 copy_fm_to_dbcsr 174 11.2 0.001 0.001 1.053 1.517 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 1.488 1.499 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.001 1.357 1.462 mp_waitany 17072 13.8 1.150 1.317 1.150 1.317 acc_transpose_blocks 16440 15.4 0.072 0.074 1.263 1.314 qs_ot_get_orbitals 99 10.5 0.000 0.001 1.249 1.255 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="204", plot="h2o_128_md", label="(8n/4r/3t)", y=60.339000, yerr=0.000000 PlotPoint: name="205", plot="h2o_128_md_mem", label="(8n/4r/3t)", y=626.090909, yerr=8.938726 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/12/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 601.317074E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 4916280 0.0% 0.0% 100.0% average stack size 0.0 0.0 200.2 marketing flops 15.646302E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 726.368256E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 937080 MPI messages size (bytes): total size 523.723932E+09 min size 0.000000E+00 max size 4.537280E+06 average size 558.889250E+03 MPI breakdown and total messages size (bytes): size <= 128 6996 0 128 < size <= 8192 264 2162688 8192 < size <= 32768 304932 8165326848 32768 < size <= 131072 110640 6338641920 131072 < size <= 4194304 489498 400769458320 4194304 < size <= 16777216 24750 108449092400 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3521 65576. MP_Allreduce 9838 600. MP_Sync 100 MP_Alltoall 1496 5863162. MP_ISendRecv 10120 43184. MP_Wait 25102 MP_comm_split 48 MP_ISend 13376 163145. MP_IRecv 13376 163145. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.056 0.124 65.764 65.776 qs_mol_dyn_low 1 2.0 0.003 0.003 65.250 65.258 qs_forces 11 3.9 0.002 0.002 64.890 64.891 qs_energies 11 4.9 0.002 0.007 61.540 61.544 scf_env_do_scf 11 5.9 0.001 0.001 52.131 52.134 scf_env_do_scf_inner_loop 99 6.5 0.002 0.006 40.595 40.596 velocity_verlet 10 3.0 0.229 0.305 36.643 36.645 dbcsr_multiply_generic 2055 12.4 0.116 0.118 29.952 30.203 qs_scf_new_mos 99 7.5 0.001 0.001 26.287 26.405 qs_scf_loop_do_ot 99 8.5 0.001 0.001 26.286 26.404 ot_scf_mini 99 9.5 0.003 0.003 24.669 24.773 multiply_cannon 2055 13.4 0.243 0.258 22.944 23.784 multiply_cannon_loop 2055 14.4 0.892 0.911 21.605 22.260 multiply_cannon_multrec 24660 15.4 4.227 6.831 13.167 14.386 ot_mini 99 10.5 0.001 0.001 14.135 14.263 rebuild_ks_matrix 110 8.3 0.000 0.000 11.874 11.987 qs_ks_build_kohn_sham_matrix 110 9.3 0.012 0.013 11.874 11.987 init_scf_loop 11 6.9 0.000 0.001 11.493 11.494 qs_ks_update_qs_env 110 7.6 0.001 0.001 10.522 10.622 qs_ot_get_derivative 99 11.5 0.001 0.001 10.008 10.119 dbcsr_mm_accdrv_process 52304 16.0 7.295 8.523 8.795 9.813 prepare_preconditioner 11 7.9 0.000 0.000 9.778 9.797 make_preconditioner 11 8.9 0.000 0.001 9.778 9.797 make_full_inverse_cholesky 11 9.9 0.000 0.000 8.033 9.479 init_scf_run 11 5.9 0.000 0.001 6.499 6.500 scf_env_initial_rho_setup 11 6.9 0.001 0.001 6.499 6.500 mp_waitall_1 126806 16.4 4.516 6.440 4.516 6.440 sum_up_and_integrate 110 10.3 0.068 0.071 6.285 6.298 integrate_v_rspace 110 11.3 0.002 0.003 6.217 6.233 make_m2s 4110 13.4 0.060 0.062 5.437 5.702 qs_ot_get_p 110 10.4 0.001 0.001 5.537 5.686 qs_rho_update_rho_low 110 7.6 0.001 0.001 5.550 5.560 calculate_rho_elec 110 8.6 0.077 0.081 5.550 5.559 make_images 4110 14.4 0.577 0.700 5.296 5.556 cp_fm_upper_to_full 70 13.8 3.243 4.611 3.243 4.611 ot_diis_step 99 11.5 0.011 0.011 4.092 4.092 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 4.002 4.076 apply_single 110 13.6 0.000 0.000 4.002 4.075 qs_ot_p2m_diag 48 11.0 0.055 0.064 3.752 3.767 dbcsr_complete_redistribute 325 12.2 0.416 0.454 2.611 3.737 grid_integrate_task_list 110 12.3 3.278 3.508 3.278 3.508 calculate_first_density_matrix 1 7.0 0.000 0.002 3.468 3.471 multiply_cannon_sync_h2d 24660 15.4 3.199 3.358 3.199 3.358 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 3.267 3.321 cp_dbcsr_syevd 48 12.0 0.003 0.004 3.279 3.279 pw_transfer 1331 11.6 0.064 0.072 3.226 3.250 calculate_dm_sparse 110 9.5 0.001 0.001 3.185 3.213 copy_fm_to_dbcsr 174 11.2 0.001 0.001 2.086 3.193 fft_wrap_pw1pw2 1111 12.6 0.008 0.008 3.120 3.146 multiply_cannon_metrocomm3 24660 15.4 0.036 0.037 1.472 3.099 make_images_data 4110 15.4 0.046 0.050 2.771 3.081 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 2.989 3.050 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 3.030 3.032 hybrid_alltoall_any 4261 16.3 0.120 0.459 2.341 3.011 density_rs2pw 110 9.6 0.004 0.004 2.723 2.950 wfi_extrapolate 11 7.9 0.001 0.001 2.947 2.947 transfer_fm_to_dbcsr 11 9.9 0.000 0.000 1.736 2.830 mp_alltoall_i22 605 13.7 1.628 2.783 1.628 2.783 rs_pw_transfer 902 11.9 0.010 0.011 2.473 2.755 cp_fm_diag_elpa 48 13.0 0.000 0.000 2.744 2.747 jit_kernel_multiply 12 15.7 1.173 2.743 1.173 2.743 cp_fm_redistribute_end 48 14.0 1.367 2.720 1.368 2.721 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.667 2.704 cp_fm_diag_elpa_base 48 14.0 1.271 2.592 1.350 2.702 cp_fm_cholesky_invert 11 10.9 2.680 2.688 2.680 2.688 fft_wrap_pw1pw2_140 451 13.1 0.202 0.212 2.652 2.681 fft3d_ps 1111 14.6 1.063 1.094 2.423 2.438 qs_energies_init_hamiltonians 11 5.9 0.018 0.031 2.406 2.407 grid_collocate_task_list 110 9.6 2.220 2.398 2.220 2.398 potential_pw2rs 110 12.3 0.012 0.013 2.060 2.069 mp_alltoall_d11v 2046 13.8 1.745 1.951 1.745 1.951 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 1.808 1.824 qs_ot_get_orbitals 99 10.5 0.001 0.001 1.693 1.725 build_core_hamiltonian_matrix_ 11 4.9 0.000 0.001 1.600 1.703 cp_fm_cholesky_decompose 22 10.9 1.659 1.696 1.659 1.696 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.598 1.612 multiply_cannon_metrocomm4 20550 15.4 0.057 0.060 0.843 1.565 acc_transpose_blocks 24660 15.4 0.103 0.105 1.527 1.556 mp_sum_l 6514 12.8 0.954 1.517 0.954 1.517 mp_irecv_dv 62702 16.1 0.744 1.490 0.744 1.490 mp_waitany 13376 13.8 1.157 1.460 1.157 1.460 mp_sum_d 3877 11.9 1.084 1.427 1.084 1.427 qs_env_update_s_mstruct 11 6.9 0.000 0.001 1.322 1.403 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="206", plot="h2o_128_md", label="(8n/3r/4t)", y=65.776000, yerr=0.000000 PlotPoint: name="207", plot="h2o_128_md_mem", label="(8n/3r/4t)", y=690.454545, yerr=7.152414 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/13/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 807.299199E+09 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 1438408 0.0% 0.0% 100.0% average stack size 0.0 0.0 684.2 marketing flops 15.646297E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 822.185984E+06 # max total images/rank 1 # max 3D layers 1 # MPI messages exchanged 197280 MPI messages size (bytes): total size 339.125567E+09 min size 0.000000E+00 max size 13.107200E+06 average size 1.719006E+06 MPI breakdown and total messages size (bytes): size <= 128 1452 0 128 < size <= 8192 0 0 8192 < size <= 32768 132 4325376 32768 < size <= 131072 88656 11620319232 131072 < size <= 4194304 89424 117209825280 4194304 < size <= 16777216 17616 210291069504 16777216 < size 0 0 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 14 12. MP_Allreduce 7346 33. MP_Alltoall 8043 263767. MP_ISend 32836 654203. MP_IRecv 32836 654587. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3521 65574. MP_Allreduce 9838 640. MP_Sync 100 MP_Alltoall 1496 8504061. MP_ISendRecv 6600 54848. MP_Wait 17226 MP_comm_split 48 MP_ISend 9240 278857. MP_IRecv 9240 278857. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.016 0.073 54.849 54.850 qs_mol_dyn_low 1 2.0 0.003 0.003 54.525 54.534 qs_forces 11 3.9 0.002 0.002 54.363 54.363 qs_energies 11 4.9 0.001 0.001 50.769 50.774 scf_env_do_scf 11 5.9 0.000 0.001 41.972 41.972 scf_env_do_scf_inner_loop 99 6.5 0.002 0.006 34.275 34.275 velocity_verlet 10 3.0 0.002 0.002 30.435 30.444 dbcsr_multiply_generic 2055 12.4 0.105 0.107 23.343 23.452 qs_scf_new_mos 99 7.5 0.001 0.001 20.357 20.415 qs_scf_loop_do_ot 99 8.5 0.001 0.001 20.357 20.414 multiply_cannon 2055 13.4 0.246 0.260 17.954 19.172 ot_scf_mini 99 9.5 0.002 0.002 19.114 19.136 multiply_cannon_loop 2055 14.4 0.324 0.337 16.617 16.817 rebuild_ks_matrix 110 8.3 0.000 0.000 11.342 11.372 qs_ks_build_kohn_sham_matrix 110 9.3 0.012 0.013 11.341 11.372 ot_mini 99 10.5 0.001 0.001 10.298 10.313 qs_ks_update_qs_env 110 7.6 0.001 0.001 10.098 10.123 multiply_cannon_multrec 8220 15.4 3.233 4.575 8.096 9.124 mp_waitall_1 106626 16.5 6.101 7.866 6.101 7.866 init_scf_loop 11 6.9 0.000 0.000 7.651 7.652 qs_ot_get_derivative 99 11.5 0.001 0.001 6.514 6.534 sum_up_and_integrate 110 10.3 0.080 0.082 6.121 6.133 integrate_v_rspace 110 11.3 0.003 0.003 6.041 6.052 prepare_preconditioner 11 7.9 0.000 0.000 6.044 6.049 make_preconditioner 11 8.9 0.000 0.000 6.044 6.049 make_full_inverse_cholesky 11 9.9 0.000 0.000 5.636 5.713 init_scf_run 11 5.9 0.000 0.001 5.689 5.689 scf_env_initial_rho_setup 11 6.9 0.001 0.001 5.688 5.689 dbcsr_mm_accdrv_process 17442 15.9 2.783 3.656 4.733 5.649 qs_rho_update_rho_low 110 7.6 0.001 0.001 5.517 5.529 calculate_rho_elec 110 8.6 0.115 0.116 5.516 5.529 qs_ot_get_p 110 10.4 0.001 0.001 4.905 4.927 make_m2s 4110 13.4 0.039 0.041 4.153 4.418 multiply_cannon_metrocomm3 8220 15.4 0.018 0.018 3.028 4.334 make_images 4110 14.4 0.636 0.690 4.022 4.284 ot_diis_step 99 11.5 0.012 0.012 3.764 3.765 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 3.701 3.735 apply_single 110 13.6 0.000 0.000 3.700 3.735 qs_ot_p2m_diag 48 11.0 0.081 0.084 3.581 3.584 grid_integrate_task_list 110 12.3 3.363 3.461 3.363 3.461 cp_dbcsr_syevd 48 12.0 0.004 0.004 3.277 3.277 pw_transfer 1331 11.6 0.065 0.071 3.184 3.207 fft_wrap_pw1pw2 1111 12.6 0.008 0.008 3.077 3.103 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 3.100 3.101 multiply_cannon_sync_h2d 8220 15.4 2.922 3.037 2.922 3.037 cp_fm_cholesky_invert 11 10.9 2.941 2.945 2.941 2.945 calculate_first_density_matrix 1 7.0 0.000 0.000 2.914 2.915 make_images_data 4110 15.4 0.038 0.043 2.367 2.753 cp_fm_diag_elpa 48 13.0 0.000 0.000 2.750 2.751 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.730 2.741 density_rs2pw 110 9.6 0.004 0.004 2.597 2.734 cp_fm_redistribute_end 48 14.0 0.693 2.721 0.697 2.722 cp_fm_diag_elpa_base 48 14.0 1.845 2.533 2.017 2.694 wfi_extrapolate 11 7.9 0.001 0.001 2.689 2.689 hybrid_alltoall_any 4261 16.3 0.199 0.858 2.293 2.676 fft_wrap_pw1pw2_140 451 13.1 0.211 0.214 2.635 2.663 qs_energies_init_hamiltonians 11 5.9 0.001 0.002 2.648 2.649 calculate_dm_sparse 110 9.5 0.001 0.001 2.552 2.594 grid_collocate_task_list 110 9.6 2.332 2.457 2.332 2.457 jit_kernel_multiply 10 15.8 1.644 2.371 1.644 2.371 fft3d_ps 1111 14.6 1.115 1.161 2.337 2.363 rs_pw_transfer 902 11.9 0.010 0.010 2.145 2.310 build_core_hamiltonian_matrix_ 11 4.9 0.001 0.001 1.769 1.989 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 1.917 1.937 potential_pw2rs 110 12.3 0.015 0.015 1.898 1.903 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 1.778 1.791 cp_fm_cholesky_decompose 22 10.9 1.674 1.686 1.674 1.686 mp_alltoall_d11v 2046 13.8 1.554 1.663 1.554 1.663 qs_env_update_s_mstruct 11 6.9 0.000 0.000 1.493 1.608 mp_allgather_i34 2055 14.4 0.526 1.600 0.526 1.600 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.575 1.580 dbcsr_complete_redistribute 325 12.2 0.570 0.602 1.447 1.525 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 1.512 1.522 qs_create_task_list 11 7.9 0.000 0.000 1.223 1.322 generate_qs_task_list 11 8.9 0.379 0.447 1.223 1.322 mp_waitany 9240 13.8 1.059 1.227 1.059 1.227 multiply_cannon_metrocomm4 6165 15.4 0.017 0.018 0.479 1.150 copy_dbcsr_to_fm 151 11.3 0.003 0.003 1.120 1.136 mp_irecv_dv 24056 15.7 0.455 1.106 0.455 1.106 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="208", plot="h2o_128_md", label="(8n/2r/6t)", y=54.850000, yerr=0.000000 PlotPoint: name="209", plot="h2o_128_md_mem", label="(8n/2r/6t)", y=779.818182, yerr=8.536746 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/14/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 32 x 32 x 32 184415158272 0.0% 0.0% 100.0% flops 9 x 9 x 32 269180485632 0.0% 0.0% 100.0% flops 9 x 22 x 32 349395425280 0.0% 0.0% 100.0% flops 22 x 9 x 32 350042406912 0.0% 0.0% 100.0% flops 22 x 22 x 32 453581815808 0.0% 0.0% 100.0% flops 32 x 32 x 9 465064427520 0.0% 0.0% 100.0% flops 32 x 32 x 22 568412078080 0.0% 0.0% 100.0% flops 9 x 32 x 32 572195340288 0.0% 0.0% 100.0% flops 22 x 32 x 32 699349860352 0.0% 0.0% 100.0% flops 9 x 32 x 9 1735942275072 0.0% 0.0% 100.0% flops 22 x 32 x 9 2216407818240 0.0% 0.0% 100.0% flops 9 x 32 x 22 2216407818240 0.0% 0.0% 100.0% flops 22 x 32 x 22 2803661053952 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 12.884056E+12 0.0% 0.0% 100.0% flops max/rank 1.612391E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 984178160 0.0% 0.0% 100.0% number of processed stacks 1464624 0.0% 0.0% 100.0% average stack size 0.0 0.0 672.0 marketing flops 15.646297E+12 ------------------------------------------------------------------------------- # multiplications 2055 max memory usage/rank 1.306309E+09 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 82200 MPI messages size (bytes): total size 297.640985E+09 min size 0.000000E+00 max size 26.214400E+06 average size 3.620936E+06 MPI breakdown and total messages size (bytes): size <= 128 572 0 128 < size <= 8192 0 0 8192 < size <= 32768 44 1441792 32768 < size <= 131072 18560 2432696320 131072 < size <= 4194304 54216 84915781632 4194304 < size <= 16777216 0 0 16777216 < size 8808 210291069504 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 3462 67104. MP_Allreduce 9672 819. MP_Sync 52 MP_Alltoall 1474 16505187. MP_ISendRecv 4620 360267. MP_Wait 7524 MP_ISend 2420 1187840. MP_IRecv 2420 1187840. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.018 0.051 85.389 85.390 qs_mol_dyn_low 1 2.0 0.003 0.003 85.082 85.091 qs_forces 11 3.9 0.002 0.002 84.576 84.577 qs_energies 11 4.9 0.001 0.001 80.556 80.558 scf_env_do_scf 11 5.9 0.000 0.001 70.207 70.207 velocity_verlet 10 3.0 0.002 0.002 54.253 54.314 scf_env_do_scf_inner_loop 99 6.5 0.002 0.007 42.328 42.330 dbcsr_multiply_generic 2055 12.4 0.129 0.142 29.290 29.367 init_scf_loop 11 6.9 0.000 0.000 27.808 27.810 qs_scf_new_mos 99 7.5 0.001 0.001 26.135 26.173 qs_scf_loop_do_ot 99 8.5 0.001 0.001 26.135 26.172 prepare_preconditioner 11 7.9 0.000 0.000 25.894 25.899 make_preconditioner 11 8.9 0.000 0.000 25.894 25.899 make_full_inverse_cholesky 11 9.9 0.000 0.000 20.282 25.380 ot_scf_mini 99 9.5 0.002 0.002 24.404 24.427 multiply_cannon 2055 13.4 0.342 0.369 22.264 23.028 multiply_cannon_loop 2055 14.4 0.343 0.347 20.506 21.026 cp_fm_upper_to_full 70 14.2 12.639 17.974 12.639 17.974 ot_mini 99 10.5 0.001 0.001 13.907 13.931 rebuild_ks_matrix 110 8.3 0.001 0.001 12.995 13.024 qs_ks_build_kohn_sham_matrix 110 9.3 0.013 0.014 12.994 13.023 qs_ks_update_qs_env 110 7.6 0.001 0.001 11.820 11.846 dbcsr_complete_redistribute 325 12.2 1.022 1.043 7.171 10.347 multiply_cannon_multrec 8220 15.4 4.369 4.550 9.738 9.868 qs_ot_get_derivative 99 11.5 0.001 0.001 9.456 9.478 copy_fm_to_dbcsr 174 11.2 0.001 0.001 6.152 9.326 transfer_fm_to_dbcsr 11 9.9 0.000 0.000 5.596 8.743 mp_waitall_1 87304 16.6 7.675 8.599 7.675 8.599 mp_alltoall_i22 605 13.7 5.215 8.377 5.215 8.377 sum_up_and_integrate 110 10.3 0.151 0.151 6.561 6.576 qs_rho_update_rho_low 110 7.6 0.001 0.001 6.392 6.428 calculate_rho_elec 110 8.6 0.227 0.227 6.391 6.428 integrate_v_rspace 110 11.3 0.003 0.003 6.410 6.424 init_scf_run 11 5.9 0.000 0.001 6.134 6.134 scf_env_initial_rho_setup 11 6.9 0.001 0.002 6.134 6.134 make_m2s 4110 13.4 0.044 0.045 5.157 5.636 dbcsr_mm_accdrv_process 11614 15.7 3.408 3.652 5.227 5.455 make_images 4110 14.4 0.881 0.939 4.968 5.446 qs_ot_get_p 110 10.4 0.001 0.001 5.385 5.413 apply_preconditioner_dbcsr 110 12.6 0.000 0.000 4.785 5.376 apply_single 110 13.6 0.000 0.000 4.785 5.376 multiply_cannon_metrocomm3 8220 15.4 0.018 0.019 4.787 5.337 cp_fm_cholesky_invert 11 10.9 5.165 5.169 5.165 5.169 ot_diis_step 99 11.5 0.015 0.015 4.435 4.435 multiply_cannon_sync_h2d 8220 15.4 3.948 3.954 3.948 3.954 qs_ot_get_derivative_taylor 52 13.0 0.001 0.001 3.292 3.947 qs_ot_p2m_diag 48 11.0 0.151 0.156 3.900 3.907 grid_integrate_task_list 110 12.3 3.665 3.715 3.665 3.715 pw_transfer 1331 11.6 0.074 0.074 3.686 3.691 qs_energies_init_hamiltonians 11 5.9 0.002 0.002 3.624 3.625 fft_wrap_pw1pw2 1111 12.6 0.009 0.009 3.570 3.575 calculate_dm_sparse 110 9.5 0.001 0.001 3.539 3.558 hybrid_alltoall_any 4261 16.3 0.255 0.552 2.826 3.551 cp_dbcsr_syevd 48 12.0 0.004 0.004 3.466 3.466 make_images_data 4110 15.4 0.041 0.045 2.801 3.406 wfi_extrapolate 11 7.9 0.001 0.001 3.177 3.177 fft_wrap_pw1pw2_140 451 13.1 0.215 0.217 3.113 3.118 cp_dbcsr_sm_fm_multiply 37 9.5 0.001 0.001 2.994 2.996 density_rs2pw 110 9.6 0.004 0.004 2.895 2.917 cp_fm_diag_elpa 48 13.0 0.000 0.000 2.875 2.875 cp_fm_diag_elpa_base 48 14.0 2.341 2.521 2.873 2.873 calculate_first_density_matrix 1 7.0 0.000 0.000 2.852 2.853 fft3d_ps 1111 14.6 1.263 1.272 2.780 2.790 grid_collocate_task_list 110 9.6 2.628 2.657 2.628 2.657 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 2.501 2.510 qs_ot_get_derivative_diag 47 12.0 0.001 0.001 2.333 2.345 qs_env_update_s_mstruct 11 6.9 0.000 0.000 2.166 2.224 build_core_hamiltonian_matrix_ 11 4.9 0.001 0.001 2.088 2.217 cp_dbcsr_plus_fm_fm_t_native 22 8.9 0.001 0.001 2.100 2.115 rs_pw_transfer 902 11.9 0.010 0.011 2.056 2.100 mp_alltoall_d11v 2046 13.8 1.994 2.034 1.994 2.034 potential_pw2rs 110 12.3 0.021 0.022 2.001 2.003 cp_fm_cholesky_decompose 22 10.9 1.950 1.974 1.950 1.974 qs_create_task_list 11 7.9 0.000 0.000 1.896 1.940 generate_qs_task_list 11 8.9 0.738 0.792 1.896 1.940 jit_kernel_multiply 10 15.1 1.618 1.873 1.618 1.873 qs_ks_update_qs_env_forces 11 4.9 0.000 0.000 1.777 1.779 copy_dbcsr_to_fm 151 11.3 0.003 0.003 1.743 1.771 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="210", plot="h2o_128_md", label="(8n/1r/12t)", y=85.390000, yerr=0.000000 PlotPoint: name="211", plot="h2o_128_md_mem", label="(8n/1r/12t)", y=1199.909091, yerr=39.148794 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/15/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1410022121472 0.0% 0.0% 100.0% flops 32 x 32 x 32 1924145348608 0.0% 0.0% 100.0% flops 22 x 9 x 32 1957871443968 0.0% 0.0% 100.0% flops 9 x 22 x 32 1963544850432 0.0% 0.0% 100.0% flops 22 x 22 x 32 2714615709696 0.0% 0.0% 100.0% flops 32 x 32 x 9 4377645416448 0.0% 0.0% 100.0% flops 32 x 32 x 22 5350455508992 0.0% 0.0% 100.0% flops 9 x 32 x 32 5395653328896 0.0% 0.0% 100.0% flops 22 x 32 x 32 6594687401984 0.0% 0.0% 100.0% flops 9 x 32 x 9 11444702699520 0.0% 0.0% 100.0% flops 22 x 32 x 9 15019188129792 0.0% 0.0% 100.0% flops 9 x 32 x 22 15019188129792 0.0% 0.0% 100.0% flops 22 x 32 x 22 19624853225472 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 92.796573E+12 0.0% 0.0% 100.0% flops max/rank 1.086553E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6705499744 0.0% 0.0% 100.0% number of processed stacks 11851392 0.0% 0.0% 100.0% average stack size 0.0 0.0 565.8 marketing flops 143.508480E+12 ------------------------------------------------------------------------------- # multiplications 2485 max memory usage/rank 627.179520E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 10258080 MPI messages size (bytes): total size 4.456715E+12 min size 0.000000E+00 max size 4.537280E+06 average size 434.459031E+03 MPI breakdown and total messages size (bytes): size <= 128 65736 0 128 < size <= 8192 1232 10092544 8192 < size <= 32768 3542056 94711185408 32768 < size <= 131072 1282176 73356279808 131072 < size <= 4194304 5107038 3151762421624 4194304 < size <= 16777216 259842 1136842803272 16777216 < size 0 0 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 4089 56701. MP_Allreduce 11263 785. MP_Sync 168 MP_Alltoall 2210 1969309. MP_ISendRecv 48260 18752. MP_Wait 66280 MP_comm_split 82 MP_ISend 15900 108037. MP_IRecv 15900 108037. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.036 0.098 203.680 203.687 qs_mol_dyn_low 1 2.0 0.009 0.031 202.863 202.888 qs_forces 11 3.9 0.005 0.015 202.421 202.423 qs_energies 11 4.9 0.003 0.013 196.931 196.943 scf_env_do_scf 11 5.9 0.001 0.002 179.247 179.251 scf_env_do_scf_inner_loop 116 6.6 0.003 0.009 158.516 158.517 dbcsr_multiply_generic 2485 12.5 0.176 0.183 123.727 124.632 velocity_verlet 10 3.0 0.004 0.015 121.218 121.225 qs_scf_new_mos 116 7.6 0.001 0.001 120.409 120.697 qs_scf_loop_do_ot 116 8.6 0.001 0.001 120.408 120.697 ot_scf_mini 116 9.6 0.003 0.004 113.920 114.200 multiply_cannon 2485 13.5 0.233 0.241 100.784 103.526 multiply_cannon_loop 2485 14.5 2.122 2.193 98.586 101.202 ot_mini 116 10.6 0.001 0.002 65.050 65.380 multiply_cannon_multrec 59640 15.5 32.780 35.019 41.832 43.921 qs_ot_get_derivative 116 11.6 0.001 0.002 40.464 40.762 rebuild_ks_matrix 127 8.3 0.001 0.001 32.830 33.303 qs_ks_build_kohn_sham_matrix 127 9.3 0.016 0.019 32.829 33.302 mp_waitall_1 288940 16.2 28.328 31.013 28.328 31.013 qs_ks_update_qs_env 127 7.6 0.001 0.001 29.521 29.958 multiply_cannon_sync_h2d 59640 15.5 27.007 28.799 27.007 28.799 qs_ot_get_p 127 10.4 0.001 0.001 27.188 27.474 apply_preconditioner_dbcsr 127 12.6 0.000 0.001 24.056 24.645 apply_single 127 13.6 0.001 0.001 24.055 24.644 ot_diis_step 116 11.6 0.008 0.011 24.199 24.201 init_scf_loop 11 6.9 0.000 0.000 20.656 20.657 qs_ot_p2m_diag 82 11.4 0.076 0.091 20.528 20.616 qs_ot_get_derivative_diag 76 12.4 0.002 0.002 18.529 18.787 multiply_cannon_metrocomm3 59640 15.5 0.115 0.120 15.451 18.037 cp_dbcsr_syevd 82 12.4 0.005 0.006 17.951 17.952 prepare_preconditioner 11 7.9 0.000 0.000 16.088 16.127 make_preconditioner 11 8.9 0.000 0.000 16.088 16.127 make_full_inverse_cholesky 11 9.9 0.000 0.000 15.317 15.518 cp_fm_diag_elpa 82 13.4 0.000 0.001 15.145 15.151 cp_fm_redistribute_end 82 14.4 11.936 15.076 11.950 15.081 cp_fm_diag_elpa_base 82 14.4 3.086 14.856 3.118 14.979 make_m2s 4970 13.5 0.103 0.111 13.712 14.105 make_images 4970 14.5 0.398 0.417 13.532 13.938 sum_up_and_integrate 127 10.3 0.088 0.106 13.749 13.765 integrate_v_rspace 127 11.3 0.003 0.004 13.660 13.679 init_scf_run 11 5.9 0.000 0.001 13.017 13.017 scf_env_initial_rho_setup 11 6.9 0.011 0.021 13.017 13.017 qs_rho_update_rho_low 127 7.7 0.001 0.002 12.419 12.491 calculate_rho_elec 127 8.7 0.044 0.063 12.418 12.490 mp_sum_l 7804 13.0 8.024 9.411 8.024 9.411 cp_fm_cholesky_invert 11 10.9 9.266 9.274 9.266 9.274 dbcsr_mm_accdrv_process 123452 16.2 3.266 3.407 8.621 9.178 calculate_dm_sparse 127 9.5 0.001 0.001 8.954 9.035 wfi_extrapolate 11 7.9 0.003 0.018 9.021 9.021 qs_ot_get_derivative_taylor 40 13.0 0.001 0.001 8.176 8.311 multiply_cannon_metrocomm1 59640 15.5 0.082 0.085 6.096 8.163 qs_ot_get_orbitals 116 10.6 0.001 0.001 7.735 7.829 make_images_data 4970 15.5 0.066 0.072 6.736 7.698 grid_integrate_task_list 127 12.3 7.078 7.424 7.078 7.424 hybrid_alltoall_any 5155 16.4 0.289 2.248 5.903 7.296 density_rs2pw 127 9.7 0.006 0.007 6.311 7.012 pw_transfer 1535 11.6 0.074 0.106 6.728 6.988 cp_dbcsr_sm_fm_multiply 37 9.5 0.003 0.003 6.767 6.775 fft_wrap_pw1pw2 1281 12.7 0.010 0.013 6.525 6.761 rs_pw_transfer 1038 11.9 0.016 0.018 5.601 6.497 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 5.903 6.041 fft_wrap_pw1pw2_140 519 13.2 0.441 0.505 5.620 5.809 fft3d_ps 1281 14.7 2.076 2.549 5.329 5.516 mp_alltoall_d11v 2401 14.1 4.093 5.138 4.093 5.138 grid_collocate_task_list 127 9.7 4.689 5.036 4.689 5.036 cp_fm_cholesky_decompose 22 10.9 4.621 4.633 4.621 4.633 potential_pw2rs 127 12.3 0.009 0.010 4.328 4.353 mp_sum_d 4444 12.2 3.600 4.285 3.600 4.285 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="400", plot="h2o_256_md", label="(8n/12r/1t)", y=203.687000, yerr=0.000000 PlotPoint: name="401", plot="h2o_256_md_mem", label="(8n/12r/1t)", y=594.090909, yerr=6.612035 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/16/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1430458527744 0.0% 0.0% 100.0% flops 32 x 32 x 32 1958505086976 0.0% 0.0% 100.0% flops 22 x 9 x 32 1986244964352 0.0% 0.0% 100.0% flops 9 x 22 x 32 1992000282624 0.0% 0.0% 100.0% flops 22 x 22 x 32 2753956716544 0.0% 0.0% 100.0% flops 32 x 32 x 9 4454954827776 0.0% 0.0% 100.0% flops 32 x 32 x 22 5444944789504 0.0% 0.0% 100.0% flops 9 x 32 x 32 5492290093056 0.0% 0.0% 100.0% flops 22 x 32 x 32 6712799002624 0.0% 0.0% 100.0% flops 9 x 32 x 9 11613083000832 0.0% 0.0% 100.0% flops 22 x 32 x 9 15239146475520 0.0% 0.0% 100.0% flops 9 x 32 x 22 15239146475520 0.0% 0.0% 100.0% flops 22 x 32 x 22 19911124992000 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 94.228655E+12 0.0% 0.0% 100.0% flops max/rank 2.199914E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6806314816 0.0% 0.0% 100.0% number of processed stacks 6022464 0.0% 0.0% 100.0% average stack size 0.0 0.0 1130.2 marketing flops 145.647559E+12 ------------------------------------------------------------------------------- # multiplications 2527 max memory usage/rank 826.347520E+06 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 2425920 MPI messages size (bytes): total size 4.132350E+12 min size 0.000000E+00 max size 17.653760E+06 average size 1.703416E+06 MPI breakdown and total messages size (bytes): size <= 128 14916 0 128 < size <= 8192 0 0 8192 < size <= 32768 71436 2336489472 32768 < size <= 131072 728832 55956209664 131072 < size <= 4194304 1386864 1409906900992 4194304 < size <= 16777216 155760 1473826487232 16777216 < size 68112 1190343475200 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 4113 56823. MP_Allreduce 11322 944. MP_Sync 170 MP_Alltoall 1983 6108152. MP_ISendRecv 24252 47072. MP_Wait 38240 MP_comm_split 83 MP_ISend 11836 212447. MP_IRecv 11836 212447. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.014 0.030 190.686 190.687 qs_mol_dyn_low 1 2.0 0.003 0.003 190.360 190.375 qs_forces 11 3.9 0.003 0.003 190.087 190.087 qs_energies 11 4.9 0.001 0.002 183.400 183.411 scf_env_do_scf 11 5.9 0.001 0.001 166.764 166.774 scf_env_do_scf_inner_loop 118 6.6 0.003 0.008 133.848 133.851 velocity_verlet 10 3.0 0.100 0.200 120.018 120.020 dbcsr_multiply_generic 2527 12.6 0.188 0.194 97.915 99.081 qs_scf_new_mos 118 7.6 0.001 0.001 95.263 95.957 qs_scf_loop_do_ot 118 8.6 0.001 0.001 95.263 95.956 ot_scf_mini 118 9.6 0.004 0.004 90.410 91.196 multiply_cannon 2527 13.6 0.482 0.535 77.826 82.186 multiply_cannon_loop 2527 14.6 1.269 1.308 74.856 77.333 ot_mini 118 10.6 0.001 0.001 49.940 50.630 mp_waitall_1 228564 16.4 25.136 38.680 25.136 38.680 multiply_cannon_multrec 30324 15.6 22.246 26.654 32.003 37.050 rebuild_ks_matrix 129 8.3 0.001 0.001 32.729 33.245 qs_ks_build_kohn_sham_matrix 129 9.3 0.017 0.019 32.729 33.245 init_scf_loop 11 6.9 0.000 0.000 32.829 32.830 qs_ks_update_qs_env 129 7.6 0.001 0.001 29.521 29.985 multiply_cannon_metrocomm3 30324 15.6 0.093 0.099 15.858 29.512 qs_ot_get_derivative 118 11.6 0.001 0.002 27.915 28.697 prepare_preconditioner 11 7.9 0.000 0.000 28.491 28.573 make_preconditioner 11 8.9 0.000 0.000 28.491 28.573 make_full_inverse_cholesky 11 9.9 0.000 0.000 27.131 27.701 apply_preconditioner_dbcsr 129 12.6 0.000 0.000 22.124 23.249 apply_single 129 13.6 0.001 0.001 22.124 23.248 qs_ot_get_p 129 10.4 0.001 0.001 22.068 22.853 multiply_cannon_sync_h2d 30324 15.6 19.493 21.979 19.493 21.979 ot_diis_step 118 11.6 0.014 0.015 21.852 21.854 qs_ot_p2m_diag 83 11.4 0.187 0.216 17.309 17.344 cp_fm_cholesky_invert 11 10.9 16.616 16.628 16.616 16.628 cp_dbcsr_syevd 83 12.4 0.006 0.006 16.164 16.165 make_m2s 5054 13.6 0.091 0.097 14.477 15.636 make_images 5054 14.6 1.169 1.354 14.263 15.422 sum_up_and_integrate 129 10.3 0.117 0.134 14.240 14.271 integrate_v_rspace 129 11.3 0.003 0.003 14.122 14.156 cp_fm_diag_elpa 83 13.4 0.000 0.001 13.064 13.071 cp_fm_redistribute_end 83 14.4 7.596 12.998 7.610 13.002 cp_fm_diag_elpa_base 83 14.4 5.157 12.546 5.377 12.914 qs_rho_update_rho_low 129 7.7 0.001 0.001 12.870 12.900 calculate_rho_elec 129 8.7 0.088 0.106 12.869 12.899 init_scf_run 11 5.9 0.000 0.001 11.916 11.917 scf_env_initial_rho_setup 11 6.9 0.001 0.001 11.915 11.917 qs_ot_get_derivative_diag 77 12.4 0.002 0.002 10.844 11.419 multiply_cannon_metrocomm4 27797 15.6 0.099 0.113 3.781 10.808 make_images_data 5054 15.6 0.065 0.072 8.881 10.438 mp_irecv_dv 70031 16.3 3.583 10.420 3.583 10.420 dbcsr_mm_accdrv_process 62734 16.2 4.547 5.290 9.213 9.870 hybrid_alltoall_any 5240 16.5 0.346 1.526 7.496 9.640 wfi_extrapolate 11 7.9 0.001 0.001 8.273 8.273 pw_transfer 1559 11.6 0.087 0.108 7.651 7.719 grid_integrate_task_list 129 12.3 7.255 7.593 7.255 7.593 fft_wrap_pw1pw2 1301 12.7 0.010 0.012 7.424 7.494 density_rs2pw 129 9.7 0.006 0.006 6.644 7.187 qs_ot_get_derivative_taylor 41 13.0 0.001 0.001 6.436 7.174 cp_fm_cholesky_decompose 22 10.9 6.904 6.988 6.904 6.988 calculate_dm_sparse 129 9.5 0.001 0.001 6.502 6.685 fft_wrap_pw1pw2_140 527 13.2 0.473 0.522 6.505 6.584 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 6.190 6.201 rs_pw_transfer 1054 12.0 0.014 0.016 5.443 6.054 mp_sum_l 7930 13.1 3.814 6.005 3.814 6.005 fft3d_ps 1301 14.7 2.793 2.938 5.785 5.818 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 5.351 5.494 qs_ot_get_orbitals 118 10.6 0.001 0.001 5.367 5.432 grid_collocate_task_list 129 9.7 4.955 5.392 4.955 5.392 mp_allgather_i34 2527 14.6 1.540 4.794 1.540 4.794 mp_alltoall_d11v 2423 14.1 4.066 4.726 4.066 4.726 potential_pw2rs 129 12.3 0.014 0.016 4.666 4.680 dbcsr_complete_redistribute 395 12.7 0.771 0.870 3.196 4.046 mp_sum_d 4492 12.2 2.570 4.001 2.570 4.001 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="402", plot="h2o_256_md", label="(8n/6r/2t)", y=190.687000, yerr=0.000000 PlotPoint: name="403", plot="h2o_256_md_mem", label="(8n/6r/2t)", y=787.909091, yerr=2.274545 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/17/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1410022121472 0.0% 0.0% 100.0% flops 32 x 32 x 32 1924145348608 0.0% 0.0% 100.0% flops 22 x 9 x 32 1957871443968 0.0% 0.0% 100.0% flops 9 x 22 x 32 1963544850432 0.0% 0.0% 100.0% flops 22 x 22 x 32 2714615709696 0.0% 0.0% 100.0% flops 32 x 32 x 9 4377645416448 0.0% 0.0% 100.0% flops 32 x 32 x 22 5350455508992 0.0% 0.0% 100.0% flops 9 x 32 x 32 5395653328896 0.0% 0.0% 100.0% flops 22 x 32 x 32 6594687401984 0.0% 0.0% 100.0% flops 9 x 32 x 9 11444702699520 0.0% 0.0% 100.0% flops 22 x 32 x 9 15019188129792 0.0% 0.0% 100.0% flops 9 x 32 x 22 15019188129792 0.0% 0.0% 100.0% flops 22 x 32 x 22 19624853225472 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 92.796573E+12 0.0% 0.0% 100.0% flops max/rank 2.906045E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6705499744 0.0% 0.0% 100.0% number of processed stacks 3951168 0.0% 0.0% 100.0% average stack size 0.0 0.0 1697.1 marketing flops 143.507742E+12 ------------------------------------------------------------------------------- # multiplications 2485 max memory usage/rank 946.724864E+06 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 1033760 MPI messages size (bytes): total size 2.695213E+12 min size 0.000000E+00 max size 26.214400E+06 average size 2.607194E+06 MPI breakdown and total messages size (bytes): size <= 128 6424 0 128 < size <= 8192 0 0 8192 < size <= 32768 264 8650752 32768 < size <= 131072 279168 36591108096 131072 < size <= 4194304 654272 987691483136 4194304 < size <= 16777216 65184 925172769472 16777216 < size 28448 745747251200 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 4089 57139. MP_Allreduce 11261 985. MP_Sync 168 MP_Alltoall 1700 9383497. MP_ISendRecv 15748 75008. MP_Wait 29528 MP_comm_split 82 MP_ISend 11660 275234. MP_IRecv 11660 275234. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.014 0.045 171.729 171.731 qs_mol_dyn_low 1 2.0 0.003 0.003 171.361 171.374 qs_forces 11 3.9 0.003 0.003 171.253 171.259 qs_energies 11 4.9 0.001 0.002 164.817 164.829 scf_env_do_scf 11 5.9 0.001 0.001 149.446 149.449 scf_env_do_scf_inner_loop 116 6.6 0.002 0.008 114.442 114.444 velocity_verlet 10 3.0 0.013 0.014 109.816 109.820 dbcsr_multiply_generic 2485 12.5 0.180 0.185 80.235 81.441 qs_scf_new_mos 116 7.6 0.001 0.001 79.249 79.605 qs_scf_loop_do_ot 116 8.6 0.001 0.001 79.248 79.604 ot_scf_mini 116 9.6 0.003 0.004 75.167 75.603 multiply_cannon 2485 13.5 0.498 0.523 61.004 64.226 multiply_cannon_loop 2485 14.5 0.854 0.883 57.953 60.809 ot_mini 116 10.6 0.001 0.001 41.665 42.104 init_scf_loop 11 6.9 0.000 0.000 34.909 34.911 mp_waitall_1 176908 16.5 23.907 33.494 23.907 33.494 prepare_preconditioner 11 7.9 0.000 0.000 30.807 30.869 make_preconditioner 11 8.9 0.000 0.000 30.807 30.869 make_full_inverse_cholesky 11 9.9 0.000 0.000 28.425 29.845 rebuild_ks_matrix 127 8.3 0.001 0.001 29.133 29.634 qs_ks_build_kohn_sham_matrix 127 9.3 0.015 0.017 29.133 29.633 qs_ks_update_qs_env 127 7.6 0.001 0.001 26.289 26.752 multiply_cannon_multrec 19880 15.5 13.690 16.550 22.570 25.388 multiply_cannon_metrocomm3 19880 15.5 0.059 0.063 14.382 23.631 qs_ot_get_derivative 116 11.6 0.001 0.002 22.162 22.582 apply_preconditioner_dbcsr 127 12.6 0.000 0.000 19.633 20.585 apply_single 127 13.6 0.001 0.001 19.633 20.584 qs_ot_get_p 127 10.4 0.001 0.001 19.375 19.888 ot_diis_step 116 11.6 0.018 0.029 19.399 19.400 multiply_cannon_sync_h2d 19880 15.5 14.229 16.414 14.229 16.414 make_m2s 4970 13.5 0.080 0.087 14.272 15.218 qs_ot_p2m_diag 82 11.4 0.262 0.269 15.090 15.096 make_images 4970 14.5 1.183 1.262 14.040 14.987 cp_fm_cholesky_invert 11 10.9 14.289 14.298 14.289 14.298 cp_dbcsr_syevd 82 12.4 0.006 0.006 14.117 14.118 sum_up_and_integrate 127 10.3 0.132 0.144 13.647 13.670 integrate_v_rspace 127 11.3 0.003 0.004 13.515 13.542 qs_rho_update_rho_low 127 7.7 0.001 0.001 12.463 12.509 calculate_rho_elec 127 8.7 0.130 0.145 12.462 12.508 cp_fm_diag_elpa 82 13.4 0.000 0.001 11.100 11.101 cp_fm_redistribute_end 82 14.4 4.167 11.048 4.180 11.050 cp_fm_diag_elpa_base 82 14.4 6.452 10.514 6.852 10.975 init_scf_run 11 5.9 0.000 0.001 10.445 10.445 scf_env_initial_rho_setup 11 6.9 0.001 0.002 10.445 10.445 make_images_data 4970 15.5 0.059 0.066 8.757 10.062 hybrid_alltoall_any 5155 16.4 0.424 1.939 7.570 9.520 multiply_cannon_metrocomm4 17395 15.5 0.059 0.068 3.336 8.983 qs_ot_get_derivative_diag 76 12.4 0.002 0.002 8.619 8.947 mp_irecv_dv 49801 16.2 3.217 8.741 3.217 8.741 dbcsr_mm_accdrv_process 41158 16.2 4.667 5.421 8.334 8.468 grid_integrate_task_list 127 12.3 7.253 7.617 7.253 7.617 cp_fm_upper_to_full 104 14.5 5.816 7.554 5.816 7.554 cp_fm_cholesky_decompose 22 10.9 7.281 7.317 7.281 7.317 pw_transfer 1535 11.6 0.084 0.104 7.200 7.293 wfi_extrapolate 11 7.9 0.001 0.001 7.204 7.204 fft_wrap_pw1pw2 1281 12.7 0.010 0.011 6.979 7.080 density_rs2pw 127 9.7 0.005 0.006 6.054 6.600 dbcsr_complete_redistribute 393 12.7 1.172 1.213 4.505 6.335 fft_wrap_pw1pw2_140 519 13.2 0.474 0.529 6.180 6.281 calculate_dm_sparse 127 9.5 0.001 0.001 5.678 5.761 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 5.640 5.646 grid_collocate_task_list 127 9.7 5.031 5.524 5.031 5.524 fft3d_ps 1281 14.7 2.666 2.862 5.261 5.321 rs_pw_transfer 1038 11.9 0.013 0.014 4.738 5.255 copy_fm_to_dbcsr 208 11.6 0.002 0.002 3.385 5.207 qs_ot_get_derivative_taylor 40 13.0 0.001 0.001 4.500 5.202 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 4.790 4.924 mp_sum_l 7804 13.0 3.158 4.683 3.158 4.683 mp_allgather_i34 2485 14.5 1.590 4.491 1.590 4.491 mp_alltoall_d11v 2401 14.1 3.963 4.423 3.963 4.423 potential_pw2rs 127 12.3 0.020 0.022 4.188 4.199 transfer_fm_to_dbcsr 11 9.9 0.000 0.000 2.363 4.114 qs_ot_get_orbitals 116 10.6 0.001 0.001 3.968 3.994 mp_alltoall_i22 712 14.1 1.945 3.825 1.945 3.825 qs_energies_init_hamiltonians 11 5.9 0.001 0.002 3.703 3.703 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="404", plot="h2o_256_md", label="(8n/4r/3t)", y=171.731000, yerr=0.000000 PlotPoint: name="405", plot="h2o_256_md_mem", label="(8n/4r/3t)", y=892.181818, yerr=15.014043 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/18/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1410022950912 0.0% 0.0% 100.0% flops 32 x 32 x 32 1924145348608 0.0% 0.0% 100.0% flops 22 x 9 x 32 1957871443968 0.0% 0.0% 100.0% flops 9 x 22 x 32 1963542011904 0.0% 0.0% 100.0% flops 22 x 22 x 32 2714615709696 0.0% 0.0% 100.0% flops 32 x 32 x 9 4377645416448 0.0% 0.0% 100.0% flops 32 x 32 x 22 5350455508992 0.0% 0.0% 100.0% flops 9 x 32 x 32 5395653328896 0.0% 0.0% 100.0% flops 22 x 32 x 32 6594687401984 0.0% 0.0% 100.0% flops 9 x 32 x 9 11444706349056 0.0% 0.0% 100.0% flops 22 x 32 x 9 15019182452736 0.0% 0.0% 100.0% flops 9 x 32 x 22 15019182452736 0.0% 0.0% 100.0% flops 22 x 32 x 22 19624853225472 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 92.796564E+12 0.0% 0.0% 100.0% flops max/rank 4.320337E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6705499488 0.0% 0.0% 100.0% number of processed stacks 5927808 0.0% 0.0% 100.0% average stack size 0.0 0.0 1131.2 marketing flops 143.508480E+12 ------------------------------------------------------------------------------- # multiplications 2485 max memory usage/rank 1.160495E+09 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 1133160 MPI messages size (bytes): total size 2.008142E+12 min size 0.000000E+00 max size 17.653760E+06 average size 1.772161E+06 MPI breakdown and total messages size (bytes): size <= 128 6996 0 128 < size <= 8192 0 0 8192 < size <= 32768 396 8650752 32768 < size <= 131072 315952 35695099904 131072 < size <= 4194304 709496 778939400192 4194304 < size <= 16777216 69840 660837542000 16777216 < size 30480 532676608000 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 4079 57274. MP_Allreduce 11236 1068. MP_Sync 168 MP_Alltoall 1700 12496381. MP_ISendRecv 11684 75008. MP_Wait 28114 MP_comm_split 82 MP_ISend 14840 244848. MP_IRecv 14840 244848. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.252 0.280 185.759 185.760 qs_mol_dyn_low 1 2.0 0.003 0.003 185.026 185.039 qs_forces 11 3.9 0.003 0.003 184.676 184.683 qs_energies 11 4.9 0.005 0.006 177.674 177.691 scf_env_do_scf 11 5.9 0.001 0.002 160.482 160.493 velocity_verlet 10 3.0 0.012 0.014 122.222 122.235 scf_env_do_scf_inner_loop 116 6.6 0.003 0.009 114.296 114.297 qs_scf_new_mos 116 7.6 0.001 0.001 79.373 79.673 qs_scf_loop_do_ot 116 8.6 0.001 0.001 79.373 79.672 dbcsr_multiply_generic 2485 12.5 0.188 0.193 78.938 79.601 ot_scf_mini 116 9.6 0.003 0.004 74.930 75.213 multiply_cannon 2485 13.5 0.553 0.593 54.521 57.558 multiply_cannon_loop 2485 14.5 1.182 1.206 50.896 52.635 init_scf_loop 11 6.9 0.001 0.003 46.065 46.066 ot_mini 116 10.6 0.001 0.001 41.804 42.072 prepare_preconditioner 11 7.9 0.000 0.000 42.013 42.037 make_preconditioner 11 8.9 0.000 0.002 42.013 42.037 make_full_inverse_cholesky 11 9.9 0.000 0.000 35.663 40.599 multiply_cannon_multrec 29820 15.5 14.082 19.750 26.252 31.388 rebuild_ks_matrix 127 8.3 0.001 0.001 28.610 28.914 qs_ks_build_kohn_sham_matrix 127 9.3 0.016 0.018 28.610 28.914 mp_waitall_1 152434 16.5 17.227 26.993 17.227 26.993 qs_ks_update_qs_env 127 7.6 0.001 0.001 25.831 26.099 qs_ot_get_derivative 116 11.6 0.001 0.002 22.446 22.733 make_m2s 4970 13.5 0.095 0.100 20.132 21.129 make_images 4970 14.5 1.949 2.242 19.826 20.824 qs_ot_get_p 127 10.4 0.001 0.001 19.466 19.795 apply_preconditioner_dbcsr 127 12.6 0.000 0.001 18.809 19.288 apply_single 127 13.6 0.001 0.001 18.809 19.288 ot_diis_step 116 11.6 0.017 0.019 19.229 19.230 cp_fm_upper_to_full 104 14.7 10.934 16.184 10.934 16.184 cp_fm_cholesky_invert 11 10.9 16.143 16.152 16.143 16.152 qs_ot_p2m_diag 82 11.4 0.339 0.385 15.450 15.503 multiply_cannon_metrocomm3 29820 15.5 0.047 0.050 6.301 15.246 cp_dbcsr_syevd 82 12.4 0.006 0.007 14.195 14.198 sum_up_and_integrate 127 10.3 0.139 0.150 13.823 13.847 integrate_v_rspace 127 11.3 0.003 0.003 13.684 13.713 dbcsr_complete_redistribute 393 12.7 1.513 1.642 9.240 13.037 qs_rho_update_rho_low 127 7.7 0.001 0.001 12.884 12.916 calculate_rho_elec 127 8.7 0.174 0.189 12.883 12.915 multiply_cannon_sync_h2d 29820 15.5 11.604 12.472 11.604 12.472 make_images_data 4970 15.5 0.062 0.066 10.846 12.337 dbcsr_mm_accdrv_process 61748 16.2 7.363 8.394 11.749 12.286 copy_fm_to_dbcsr 208 11.6 0.002 0.002 7.886 11.669 hybrid_alltoall_any 5155 16.4 0.521 2.196 9.694 11.517 init_scf_run 11 5.9 0.000 0.001 11.281 11.282 scf_env_initial_rho_setup 11 6.9 0.001 0.001 11.281 11.282 cp_fm_diag_elpa 82 13.4 0.000 0.001 11.152 11.156 cp_fm_redistribute_end 82 14.4 1.897 11.082 1.911 11.087 cp_fm_diag_elpa_base 82 14.4 8.578 10.506 9.143 10.981 transfer_fm_to_dbcsr 11 9.9 0.000 0.000 6.327 9.976 mp_alltoall_i22 712 14.1 5.821 9.660 5.821 9.660 qs_ot_get_derivative_diag 76 12.4 0.002 0.002 9.170 9.383 pw_transfer 1535 11.6 0.085 0.099 7.666 7.753 grid_integrate_task_list 127 12.3 7.490 7.723 7.490 7.723 cp_fm_cholesky_decompose 22 10.9 7.505 7.599 7.505 7.599 wfi_extrapolate 11 7.9 0.001 0.001 7.574 7.574 fft_wrap_pw1pw2 1281 12.7 0.010 0.011 7.442 7.533 multiply_cannon_metrocomm4 24850 15.5 0.075 0.084 2.765 7.145 mp_irecv_dv 75445 16.2 2.621 6.872 2.621 6.872 fft_wrap_pw1pw2_140 519 13.2 0.477 0.485 6.581 6.676 density_rs2pw 127 9.7 0.005 0.006 6.095 6.467 calculate_dm_sparse 127 9.5 0.001 0.001 6.227 6.309 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 5.932 6.001 fft3d_ps 1281 14.7 2.777 2.853 5.684 5.757 grid_collocate_task_list 127 9.7 5.167 5.434 5.167 5.434 mp_alltoall_d11v 2401 14.1 4.668 4.956 4.668 4.956 rs_pw_transfer 1038 11.9 0.013 0.014 4.466 4.841 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 4.563 4.619 qs_ot_get_derivative_taylor 40 13.0 0.001 0.001 4.430 4.528 qs_energies_init_hamiltonians 11 5.9 0.002 0.006 4.417 4.419 qs_ot_get_orbitals 116 10.6 0.001 0.001 4.135 4.194 potential_pw2rs 127 12.3 0.022 0.023 4.136 4.147 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="406", plot="h2o_256_md", label="(8n/3r/4t)", y=185.760000, yerr=0.000000 PlotPoint: name="407", plot="h2o_256_md_mem", label="(8n/3r/4t)", y=1089.000000, yerr=21.260292 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/19/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1420241154048 0.0% 0.0% 100.0% flops 32 x 32 x 32 1943472701440 0.0% 0.0% 100.0% flops 22 x 9 x 32 1972057190400 0.0% 0.0% 100.0% flops 9 x 22 x 32 1977770336256 0.0% 0.0% 100.0% flops 22 x 22 x 32 2734287699968 0.0% 0.0% 100.0% flops 32 x 32 x 9 4416300122112 0.0% 0.0% 100.0% flops 32 x 32 x 22 5397700149248 0.0% 0.0% 100.0% flops 9 x 32 x 32 5443971710976 0.0% 0.0% 100.0% flops 22 x 32 x 32 6653743202304 0.0% 0.0% 100.0% flops 9 x 32 x 9 11528896499712 0.0% 0.0% 100.0% flops 22 x 32 x 9 15129160814592 0.0% 0.0% 100.0% flops 9 x 32 x 22 15129160814592 0.0% 0.0% 100.0% flops 22 x 32 x 22 19767995056128 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 93.514757E+12 0.0% 0.0% 100.0% flops max/rank 5.865088E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6755939872 0.0% 0.0% 100.0% number of processed stacks 1960712 0.0% 0.0% 100.0% average stack size 0.0 0.0 3445.7 marketing flops 144.579337E+12 ------------------------------------------------------------------------------- # multiplications 2507 max memory usage/rank 1.521484E+09 # max total images/rank 1 # max 3D layers 1 # MPI messages exchanged 240672 MPI messages size (bytes): total size 1.331455E+12 min size 0.000000E+00 max size 52.428800E+06 average size 5.532238E+06 MPI breakdown and total messages size (bytes): size <= 128 1452 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 132 8650752 131072 < size <= 4194304 113904 59718500352 4194304 < size <= 16777216 104976 550376570880 16777216 < size 20208 721350154512 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 14 12. MP_Allreduce 8931 51. MP_Alltoall 9654 799394. MP_ISend 40068 2102573. MP_IRecv 40068 2101675. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 4002 58209. MP_Allreduce 11002 1175. MP_Sync 87 MP_Alltoall 1712 18838222. MP_ISendRecv 7680 122880. MP_Wait 19962 MP_ISend 10680 423556. MP_IRecv 10680 423556. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.019 0.052 176.987 176.987 qs_mol_dyn_low 1 2.0 0.003 0.003 176.551 176.564 qs_forces 11 3.9 0.003 0.003 176.454 176.456 qs_energies 11 4.9 0.005 0.005 169.034 169.041 scf_env_do_scf 11 5.9 0.001 0.001 150.374 150.387 velocity_verlet 10 3.0 0.009 0.010 114.940 114.944 scf_env_do_scf_inner_loop 117 6.6 0.003 0.008 113.189 113.190 qs_scf_new_mos 117 7.6 0.001 0.001 77.850 77.927 qs_scf_loop_do_ot 117 8.6 0.001 0.001 77.849 77.926 dbcsr_multiply_generic 2507 12.6 0.184 0.189 75.286 75.662 ot_scf_mini 117 9.6 0.003 0.004 73.517 73.554 multiply_cannon 2507 13.6 0.586 0.617 55.442 59.459 multiply_cannon_loop 2507 14.6 0.450 0.459 50.480 51.174 ot_mini 117 10.6 0.001 0.001 39.829 39.874 init_scf_loop 11 6.9 0.000 0.000 37.032 37.033 mp_waitall_1 129618 16.6 26.043 33.885 26.043 33.885 prepare_preconditioner 11 7.9 0.000 0.000 33.180 33.211 make_preconditioner 11 8.9 0.000 0.000 33.180 33.211 make_full_inverse_cholesky 11 9.9 0.000 0.000 31.019 31.310 rebuild_ks_matrix 128 8.3 0.001 0.001 28.301 28.367 qs_ks_build_kohn_sham_matrix 128 9.3 0.017 0.017 28.300 28.366 qs_ks_update_qs_env 128 7.6 0.001 0.001 25.768 25.829 multiply_cannon_multrec 10028 15.6 10.464 14.258 17.961 20.914 qs_ot_get_p 128 10.4 0.001 0.001 20.682 20.750 qs_ot_get_derivative 117 11.6 0.001 0.002 19.945 19.990 ot_diis_step 117 11.6 0.019 0.021 19.813 19.813 apply_preconditioner_dbcsr 128 12.6 0.000 0.000 19.463 19.677 apply_single 128 13.6 0.001 0.001 19.462 19.676 cp_fm_cholesky_invert 11 10.9 19.069 19.078 19.069 19.078 multiply_cannon_metrocomm3 10028 15.6 0.023 0.025 12.320 18.895 make_m2s 5014 13.6 0.067 0.071 16.194 18.849 make_images 5014 14.6 2.333 2.827 15.887 18.543 qs_ot_p2m_diag 83 11.4 0.496 0.501 16.908 16.924 cp_dbcsr_syevd 83 12.4 0.371 0.372 15.730 15.731 sum_up_and_integrate 128 10.3 0.182 0.192 14.047 14.094 integrate_v_rspace 128 11.3 0.003 0.004 13.865 13.920 qs_rho_update_rho_low 128 7.7 0.001 0.001 13.144 13.187 calculate_rho_elec 128 8.7 0.258 0.269 13.144 13.186 make_images_data 5014 15.6 0.053 0.060 9.715 12.500 cp_fm_diag_elpa 83 13.4 0.000 0.000 12.108 12.109 cp_fm_diag_elpa_base 83 14.4 11.852 11.933 12.099 12.099 multiply_cannon_sync_h2d 10028 15.6 11.639 12.093 11.639 12.093 init_scf_run 11 5.9 0.000 0.001 11.930 11.930 scf_env_initial_rho_setup 11 6.9 0.001 0.001 11.929 11.930 hybrid_alltoall_any 5200 16.5 0.839 3.791 9.477 11.881 cp_fm_cholesky_decompose 22 10.9 8.419 8.535 8.419 8.535 grid_integrate_task_list 128 12.3 7.766 8.042 7.766 8.042 qs_ot_get_derivative_diag 77 12.4 0.002 0.003 7.995 8.028 dbcsr_mm_accdrv_process 20762 16.1 2.797 4.278 7.128 7.764 pw_transfer 1547 11.6 0.084 0.092 7.523 7.542 wfi_extrapolate 11 7.9 0.001 0.001 7.395 7.395 mp_allgather_i34 2507 14.6 3.173 7.339 3.173 7.339 fft_wrap_pw1pw2 1291 12.7 0.010 0.010 7.301 7.324 calculate_dm_sparse 128 9.5 0.001 0.001 7.000 7.084 multiply_cannon_metrocomm1 10028 15.6 0.028 0.029 4.259 6.872 fft_wrap_pw1pw2_140 523 13.2 0.502 0.524 6.362 6.393 density_rs2pw 128 9.7 0.005 0.005 5.978 6.148 grid_collocate_task_list 128 9.7 5.484 5.709 5.484 5.709 dbcsr_complete_redistribute 395 12.7 2.117 2.264 5.278 5.631 fft3d_ps 1291 14.7 2.721 2.796 5.463 5.500 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 5.345 5.354 qs_energies_init_hamiltonians 11 5.9 0.003 0.004 5.197 5.198 mp_alltoall_d11v 2415 14.1 4.675 5.101 4.675 5.101 rs_pw_transfer 1046 11.9 0.012 0.013 4.363 4.544 calculate_first_density_matrix 1 7.0 0.000 0.000 4.339 4.342 cp_dbcsr_sm_fm_multiply_core 37 10.5 0.000 0.000 4.248 4.263 potential_pw2rs 128 12.3 0.027 0.027 4.150 4.162 multiply_cannon_metrocomm4 7521 15.6 0.023 0.025 1.842 4.022 mp_irecv_dv 28860 15.9 1.807 3.957 1.807 3.957 build_core_hamiltonian_matrix_ 11 4.9 0.001 0.001 3.562 3.880 copy_fm_to_dbcsr 209 11.7 0.002 0.002 3.464 3.799 qs_ot_get_orbitals 117 10.6 0.001 0.001 3.762 3.782 copy_dbcsr_to_fm 186 11.8 0.004 0.004 3.550 3.615 qs_ot_get_derivative_taylor 40 13.0 0.001 0.001 3.579 3.595 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="408", plot="h2o_256_md", label="(8n/2r/6t)", y=176.987000, yerr=0.000000 PlotPoint: name="409", plot="h2o_256_md_mem", label="(8n/2r/6t)", y=1421.363636, yerr=54.759429 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/20/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 9 x 9 x 32 1430454546432 0.0% 0.0% 100.0% flops 32 x 32 x 32 1975684956160 0.0% 0.0% 100.0% flops 22 x 9 x 32 1986255912960 0.0% 0.0% 100.0% flops 9 x 22 x 32 1992006770688 0.0% 0.0% 100.0% flops 22 x 22 x 32 2753958699008 0.0% 0.0% 100.0% flops 32 x 32 x 9 4454954827776 0.0% 0.0% 100.0% flops 32 x 32 x 22 5444944789504 0.0% 0.0% 100.0% flops 9 x 32 x 32 5492290093056 0.0% 0.0% 100.0% flops 22 x 32 x 32 6712799002624 0.0% 0.0% 100.0% flops 9 x 32 x 9 11613065416704 0.0% 0.0% 100.0% flops 22 x 32 x 9 15239182565376 0.0% 0.0% 100.0% flops 9 x 32 x 22 15239182565376 0.0% 0.0% 100.0% flops 22 x 32 x 22 19911132921856 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 94.245913E+12 0.0% 0.0% 100.0% flops max/rank 11.787674E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 6806580192 0.0% 0.0% 100.0% number of processed stacks 1982496 0.0% 0.0% 100.0% average stack size 0.0 0.0 3433.3 marketing flops 145.663816E+12 ------------------------------------------------------------------------------- # multiplications 2535 max memory usage/rank 3.041288E+09 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 101400 MPI messages size (bytes): total size 1.145171E+12 min size 0.000000E+00 max size 104.857600E+06 average size 11.293599E+06 MPI breakdown and total messages size (bytes): size <= 128 572 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 44 2883584 131072 < size <= 4194304 45888 35634806784 4194304 < size <= 16777216 44720 382939955200 16777216 < size 10176 726592540656 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 4057 58369. MP_Allreduce 11139 1510. MP_Sync 88 MP_Alltoall 1724 36993632. MP_ISendRecv 3612 218624. MP_Wait 11682 MP_ISend 6456 1080169. MP_IRecv 6456 1080169. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.079 0.103 297.450 297.450 qs_mol_dyn_low 1 2.0 0.003 0.003 296.548 296.559 qs_forces 11 3.9 0.003 0.003 296.449 296.452 qs_energies 11 4.9 0.002 0.002 287.775 287.791 scf_env_do_scf 11 5.9 0.001 0.001 264.543 264.560 velocity_verlet 10 3.0 0.008 0.009 214.712 214.721 scf_env_do_scf_inner_loop 118 6.6 0.003 0.008 136.006 136.007 init_scf_loop 11 6.9 0.000 0.000 128.268 128.270 prepare_preconditioner 11 7.9 0.000 0.000 123.576 123.600 make_preconditioner 11 8.9 0.000 0.000 123.576 123.600 make_full_inverse_cholesky 11 9.9 0.000 0.000 98.593 120.723 qs_scf_new_mos 118 7.6 0.001 0.001 93.757 93.873 qs_scf_loop_do_ot 118 8.6 0.001 0.001 93.756 93.872 ot_scf_mini 118 9.6 0.004 0.004 88.999 89.019 dbcsr_multiply_generic 2535 12.6 0.215 0.223 83.657 84.302 cp_fm_upper_to_full 106 14.8 53.041 77.035 53.041 77.035 multiply_cannon 2535 13.6 0.714 0.786 58.847 59.418 multiply_cannon_loop 2535 14.6 0.481 0.491 55.129 56.576 ot_mini 118 10.6 0.001 0.001 44.725 44.748 dbcsr_complete_redistribute 397 12.7 4.007 4.067 30.865 44.106 copy_fm_to_dbcsr 210 11.7 0.002 0.002 27.374 40.663 transfer_fm_to_dbcsr 11 9.9 0.000 0.000 24.936 38.097 mp_alltoall_i22 720 14.1 22.832 36.309 22.832 36.309 cp_fm_cholesky_invert 11 10.9 35.832 35.841 35.832 35.841 rebuild_ks_matrix 129 8.3 0.001 0.001 33.485 33.517 qs_ks_build_kohn_sham_matrix 129 9.3 0.017 0.018 33.484 33.516 mp_waitall_1 106626 16.7 27.598 31.711 27.598 31.711 qs_ks_update_qs_env 129 7.6 0.001 0.001 31.255 31.293 qs_ot_get_p 129 10.4 0.001 0.001 28.923 28.950 qs_ot_p2m_diag 84 11.4 0.889 0.894 24.824 24.857 qs_ot_get_derivative 118 11.6 0.002 0.002 24.574 24.596 cp_dbcsr_syevd 84 12.4 0.014 0.015 23.018 23.018 make_m2s 5070 13.6 0.077 0.078 20.007 20.920 make_images 5070 14.6 3.801 3.939 19.522 20.440 ot_diis_step 118 11.6 0.022 0.022 20.104 20.105 cp_fm_diag_elpa 84 13.4 0.000 0.000 19.595 19.596 cp_fm_diag_elpa_base 84 14.4 15.142 16.775 19.590 19.590 apply_preconditioner_dbcsr 129 12.6 0.000 0.000 19.254 19.475 apply_single 129 13.6 0.001 0.001 19.254 19.475 multiply_cannon_metrocomm3 10140 15.6 0.024 0.025 17.896 19.200 multiply_cannon_multrec 10140 15.6 10.547 12.363 18.518 18.831 sum_up_and_integrate 129 10.3 0.323 0.326 15.874 15.964 multiply_cannon_sync_h2d 10140 15.6 15.771 15.784 15.771 15.784 integrate_v_rspace 129 11.3 0.004 0.004 15.550 15.639 qs_rho_update_rho_low 129 7.7 0.001 0.001 15.282 15.311 calculate_rho_elec 129 8.7 0.487 0.487 15.281 15.310 init_scf_run 11 5.9 0.000 0.001 13.223 13.223 scf_env_initial_rho_setup 11 6.9 0.014 0.014 13.222 13.223 hybrid_alltoall_any 5257 16.5 1.311 3.067 10.687 12.418 make_images_data 5070 15.6 0.060 0.063 10.609 12.349 dbcsr_mm_accdrv_process 20958 16.1 3.826 5.790 7.735 9.865 qs_ot_get_derivative_diag 78 12.4 0.002 0.002 9.654 9.675 cp_fm_cholesky_decompose 22 10.9 9.547 9.573 9.547 9.573 wfi_extrapolate 11 7.9 0.001 0.001 8.901 8.901 grid_integrate_task_list 129 12.3 8.599 8.796 8.599 8.796 pw_transfer 1559 11.6 0.091 0.092 8.556 8.561 fft_wrap_pw1pw2 1301 12.7 0.011 0.011 8.320 8.325 qs_energies_init_hamiltonians 11 5.9 0.002 0.003 7.961 7.963 cp_dbcsr_sm_fm_multiply 37 9.5 0.002 0.002 7.281 7.346 fft_wrap_pw1pw2_140 527 13.2 0.544 0.554 7.336 7.346 mp_alltoall_d11v 2429 14.1 7.160 7.317 7.160 7.317 calculate_dm_sparse 129 9.5 0.001 0.001 6.752 6.872 copy_dbcsr_to_fm 187 11.8 0.004 0.004 6.227 6.503 grid_collocate_task_list 129 9.7 6.383 6.421 6.383 6.421 fft3d_ps 1301 14.7 2.785 2.826 6.349 6.358 density_rs2pw 129 9.7 0.005 0.005 6.252 6.287 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="410", plot="h2o_256_md", label="(8n/1r/12t)", y=297.450000, yerr=0.000000 PlotPoint: name="411", plot="h2o_256_md_mem", label="(8n/1r/12t)", y=2720.363636, yerr=166.069686 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/21/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 2.766000E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 419739 0.0% 0.0% 100.0% average stack size 0.0 0.0 22952.9 marketing flops 1.742116E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 1.261752E+09 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 458208 MPI messages size (bytes): total size 3.456111E+12 min size 0.000000E+00 max size 18.735064E+06 average size 7.542668E+06 MPI breakdown and total messages size (bytes): size <= 128 112896 0 128 < size <= 8192 0 0 8192 < size <= 32768 224 5687808 32768 < size <= 131072 10528 813356544 131072 < size <= 4194304 36422 76284728544 4194304 < size <= 16777216 294266 3312457683808 16777216 < size 3872 66548597808 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 255669. MP_Allreduce 3059 6274. MP_Sync 4 MP_Alltoall 54 MP_ISendRecv 570 19200. MP_Wait 1302 MP_ISend 642 197829. MP_IRecv 642 197607. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.029 0.061 86.169 86.170 qs_energies 1 2.0 0.000 0.000 85.119 85.133 ls_scf 1 3.0 0.000 0.001 84.128 84.141 dbcsr_multiply_generic 111 6.7 0.015 0.016 72.736 72.972 multiply_cannon 111 7.7 0.018 0.020 55.903 57.063 multiply_cannon_loop 111 8.7 0.211 0.228 52.468 53.762 ls_scf_main 1 4.0 0.000 0.000 52.304 52.305 density_matrix_trs4 2 5.0 0.002 0.003 46.795 46.887 ls_scf_init_scf 1 4.0 0.000 0.001 28.744 28.744 ls_scf_init_matrix_S 1 5.0 0.000 0.000 27.527 27.578 matrix_sqrt_Newton_Schulz 2 6.5 0.047 0.372 25.414 25.449 mp_waitall_1 11316 10.9 22.535 25.089 22.535 25.089 multiply_cannon_multrec 2664 9.7 8.183 8.947 15.502 17.267 multiply_cannon_sync_h2d 2664 9.7 13.624 15.761 13.624 15.761 make_m2s 222 7.7 0.008 0.011 13.098 13.590 make_images 222 8.7 0.099 0.110 13.076 13.570 multiply_cannon_metrocomm1 2664 9.7 0.009 0.010 9.678 12.794 make_images_data 222 9.7 0.004 0.005 7.706 8.327 dbcsr_mm_accdrv_process 4760 10.4 0.507 0.610 6.937 7.978 multiply_cannon_metrocomm3 2664 9.7 0.009 0.010 5.477 7.874 hybrid_alltoall_any 227 10.6 0.215 1.840 6.564 7.858 dbcsr_mm_accdrv_process_sort 4760 11.4 6.228 7.164 6.228 7.164 calculate_norms 4752 9.8 5.532 6.202 5.532 6.202 apply_matrix_preconditioner 6 5.3 0.000 0.000 5.093 5.321 mp_sum_l 807 5.4 3.222 4.663 3.222 4.663 multiply_cannon_metrocomm4 2442 9.7 0.012 0.014 2.053 3.481 mp_irecv_dv 6231 10.9 2.036 3.457 2.036 3.457 dbcsr_multiply_generic_mpsum_f 86 7.8 0.000 0.000 2.397 3.444 make_images_sizes 222 9.7 0.000 0.000 0.759 3.354 mp_alltoall_i44 222 10.7 0.759 3.353 0.759 3.353 arnoldi_extremal 4 6.8 0.000 0.000 3.172 3.207 arnoldi_normal_ev 4 7.8 0.001 0.003 3.172 3.207 ls_scf_post 1 4.0 0.000 0.000 3.080 3.093 build_subspace 16 8.4 0.009 0.012 3.076 3.078 ls_scf_store_result 1 5.0 0.000 0.000 2.882 2.939 dbcsr_special_finalize 555 9.7 0.005 0.006 2.279 2.756 dbcsr_merge_single_wm 555 10.7 0.456 0.598 2.272 2.748 make_images_pack 222 9.7 2.208 2.639 2.210 2.641 dbcsr_matrix_vector_mult 304 9.0 0.003 0.010 2.299 2.537 dbcsr_sort_data 658 11.4 2.075 2.471 2.075 2.471 dbcsr_matrix_vector_mult_local 304 10.0 2.064 2.451 2.066 2.453 ls_scf_dm_to_ks 2 5.0 0.000 0.000 2.232 2.323 buffer_matrices_ensure_size 222 8.7 1.758 2.130 1.758 2.130 qs_ks_update_qs_env 3 6.3 0.000 0.000 1.872 1.874 rebuild_ks_matrix 3 7.3 0.000 0.000 1.863 1.864 qs_ks_build_kohn_sham_matrix 3 8.3 0.004 0.018 1.863 1.864 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="500", plot="h2o_32_nrep3_ls", label="(8n/12r/1t)", y=86.170000, yerr=0.000000 PlotPoint: name="501", plot="h2o_32_nrep3_ls_mem", label="(8n/12r/1t)", y=1141.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/22/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 5.588524E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 368848 0.0% 0.0% 100.0% average stack size 0.0 0.0 26119.8 marketing flops 1.742116E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 2.094154E+09 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 106560 MPI messages size (bytes): total size 2.699093E+12 min size 0.000000E+00 max size 72.286792E+06 average size 25.329324E+06 MPI breakdown and total messages size (bytes): size <= 128 23040 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 3264 325830144 131072 < size <= 4194304 5280 3328561104 4194304 < size <= 16777216 12709 156766962056 16777216 < size 62267 2538670978840 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 266696. MP_Allreduce 3058 10339. MP_Sync 4 MP_Alltoall 47 15335933. MP_ISendRecv 282 57600. MP_Wait 828 MP_ISend 462 414589. MP_IRecv 462 413870. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.035 0.052 91.279 91.280 qs_energies 1 2.0 0.000 0.000 90.699 90.713 ls_scf 1 3.0 0.000 0.001 89.389 89.402 dbcsr_multiply_generic 111 6.7 0.015 0.016 75.032 75.391 multiply_cannon 111 7.7 0.028 0.043 53.250 56.553 ls_scf_main 1 4.0 0.000 0.011 54.786 54.786 multiply_cannon_loop 111 8.7 0.117 0.124 49.957 52.837 density_matrix_trs4 2 5.0 0.002 0.017 49.106 49.270 ls_scf_init_scf 1 4.0 0.000 0.002 31.089 31.090 mp_waitall_1 9246 10.9 21.271 30.486 21.271 30.486 ls_scf_init_matrix_S 1 5.0 0.000 0.002 29.938 30.040 multiply_cannon_multrec 1332 9.7 13.036 17.514 22.234 27.880 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 27.150 27.164 multiply_cannon_metrocomm3 1332 9.7 0.007 0.008 11.873 21.054 make_m2s 222 7.7 0.006 0.008 15.259 15.924 make_images 222 8.7 1.571 1.936 15.229 15.894 dbcsr_mm_accdrv_process 4041 10.4 0.264 0.445 8.805 10.363 dbcsr_mm_accdrv_process_sort 4041 11.4 8.401 9.918 8.401 9.918 make_images_data 222 9.7 0.004 0.004 8.760 9.672 hybrid_alltoall_any 227 10.6 0.519 2.444 8.231 9.046 mp_sum_l 807 5.4 5.220 8.221 5.220 8.221 multiply_cannon_metrocomm4 1221 9.7 0.006 0.008 3.198 7.766 mp_irecv_dv 3311 11.0 3.178 7.710 3.178 7.710 calculate_norms 2376 9.8 5.996 6.833 5.996 6.833 dbcsr_multiply_generic_mpsum_f 86 7.8 0.000 0.000 4.027 6.505 multiply_cannon_sync_h2d 1332 9.7 4.819 6.010 4.819 6.010 apply_matrix_preconditioner 6 5.3 0.000 0.000 5.039 5.248 arnoldi_extremal 4 6.8 0.000 0.000 4.668 4.690 arnoldi_normal_ev 4 7.8 0.001 0.005 4.668 4.690 build_subspace 16 8.4 0.014 0.021 4.411 4.414 ls_scf_post 1 4.0 0.004 0.015 3.514 3.529 dbcsr_matrix_vector_mult 304 9.0 0.005 0.017 3.127 3.366 ls_scf_store_result 1 5.0 0.000 0.000 3.213 3.334 dbcsr_matrix_vector_mult_local 304 10.0 2.733 3.213 2.735 3.215 ls_scf_dm_to_ks 2 5.0 0.000 0.000 2.533 2.654 multiply_cannon_metrocomm1 1332 9.7 0.003 0.004 1.262 2.603 make_images_pack 222 9.7 2.024 2.403 2.027 2.405 mp_allgather_i34 111 8.7 0.991 2.308 0.991 2.308 dbcsr_sort_data 436 11.2 1.818 2.057 1.818 2.057 dbcsr_data_new 4174 10.1 1.618 1.852 1.618 1.852 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="502", plot="h2o_32_nrep3_ls", label="(8n/6r/2t)", y=91.280000, yerr=0.000000 PlotPoint: name="503", plot="h2o_32_nrep3_ls_mem", label="(8n/6r/2t)", y=1697.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/23/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 8.404608E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 353133 0.0% 0.0% 100.0% average stack size 0.0 0.0 27282.1 marketing flops 1.742118E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 2.702230E+09 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 46176 MPI messages size (bytes): total size 1.924064E+12 min size 0.000000E+00 max size 108.059888E+06 average size 41.668048E+06 MPI breakdown and total messages size (bytes): size <= 128 9984 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 0 0 131072 < size <= 4194304 3328 1170063360 4194304 < size <= 16777216 1870 19378539600 16777216 < size 30994 1903514987232 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 265470. MP_Allreduce 3058 11181. MP_Sync 4 MP_Alltoall 47 23526250. MP_ISendRecv 186 57600. MP_Wait 732 MP_ISend 462 560046. MP_IRecv 462 560662. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.034 0.050 93.552 93.553 qs_energies 1 2.0 0.000 0.000 92.939 92.942 ls_scf 1 3.0 0.000 0.001 91.380 91.382 dbcsr_multiply_generic 111 6.7 0.015 0.016 75.956 76.187 ls_scf_main 1 4.0 0.000 0.002 57.247 57.252 multiply_cannon 111 7.7 0.040 0.107 52.408 56.626 multiply_cannon_loop 111 8.7 0.100 0.107 48.865 52.180 density_matrix_trs4 2 5.0 0.002 0.004 51.343 51.543 mp_waitall_1 7374 11.0 23.567 32.788 23.567 32.788 ls_scf_init_scf 1 4.0 0.000 0.001 30.603 30.605 ls_scf_init_matrix_S 1 5.0 0.000 0.001 29.179 29.242 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 26.787 26.800 multiply_cannon_multrec 888 9.7 12.629 15.128 21.143 24.097 multiply_cannon_metrocomm3 888 9.7 0.004 0.004 10.806 21.827 make_m2s 222 7.7 0.006 0.007 17.128 18.299 make_images 222 8.7 1.970 2.313 17.090 18.262 make_images_data 222 9.7 0.003 0.004 9.815 10.773 hybrid_alltoall_any 227 10.6 0.620 2.857 9.418 10.765 dbcsr_mm_accdrv_process 3754 10.4 0.240 0.413 8.053 9.262 dbcsr_mm_accdrv_process_sort 3754 11.4 7.682 8.849 7.682 8.849 mp_sum_l 807 5.4 4.848 8.287 4.848 8.287 multiply_cannon_sync_h2d 888 9.7 6.037 7.254 6.037 7.254 multiply_cannon_metrocomm1 888 9.7 0.003 0.003 3.563 6.983 multiply_cannon_metrocomm4 777 9.7 0.004 0.005 2.423 6.748 mp_irecv_dv 2335 11.1 2.408 6.686 2.408 6.686 dbcsr_multiply_generic_mpsum_f 86 7.8 0.000 0.000 3.692 6.362 apply_matrix_preconditioner 6 5.3 0.000 0.000 4.928 5.154 arnoldi_extremal 4 6.8 0.000 0.000 5.046 5.064 arnoldi_normal_ev 4 7.8 0.001 0.005 5.046 5.064 build_subspace 16 8.4 0.014 0.020 4.737 4.742 calculate_norms 1584 9.8 4.277 4.666 4.277 4.666 mp_allgather_i34 111 8.7 1.387 3.891 1.387 3.891 dbcsr_matrix_vector_mult 304 9.0 0.005 0.016 3.416 3.754 dbcsr_matrix_vector_mult_local 304 10.0 3.013 3.587 3.015 3.589 ls_scf_post 1 4.0 0.000 0.002 3.530 3.534 ls_scf_store_result 1 5.0 0.000 0.000 3.274 3.355 ls_scf_dm_to_ks 2 5.0 0.000 0.000 2.762 2.854 dbcsr_sort_data 325 11.1 1.900 2.143 1.900 2.143 make_images_pack 222 9.7 1.807 2.132 1.810 2.135 make_images_sizes 222 9.7 0.000 0.000 0.984 2.105 mp_alltoall_i44 222 10.7 0.984 2.105 0.984 2.105 qs_ks_update_qs_env 3 6.3 0.000 0.000 2.018 2.020 rebuild_ks_matrix 3 7.3 0.000 0.000 2.000 2.002 qs_ks_build_kohn_sham_matrix 3 8.3 0.000 0.001 2.000 2.002 dbcsr_data_release 9322 10.9 1.305 1.989 1.305 1.989 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="504", plot="h2o_32_nrep3_ls", label="(8n/4r/3t)", y=93.553000, yerr=0.000000 PlotPoint: name="505", plot="h2o_32_nrep3_ls_mem", label="(8n/4r/3t)", y=2166.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/24/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 10.747127E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 369794 0.0% 0.0% 100.0% average stack size 0.0 0.0 26053.0 marketing flops 1.742116E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 3.385225E+09 # max total images/rank 3 # max 3D layers 1 # MPI messages exchanged 50616 MPI messages size (bytes): total size 1.536549E+12 min size 0.000000E+00 max size 72.286792E+06 average size 30.356986E+06 MPI breakdown and total messages size (bytes): size <= 128 10368 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 1056 104411904 131072 < size <= 4194304 3168 831638784 4194304 < size <= 16777216 3103 33613273640 16777216 < size 32921 1501999894888 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 266696. MP_Allreduce 3058 13371. MP_Sync 4 MP_Alltoall 47 30278988. MP_ISendRecv 138 86400. MP_Wait 600 MP_ISend 378 823502. MP_IRecv 378 823753. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.105 0.171 98.238 98.240 qs_energies 1 2.0 0.000 0.000 96.995 97.006 ls_scf 1 3.0 0.014 0.025 95.077 95.084 dbcsr_multiply_generic 111 6.7 0.016 0.018 78.493 78.750 ls_scf_main 1 4.0 0.003 0.022 58.325 58.326 multiply_cannon 111 7.7 0.074 0.143 52.032 56.749 density_matrix_trs4 2 5.0 0.003 0.024 52.339 52.447 multiply_cannon_loop 111 8.7 0.116 0.127 46.902 49.563 ls_scf_init_scf 1 4.0 0.011 0.018 33.499 33.524 ls_scf_init_matrix_S 1 5.0 0.000 0.001 31.963 32.053 mp_waitall_1 6438 11.0 22.610 29.616 22.610 29.616 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 29.397 29.406 multiply_cannon_multrec 1332 9.7 14.148 17.442 22.002 24.432 make_m2s 222 7.7 0.007 0.007 20.881 22.350 make_images 222 8.7 3.140 3.594 20.831 22.302 multiply_cannon_metrocomm3 1332 9.7 0.003 0.003 9.195 16.996 make_images_data 222 9.7 0.004 0.004 11.610 13.235 hybrid_alltoall_any 227 10.6 0.798 3.817 10.993 12.831 dbcsr_mm_accdrv_process 3641 10.4 0.199 0.500 7.488 9.007 dbcsr_mm_accdrv_process_sort 3641 11.4 7.112 8.594 7.112 8.594 mp_sum_l 807 5.4 4.019 7.249 4.019 7.249 multiply_cannon_sync_h2d 1332 9.7 5.511 6.146 5.511 6.146 multiply_cannon_metrocomm4 1110 9.7 0.004 0.006 2.104 6.020 mp_irecv_dv 3229 10.9 2.082 5.939 2.082 5.939 dbcsr_multiply_generic_mpsum_f 86 7.8 0.000 0.000 3.100 5.794 arnoldi_extremal 4 6.8 0.000 0.000 5.202 5.214 arnoldi_normal_ev 4 7.8 0.001 0.004 5.202 5.214 multiply_cannon_metrocomm1 1332 9.7 0.003 0.003 2.632 4.986 build_subspace 16 8.4 0.014 0.021 4.864 4.872 mp_allgather_i34 111 8.7 2.238 4.713 2.238 4.713 apply_matrix_preconditioner 6 5.3 0.000 0.000 4.511 4.692 calculate_norms 2376 9.8 4.191 4.492 4.191 4.492 dbcsr_matrix_vector_mult 304 9.0 0.006 0.017 3.570 3.881 dbcsr_matrix_vector_mult_local 304 10.0 3.186 3.694 3.188 3.696 dbcsr_sort_data 658 11.4 3.080 3.394 3.080 3.394 ls_scf_post 1 4.0 0.010 0.017 3.240 3.250 dbcsr_special_finalize 555 9.7 0.006 0.007 2.831 3.157 dbcsr_merge_single_wm 555 10.7 0.538 0.664 2.822 3.149 ls_scf_store_result 1 5.0 0.000 0.000 2.962 3.032 ls_scf_dm_to_ks 2 5.0 0.000 0.000 2.884 2.926 dbcsr_data_release 10477 10.7 1.585 2.391 1.585 2.391 qs_ks_update_qs_env 3 6.3 0.000 0.000 2.222 2.224 rebuild_ks_matrix 3 7.3 0.000 0.000 2.199 2.201 qs_ks_build_kohn_sham_matrix 3 8.3 0.002 0.015 2.199 2.201 dbcsr_finalize 304 7.8 0.049 0.061 1.798 1.987 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="506", plot="h2o_32_nrep3_ls", label="(8n/3r/4t)", y=98.240000, yerr=0.000000 PlotPoint: name="507", plot="h2o_32_nrep3_ls_mem", label="(8n/3r/4t)", y=2747.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/25/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 15.383312E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 336818 0.0% 0.0% 100.0% average stack size 0.0 0.0 28603.7 marketing flops 1.742118E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 4.620763E+09 # max total images/rank 1 # max 3D layers 1 # MPI messages exchanged 10656 MPI messages size (bytes): total size 1.149035E+12 min size 0.000000E+00 max size 203.538048E+06 average size 107.829832E+06 MPI breakdown and total messages size (bytes): size <= 128 2304 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 0 0 131072 < size <= 4194304 768 702038016 4194304 < size <= 16777216 0 0 16777216 < size 7584 1148332810224 ------------------------------------------------------------------------------- - - - DBCSR MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Bcast 2 12. MP_Allreduce 705 128. MP_Alltoall 310 12920694. MP_ISend 1776 40180424. MP_IRecv 1776 40465030. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 265558. MP_Allreduce 3049 15663. MP_Sync 4 MP_Alltoall 47 46208988. MP_ISendRecv 90 115200. MP_Wait 573 MP_ISend 420 924980. MP_IRecv 420 924528. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.264 0.299 99.995 99.996 qs_energies 1 2.0 0.000 0.000 98.632 98.639 ls_scf 1 3.0 0.000 0.000 96.359 96.365 dbcsr_multiply_generic 111 6.7 0.017 0.018 77.766 78.009 ls_scf_main 1 4.0 0.000 0.000 61.739 61.740 multiply_cannon 111 7.7 0.118 0.193 55.407 60.586 density_matrix_trs4 2 5.0 0.004 0.033 54.745 54.852 multiply_cannon_loop 111 8.7 0.069 0.076 50.825 52.358 mp_waitall_1 5481 11.0 26.179 32.205 26.179 32.205 ls_scf_init_scf 1 4.0 0.000 0.000 31.010 31.012 ls_scf_init_matrix_S 1 5.0 0.000 0.000 29.758 29.804 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 27.483 27.493 multiply_cannon_multrec 444 9.7 14.035 16.583 21.059 22.700 make_m2s 222 7.7 0.005 0.005 17.503 20.044 make_images 222 8.7 3.726 4.434 17.441 19.984 multiply_cannon_metrocomm1 444 9.7 0.002 0.002 11.069 16.561 multiply_cannon_metrocomm3 444 9.7 0.001 0.001 6.190 14.830 make_images_data 222 9.7 0.003 0.004 9.721 12.222 hybrid_alltoall_any 227 10.6 0.792 3.761 9.605 12.189 dbcsr_mm_accdrv_process 3003 10.4 0.176 0.341 6.733 7.859 multiply_cannon_sync_h2d 444 9.7 6.558 7.707 6.558 7.707 dbcsr_mm_accdrv_process_sort 3003 11.4 6.418 7.518 6.418 7.518 mp_allgather_i34 111 8.7 2.793 7.029 2.793 7.029 arnoldi_extremal 4 6.8 0.000 0.000 5.794 5.805 arnoldi_normal_ev 4 7.8 0.002 0.005 5.793 5.805 build_subspace 16 8.4 0.015 0.020 5.408 5.417 apply_matrix_preconditioner 6 5.3 0.000 0.000 4.611 4.739 mp_sum_l 807 5.4 3.002 4.643 3.002 4.643 multiply_cannon_metrocomm4 333 9.7 0.001 0.002 1.690 4.618 mp_irecv_dv 1241 11.2 1.671 4.590 1.671 4.590 dbcsr_matrix_vector_mult 304 9.0 0.007 0.017 4.163 4.366 dbcsr_matrix_vector_mult_local 304 10.0 3.697 4.181 3.700 4.183 calculate_norms 792 9.8 3.540 3.620 3.540 3.620 ls_scf_post 1 4.0 0.000 0.000 3.609 3.616 ls_scf_dm_to_ks 2 5.0 0.000 0.000 3.378 3.485 ls_scf_store_result 1 5.0 0.000 0.000 3.374 3.442 make_images_sizes 222 9.7 0.000 0.000 1.126 3.269 mp_alltoall_i44 222 10.7 1.125 3.268 1.125 3.268 dbcsr_multiply_generic_mpsum_f 86 7.8 0.000 0.000 1.933 3.171 dbcsr_finalize 304 7.8 0.062 0.078 2.200 2.324 qs_energies_init_hamiltonians 1 3.0 0.051 0.100 2.257 2.257 dbcsr_merge_all 275 8.9 0.474 0.538 2.050 2.152 qs_ks_update_qs_env 3 6.3 0.000 0.000 2.022 2.023 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="508", plot="h2o_32_nrep3_ls", label="(8n/2r/6t)", y=99.996000, yerr=0.000000 PlotPoint: name="509", plot="h2o_32_nrep3_ls_mem", label="(8n/2r/6t)", y=3597.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ~~~~~~~~~ RESULT ~~~~~~~~~ RESULT file: /scratch/snx3000/mkrack/rt/../rt/CRAY-XC50-gnu/8ef42d3b316011b445a73f25f7afd24a63b63184_performance_tests/26/result.log @@@@@@@@@@ Run number: 1 @@@@@@@@@@ ------------------------------------------------------------------------------- - - - DBCSR STATISTICS - - - ------------------------------------------------------------------------------- COUNTER TOTAL BLAS SMM ACC flops 23 x 23 x 23 234439235724792 0.0% 0.0% 100.0% flops inhomo. stacks 0 0.0% 0.0% 0.0% flops total 234.439236E+12 0.0% 0.0% 100.0% flops max/rank 30.358840E+12 0.0% 0.0% 100.0% matmuls inhomo. stacks 0 0.0% 0.0% 0.0% matmuls total 9634225188 0.0% 0.0% 100.0% number of processed stacks 339931 0.0% 0.0% 100.0% average stack size 0.0 0.0 28341.7 marketing flops 1.742118E+15 ------------------------------------------------------------------------------- # multiplications 111 max memory usage/rank 8.738869E+09 # max total images/rank 2 # max 3D layers 1 # MPI messages exchanged 4440 MPI messages size (bytes): total size 770.525954E+09 min size 0.000000E+00 max size 399.069120E+06 average size 173.541888E+06 MPI breakdown and total messages size (bytes): size <= 128 640 0 128 < size <= 8192 0 0 8192 < size <= 32768 0 0 32768 < size <= 131072 0 0 131072 < size <= 4194304 640 468025344 4194304 < size <= 16777216 0 0 16777216 < size 3160 770057961712 ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - MESSAGE PASSING PERFORMANCE - - - ------------------------------------------------------------------------------- ROUTINE CALLS AVE VOLUME [Bytes] MP_Group 4 MP_Bcast 1026 284111. MP_Allreduce 3043 21950. MP_Sync 4 MP_Alltoall 47 88727262. MP_ISendRecv 84 732600. MP_Wait 309 MP_ISend 180 3337386. MP_IRecv 180 3339494. ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- - - - T I M I N G - - - ------------------------------------------------------------------------------- SUBROUTINE CALLS ASD SELF TIME TOTAL TIME MAXIMUM AVERAGE MAXIMUM AVERAGE MAXIMUM CP2K 1 1.0 0.147 0.202 108.451 108.452 qs_energies 1 2.0 0.000 0.000 106.392 106.407 ls_scf 1 3.0 0.000 0.000 103.480 103.494 dbcsr_multiply_generic 111 6.7 0.023 0.027 77.514 77.679 ls_scf_main 1 4.0 0.000 0.000 65.183 65.184 density_matrix_trs4 2 5.0 0.005 0.024 56.399 56.461 multiply_cannon 111 7.7 0.179 0.273 49.632 51.324 multiply_cannon_loop 111 8.7 0.067 0.070 46.139 47.631 ls_scf_init_scf 1 4.0 0.000 0.000 34.637 34.638 ls_scf_init_matrix_S 1 5.0 0.000 0.000 33.187 33.206 matrix_sqrt_Newton_Schulz 2 6.5 0.001 0.001 30.349 30.361 mp_waitall_1 4569 11.1 21.694 25.541 21.694 25.541 make_m2s 222 7.7 0.005 0.005 23.849 24.805 make_images 222 8.7 4.590 4.971 23.743 24.696 multiply_cannon_multrec 444 9.7 17.884 18.652 22.544 23.378 hybrid_alltoall_any 227 10.6 1.659 3.619 12.925 15.574 make_images_data 222 9.7 0.003 0.003 13.084 15.479 multiply_cannon_metrocomm3 444 9.7 0.001 0.001 9.972 10.492 multiply_cannon_sync_h2d 444 9.7 8.856 8.892 8.856 8.892 arnoldi_extremal 4 6.8 0.000 0.000 7.457 7.472 arnoldi_normal_ev 4 7.8 0.047 0.058 7.457 7.472 build_subspace 16 8.4 0.026 0.036 6.853 6.868 dbcsr_matrix_vector_mult 304 9.0 0.009 0.025 5.439 5.611 dbcsr_matrix_vector_mult_local 304 10.0 5.045 5.370 5.047 5.372 apply_matrix_preconditioner 6 5.3 0.000 0.000 5.022 5.284 ls_scf_dm_to_ks 2 5.0 0.000 0.000 4.755 4.847 dbcsr_mm_accdrv_process 1814 10.4 0.228 0.477 4.484 4.685 dbcsr_mm_accdrv_process_sort 1814 11.4 4.125 4.258 4.125 4.258 ls_scf_post 1 4.0 0.000 0.000 3.660 3.674 mp_allgather_i34 111 8.7 1.088 3.489 1.088 3.489 make_images_sizes 222 9.7 0.000 0.000 1.430 3.480 mp_alltoall_i44 222 10.7 1.430 3.480 1.430 3.480 ls_scf_store_result 1 5.0 0.000 0.000 3.395 3.413 calculate_norms 792 9.8 3.238 3.280 3.238 3.280 dbcsr_finalize 304 7.8 0.082 0.089 3.088 3.130 dbcsr_merge_all 275 8.9 0.894 0.913 2.873 2.920 dbcsr_complete_redistribute 5 7.6 1.433 1.467 2.764 2.883 qs_energies_init_hamiltonians 1 3.0 0.016 0.026 2.881 2.881 dbcsr_data_release 12724 10.6 2.334 2.849 2.334 2.849 mp_sum_l 807 5.4 1.734 2.615 1.734 2.615 matrix_ls_to_qs 2 6.0 0.000 0.000 2.407 2.539 dbcsr_sort_data 325 11.1 2.439 2.493 2.439 2.493 dbcsr_new_transposed 4 7.5 0.254 0.272 2.289 2.301 dbcsr_frobenius_norm 74 6.6 2.056 2.135 2.204 2.246 qs_ks_update_qs_env 3 6.3 0.000 0.000 2.231 2.232 dbcsr_add_d 103 6.2 0.000 0.000 2.133 2.200 dbcsr_add_anytype 103 7.2 0.859 0.891 2.133 2.199 ------------------------------------------------------------------------------- ~ ~ ~ ~ DATA POINTS ~ ~ ~ ~ PlotPoint: name="510", plot="h2o_32_nrep3_ls", label="(8n/1r/12t)", y=108.452000, yerr=0.000000 PlotPoint: name="511", plot="h2o_32_nrep3_ls_mem", label="(8n/1r/12t)", y=6859.000000, yerr=0.000000 ~ ~ ~ ~ END DATA POINTS ~ ~ ~ ~~~~~~ END RESULT ~~~~~~~~ ========= END RESULTS =========== CommitSHA: 8ef42d3b316011b445a73f25f7afd24a63b63184 Summary: empty Status: OK