[mpiexec@tapir] Launch arguments: /opt/intel/oneapi/mpi/2021.13/bin//hydra_bstrap_proxy --upstream-host tapir.lan --upstream-port 40139 --pgid 0 --launcher ssh --launcher-number 0 --base-path /opt/intel/oneapi/mpi/2021.13/bin/ --topolib hwloc --tree-width 16 --tree-level 1 --time-left -1 --launch-type 2 --debug --proxy-id 0 --node-id 0 --subtree-size 1 --upstream-fd 7 /opt/intel/oneapi/mpi/2021.13/bin//hydra_pmi_proxy --usize -1 --auto-cleanup 1 --abort-signal 9 [mpiexec@tapir] Launch arguments: /usr/bin/ssh -x jaguar /opt/intel/oneapi/mpi/2021.13/bin//hydra_bstrap_proxy --upstream-host tapir.lan --upstream-port 40139 --pgid 0 --launcher ssh --launcher-number 0 --base-path /opt/intel/oneapi/mpi/2021.13/bin/ --topolib hwloc --tree-width 16 --tree-level 1 --time-left -1 --launch-type 2 --debug --proxy-id 1 --node-id 1 --subtree-size 1 /opt/intel/oneapi/mpi/2021.13/bin//hydra_pmi_proxy --usize -1 --auto-cleanup 1 --abort-signal 9 [proxy:0:1@jaguar] pmi cmd from fd 4: cmd=init pmi_version=1 pmi_subversion=1 [proxy:0:1@jaguar] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:1@jaguar] pmi cmd from fd 5: cmd=init pmi_version=1 pmi_subversion=1 [proxy:0:1@jaguar] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:1@jaguar] pmi cmd from fd 4: cmd=get_maxes [proxy:0:1@jaguar] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=4096 [proxy:0:1@jaguar] pmi cmd from fd 5: cmd=get_maxes [proxy:0:1@jaguar] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=4096 [proxy:0:1@jaguar] pmi cmd from fd 4: cmd=get_appnum [proxy:0:1@jaguar] PMI response: cmd=appnum appnum=0 [proxy:0:1@jaguar] pmi cmd from fd 5: cmd=get_appnum [proxy:0:1@jaguar] PMI response: cmd=appnum appnum=0 [proxy:0:1@jaguar] pmi cmd from fd 4: cmd=get_my_kvsname [proxy:0:1@jaguar] PMI response: cmd=my_kvsname kvsname=kvs_404448_0 [proxy:0:1@jaguar] pmi cmd from fd 5: cmd=get_my_kvsname [proxy:0:1@jaguar] PMI response: cmd=my_kvsname kvsname=kvs_404448_0 [proxy:0:1@jaguar] pmi cmd from fd 4: cmd=get kvsname=kvs_404448_0 key=PMI_process_mapping [proxy:0:1@jaguar] PMI response: cmd=get_result rc=0 msg=success value=(vector,(0,2,2)) [proxy:0:1@jaguar] pmi cmd from fd 5: cmd=get kvsname=kvs_404448_0 key=PMI_process_mapping [proxy:0:1@jaguar] PMI response: cmd=get_result rc=0 msg=success value=(vector,(0,2,2)) [proxy:0:1@jaguar] pmi cmd from fd 4: cmd=put kvsname=kvs_404448_0 key=-bcast-1-2 value=2F6465762F73686D2F496E74656C5F4D50495F684F32393430 [proxy:0:1@jaguar] PMI response: cmd=put_result rc=0 msg=success [proxy:0:1@jaguar] pmi cmd from fd 4: cmd=barrier_in [proxy:0:1@jaguar] pmi cmd from fd 5: cmd=barrier_in [proxy:0:0@tapir] pmi cmd from fd 9: cmd=init pmi_version=1 pmi_subversion=1 [proxy:0:0@tapir] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@tapir] pmi cmd from fd 9: cmd=get_maxes [proxy:0:0@tapir] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=4096 [proxy:0:0@tapir] pmi cmd from fd 9: cmd=get_appnum [proxy:0:0@tapir] PMI response: cmd=appnum appnum=0 [proxy:0:0@tapir] pmi cmd from fd 9: cmd=get_my_kvsname [proxy:0:0@tapir] PMI response: cmd=my_kvsname kvsname=kvs_404448_0 [proxy:0:0@tapir] pmi cmd from fd 9: cmd=get kvsname=kvs_404448_0 key=PMI_process_mapping [proxy:0:0@tapir] PMI response: cmd=get_result rc=0 msg=success value=(vector,(0,2,2)) [proxy:0:0@tapir] pmi cmd from fd 9: cmd=barrier_in [proxy:0:0@tapir] pmi cmd from fd 6: cmd=init pmi_version=1 pmi_subversion=1 [proxy:0:0@tapir] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@tapir] pmi cmd from fd 6: cmd=get_maxes [proxy:0:0@tapir] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=4096 [proxy:0:0@tapir] pmi cmd from fd 6: cmd=get_appnum [proxy:0:0@tapir] PMI response: cmd=appnum appnum=0 [proxy:0:0@tapir] pmi cmd from fd 6: cmd=get_my_kvsname [proxy:0:0@tapir] PMI response: cmd=my_kvsname kvsname=kvs_404448_0 [proxy:0:0@tapir] pmi cmd from fd 6: cmd=get kvsname=kvs_404448_0 key=PMI_process_mapping [proxy:0:0@tapir] PMI response: cmd=get_result rc=0 msg=success value=(vector,(0,2,2)) [0] MPI startup(): Intel(R) MPI Library, Version 2021.13 Build 20240701 (id: 179630a) [0] MPI startup(): Copyright (C) 2003-2024 Intel Corporation. All rights reserved. [0] MPI startup(): library kind: release [proxy:0:0@tapir] pmi cmd from fd 6: cmd=put kvsname=kvs_404448_0 key=-bcast-1-0 value=2F6465762F73686D2F496E74656C5F4D50495F38416C61376E [proxy:0:0@tapir] PMI response: cmd=put_result rc=0 msg=success [proxy:0:0@tapir] pmi cmd from fd 6: cmd=barrier_in [proxy:0:0@tapir] PMI response: cmd=barrier_out [proxy:0:0@tapir] PMI response: cmd=barrier_out [proxy:0:0@tapir] pmi cmd from fd 9: cmd=get kvsname=kvs_404448_0 key=-bcast-1-0 [proxy:0:0@tapir] PMI response: cmd=get_result rc=0 msg=success value=2F6465762F73686D2F496E74656C5F4D50495F38416C61376E [0] MPI startup(): libfabric loaded: libfabric.so.1 [0] MPI startup(): libfabric version: 1.20.1-impi [proxy:0:1@jaguar] PMI response: cmd=barrier_out [proxy:0:1@jaguar] PMI response: cmd=barrier_out [proxy:0:1@jaguar] pmi cmd from fd 5: cmd=get kvsname=kvs_404448_0 key=-bcast-1-2 [proxy:0:1@jaguar] PMI response: cmd=get_result rc=0 msg=success value=2F6465762F73686D2F496E74656C5F4D50495F684F32393430 [0] MPI startup(): max number of MPI_Request per vci: 67108864 (pools: 1) [0] MPI startup(): libfabric provider: tcp [proxy:0:0@tapir] pmi cmd from fd 6: cmd=put kvsname=kvs_404448_0 key=bc-0 value=mpi#0200A3A5C0A802040000000000000000$ [proxy:0:0@tapir] PMI response: cmd=put_result rc=0 msg=success [proxy:0:0@tapir] pmi cmd from fd 6: cmd=barrier_in [proxy:0:0@tapir] pmi cmd from fd 9: cmd=barrier_in [proxy:0:1@jaguar] pmi cmd from fd 5: cmd=barrier_in [proxy:0:1@jaguar] pmi cmd from fd 4: cmd=put kvsname=kvs_404448_0 key=bc-2 value=mpi#0200A2A1C0A8020C0000000000000000$ [proxy:0:1@jaguar] PMI response: cmd=put_result rc=0 msg=success [proxy:0:1@jaguar] pmi cmd from fd 4: cmd=barrier_in [proxy:0:0@tapir] PMI response: cmd=barrier_out [proxy:0:0@tapir] PMI response: cmd=barrier_out [proxy:0:0@tapir] pmi cmd from fd 6: cmd=get kvsname=kvs_404448_0 key=bc-0 [proxy:0:0@tapir] PMI response: cmd=get_result rc=0 msg=success value=mpi#0200A3A5C0A802040000000000000000$ [proxy:0:0@tapir] pmi cmd from fd 9: cmd=get kvsname=kvs_404448_0 key=bc-2 [proxy:0:0@tapir] PMI response: cmd=get_result rc=0 msg=success value=mpi#0200A2A1C0A8020C0000000000000000$ [proxy:0:1@jaguar] PMI response: cmd=barrier_out [proxy:0:1@jaguar] PMI response: cmd=barrier_out [proxy:0:1@jaguar] pmi cmd from fd 4: cmd=get kvsname=kvs_404448_0 key=bc-0 [proxy:0:1@jaguar] PMI response: cmd=get_result rc=0 msg=success value=mpi#0200A3A5C0A802040000000000000000$ [proxy:0:1@jaguar] pmi cmd from fd 5: cmd=get kvsname=kvs_404448_0 key=bc-2 [proxy:0:1@jaguar] PMI response: cmd=get_result rc=0 msg=success value=mpi#0200A2A1C0A8020C0000000000000000$ [2] MPI startup(): shm segment size (1068 MB per rank) * (2 local ranks) = 2136 MB total [0] MPI startup(): shm segment size (1580 MB per rank) * (2 local ranks) = 3160 MB total [0] MPI startup(): Load tuning file: "/opt/intel/oneapi/mpi/2021.13/opt/mpi/etc/tuning_generic_shm-ofi.dat" [0] MPI startup(): threading: mode: direct [0] MPI startup(): threading: vcis: 1 [0] MPI startup(): threading: app_threads: -1 [0] MPI startup(): threading: runtime: generic [0] MPI startup(): threading: progress_threads: 0 [0] MPI startup(): threading: async_progress: 0 [0] MPI startup(): threading: lock_level: global [0] MPI startup(): tag bits available: 19 (TAG_UB value: 524287) [0] MPI startup(): source bits available: 20 (Maximal number of rank: 1048575) [0] MPI startup(): Number of NICs: 1 [0] MPI startup(): ===== NIC pinning on tapir ===== [0] MPI startup(): Rank Thread id Pin nic [0] MPI startup(): 0 0 enp68s0 [0] MPI startup(): 1 0 enp68s0 [0] MPI startup(): ===== CPU pinning ===== [0] MPI startup(): Rank Pid Node name Pin cpu [0] MPI startup(): 0 404457 tapir {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,32,33,34,35,36,37,38,39,40,41,42,43,44,45, 46,47} [0] MPI startup(): 1 404458 tapir {16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,48,49,50,51,52,53,54,55,56,57,58 ,59,60,61,62,63} [0] MPI startup(): 2 823415 jaguar {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15} [0] MPI startup(): 3 -224450721 {0,1,2,5,6,8,10,18,19,20,21,22,23,24,28,29,30,34,35,36,37,38,39,40,41,42,43,44,45 ,48,50,51,52,53,54,55,56,57,59,60,61,62,63} [0] MPI startup(): I_MPI_ROOT=/opt/intel/oneapi/mpi/2021.13 [0] MPI startup(): I_MPI_MPIRUN=mpirun [0] MPI startup(): I_MPI_BIND_WIN_ALLOCATE=localalloc [0] MPI startup(): I_MPI_HYDRA_DEBUG=1 [0] MPI startup(): I_MPI_HYDRA_TOPOLIB=hwloc [0] MPI startup(): I_MPI_RETURN_WIN_MEM_NUMA=-1 [0] MPI startup(): I_MPI_INTERNAL_MEM_POLICY=default [0] MPI startup(): I_MPI_DEBUG=10 #---------------------------------------------------------------- # Intel(R) MPI Benchmarks 2021.8, MPI-1 part #---------------------------------------------------------------- # Date : Fri Sep 27 10:16:28 2024 # Machine : x86_64 # System : Linux # Release : 6.9.3-76060903-generic # Version : #202405300957~1726766035~22.04~4092a0e SMP PREEMPT_DYNAMIC Thu S # MPI Version : 3.1 # MPI Thread Environment: # Calling sequence was: # IMB-MPI1 # Minimum message length in bytes: 0 # Maximum message length in bytes: 4194304 # # MPI_Datatype : MPI_BYTE # MPI_Datatype for reductions : MPI_FLOAT # MPI_Op : MPI_SUM # # # List of Benchmarks to run: # PingPong # PingPing # Sendrecv # Exchange # Allreduce # Reduce # Reduce_local # Reduce_scatter # Reduce_scatter_block # Allgather # Allgatherv # Gather # Gatherv # Scatter # Scatterv # Alltoall # Alltoallv # Bcast # Barrier