================================================================================================== mpiexec options: ---------------- Base path: /opt/intel/impi/4.0.3.008/intel64/bin/ Bootstrap server: ssh Debug level: 1 Enable X: -1 Global environment: ------------------- I_MPI_PERHOST=allcores SET_HOST_TYPE= -x MKLROOT=/opt/intel/composer_xe_2013.0.079/mkl MANPATH=/opt/intel/itac/8.0.3.007/man:/opt/intel/impi/4.0.3.008/man:/opt/intel/composer_xe_2013.0.079/man/en_US:/opt/intel/composer_xe_2013.0.079/man/en_US:/usr/kerberos/man:/usr/java/latest/man:/usr/local/share/man:/usr/share/man/en:/usr/share/man:/opt/ganglia/man:/opt/pdsh/man:/opt/rocks/man:/opt/condor/man:/opt/tripwire/man:/opt/openmpi/share/man:/opt/sun-ct/man:/opt/gridengine/man::/opt/intel/vtune_amplifier_xe_2013/man PDSHROOT=/opt/pdsh SELINUX_INIT=YES CONSOLE=/dev/console VT_MPI=impi4 HOSTNAME=compute-0-1.local SGE_INFOTEXT_MAX_COLUMN=5000 INTEL_LICENSE_FILE=/opt/intel/licenses:/opt/intel/composer_xe_2013.0.079/licenses:/opt/intel/licenses:/home/x/intel/licenses IPPROOT=/opt/intel/composer_xe_2013.0.079/ipp SGE_TASK_STEPSIZE=undefined TERM=vt100 SHELL=/bin/bash ECLIPSE_HOME=/opt/eclipse HISTSIZE=1000 NHOSTS=2 CONDOR_IDS=407.407 TMPDIR=/tmp/460.1.all.q SGE_O_WORKDIR=/datos/x/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2 LIBRARY_PATH=/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/../compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/lib/intel64:/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/mkl/lib/intel64:/opt/intel/composer_xe_2013.0.079/tbb/lib/intel64 SGE_O_HOME=/home/x SGE_CELL=default SGE_ARCH=lx26-amd64 MPICH_PROCESS_GROUP=no MIC_LD_LIBRARY_PATH=/opt/intel/composer_xe_2013.0.079/compiler/lib/mic:/opt/intel/mic/coi/device-linux-release/lib:/opt/intel/mic/myo/lib:/opt/intel/composer_xe_2013.0.079/compiler/lib/mic:/opt/intel/composer_xe_2013.0.079/mkl/lib/mic:/opt/intel/composer_xe_2013.0.079/tbb/lib/mic ROCKSROOT=/opt/rocks/share/devel SSH_TTY=/dev/pts/4 RESTARTED=0 ANT_HOME=/opt/rocks ARC=lx26-amd64 USER=x LD_LIBRARY_PATH=/opt/intel/itac/8.0.3.007/itac/slib_impi4:/opt/intel/impi/4.0.3.008/intel64/lib:/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/mic/coi/host-linux-release/lib:/opt/intel/mic/myo/lib:/opt/intel/composer_xe_2013.0.079/mpirt/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/../compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/lib/intel64:/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/mkl/lib/intel64:/opt/intel/composer_xe_2013.0.079/tbb/lib/intel64:/opt/gridengine/lib/lx26-amd64 LS_COLORS=no=00:fi=00:di=01;34:ln=01;36:pi=40;33:so=01;35:bd=40;33;01:cd=40;33;01:or=01;05;37;41:mi=01;05;37;41:ex=01;32:*.cmd=01;32:*.exe=01;32:*.com=01;32:*.btm=01;32:*.bat=01;32:*.sh=01;32:*.csh=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.gz=01;31:*.bz2=01;31:*.bz=01;31:*.tz=01;31:*.rpm=01;31:*.cpio=01;31:*.jpg=01;35:*.gif=01;35:*.bmp=01;35:*.xbm=01;35:*.xpm=01;35:*.png=01;35:*.tif=01;35: INIT_VERSION=sysvinit-2.86 SGE_TASK_LAST=undefined ROCKS_ROOT=/opt/rocks QUEUE=all.q CPATH=/opt/intel/composer_xe_2013.0.079/mkl/include:/opt/intel/composer_xe_2013.0.079/tbb/include SGE_TASK_ID=undefined NLSPATH=/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64/locale/%l_%t/%N:/opt/intel/composer_xe_2013.0.079/ipp/lib/intel64/locale/%l_%t/%N:/opt/intel/composer_xe_2013.0.079/mkl/lib/intel64/locale/%l_%t/%N:/opt/intel/composer_xe_2013.0.079/debugger/intel64/locale/%l_%t/%N PATH=/tmp/460.1.all.q:/opt/intel/vtune_amplifier_xe_2013/bin64:/opt/intel/itac/8.0.3.007/bin:/opt/intel/impi/4.0.3.008/intel64/bin:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/mpirt/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64_mic:/opt/intel/composer_xe_2013.0.079/debugger/gui/intel64:/usr/kerberos/bin:/usr/java/latest/bin:/usr/local/bin:/bin:/usr/bin:/opt/eclipse:/opt/ganglia/bin:/opt/ganglia/sbin:/opt/maven/bin:/opt/pdsh/bin:/opt/rocks/bin:/opt/rocks/sbin:/opt/condor/bin:/opt/condor/sbin:/opt/gridengine/bin/lx26-amd64:/home/x/bin:/home/x/binvasp VT_ADD_LIBS=-ldwarf -lelf -lvtunwind -lnsl -lm -ldl -lpthread MAVEN_HOME=/opt/maven MAIL=/var/spool/mail/x SGE_BINARY_PATH=/opt/gridengine/bin/lx26-amd64 RUNLEVEL=3 TBBROOT=/opt/intel/composer_xe_2013.0.079/tbb CONDOR_CONFIG=/opt/condor/etc/condor_config SGE_STDERR_PATH=/home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2/atest.o460 PWD=/home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2 INPUTRC=/etc/inputrc JAVA_HOME=/usr/java/latest SGE_EXECD_PORT=537 SGE_ACCOUNT=sge SGE_STDOUT_PATH=/home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2/atest.o460 LANG=en_US.iso885915 SGE_QMASTER_PORT=536 JOB_NAME=atest JOB_SCRIPT=/opt/gridengine/default/spool/compute-0-1/job_scripts/460 SGE_ROOT=/opt/gridengine SGE_NOMSG=1 VT_LIB_DIR=/opt/intel/itac/8.0.3.007/itac/lib_impi4 CONDOR_ROOT=/opt/condor PREVLEVEL=N VT_ROOT=/opt/intel/itac/8.0.3.007 REQNAME=atest VTUNE_AMPLIFIER_XE_2013_DIR=/opt/intel/vtune_amplifier_xe_2013 SSH_ASKPASS=/usr/libexec/openssh/gnome-ssh-askpass ENVIRONMENT=BATCH SGE_JOB_SPOOL_DIR=/opt/gridengine/default/spool/compute-0-1/active_jobs/460.1 PE_HOSTFILE=/opt/gridengine/default/spool/compute-0-1/active_jobs/460.1/pe_hostfile HOME=/home/x SHLVL=3 NQUEUES=2 SGE_CWD_PATH=/datos/x/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2 SGE_O_LOGNAME=x ROLLSROOT=/opt/rocks/share/devel/src/roll VT_SLIB_DIR=/opt/intel/itac/8.0.3.007/itac/slib_impi4 SGE_O_MAIL=/var/spool/mail/x LOGNAME=x JOB_ID=460 TMP=/tmp/460.1.all.q CVS_RSH=ssh CLASSPATH=/opt/intel/itac/8.0.3.007/itac/lib_impi4 PE=impi I_MPI_HYDRA_BOOTSTRAP=sge SGE_TASK_FIRST=undefined LESSOPEN=|/usr/bin/lesspipe.sh %s SGE_O_PATH=/opt/intel/vtune_amplifier_xe_2013/bin64:/opt/intel/itac/8.0.3.007/bin:/opt/intel/impi/4.0.3.008/intel64/bin:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/mpirt/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64_mic:/opt/intel/composer_xe_2013.0.079/debugger/gui/intel64:/usr/kerberos/bin:/usr/java/latest/bin:/usr/local/bin:/bin:/usr/bin:/opt/eclipse:/opt/ganglia/bin:/opt/ganglia/sbin:/opt/maven/bin:/opt/pdsh/bin:/opt/rocks/bin:/opt/rocks/sbin:/opt/condor/bin:/opt/condor/sbin:/opt/gridengine/bin/lx26-amd64:/home/x/bin:/home/x/binvasp SGE_CLUSTER_NAME=gamma SGE_O_SHELL=/bin/bash SGE_O_HOST=gamma REQUEST=atest INCLUDE=/opt/intel/composer_xe_2013.0.079/mkl/include NSLOTS=12 G_BROKEN_FILENAMES=1 SGE_STDIN_PATH=/dev/null I_MPI_ROOT=/opt/intel/impi/4.0.3.008 _=/opt/intel/impi/4.0.3.008/intel64/bin/mpiexec.hydra User set environment: --------------------- I_MPI_FABRICS_LIST=ofa I_MPI_FALLBACK=0 I_MPI_DEBUG=5 Proxy information: ********************* Proxy ID: 1 ----------------- Proxy name: compute-0-0 Process count: 6 Start PID: 0 Proxy exec list: .................... Exec: abinit6.12.3b; Process count: 6 Proxy ID: 2 ----------------- Proxy name: compute-0-1 Process count: 6 Start PID: 6 Proxy exec list: .................... Exec: abinit6.12.3b; Process count: 6 ================================================================================================== [mpiexec@compute-0-1.local] Timeout set to -1 (-1 means infinite) [mpiexec@compute-0-1.local] Got a control port string of compute-0-1.local:58458 Proxy launch args: /opt/intel/impi/4.0.3.008/intel64/bin/pmi_proxy --control-port compute-0-1.local:58458 --debug --pmi-connect lazy-cache --pmi-aggregate -s 0 --bootstrap ssh --demux poll --pgid 0 --enable-stdin 1 --proxy-id [mpiexec@compute-0-1.local] PMI FD: (null); PMI PORT: (null); PMI ID/RANK: -1 Arguments being passed to proxy 0: --version 1.3 --interface-env-name MPICH_INTERFACE_HOSTNAME --hostname compute-0-0 --global-core-count 12 --global-process-count 12 --auto-cleanup 1 --pmi-rank -1 --pmi-kvsname kvs_18749_0 --pmi-process-mapping (vector,(0,2,6)) --bindlib ipl --ckpoint-num -1 --global-inherited-env 103 'I_MPI_PERHOST=allcores' 'SET_HOST_TYPE= -x ' 'MKLROOT=/opt/intel/composer_xe_2013.0.079/mkl' 'MANPATH=/opt/intel/itac/8.0.3.007/man:/opt/intel/impi/4.0.3.008/man:/opt/intel/composer_xe_2013.0.079/man/en_US:/opt/intel/composer_xe_2013.0.079/man/en_US:/usr/kerberos/man:/usr/java/latest/man:/usr/local/share/man:/usr/share/man/en:/usr/share/man:/opt/ganglia/man:/opt/pdsh/man:/opt/rocks/man:/opt/condor/man:/opt/tripwire/man:/opt/openmpi/share/man:/opt/sun-ct/man:/opt/gridengine/man::/opt/intel/vtune_amplifier_xe_2013/man' 'PDSHROOT=/opt/pdsh' 'SELINUX_INIT=YES' 'CONSOLE=/dev/console' 'VT_MPI=impi4' 'HOSTNAME=compute-0-1.local' 'SGE_INFOTEXT_MAX_COLUMN=5000' 'INTEL_LICENSE_FILE=/opt/intel/licenses:/opt/intel/composer_xe_2013.0.079/licenses:/opt/intel/licenses:/home/x/intel/licenses' 'IPPROOT=/opt/intel/composer_xe_2013.0.079/ipp' 'SGE_TASK_STEPSIZE=undefined' 'TERM=vt100' 'SHELL=/bin/bash' 'ECLIPSE_HOME=/opt/eclipse' 'HISTSIZE=1000' 'NHOSTS=2' 'CONDOR_IDS=407.407' 'TMPDIR=/tmp/460.1.all.q' 'SGE_O_WORKDIR=/datos/x/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2' 'LIBRARY_PATH=/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/../compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/lib/intel64:/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/mkl/lib/intel64:/opt/intel/composer_xe_2013.0.079/tbb/lib/intel64' 'SGE_O_HOME=/home/x' 'SGE_CELL=default' 'SGE_ARCH=lx26-amd64' 'MPICH_PROCESS_GROUP=no' 'MIC_LD_LIBRARY_PATH=/opt/intel/composer_xe_2013.0.079/compiler/lib/mic:/opt/intel/mic/coi/device-linux-release/lib:/opt/intel/mic/myo/lib:/opt/intel/composer_xe_2013.0.079/compiler/lib/mic:/opt/intel/composer_xe_2013.0.079/mkl/lib/mic:/opt/intel/composer_xe_2013.0.079/tbb/lib/mic' 'ROCKSROOT=/opt/rocks/share/devel' 'SSH_TTY=/dev/pts/4' 'RESTARTED=0' 'ANT_HOME=/opt/rocks' 'ARC=lx26-amd64' 'USER=x' 'LD_LIBRARY_PATH=/opt/intel/itac/8.0.3.007/itac/slib_impi4:/opt/intel/impi/4.0.3.008/intel64/lib:/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/mic/coi/host-linux-release/lib:/opt/intel/mic/myo/lib:/opt/intel/composer_xe_2013.0.079/mpirt/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/../compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/lib/intel64:/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/mkl/lib/intel64:/opt/intel/composer_xe_2013.0.079/tbb/lib/intel64:/opt/gridengine/lib/lx26-amd64' 'LS_COLORS=no=00:fi=00:di=01;34:ln=01;36:pi=40;33:so=01;35:bd=40;33;01:cd=40;33;01:or=01;05;37;41:mi=01;05;37;41:ex=01;32:*.cmd=01;32:*.exe=01;32:*.com=01;32:*.btm=01;32:*.bat=01;32:*.sh=01;32:*.csh=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.gz=01;31:*.bz2=01;31:*.bz=01;31:*.tz=01;31:*.rpm=01;31:*.cpio=01;31:*.jpg=01;35:*.gif=01;35:*.bmp=01;35:*.xbm=01;35:*.xpm=01;35:*.png=01;35:*.tif=01;35:' 'INIT_VERSION=sysvinit-2.86' 'SGE_TASK_LAST=undefined' 'ROCKS_ROOT=/opt/rocks' 'QUEUE=all.q' 'CPATH=/opt/intel/composer_xe_2013.0.079/mkl/include:/opt/intel/composer_xe_2013.0.079/tbb/include' 'SGE_TASK_ID=undefined' 'NLSPATH=/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64/locale/%l_%t/%N:/opt/intel/composer_xe_2013.0.079/ipp/lib/intel64/locale/%l_%t/%N:/opt/intel/composer_xe_2013.0.079/mkl/lib/intel64/locale/%l_%t/%N:/opt/intel/composer_xe_2013.0.079/debugger/intel64/locale/%l_%t/%N' 'PATH=/tmp/460.1.all.q:/opt/intel/vtune_amplifier_xe_2013/bin64:/opt/intel/itac/8.0.3.007/bin:/opt/intel/impi/4.0.3.008/intel64/bin:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/mpirt/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64_mic:/opt/intel/composer_xe_2013.0.079/debugger/gui/intel64:/usr/kerberos/bin:/usr/java/latest/bin:/usr/local/bin:/bin:/usr/bin:/opt/eclipse:/opt/ganglia/bin:/opt/ganglia/sbin:/opt/maven/bin:/opt/pdsh/bin:/opt/rocks/bin:/opt/rocks/sbin:/opt/condor/bin:/opt/condor/sbin:/opt/gridengine/bin/lx26-amd64:/home/x/bin:/home/x/binvasp' 'VT_ADD_LIBS=-ldwarf -lelf -lvtunwind -lnsl -lm -ldl -lpthread' 'MAVEN_HOME=/opt/maven' 'MAIL=/var/spool/mail/x' 'SGE_BINARY_PATH=/opt/gridengine/bin/lx26-amd64' 'RUNLEVEL=3' 'TBBROOT=/opt/intel/composer_xe_2013.0.079/tbb' 'CONDOR_CONFIG=/opt/condor/etc/condor_config' 'SGE_STDERR_PATH=/home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2/atest.o460' 'PWD=/home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2' 'INPUTRC=/etc/inputrc' 'JAVA_HOME=/usr/java/latest' 'SGE_EXECD_PORT=537' 'SGE_ACCOUNT=sge' 'SGE_STDOUT_PATH=/home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2/atest.o460' 'LANG=en_US.iso885915' 'SGE_QMASTER_PORT=536' 'JOB_NAME=atest' 'JOB_SCRIPT=/opt/gridengine/default/spool/compute-0-1/job_scripts/460' 'SGE_ROOT=/opt/gridengine' 'SGE_NOMSG=1' 'VT_LIB_DIR=/opt/intel/itac/8.0.3.007/itac/lib_impi4' 'CONDOR_ROOT=/opt/condor' 'PREVLEVEL=N' 'VT_ROOT=/opt/intel/itac/8.0.3.007' 'REQNAME=atest' 'VTUNE_AMPLIFIER_XE_2013_DIR=/opt/intel/vtune_amplifier_xe_2013' 'SSH_ASKPASS=/usr/libexec/openssh/gnome-ssh-askpass' 'ENVIRONMENT=BATCH' 'SGE_JOB_SPOOL_DIR=/opt/gridengine/default/spool/compute-0-1/active_jobs/460.1' 'PE_HOSTFILE=/opt/gridengine/default/spool/compute-0-1/active_jobs/460.1/pe_hostfile' 'HOME=/home/x' 'SHLVL=3' 'NQUEUES=2' 'SGE_CWD_PATH=/datos/x/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2' 'SGE_O_LOGNAME=x' 'ROLLSROOT=/opt/rocks/share/devel/src/roll' 'VT_SLIB_DIR=/opt/intel/itac/8.0.3.007/itac/slib_impi4' 'SGE_O_MAIL=/var/spool/mail/x' 'LOGNAME=x' 'JOB_ID=460' 'TMP=/tmp/460.1.all.q' 'CVS_RSH=ssh' 'CLASSPATH=/opt/intel/itac/8.0.3.007/itac/lib_impi4' 'PE=impi' 'I_MPI_HYDRA_BOOTSTRAP=sge' 'SGE_TASK_FIRST=undefined' 'LESSOPEN=|/usr/bin/lesspipe.sh %s' 'SGE_O_PATH=/opt/intel/vtune_amplifier_xe_2013/bin64:/opt/intel/itac/8.0.3.007/bin:/opt/intel/impi/4.0.3.008/intel64/bin:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/mpirt/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64_mic:/opt/intel/composer_xe_2013.0.079/debugger/gui/intel64:/usr/kerberos/bin:/usr/java/latest/bin:/usr/local/bin:/bin:/usr/bin:/opt/eclipse:/opt/ganglia/bin:/opt/ganglia/sbin:/opt/maven/bin:/opt/pdsh/bin:/opt/rocks/bin:/opt/rocks/sbin:/opt/condor/bin:/opt/condor/sbin:/opt/gridengine/bin/lx26-amd64:/home/x/bin:/home/x/binvasp' 'SGE_CLUSTER_NAME=gamma' 'SGE_O_SHELL=/bin/bash' 'SGE_O_HOST=gamma' 'REQUEST=atest' 'INCLUDE=/opt/intel/composer_xe_2013.0.079/mkl/include' 'NSLOTS=12' 'G_BROKEN_FILENAMES=1' 'SGE_STDIN_PATH=/dev/null' 'I_MPI_ROOT=/opt/intel/impi/4.0.3.008' '_=/opt/intel/impi/4.0.3.008/intel64/bin/mpiexec.hydra' --global-user-env 3 'I_MPI_FABRICS_LIST=ofa' 'I_MPI_FALLBACK=0' 'I_MPI_DEBUG=5' --global-system-env 0 --start-pid 0 --proxy-core-count 6 --exec --exec-appnum 0 --exec-proc-count 6 --exec-local-env 0 --exec-wdir /home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2 --exec-args 1 abinit6.12.3b [mpiexec@compute-0-1.local] PMI FD: (null); PMI PORT: (null); PMI ID/RANK: -1 Arguments being passed to proxy 1: --version 1.3 --interface-env-name MPICH_INTERFACE_HOSTNAME --hostname compute-0-1 --global-core-count 12 --global-process-count 12 --auto-cleanup 1 --pmi-rank -1 --pmi-kvsname kvs_18749_0 --pmi-process-mapping (vector,(0,2,6)) --bindlib ipl --ckpoint-num -1 --global-inherited-env 103 'I_MPI_PERHOST=allcores' 'SET_HOST_TYPE= -x ' 'MKLROOT=/opt/intel/composer_xe_2013.0.079/mkl' 'MANPATH=/opt/intel/itac/8.0.3.007/man:/opt/intel/impi/4.0.3.008/man:/opt/intel/composer_xe_2013.0.079/man/en_US:/opt/intel/composer_xe_2013.0.079/man/en_US:/usr/kerberos/man:/usr/java/latest/man:/usr/local/share/man:/usr/share/man/en:/usr/share/man:/opt/ganglia/man:/opt/pdsh/man:/opt/rocks/man:/opt/condor/man:/opt/tripwire/man:/opt/openmpi/share/man:/opt/sun-ct/man:/opt/gridengine/man::/opt/intel/vtune_amplifier_xe_2013/man' 'PDSHROOT=/opt/pdsh' 'SELINUX_INIT=YES' 'CONSOLE=/dev/console' 'VT_MPI=impi4' 'HOSTNAME=compute-0-1.local' 'SGE_INFOTEXT_MAX_COLUMN=5000' 'INTEL_LICENSE_FILE=/opt/intel/licenses:/opt/intel/composer_xe_2013.0.079/licenses:/opt/intel/licenses:/home/x/intel/licenses' 'IPPROOT=/opt/intel/composer_xe_2013.0.079/ipp' 'SGE_TASK_STEPSIZE=undefined' 'TERM=vt100' 'SHELL=/bin/bash' 'ECLIPSE_HOME=/opt/eclipse' 'HISTSIZE=1000' 'NHOSTS=2' 'CONDOR_IDS=407.407' 'TMPDIR=/tmp/460.1.all.q' 'SGE_O_WORKDIR=/datos/x/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2' 'LIBRARY_PATH=/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/../compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/lib/intel64:/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/mkl/lib/intel64:/opt/intel/composer_xe_2013.0.079/tbb/lib/intel64' 'SGE_O_HOME=/home/x' 'SGE_CELL=default' 'SGE_ARCH=lx26-amd64' 'MPICH_PROCESS_GROUP=no' 'MIC_LD_LIBRARY_PATH=/opt/intel/composer_xe_2013.0.079/compiler/lib/mic:/opt/intel/mic/coi/device-linux-release/lib:/opt/intel/mic/myo/lib:/opt/intel/composer_xe_2013.0.079/compiler/lib/mic:/opt/intel/composer_xe_2013.0.079/mkl/lib/mic:/opt/intel/composer_xe_2013.0.079/tbb/lib/mic' 'ROCKSROOT=/opt/rocks/share/devel' 'SSH_TTY=/dev/pts/4' 'RESTARTED=0' 'ANT_HOME=/opt/rocks' 'ARC=lx26-amd64' 'USER=x' 'LD_LIBRARY_PATH=/opt/intel/itac/8.0.3.007/itac/slib_impi4:/opt/intel/impi/4.0.3.008/intel64/lib:/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/mic/coi/host-linux-release/lib:/opt/intel/mic/myo/lib:/opt/intel/composer_xe_2013.0.079/mpirt/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/../compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/ipp/lib/intel64:/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64:/opt/intel/composer_xe_2013.0.079/mkl/lib/intel64:/opt/intel/composer_xe_2013.0.079/tbb/lib/intel64:/opt/gridengine/lib/lx26-amd64' 'LS_COLORS=no=00:fi=00:di=01;34:ln=01;36:pi=40;33:so=01;35:bd=40;33;01:cd=40;33;01:or=01;05;37;41:mi=01;05;37;41:ex=01;32:*.cmd=01;32:*.exe=01;32:*.com=01;32:*.btm=01;32:*.bat=01;32:*.sh=01;32:*.csh=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.gz=01;31:*.bz2=01;31:*.bz=01;31:*.tz=01;31:*.rpm=01;31:*.cpio=01;31:*.jpg=01;35:*.gif=01;35:*.bmp=01;35:*.xbm=01;35:*.xpm=01;35:*.png=01;35:*.tif=01;35:' 'INIT_VERSION=sysvinit-2.86' 'SGE_TASK_LAST=undefined' 'ROCKS_ROOT=/opt/rocks' 'QUEUE=all.q' 'CPATH=/opt/intel/composer_xe_2013.0.079/mkl/include:/opt/intel/composer_xe_2013.0.079/tbb/include' 'SGE_TASK_ID=undefined' 'NLSPATH=/opt/intel/composer_xe_2013.0.079/compiler/lib/intel64/locale/%l_%t/%N:/opt/intel/composer_xe_2013.0.079/ipp/lib/intel64/locale/%l_%t/%N:/opt/intel/composer_xe_2013.0.079/mkl/lib/intel64/locale/%l_%t/%N:/opt/intel/composer_xe_2013.0.079/debugger/intel64/locale/%l_%t/%N' 'PATH=/tmp/460.1.all.q:/opt/intel/vtune_amplifier_xe_2013/bin64:/opt/intel/itac/8.0.3.007/bin:/opt/intel/impi/4.0.3.008/intel64/bin:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/mpirt/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64_mic:/opt/intel/composer_xe_2013.0.079/debugger/gui/intel64:/usr/kerberos/bin:/usr/java/latest/bin:/usr/local/bin:/bin:/usr/bin:/opt/eclipse:/opt/ganglia/bin:/opt/ganglia/sbin:/opt/maven/bin:/opt/pdsh/bin:/opt/rocks/bin:/opt/rocks/sbin:/opt/condor/bin:/opt/condor/sbin:/opt/gridengine/bin/lx26-amd64:/home/x/bin:/home/x/binvasp' 'VT_ADD_LIBS=-ldwarf -lelf -lvtunwind -lnsl -lm -ldl -lpthread' 'MAVEN_HOME=/opt/maven' 'MAIL=/var/spool/mail/x' 'SGE_BINARY_PATH=/opt/gridengine/bin/lx26-amd64' 'RUNLEVEL=3' 'TBBROOT=/opt/intel/composer_xe_2013.0.079/tbb' 'CONDOR_CONFIG=/opt/condor/etc/condor_config' 'SGE_STDERR_PATH=/home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2/atest.o460' 'PWD=/home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2' 'INPUTRC=/etc/inputrc' 'JAVA_HOME=/usr/java/latest' 'SGE_EXECD_PORT=537' 'SGE_ACCOUNT=sge' 'SGE_STDOUT_PATH=/home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2/atest.o460' 'LANG=en_US.iso885915' 'SGE_QMASTER_PORT=536' 'JOB_NAME=atest' 'JOB_SCRIPT=/opt/gridengine/default/spool/compute-0-1/job_scripts/460' 'SGE_ROOT=/opt/gridengine' 'SGE_NOMSG=1' 'VT_LIB_DIR=/opt/intel/itac/8.0.3.007/itac/lib_impi4' 'CONDOR_ROOT=/opt/condor' 'PREVLEVEL=N' 'VT_ROOT=/opt/intel/itac/8.0.3.007' 'REQNAME=atest' 'VTUNE_AMPLIFIER_XE_2013_DIR=/opt/intel/vtune_amplifier_xe_2013' 'SSH_ASKPASS=/usr/libexec/openssh/gnome-ssh-askpass' 'ENVIRONMENT=BATCH' 'SGE_JOB_SPOOL_DIR=/opt/gridengine/default/spool/compute-0-1/active_jobs/460.1' 'PE_HOSTFILE=/opt/gridengine/default/spool/compute-0-1/active_jobs/460.1/pe_hostfile' 'HOME=/home/x' 'SHLVL=3' 'NQUEUES=2' 'SGE_CWD_PATH=/datos/x/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2' 'SGE_O_LOGNAME=x' 'ROLLSROOT=/opt/rocks/share/devel/src/roll' 'VT_SLIB_DIR=/opt/intel/itac/8.0.3.007/itac/slib_impi4' 'SGE_O_MAIL=/var/spool/mail/x' 'LOGNAME=x' 'JOB_ID=460' 'TMP=/tmp/460.1.all.q' 'CVS_RSH=ssh' 'CLASSPATH=/opt/intel/itac/8.0.3.007/itac/lib_impi4' 'PE=impi' 'I_MPI_HYDRA_BOOTSTRAP=sge' 'SGE_TASK_FIRST=undefined' 'LESSOPEN=|/usr/bin/lesspipe.sh %s' 'SGE_O_PATH=/opt/intel/vtune_amplifier_xe_2013/bin64:/opt/intel/itac/8.0.3.007/bin:/opt/intel/impi/4.0.3.008/intel64/bin:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/mpirt/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64:/opt/intel/composer_xe_2013.0.079/bin/intel64_mic:/opt/intel/composer_xe_2013.0.079/debugger/gui/intel64:/usr/kerberos/bin:/usr/java/latest/bin:/usr/local/bin:/bin:/usr/bin:/opt/eclipse:/opt/ganglia/bin:/opt/ganglia/sbin:/opt/maven/bin:/opt/pdsh/bin:/opt/rocks/bin:/opt/rocks/sbin:/opt/condor/bin:/opt/condor/sbin:/opt/gridengine/bin/lx26-amd64:/home/x/bin:/home/x/binvasp' 'SGE_CLUSTER_NAME=gamma' 'SGE_O_SHELL=/bin/bash' 'SGE_O_HOST=gamma' 'REQUEST=atest' 'INCLUDE=/opt/intel/composer_xe_2013.0.079/mkl/include' 'NSLOTS=12' 'G_BROKEN_FILENAMES=1' 'SGE_STDIN_PATH=/dev/null' 'I_MPI_ROOT=/opt/intel/impi/4.0.3.008' '_=/opt/intel/impi/4.0.3.008/intel64/bin/mpiexec.hydra' --global-user-env 3 'I_MPI_FABRICS_LIST=ofa' 'I_MPI_FALLBACK=0' 'I_MPI_DEBUG=5' --global-system-env 0 --start-pid 6 --proxy-core-count 6 --exec --exec-appnum 0 --exec-proc-count 6 --exec-local-env 0 --exec-wdir /home/x/datos/Fotoluminiscencia/00-Abinit-Conv-Ecut/LDA-ab/15Ha/4x4x4.G.relax.wfk4kss2 --exec-args 1 abinit6.12.3b [mpiexec@compute-0-1.local] Launch arguments: /usr/bin/ssh -x -q compute-0-0 /opt/intel/impi/4.0.3.008/intel64/bin/pmi_proxy --control-port compute-0-1.local:58458 --debug --pmi-connect lazy-cache --pmi-aggregate -s 0 --bootstrap ssh --demux poll --pgid 0 --enable-stdin 1 --proxy-id 0 [mpiexec@compute-0-1.local] Launch arguments: /opt/intel/impi/4.0.3.008/intel64/bin/pmi_proxy --control-port compute-0-1.local:58458 --debug --pmi-connect lazy-cache --pmi-aggregate -s 0 --bootstrap ssh --demux poll --pgid 0 --enable-stdin 1 --proxy-id 1 [mpiexec@compute-0-1.local] STDIN will be redirected to 1 fd(s): 7 [proxy:0:1@compute-0-1.local] Start PMI_proxy 1 [proxy:0:1@compute-0-1.local] got pmi command (from 0): init pmi_version=1 pmi_subversion=1 [proxy:0:1@compute-0-1.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:1@compute-0-1.local] got pmi command (from 6): init pmi_version=1 pmi_subversion=1 [proxy:0:1@compute-0-1.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:1@compute-0-1.local] got pmi command (from 16): init pmi_version=1 pmi_subversion=1 [proxy:0:1@compute-0-1.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:1@compute-0-1.local] got pmi command (from 19): init pmi_version=1 pmi_subversion=1 [proxy:0:1@compute-0-1.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:1@compute-0-1.local] got pmi command (from 22): init pmi_version=1 pmi_subversion=1 [proxy:0:1@compute-0-1.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:1@compute-0-1.local] got pmi command (from 0): get_maxes [proxy:0:1@compute-0-1.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:1@compute-0-1.local] got pmi command (from 6): get_maxes [proxy:0:1@compute-0-1.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:1@compute-0-1.local] got pmi command (from 16): get_maxes [proxy:0:1@compute-0-1.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:1@compute-0-1.local] got pmi command (from 19): get_maxes [proxy:0:1@compute-0-1.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:1@compute-0-1.local] got pmi command (from 0): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 6): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 16): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 22): get_maxes [proxy:0:1@compute-0-1.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:1@compute-0-1.local] got pmi command (from 19): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 22): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 11): init pmi_version=1 pmi_subversion=1 [proxy:0:1@compute-0-1.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:1@compute-0-1.local] got pmi command (from 11): get_maxes [proxy:0:1@compute-0-1.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:1@compute-0-1.local] got pmi command (from 11): barrier_in [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=barrier_in [proxy:0:1@compute-0-1.local] forwarding command (cmd=barrier_in) upstream [proxy:0:0@compute-0-0.local] Start PMI_proxy 0 [proxy:0:0@compute-0-0.local] STDIN will be redirected to 1 fd(s): 7 [proxy:0:0@compute-0-0.local] got pmi command (from 4): init pmi_version=1 pmi_subversion=1 [proxy:0:0@compute-0-0.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@compute-0-0.local] got pmi command (from 5): init pmi_version=1 pmi_subversion=1 [proxy:0:0@compute-0-0.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@compute-0-0.local] got pmi command (from 6): init pmi_version=1 pmi_subversion=1 [proxy:0:0@compute-0-0.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@compute-0-0.local] got pmi command (from 11): init pmi_version=1 pmi_subversion=1 [proxy:0:0@compute-0-0.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@compute-0-0.local] got pmi command (from 17): init pmi_version=1 pmi_subversion=1 [proxy:0:0@compute-0-0.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@compute-0-0.local] got pmi command (from 4): get_maxes [proxy:0:0@compute-0-0.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@compute-0-0.local] got pmi command (from 5): get_maxes [proxy:0:0@compute-0-0.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@compute-0-0.local] got pmi command (from 6): get_maxes [proxy:0:0@compute-0-0.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@compute-0-0.local] got pmi command (from 11): get_maxes [proxy:0:0@compute-0-0.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@compute-0-0.local] got pmi command (from 17): get_maxes [proxy:0:0@compute-0-0.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@compute-0-0.local] got pmi command (from 4): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 5): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 6): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 11): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 17): barrier_in [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=barrier_in [mpiexec@compute-0-1.local] PMI response to fd 0 pid 14: cmd=barrier_out [mpiexec@compute-0-1.local] PMI response to fd 6 pid 14: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] got pmi command (from 0): get_ranks2hosts [proxy:0:1@compute-0-1.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:1@compute-0-1.local] got pmi command (from 6): get_ranks2hosts [proxy:0:1@compute-0-1.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:1@compute-0-1.local] got pmi command (from 11): get_ranks2hosts [proxy:0:1@compute-0-1.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:1@compute-0-1.local] got pmi command (from 16): get_ranks2hosts [proxy:0:1@compute-0-1.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:1@compute-0-1.local] got pmi command (from 0): get_appnum [proxy:0:1@compute-0-1.local] PMI response: cmd=appnum appnum=0 [proxy:0:1@compute-0-1.local] got pmi command (from 6): get_appnum [proxy:0:1@compute-0-1.local] PMI response: cmd=appnum appnum=0 [proxy:0:1@compute-0-1.local] got pmi command (from 11): get_appnum [proxy:0:1@compute-0-1.local] PMI response: cmd=appnum appnum=0 [proxy:0:1@compute-0-1.local] got pmi command (from 19): get_ranks2hosts [proxy:0:1@compute-0-1.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:0@compute-0-0.local] got pmi command (from 14): init pmi_version=1 pmi_subversion=1 [proxy:0:0@compute-0-0.local] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@compute-0-0.local] got pmi command (from 14): get_maxes [proxy:0:0@compute-0-0.local] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@compute-0-0.local] got pmi command (from 14): barrier_in [proxy:0:0@compute-0-0.local] forwarding command (cmd=barrier_in) upstream [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] got pmi command (from 4): get_ranks2hosts [proxy:0:0@compute-0-0.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:1@compute-0-1.local] got pmi command (from 22): get_ranks2hosts [proxy:0:1@compute-0-1.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:1@compute-0-1.local] got pmi command (from 0): get_my_kvsname [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] got pmi command (from 6): get_my_kvsname [proxy:0:0@compute-0-0.local] got pmi command (from 5): get_ranks2hosts [proxy:0:0@compute-0-0.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:0@compute-0-0.local] got pmi command (from 6): get_ranks2hosts [proxy:0:0@compute-0-0.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:0@compute-0-0.local] got pmi command (from 11): get_ranks2hosts [proxy:0:0@compute-0-0.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:0@compute-0-0.local] got pmi command (from 17): get_ranks2hosts [proxy:0:0@compute-0-0.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:0@compute-0-0.local] got pmi command (from 4): get_appnum [proxy:0:0@compute-0-0.local] PMI response: cmd=appnum appnum=0 [proxy:0:0@compute-0-0.local] got pmi command (from 5): get_appnum [proxy:0:0@compute-0-0.local] PMI response: cmd=appnum appnum=0 [proxy:0:0@compute-0-0.local] got pmi command (from 6): get_appnum [proxy:0:0@compute-0-0.local] PMI response: cmd=appnum appnum=0 [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] got pmi command (from 11): get_my_kvsname [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] got pmi command (from 16): get_appnum [proxy:0:1@compute-0-1.local] PMI response: cmd=appnum appnum=0 [proxy:0:1@compute-0-1.local] got pmi command (from 19): get_appnum [proxy:0:1@compute-0-1.local] PMI response: cmd=appnum appnum=0 [proxy:0:1@compute-0-1.local] got pmi command (from 0): get_my_kvsname [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] got pmi command (from 6): get_my_kvsname [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] got pmi command (from 11): get_my_kvsname [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] got pmi command (from 16): get_my_kvsname [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=put kvsname=kvs_18749_0 key=sharedFilename[0] value=/dev/shm/Intel_MPI_JJ53O8 [mpiexec@compute-0-1.local] PMI response to fd 0 pid 4: cmd=put_result rc=0 msg=success [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] got pmi command (from 22): get_appnum [proxy:0:1@compute-0-1.local] PMI response: cmd=appnum appnum=0 [proxy:0:1@compute-0-1.local] got pmi command (from 19): get_my_kvsname [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] got pmi command (from 6): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 11): get_appnum [proxy:0:0@compute-0-0.local] PMI response: cmd=appnum appnum=0 [proxy:0:0@compute-0-0.local] got pmi command (from 14): get_ranks2hosts [proxy:0:0@compute-0-0.local] PMI response: put_ranks2hosts 60 2 11 compute-0-0 0,1,2,3,4,5, 11 compute-0-1 6,7,8,9,10,11, [proxy:0:0@compute-0-0.local] got pmi command (from 17): get_appnum [proxy:0:0@compute-0-0.local] PMI response: cmd=appnum appnum=0 [proxy:0:0@compute-0-0.local] got pmi command (from 4): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 5): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 6): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 11): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 17): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] got pmi command (from 11): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 16): get_my_kvsname [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 4): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 5): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 6): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 11): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 17): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 4): put kvsname=kvs_18749_0 key=sharedFilename[0] value=/dev/shm/Intel_MPI_JJ53O8 [proxy:0:1@compute-0-1.local] got pmi command (from 19): get_my_kvsname [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] got pmi command (from 0): put kvsname=kvs_18749_0 key=sharedFilename[6] value=/dev/shm/Intel_MPI_zumT0n [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=put kvsname=kvs_18749_0 key=sharedFilename[6] value=/dev/shm/Intel_MPI_zumT0n [mpiexec@compute-0-1.local] PMI response to fd 6 pid 0: cmd=put_result rc=0 msg=success [proxy:0:1@compute-0-1.local] forwarding command (cmd=put kvsname=kvs_18749_0 key=sharedFilename[6] value=/dev/shm/Intel_MPI_zumT0n) upstream [proxy:0:1@compute-0-1.local] got pmi command (from 22): get_my_kvsname [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:1@compute-0-1.local] we don't understand the response put_result; forwarding downstream [proxy:0:1@compute-0-1.local] got pmi command (from 16): barrier_in [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 19): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 0): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 22): get_my_kvsname [proxy:0:1@compute-0-1.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] forwarding command (cmd=put kvsname=kvs_18749_0 key=sharedFilename[0] value=/dev/shm/Intel_MPI_JJ53O8) upstream [proxy:0:0@compute-0-0.local] got pmi command (from 5): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 6): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 11): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 14): get_appnum [proxy:0:0@compute-0-0.local] PMI response: cmd=appnum appnum=0 [proxy:0:0@compute-0-0.local] got pmi command (from 17): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 14): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] we don't understand the response put_result; forwarding downstream [proxy:0:0@compute-0-0.local] got pmi command (from 4): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 14): get_my_kvsname [proxy:0:0@compute-0-0.local] PMI response: cmd=my_kvsname kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 14): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 22): barrier_in [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=barrier_in [mpiexec@compute-0-1.local] PMI response to fd 0 pid 22: cmd=barrier_out [mpiexec@compute-0-1.local] PMI response to fd 6 pid 22: cmd=barrier_out [proxy:0:0@compute-0-0.local] forwarding command (cmd=barrier_in) upstream [proxy:0:1@compute-0-1.local] forwarding command (cmd=barrier_in) upstream [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] got pmi command (from 6): get kvsname=kvs_18749_0 key=sharedFilename[6] [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_zumT0n [proxy:0:1@compute-0-1.local] got pmi command (from 11): get kvsname=kvs_18749_0 key=sharedFilename[6] [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_zumT0n [proxy:0:1@compute-0-1.local] got pmi command (from 16): get kvsname=kvs_18749_0 key=sharedFilename[6] [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_zumT0n [proxy:0:1@compute-0-1.local] got pmi command (from 19): get kvsname=kvs_18749_0 key=sharedFilename[6] [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_zumT0n [proxy:0:1@compute-0-1.local] got pmi command (from 22): get kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] got pmi command (from 5): get kvsname=kvs_18749_0 key=sharedFilename[0] [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_JJ53O8 [proxy:0:0@compute-0-0.local] got pmi command (from 6): get kvsname=kvs_18749_0 key=sharedFilename[0] [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_JJ53O8 [proxy:0:0@compute-0-0.local] got pmi command (from 11): get kvsname=kvs_18749_0 key=sharedFilename[0] [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_JJ53O8 key=sharedFilename[6] [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_zumT0n [proxy:0:0@compute-0-0.local] got pmi command (from 17): get kvsname=kvs_18749_0 key=sharedFilename[0] [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_JJ53O8 [proxy:0:0@compute-0-0.local] got pmi command (from 14): get kvsname=kvs_18749_0 key=sharedFilename[0] [11] MPI startup(): shm and ofa data transfer modes [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_JJ53O8 [5] MPI startup(): shm and ofa data transfer modes [6] MPI startup(): shm and ofa data transfer modes [3] MPI startup(): shm and ofa data transfer modes [10] MPI startup(): shm and ofa data transfer modes [0] MPI startup(): shm and ofa data transfer modes [2] MPI startup(): shm and ofa data transfer modes [8] MPI startup(): shm and ofa data transfer modes [4] MPI startup(): shm and ofa data transfer modes [7] MPI startup(): shm and ofa data transfer modes [9] MPI startup(): shm and ofa data transfer modes [proxy:0:1@compute-0-1.local] got pmi command (from 6): put kvsname=kvs_18749_0 key=P7-businesscard value=OFA#00000006:004400e1:004400e2:004400e0$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] got pmi command (from 16): put kvsname=kvs_18749_0 key=P9-businesscard value=OFA#00000006:004400ef:004400f0:004400ee$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] got pmi command (from 19): put kvsname=kvs_18749_0 key=P10-businesscard value=OFA#00000006:004400c5:004400c6:004400c4$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] got pmi command (from 22): put kvsname=kvs_18749_0 key=P11-businesscard value=OFA#00000006:004400a9:004400aa:004400a8$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] got pmi command (from 0): put kvsname=kvs_18749_0 key=P6-businesscard value=OFA#00000006:004400b7:004400b8:004400b6$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] got pmi command (from 11): put kvsname=kvs_18749_0 key=P8-businesscard value=OFA#00000006:004400d3:004400d4:004400d2$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P7-businesscard value=OFA#00000006:004400e1:004400e2:004400e0$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P9-businesscard value=OFA#00000006:004400ef:004400f0:004400ee$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P10-businesscard value=OFA#00000006:004400c5:004400c6:004400c4$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P11-businesscard value=OFA#00000006:004400a9:004400aa:004400a8$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P6-businesscard value=OFA#00000006:004400b7:004400b8:004400b6$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P8-businesscard value=OFA#00000006:004400d3:004400d4:004400d2$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [proxy:0:1@compute-0-1.local] got pmi command (from 16): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 19): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 22): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 0): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 6): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 11): barrier_in [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=barrier_in [proxy:0:1@compute-0-1.local] forwarding command (cmd=barrier_in) upstream [1] MPI startup(): shm and ofa data transfer modes [proxy:0:0@compute-0-0.local] got pmi command (from 5): put kvsname=kvs_18749_0 key=P1-businesscard value=OFA#00000008:004800ef:004800f0:004800ee$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 11): put kvsname=kvs_18749_0 key=P3-businesscard value=OFA#00000008:004800b7:004800b8:004800b6$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 14): put kvsname=kvs_18749_0 key=P4-businesscard value=OFA#00000008:004800e1:004800e2:004800e0$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 4): put kvsname=kvs_18749_0 key=P0-businesscard value=OFA#00000008:004800c5:004800c6:004800c4$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 6): put kvsname=kvs_18749_0 key=P2-businesscard value=OFA#00000008:004800d3:004800d4:004800d2$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 17): put [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P1-businesscard value=OFA#00000008:004800ef:004800f0:004800ee$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P3-businesscard value=OFA#00000008:004800b7:004800b8:004800b6$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P4-businesscard value=OFA#00000008:004800e1:004800e2:004800e0$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P0-businesscard value=OFA#00000008:004800c5:004800c6:004800c4$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P2-businesscard value=OFA#00000008:004800d3:004800d4:004800d2$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success [mpiexec@compute-0-1.local] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_18749_0 key=P5-businesscard value=OFA#00000008:004800a9:004800aa:004800a8$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] reply: cmd=put_result rc=0 msg=success kvsname=kvs_18749_0 key=P5-businesscard value=OFA#00000008:004800a9:004800aa:004800a8$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 11): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 14): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 4): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 5): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 6): barrier_in [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=barrier_in [mpiexec@compute-0-1.local] PMI response to fd 0 pid 17: cmd=barrier_out [mpiexec@compute-0-1.local] PMI response to fd 6 pid 17: cmd=barrier_out [proxy:0:0@compute-0-0.local] got pmi command (from 17): barrier_in [proxy:0:0@compute-0-0.local] forwarding command (cmd=barrier_in) upstream [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] got pmi command (from 6): get kvsname=kvs_18749_0 key=P6-businesscard [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000006:004400b7:004400b8:004400b6$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] got pmi command (from 11): get kvsname=kvs_18749_0 key=P7-businesscard [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000006:004400e1:004400e2:004400e0$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] got pmi command (from 16): get kvsname=kvs_18749_0 key=P8-businesscard [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000006:004400d3:004400d4:004400d2$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] got pmi command (from 19): get kvsname=kvs_18749_0 key=P9-businesscard [proxy:0:0@compute-0-0.local] got pmi command (from 4): get [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000006:004400ef:004400f0:004400ee$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] got pmi command (from 22): get kvsname=kvs_18749_0 key=P10-businesscard [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000006:004400c5:004400c6:004400c4$fabrics_list#shm_and_ofa$ kvsname=kvs_18749_0 [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=get kvsname=kvs_18749_0 key=P11-businesscard [mpiexec@compute-0-1.local] PMI response to fd 0 pid 4: cmd=get_result rc=0 msg=success value=OFA#00000006:004400a9:004400aa:004400a8$fabrics_list#shm_and_ofa$ key=P11-businesscard key=P11-businesscard [proxy:0:1@compute-0-1.local] got pmi command (from 0): get kvsname=kvs_18749_0 key=P5-businesscard [proxy:0:1@compute-0-1.local] forwarding command (cmd=get kvsname=kvs_18749_0 key=P5-businesscard) upstream [proxy:0:1@compute-0-1.local] got pmi command (from 6): get kvsname=kvs_18749_0 key=P8-businesscard [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000006:004400d3:004400d4:004400d2$fabrics_list#shm_and_ofa$ [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=get kvsname=kvs_18749_0 key=P5-businesscard [mpiexec@compute-0-1.local] PMI response to fd 6 pid 0: cmd=get_result rc=0 msg=success value=OFA#00000008:004800a9:004800aa:004800a8$fabrics_list#shm_and_ofa$ key=P5-businesscard [proxy:0:0@compute-0-0.local] forwarding command (cmd=get kvsname=kvs_18749_0 key=P11-businesscard) upstream [proxy:0:0@compute-0-0.local] got pmi command (from 17): get [proxy:0:1@compute-0-1.local] got pmi command (from 11): get kvsname=kvs_18749_0 key=P9-businesscard [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000006:004400ef:004400f0:004400ee$fabrics_list#shm_and_ofa$ kvsname=kvs_18749_0 key=P4-businesscard [proxy:0:1@compute-0-1.local] got pmi command (from 16): get kvsname=kvs_18749_0 key=P10-businesscard [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000006:004400c5:004400c6:004400c4$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000008:004800e1:004800e2:004800e0$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] got pmi command (from 19): get kvsname=kvs_18749_0 key=P11-businesscard [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000006:004400a9:004400aa:004400a8$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] we don't understand the response get_result; forwarding downstream [proxy:0:1@compute-0-1.local] we don't understand the response get_result; forwarding downstream [proxy:0:1@compute-0-1.local] got pmi command (from 22): get [proxy:0:0@compute-0-0.local] got pmi command (from 6): get kvsname=kvs_18749_0 kvsname=kvs_18749_0 key=P0-businesscard [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=get kvsname=kvs_18749_0 key=P0-businesscard [mpiexec@compute-0-1.local] PMI response to fd 6 pid 22: cmd=get_result rc=0 msg=success value=OFA#00000008:004800c5:004800c6:004800c4$fabrics_list#shm_and_ofa$ key=P0-businesscard [proxy:0:1@compute-0-1.local] forwarding command (cmd=get kvsname=kvs_18749_0 key=P0-businesscard) upstream key=P1-businesscard [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000008:004800ef:004800f0:004800ee$fabrics_list#shm_and_ofa$ [proxy:0:1@compute-0-1.local] we don't understand the response get_result; forwarding downstream [proxy:0:1@compute-0-1.local] got pmi command (from 0): get kvsname=kvs_18749_0 [proxy:0:0@compute-0-0.local] got pmi command (from 11): get kvsname=kvs_18749_0 key=P7-businesscard [proxy:0:1@compute-0-1.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000006:004400e1:004400e2:004400e0$fabrics_list#shm_and_ofa$ key=P2-businesscard [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000008:004800d3:004800d4:004800d2$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 5): get kvsname=kvs_18749_0 key=P0-businesscard [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000008:004800c5:004800c6:004800c4$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 6): get kvsname=kvs_18749_0 key=P3-businesscard [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000008:004800b7:004800b8:004800b6$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 14): get kvsname=kvs_18749_0 key=P3-businesscard [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000008:004800b7:004800b8:004800b6$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 17): get [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=get kvsname=kvs_18749_0 key=P6-businesscard [mpiexec@compute-0-1.local] PMI response to fd 0 pid 17: cmd=get_result rc=0 msg=success value=OFA#00000006:004400b7:004400b8:004400b6$fabrics_list#shm_and_ofa$ key=P6-businesscard kvsname=kvs_18749_0 key=P6-businesscard [proxy:0:0@compute-0-0.local] forwarding command (cmd=get kvsname=kvs_18749_0 key=P6-businesscard) upstream [proxy:0:0@compute-0-0.local] got pmi command (from 4): get kvsname=kvs_18749_0 key=P1-businesscard [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000008:004800ef:004800f0:004800ee$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 5): get kvsname=kvs_18749_0 key=P2-businesscard [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000008:004800d3:004800d4:004800d2$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 11): get kvsname=kvs_18749_0 key=P4-businesscard [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000008:004800e1:004800e2:004800e0$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] got pmi command (from 14): get kvsname=kvs_18749_0 key=P5-businesscard [proxy:0:0@compute-0-0.local] PMI response: cmd=get_result rc=0 msg=success value=OFA#00000008:004800a9:004800aa:004800a8$fabrics_list#shm_and_ofa$ [proxy:0:0@compute-0-0.local] we don't understand the response get_result; forwarding downstream [proxy:0:1@compute-0-1.local] got pmi command (from 6): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 16): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 19): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 6): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 11): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 0): barrier_in [proxy:0:1@compute-0-1.local] got pmi command (from 22): barrier_in [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=barrier_in [proxy:0:1@compute-0-1.local] forwarding command (cmd=barrier_in) upstream [proxy:0:0@compute-0-0.local] got pmi command (from 4): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 11): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 17): barrier_in [proxy:0:0@compute-0-0.local] got pmi command (from 5): barrier_in [mpiexec@compute-0-1.local] [pgid: 0] got PMI command: cmd=barrier_in [mpiexec@compute-0-1.local] PMI response to fd 0 pid 14: cmd=barrier_out [mpiexec@compute-0-1.local] PMI response to fd 6 pid 14: cmd=barrier_out [proxy:0:0@compute-0-0.local] got pmi command (from 14): barrier_in [proxy:0:0@compute-0-0.local] forwarding command (cmd=barrier_in) upstream [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:1@compute-0-1.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [proxy:0:0@compute-0-0.local] PMI response: cmd=barrier_out [0] MPI startup(): Rank Pid Node name Pin cpu [0] MPI startup(): 0 29636 compute-0-0.local {0,2} [0] MPI startup(): 1 29637 compute-0-0.local {4,6} [0] MPI startup(): 2 29638 compute-0-0.local {8,10} [0] MPI startup(): 3 29639 compute-0-0.local {1,3} [0] MPI startup(): 4 29640 compute-0-0.local {5,7} [0] MPI startup(): 5 29641 compute-0-0.local {9,11} [0] MPI startup(): 6 18752 compute-0-1.local {0,2} [0] MPI startup(): 7 18753 compute-0-1.local {4,6} [0] MPI startup(): 8 18754 compute-0-1.local {8,10} [0] MPI startup(): 9 18755 compute-0-1.local {1,3} [0] MPI startup(): 10 18756 compute-0-1.local {5,7} [0] MPI startup(): 11 18757 compute-0-1.local {9,11} [0] MPI startup(): I_MPI_DEBUG=5 [0] MPI startup(): I_MPI_FABRICS_LIST=ofa [0] MPI startup(): I_MPI_FALLBACK=0 [0] MPI startup(): I_MPI_PIN_MAPPING=6:0 0,1 1,2 4,3 5,4 8,5 9 ABINIT Give name for formatted input file: cSi216I3J.kss.in Give name for formatted output file: cSi216I3J.kss.out Give root name for generic input files: cSi216I3J.kss.xi Give root name for generic output files: cSi216I3J.kss.xo Give root name for generic temporary files: cSi216I3J.kss.xe -P-0000 -P-0000 isfile : WARNING - -P-0000 Finds that output file cSi216I3J.kss.out -P-0000 already exists. -P-0000 new name assigned:cSi216I3J.kss.outA -P-0000 -P-0000 leave_test : synchronization done... .Version 6.12.3 of ABINIT .(MPI version, prepared for a x86_64_linux_intel13.0 computer) .Copyright (C) 1998-2012 ABINIT group . ABINIT comes with ABSOLUTELY NO WARRANTY. It is free software, and you are welcome to redistribute it under certain conditions (GNU General Public License, see ~abinit/COPYING or http://www.gnu.org/copyleft/gpl.txt). ABINIT is a project of the Universite Catholique de Louvain, Corning Inc. and other collaborators, see ~abinit/doc/developers/contributors.txt . Please read ~abinit/doc/users/acknowledgments.html for suggested acknowledgments of the ABINIT effort. For more information, see http://www.abinit.org . .Starting date : Wed 17 Oct 2012. - ( at 16h33 ) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ === Build Information === Version : 6.12.3 Build target : x86_64_linux_intel13.0 Build date : 20121009 === Compiler Suite === C compiler : intel13.0 CFLAGS : -g -O2 -vec-report0 C++ compiler : gnu13.0 CXXFLAGS : -g -O2 -xHost -mfpmath=sse Fortran compiler : intel13.0 FCFLAGS : -g -extend-source -vec-report0 -noaltparam -nofpscomp FC_LDFLAGS : -static-intel -static-libgcc === Optimizations === Debug level : basic Optimization level : standard Architecture : intel_xeon === MPI === Parallel build : yes Parallel I/O : yes Time tracing : no GPU support : no === Connectors / Fallbacks === Connectors on : yes Fallbacks on : yes DFT flavor : libxc-fallback+atompaw-fallback+bigdft-fallback+wannier90-fallback FFT flavor : fftw3-mkl LINALG flavor : mkl MATH flavor : none TIMER flavor : abinit TRIO flavor : netcdf-fallback+etsf_io-fallback === Experimental features === Bindings : no Exports : no GW double-precision : yes ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Default optimizations: -O2 -xHost ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ CPP options activated during the build: CC_INTEL CXX_GNU FC_INTEL HAVE_DFT_ATOMPAW HAVE_DFT_BIGDFT HAVE_DFT_LIBXC HAVE_DFT_WANNIER90 HAVE_FC_ALLOCATABLE_DT... HAVE_FC_CPUTIME HAVE_FC_EXIT HAVE_FC_GAMMA HAVE_FC_ISO_C_BINDING HAVE_FC_LONG_LINES HAVE_FC_NULL HAVE_FC_STREAM_IO HAVE_FFT HAVE_FFT_FFTW3_MKL HAVE_FFT_MPI HAVE_FFT_SERIAL HAVE_LINALG HAVE_LINALG_SERIAL HAVE_MPI HAVE_MPI2 HAVE_MPI_IO HAVE_MPI_TYPE_CREATE_S... HAVE_OS_LINUX HAVE_TIMER HAVE_TIMER_ABINIT HAVE_TIMER_MPI HAVE_TIMER_SERIAL HAVE_TRIO_ETSF_IO HAVE_TRIO_NETCDF ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - input file -> cSi216I3J.kss.in - output file -> cSi216I3J.kss.outA - root for input files -> cSi216I3J.kss.xi - root for output files -> cSi216I3J.kss.xo instrng : 281 lines of input have been read m_ab6_invars_f90 (ab6_invars_load) : token%timopt= 0 iofn2 : Please give name of formatted atomic psp file iofn2 : for atom type 1 , psp file is 14-Si.LDA.fhi -P-0000 read the values zionpsp= 4.0 , pspcod= 6 , lmax= 3 -P-0000 -P-0000 inpspheads : deduce mpsang = 4, n1xccc = 0. -P-0000 leave_test : synchronization done... invars1m : enter jdtset= 0 invars1 : treat image number 1 symlatt : the Bravais lattice is hR (rhombohedral) xred is defined in input file ingeo : takes atomic coordinates from input array xred symlatt : the Bravais lattice is hR (rhombohedral) symlatt : the Bravais lattice is hR (rhombohedral) symspgr : the symmetry operation no. 1 is the identity symaxes : the symmetry operation no. 2 is a 3-axis symaxes : the symmetry operation no. 3 is a 3-axis symspgr : spgroup= 146 R3 (=C3^4) inkpts : enter getkgrid : length of smallest supercell vector (bohr)= 1.225941E+02 Simple Lattice Grid symkpt : found identity, with number 1 inkpts : exit abinit : WARNING - The product of npkpt, npfft, npband and npspinor is bigger than the number of processors. The user-defined values of npkpt, npfft, npband or npspinor will be modified, in order to bring this product below nproc . At present, only a very simple algorithm is used ... abinit : WARNING - Set npfft and npband to 1 initmpi_grid : WARNING - The number of band*FFT*kpt*spinor processors, npband*npfft*npkpt*npspinor should be equal to the total number of processors, nproc. However, npband = 1 npfft = 1 npkpt = 2 npspinor = 1 and nproc= 12 npfft, npband, npspinor and npkpt 1 1 1 2 Fatal error in MPI_Comm_rank: Invalid communicator, error stack: MPI_Comm_rank(118): MPI_Comm_rank(MPI_COMM_NULL, rank=0x29319b8) failed MPI_Comm_rank(71).: Null communicator Fatal error in MPI_Comm_rank: Invalid communicator, error stack: MPI_Comm_rank(118): MPI_Comm_rank(MPI_COMM_NULL, rank=0x29319b8) failed MPI_Comm_rank(71).: Null communicator Fatal error in MPI_Comm_rank: Invalid communicator, error stack: MPI_Comm_rank(118): MPI_Comm_rank(MPI_COMM_NULL, rank=0x29319b8) failed MPI_Comm_rank(71).: Null communicator Fatal error in MPI_Comm_rank: Invalid communicator, error stack: MPI_Comm_rank(118): MPI_Comm_rank(MPI_COMM_NULL, rank=0x29319b8) failed MPI_Comm_rank(71).: Null communicator Fatal error in MPI_Comm_rank: Invalid communicator, error stack: MPI_Comm_rank(118): MPI_Comm_rank(MPI_COMM_NULL, rank=0x29319b8) failed MPI_Comm_rank(71).: Null communicator Fatal error in MPI_Comm_rank: Invalid communicator, error stack: MPI_Comm_rank(118): MPI_Comm_rank(MPI_COMM_NULL, rank=0x29319b8) failed MPI_Comm_rank(71).: Null communicator Fatal error in MPI_Comm_rank: Invalid communicator, error stack: MPI_Comm_rank(118): MPI_Comm_rank(MPI_COMM_NULL, rank=0x29319b8) failed MPI_Comm_rank(71).: Null communicator Fatal error in MPI_Comm_rank: Invalid communicator, error stack: MPI_Comm_rank(118): MPI_Comm_rank(MPI_COMM_NULL, rank=0x29319b8) failed MPI_Comm_rank(71).: Null communicator Fatal error in MPI_Comm_rank: Invalid communicator, error stack: MPI_Comm_rank(118): MPI_Comm_rank(MPI_COMM_NULL, rank=0x29319b8) failed MPI_Comm_rank(71).: Null communicator Fatal error in MPI_Comm_rank: Invalid communicator, error stack: MPI_Comm_rank(118): MPI_Comm_rank(MPI_COMM_NULL, rank=0x29319b8) failed MPI_Comm_rank(71).: Null communicator [proxy:0:1@compute-0-1.local] got crush from 6, 0 [proxy:0:1@compute-0-1.local] got crush from 0, 0 [proxy:0:1@compute-0-1.local] got crush from 11, 0 [proxy:0:1@compute-0-1.local] got crush from 16, 0 [proxy:0:1@compute-0-1.local] got crush from 22, 0 [proxy:0:1@compute-0-1.local] got crush from 19, 0 mpi_enreg%sizecart(1),np_fft = 1 1 mpi_enreg%sizecart(2),np_band = 1 1 mpi_enreg%sizecart(3),np_kpt = 2 2 mpi_enreg%sizecart(4),np_spinor= 1 1 in initmpi_grid : me_fft, me_band, me_spin , me_kpt are 0 0 0 0 invars1: mkmem undefined in the input file. Use default mkmem = nkpt invars1: With nkpt_me= 7 and mkmem = 14, ground state wf handled in core. Resetting mkmem to nkpt_me to save memory space. invars1: mkqmem undefined in the input file. Use default mkqmem = nkpt invars1: With nkpt_me= 7 and mkqmem = 14, ground state wf handled in core. Resetting mkqmem to nkpt_me to save memory space. [proxy:0:0@compute-0-0.local] got crush from 17, 0 [proxy:0:0@compute-0-0.local] got crush from 4, 0 [proxy:0:0@compute-0-0.local] got crush from 14, 0 [proxy:0:0@compute-0-0.local] got crush from 11, 0 [proxy:0:0@compute-0-0.local] got crush from 6, 0 [proxy:0:0@compute-0-0.local] got crush from 5, 0 APPLICATION TERMINATED WITH THE EXIT STRING: Hangup (signal 1)