<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Please provide the equivalent mpirun flags in Intel® MPI Library</title>
    <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679932#M12110</link>
    <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.intel.com/t5/user/viewprofilepage/user-id/245425"&gt;@TobiasK&lt;/a&gt;&amp;nbsp;,&lt;BR /&gt;I am using Intel MPI not OpenMP. Actually, this option is working well in INTEL MPI version 2023 and 2024.0.1 but not in latest mentioned version. Please let us know what is exact equivalent flag to get same behavior.&lt;BR /&gt;&lt;BR /&gt;Thanks&lt;BR /&gt;Arun prasad&lt;/P&gt;</description>
    <pubDate>Wed, 02 Apr 2025 16:28:50 GMT</pubDate>
    <dc:creator>Arunamat</dc:creator>
    <dc:date>2025-04-02T16:28:50Z</dc:date>
    <item>
      <title>Please provide the equivalent mpirun flags</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679916#M12108</link>
      <description>&lt;P&gt;Hi Team,&lt;BR /&gt;We are using this below command using intel oneapi&amp;nbsp;&lt;SPAN&gt;2023.1 and 2024.0.1.&lt;/SPAN&gt;&lt;BR /&gt;mpirun-n 4 -machinefile $HOME/machinefile&amp;nbsp; &lt;STRONG&gt;--map-by socket:pe=$CORES_PER_TASK --bind-to core&lt;BR /&gt;&lt;/STRONG&gt;&amp;nbsp;The above MPIRUN options is not working on these&amp;nbsp;intel oneapi&amp;nbsp;&lt;SPAN&gt;2025.0.1 and 2024.2.1 version.&lt;BR /&gt;can you please suggest equivalent option to make it work on the latest version.&lt;BR /&gt;&lt;BR /&gt;Thanks&lt;BR /&gt;Arun prasad&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Wed, 02 Apr 2025 14:53:22 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679916#M12108</guid>
      <dc:creator>Arunamat</dc:creator>
      <dc:date>2025-04-02T14:53:22Z</dc:date>
    </item>
    <item>
      <title>Re: Please provide the equivalent mpirun flags</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679930#M12109</link>
      <description>&lt;P&gt;&lt;a href="https://community.intel.com/t5/user/viewprofilepage/user-id/419971"&gt;@Arunamat&lt;/a&gt;&amp;nbsp;of course that does not work since it's an OpenMPI specific command line.&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;What are you trying to achieve? If you are using OpenMP and OMP_NUM_THREADS x 4 is using all the cores, just delte --map-by and --bind-to core&lt;BR /&gt;&lt;BR /&gt;Best&lt;BR /&gt;Tobias&lt;/P&gt;</description>
      <pubDate>Wed, 02 Apr 2025 16:03:00 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679930#M12109</guid>
      <dc:creator>TobiasK</dc:creator>
      <dc:date>2025-04-02T16:03:00Z</dc:date>
    </item>
    <item>
      <title>Re: Please provide the equivalent mpirun flags</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679932#M12110</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.intel.com/t5/user/viewprofilepage/user-id/245425"&gt;@TobiasK&lt;/a&gt;&amp;nbsp;,&lt;BR /&gt;I am using Intel MPI not OpenMP. Actually, this option is working well in INTEL MPI version 2023 and 2024.0.1 but not in latest mentioned version. Please let us know what is exact equivalent flag to get same behavior.&lt;BR /&gt;&lt;BR /&gt;Thanks&lt;BR /&gt;Arun prasad&lt;/P&gt;</description>
      <pubDate>Wed, 02 Apr 2025 16:28:50 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679932#M12110</guid>
      <dc:creator>Arunamat</dc:creator>
      <dc:date>2025-04-02T16:28:50Z</dc:date>
    </item>
    <item>
      <title>Re: Please provide the equivalent mpirun flags</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679934#M12111</link>
      <description>&lt;P&gt;Ok I have to apologize, even though --map-by --bind-to are OpenMPI specific binding options, we just ignore them. However, that works.&lt;BR /&gt;&lt;BR /&gt;Can you please give the output of:&lt;/P&gt;
&lt;LI-CODE lang="bash"&gt;I_MPI_DEBUG=10 mpirun -n 4 --machinefile $HOME/machinefile --map-by socket:pe=1 IMB-MPI1 allreduce&lt;/LI-CODE&gt;</description>
      <pubDate>Wed, 02 Apr 2025 16:58:21 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679934#M12111</guid>
      <dc:creator>TobiasK</dc:creator>
      <dc:date>2025-04-02T16:58:21Z</dc:date>
    </item>
    <item>
      <title>Re: Please provide the equivalent mpirun flags</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679953#M12112</link>
      <description>&lt;P&gt;Please find the output on version&amp;nbsp;&lt;SPAN&gt;&amp;nbsp;2024.2.1 version&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;span class="lia-inline-image-display-wrapper lia-image-align-inline" image-alt="Arunamat_0-1743620246535.png" style="width: 824px;"&gt;&lt;img src="https://community.intel.com/t5/image/serverpage/image-id/64443i0D96DA28A6A15FBB/image-dimensions/824x105/is-moderation-mode/true?v=v2&amp;amp;whitelist-exif-data=Orientation%2CResolution%2COriginalDefaultFinalSize%2CCopyright" width="824" height="105" role="button" title="Arunamat_0-1743620246535.png" alt="Arunamat_0-1743620246535.png" /&gt;&lt;/span&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 02 Apr 2025 18:58:34 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679953#M12112</guid>
      <dc:creator>Arunamat</dc:creator>
      <dc:date>2025-04-02T18:58:34Z</dc:date>
    </item>
    <item>
      <title>Re: Please provide the equivalent mpirun flags</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679958#M12113</link>
      <description>&lt;P&gt;&lt;SPAN&gt;Please find the output on version&amp;nbsp;&lt;/SPAN&gt;&lt;SPAN class=""&gt;&amp;nbsp;2024.2.1 version&lt;BR /&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;span class="lia-inline-image-display-wrapper lia-image-align-inline" image-alt="Arunamat_3-1743620425071.png" style="width: 400px;"&gt;&lt;img src="https://community.intel.com/t5/image/serverpage/image-id/64446i73B2B2FDB4C4AD4D/image-size/medium/is-moderation-mode/true?v=v2&amp;amp;px=400&amp;amp;whitelist-exif-data=Orientation%2CResolution%2COriginalDefaultFinalSize%2CCopyright" role="button" title="Arunamat_3-1743620425071.png" alt="Arunamat_3-1743620425071.png" /&gt;&lt;/span&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 02 Apr 2025 19:00:43 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679958#M12113</guid>
      <dc:creator>Arunamat</dc:creator>
      <dc:date>2025-04-02T19:00:43Z</dc:date>
    </item>
    <item>
      <title>Re: Please provide the equivalent mpirun flags</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679964#M12114</link>
      <description>&lt;P&gt;Please find output from 2023 version:&lt;BR /&gt;Using default -machinefile setting (/enc/x0144578/mpd.hosts)&lt;BR /&gt;[0] MPI startup(): Intel(R) MPI Library, Version 2021.9 Build 20230307 (id: d82b3071db)&lt;BR /&gt;[0] MPI startup(): Copyright (C) 2003-2023 Intel Corporation. All rights reserved.&lt;BR /&gt;[0] MPI startup(): library kind: release&lt;BR /&gt;[0] MPI startup(): shm segment size (342 MB per rank) * (4 local ranks) = 1368 MB total&lt;BR /&gt;[0] MPI startup(): max number of MPI_Request per vci: 67108864 (pools: 1)&lt;BR /&gt;[0] MPI startup(): File "" not found&lt;BR /&gt;[0] MPI startup(): Load tuning file: "/program/intel-oneapi-2023.1/2023.1.0/mpi/2021.9.0/etc/tuning_icx_shm.dat"&lt;BR /&gt;[0] MPI startup(): threading: mode: direct&lt;BR /&gt;[0] MPI startup(): threading: vcis: 1&lt;BR /&gt;[0] MPI startup(): threading: app_threads: -1&lt;BR /&gt;[0] MPI startup(): threading: runtime: generic&lt;BR /&gt;[0] MPI startup(): threading: progress_threads: 0&lt;BR /&gt;[0] MPI startup(): threading: async_progress: 0&lt;BR /&gt;[0] MPI startup(): threading: lock_level: global&lt;BR /&gt;[0] MPI startup(): tag bits available: 30 (TAG_UB value: 1073741823)&lt;BR /&gt;[0] MPI startup(): source bits available: 0 (Maximal number of rank: 0)&lt;BR /&gt;[0] MPI startup(): Rank Pid Node name Pin cpu&lt;BR /&gt;[0] MPI startup(): 0 16048 ip-10-132-133-136.ec2.internal {0,1,2,3,16,17,18,19}&lt;BR /&gt;[0] MPI startup(): 1 16049 ip-10-132-133-136.ec2.internal {4,5,6,7,20,21,22,23}&lt;BR /&gt;[0] MPI startup(): 2 16050 ip-10-132-133-136.ec2.internal {8,9,10,11,24,25,26,27}&lt;BR /&gt;[0] MPI startup(): 3 16051 ip-10-132-133-136.ec2.internal {12,13,14,15,28,29,30,31}&lt;BR /&gt;[0] MPI startup(): I_MPI_ROOT=/program/intel-oneapi-2023.1/2023.1.0/mpi/2021.9.0&lt;BR /&gt;[0] MPI startup(): I_MPI_MPIRUN=mpirun&lt;BR /&gt;[0] MPI startup(): I_MPI_HYDRA_TOPOLIB=hwloc&lt;BR /&gt;[0] MPI startup(): I_MPI_INTERNAL_MEM_POLICY=default&lt;BR /&gt;[0] MPI startup(): I_MPI_FABRICS=shm&lt;BR /&gt;[0] MPI startup(): I_MPI_DEBUG=10&lt;BR /&gt;#----------------------------------------------------------------&lt;BR /&gt;# Intel(R) MPI Benchmarks 2021.5, MPI-1 part&lt;BR /&gt;#----------------------------------------------------------------&lt;BR /&gt;# Date : Wed Apr 2 19:18:36 2025&lt;BR /&gt;# Machine : x86_64&lt;BR /&gt;# System : Linux&lt;BR /&gt;# Release : 4.18.0-425.19.2.el8_7.x86_64&lt;BR /&gt;# Version : #1 SMP Tue Apr 4 22:38:11 UTC 2023&lt;BR /&gt;# MPI Version : 3.1&lt;BR /&gt;# MPI Thread Environment:&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;# Calling sequence was:&lt;/P&gt;&lt;P&gt;# IMB-MPI1 allreduce&lt;/P&gt;&lt;P&gt;# Minimum message length in bytes: 0&lt;BR /&gt;# Maximum message length in bytes: 4194304&lt;BR /&gt;#&lt;BR /&gt;# MPI_Datatype : MPI_BYTE&lt;BR /&gt;# MPI_Datatype for reductions : MPI_FLOAT&lt;BR /&gt;# MPI_Op : MPI_SUM&lt;BR /&gt;#&lt;BR /&gt;#&lt;/P&gt;&lt;P&gt;# List of Benchmarks to run:&lt;/P&gt;&lt;P&gt;# Allreduce&lt;/P&gt;&lt;P&gt;#----------------------------------------------------------------&lt;BR /&gt;# Benchmarking Allreduce&lt;BR /&gt;# #processes = 2&lt;BR /&gt;# ( 2 additional processes waiting in MPI_Barrier)&lt;BR /&gt;#----------------------------------------------------------------&lt;BR /&gt;#bytes #repetitions t_min[usec] t_max[usec] t_avg[usec]&lt;BR /&gt;0 1000 0.03 0.03 0.03&lt;BR /&gt;4 1000 0.42 0.50 0.46&lt;BR /&gt;8 1000 0.46 0.52 0.49&lt;BR /&gt;16 1000 0.44 0.50 0.47&lt;BR /&gt;32 1000 0.42 0.47 0.45&lt;BR /&gt;64 1000 0.50 0.54 0.52&lt;BR /&gt;128 1000 0.51 0.55 0.53&lt;BR /&gt;256 1000 0.50 0.55 0.53&lt;BR /&gt;512 1000 0.66 0.85 0.76&lt;BR /&gt;1024 1000 0.73 0.80 0.76&lt;BR /&gt;2048 1000 0.87 0.90 0.89&lt;BR /&gt;4096 1000 1.11 1.19 1.15&lt;BR /&gt;8192 1000 1.95 2.05 2.00&lt;BR /&gt;16384 1000 3.56 3.73 3.65&lt;BR /&gt;32768 1000 5.77 5.78 5.78&lt;BR /&gt;65536 640 7.48 7.50 7.49&lt;BR /&gt;131072 320 12.94 12.96 12.95&lt;BR /&gt;262144 160 24.62 24.72 24.67&lt;BR /&gt;524288 80 55.64 56.88 56.26&lt;BR /&gt;1048576 40 129.90 130.03 129.97&lt;BR /&gt;2097152 20 321.61 321.76 321.68&lt;BR /&gt;4194304 10 694.10 694.28 694.19&lt;/P&gt;&lt;P&gt;#----------------------------------------------------------------&lt;BR /&gt;# Benchmarking Allreduce&lt;BR /&gt;# #processes = 4&lt;BR /&gt;#----------------------------------------------------------------&lt;BR /&gt;#bytes #repetitions t_min[usec] t_max[usec] t_avg[usec]&lt;BR /&gt;0 1000 0.03 0.04 0.04&lt;BR /&gt;4 1000 0.40 0.50 0.46&lt;BR /&gt;8 1000 0.38 0.47 0.45&lt;BR /&gt;16 1000 0.85 0.85 0.85&lt;BR /&gt;32 1000 0.42 0.51 0.47&lt;BR /&gt;64 1000 0.87 0.88 0.88&lt;BR /&gt;128 1000 0.91 0.91 0.91&lt;BR /&gt;256 1000 0.96 0.96 0.96&lt;BR /&gt;512 1000 0.93 1.04 0.98&lt;BR /&gt;1024 1000 1.33 1.34 1.33&lt;BR /&gt;2048 1000 1.59 1.61 1.60&lt;BR /&gt;4096 1000 2.14 2.15 2.15&lt;BR /&gt;8192 1000 3.92 4.20 4.06&lt;BR /&gt;16384 1000 5.21 5.23 5.22&lt;BR /&gt;32768 1000 8.47 8.50 8.48&lt;BR /&gt;65536 640 12.35 12.39 12.37&lt;BR /&gt;131072 320 19.99 20.32 20.16&lt;BR /&gt;262144 160 37.97 38.55 38.26&lt;BR /&gt;524288 80 76.63 78.00 77.15&lt;BR /&gt;1048576 40 188.38 193.09 190.38&lt;BR /&gt;2097152 20 346.11 354.84 351.65&lt;BR /&gt;4194304 10 1031.87 1033.13 1032.28&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;# All processes entering MPI_Finalize&lt;/P&gt;</description>
      <pubDate>Wed, 02 Apr 2025 19:23:21 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1679964#M12114</guid>
      <dc:creator>Arunamat</dc:creator>
      <dc:date>2025-04-02T19:23:21Z</dc:date>
    </item>
    <item>
      <title>Re: Please provide the equivalent mpirun flags</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1680134#M12115</link>
      <description>&lt;P&gt;&lt;a href="https://community.intel.com/t5/user/viewprofilepage/user-id/419971"&gt;@Arunamat&lt;/a&gt;&amp;nbsp;&lt;BR /&gt;&lt;BR /&gt;We reenabled parsing of --bind-to and --map-by with 2021.15 which is part of OneAPI 2025.1.&lt;BR /&gt;&lt;BR /&gt;However, please note, we really just ignore those options, so please clean up your parameters.&lt;/P&gt;</description>
      <pubDate>Thu, 03 Apr 2025 08:46:26 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1680134#M12115</guid>
      <dc:creator>TobiasK</dc:creator>
      <dc:date>2025-04-03T08:46:26Z</dc:date>
    </item>
    <item>
      <title>Re: Please provide the equivalent mpirun flags</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1681394#M12117</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.intel.com/t5/user/viewprofilepage/user-id/245425"&gt;@TobiasK&lt;/a&gt;&amp;nbsp;,&lt;BR /&gt;&amp;nbsp; When will be this version release available?&lt;BR /&gt;&lt;BR /&gt;Please share &amp;nbsp;if any equivalent INTEL MPI option available for the same above OPENMPI parameters. Please share your inputs.&lt;BR /&gt;&lt;BR /&gt;Thanks&lt;BR /&gt;Arun&lt;BR /&gt;&lt;BR /&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 08 Apr 2025 13:06:04 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1681394#M12117</guid>
      <dc:creator>Arunamat</dc:creator>
      <dc:date>2025-04-08T13:06:04Z</dc:date>
    </item>
    <item>
      <title>Re: Please provide the equivalent mpirun flags</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1681722#M12119</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.intel.com/t5/user/viewprofilepage/user-id/419971"&gt;@Arunamat&lt;/a&gt;&amp;nbsp;&lt;BR /&gt;oneAPI 2025.1/Intel MPI 2021.15 are already out.&lt;BR /&gt;&lt;BR /&gt;For the pinning options, please find the documentation here:&lt;BR /&gt;&lt;A href="https://www.intel.com/content/www/us/en/docs/mpi-library/developer-reference-linux/2021-15/process-pinning.html" target="_blank"&gt;https://www.intel.com/content/www/us/en/docs/mpi-library/developer-reference-linux/2021-15/process-pinning.html&lt;/A&gt;&lt;BR /&gt;&lt;BR /&gt;You can find the actual pinning in your debug output:&lt;BR /&gt;&lt;BR /&gt;&lt;/P&gt;
&lt;LI-CODE lang="bash"&gt;[0] MPI startup(): 0 16048 ip-10-132-133-136.ec2.internal {0,1,2,3,16,17,18,19}
[0] MPI startup(): 1 16049 ip-10-132-133-136.ec2.internal {4,5,6,7,20,21,22,23}
[0] MPI startup(): 2 16050 ip-10-132-133-136.ec2.internal {8,9,10,11,24,25,26,27}
[0] MPI startup(): 3 16051 ip-10-132-133-136.ec2.internal {12,13,14,15,28,29,30,31}&lt;/LI-CODE&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 09 Apr 2025 13:08:38 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/Please-provide-the-equivalent-mpirun-flags/m-p/1681722#M12119</guid>
      <dc:creator>TobiasK</dc:creator>
      <dc:date>2025-04-09T13:08:38Z</dc:date>
    </item>
  </channel>
</rss>

