<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re:MPI linking with LLVM based compilers in Intel® MPI Library</title>
    <link>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1495227#M10684</link>
    <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;Thanks for posting in Intel Communities.&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;Could you please post the entire information of your query in the forum as it will be useful for other users as well.&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;Could you also please provide these following details:&lt;/P&gt;&lt;P&gt;1. The Operating System &amp;amp; Processor details&lt;/P&gt;&lt;P&gt;2. Detailed steps to reproduce your issue&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;&amp;gt;&amp;gt;&amp;gt;The command failed during build.&lt;/P&gt;&lt;P&gt;Could you please elaborate where you are facing the issue.&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;Thanks &amp;amp; Regards,&lt;/P&gt;&lt;P&gt;Shaik Rabiya&lt;/P&gt;&lt;BR /&gt;</description>
    <pubDate>Tue, 13 Jun 2023 10:05:49 GMT</pubDate>
    <dc:creator>RabiyaSK_Intel</dc:creator>
    <dc:date>2023-06-13T10:05:49Z</dc:date>
    <item>
      <title>MPI linking with LLVM based compilers</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1495049#M10682</link>
      <description>&lt;P&gt;Hi All,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I'm currently trying to build the PETSC library with the Intel OneAPI HPC package, and was hoping to use the more modern LLVM based compilers (specifically ICX, ICPX and IFX)&lt;BR /&gt;&lt;BR /&gt;The "classical" way of using MPI with the intel compilers was the provided wrappers (mpiicc, mpiicpc and mpiifort) but those do not seem to be around for the newer compilers.&lt;BR /&gt;&lt;BR /&gt;After having found this post &lt;A href="https://community.intel.com/t5/Intel-oneAPI-HPC-Toolkit/Intel-MPI-for-icx/m-p/1439211#M10147" target="_blank"&gt;https://community.intel.com/t5/Intel-oneAPI-HPC-Toolkit/Intel-MPI-for-icx/m-p/1439211#M10147&lt;/A&gt; and following the relevant documentation specified here: &lt;A href="https://www.intel.com/content/www/us/en/docs/mpi-library/developer-reference-linux/2021-8/compilation-command-options.html#compilation-command-options_GUID-7D5FF697-C49D-4D40-92C0-8FEBE4C1BFF5" target="_blank"&gt;https://www.intel.com/content/www/us/en/docs/mpi-library/developer-reference-linux/2021-8/compilation-command-options.html#compilation-command-options_GUID-7D5FF697-C49D-4D40-92C0-8FEBE4C1BFF5&lt;/A&gt;&lt;BR /&gt;The command failed during build.&lt;BR /&gt;&lt;BR /&gt;Perhaps it's a bug in the MPI wrapper or something else all together, but the above doesn't seem to work with the 2023.1 versions of the compilers.&lt;BR /&gt;&lt;BR /&gt;specifically tried with&lt;BR /&gt;&amp;nbsp;icx --version:&amp;nbsp;&lt;BR /&gt;Intel(R) oneAPI DPC++/C++ Compiler 2023.1.0 (2023.1.0.20230320)&lt;BR /&gt;and&lt;BR /&gt;mpiicc --version: icc (ICC) 2021.9.0 20230302&lt;BR /&gt;&lt;BR /&gt;&lt;BR /&gt;I'm documenting the overall project in this thread: &lt;A href="https://twitter.com/FelixCLC_/status/1668344146401300481" target="_blank"&gt;https://twitter.com/FelixCLC_/status/1668344146401300481&lt;/A&gt;&lt;BR /&gt;&lt;BR /&gt;Cheers and thanks,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Felix&lt;/P&gt;</description>
      <pubDate>Mon, 12 Jun 2023 20:25:29 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1495049#M10682</guid>
      <dc:creator>FCLC</dc:creator>
      <dc:date>2023-06-12T20:25:29Z</dc:date>
    </item>
    <item>
      <title>Re:MPI linking with LLVM based compilers</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1495227#M10684</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;Thanks for posting in Intel Communities.&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;Could you please post the entire information of your query in the forum as it will be useful for other users as well.&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;Could you also please provide these following details:&lt;/P&gt;&lt;P&gt;1. The Operating System &amp;amp; Processor details&lt;/P&gt;&lt;P&gt;2. Detailed steps to reproduce your issue&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;&amp;gt;&amp;gt;&amp;gt;The command failed during build.&lt;/P&gt;&lt;P&gt;Could you please elaborate where you are facing the issue.&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;Thanks &amp;amp; Regards,&lt;/P&gt;&lt;P&gt;Shaik Rabiya&lt;/P&gt;&lt;BR /&gt;</description>
      <pubDate>Tue, 13 Jun 2023 10:05:49 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1495227#M10684</guid>
      <dc:creator>RabiyaSK_Intel</dc:creator>
      <dc:date>2023-06-13T10:05:49Z</dc:date>
    </item>
    <item>
      <title>Re: MPI linking with LLVM based compilers</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1496910#M10693</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;We haven't heard back from you. Could you please provide the requested details so we could reproduce your issue from our end.&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;Thanks &amp;amp; Regards,&lt;/P&gt;
&lt;P&gt;Shaik Rabiya&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 19 Jun 2023 04:03:47 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1496910#M10693</guid>
      <dc:creator>RabiyaSK_Intel</dc:creator>
      <dc:date>2023-06-19T04:03:47Z</dc:date>
    </item>
    <item>
      <title>Re: Re:MPI linking with LLVM based compilers</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1497085#M10696</link>
      <description>&lt;P&gt;Hi Shaik, apologies for the delay.&lt;BR /&gt;&lt;BR /&gt;Platform is Linux, specifically Pop!_OS 22.04 LTS, which is a downstream version of ubuntu 22.04LTS that follows a rolling release cadence.&lt;BR /&gt;&lt;BR /&gt;(uname -a: `uname -a&lt;BR /&gt;Linux pop-os 6.3.0-local-fclc #3 SMP PREEMPT_DYNAMIC Mon May 15 23:00:23 EDT 2023 x86_64 x86_64 x86_64 GNU/Linux`)&lt;BR /&gt;&lt;BR /&gt;Kernel is upstream kernel version 6.3.0&lt;BR /&gt;&lt;BR /&gt;Processor is a Alderlake i7-12700K with pcores only being used as a development platform for the GoldenCove Micro-architecture as found in 4th generation Xeon Sapphire-Rapids and the latest W790 Sapphire rapids based Workstation/HEDT processors.&lt;BR /&gt;&lt;BR /&gt;The query/issue in question is the linking to MPI libraries with the latest LLVM based intel compilers, including ICX, ICPX and IFX for C, C++ and Fortran respectively.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;The usage/application is with the PETSC HPC library developed by Argonne National Lab Leadership Computing Facitlity, the University of Chicago and other FOSS/Open source contributors.&lt;BR /&gt;&lt;BR /&gt;In this case I've had to build with the following flags: `./configure --download-viennacl=1 --with-debugging=0 COPTFLAGS='-O3 -march=sapphirerapids -mno-amx-tile -mno-amx-int8 -mno-amx-bf16' CXXOPTFLAGS='-O3 -march=sapphirerapids -mno-amx-tile -mno-amx-int8 -mno-amx-bf16' FOPTFLAGS='-O3 -march=sapphirerapids -mno-amx-tile -mno-amx-int8 -mno-amx-bf16' --with-blaslapack-dir=$MKLROOT --with-mpi-dir=/opt/intel/oneapi/mpi/latest/ --prefix=/home/felix/petsc4foam/petsc_build/ --with-precision=single` due to the workarounds listed in &lt;A href="https://www.intel.com/content/www/us/en/docs/mpi-library/developer-reference-linux/2021-8/compilation-command-options.html#GUID-7D5FF697-C49D-4D40-92C0-8FEBE4C1BFF5" target="_blank"&gt;https://www.intel.com/content/www/us/en/docs/mpi-library/developer-reference-linux/2021-8/compilation-command-options.html#GUID-7D5FF697-C49D-4D40-92C0-8FEBE4C1BFF5&lt;/A&gt; not being usable/functional.&lt;BR /&gt;&lt;BR /&gt;Replication should be somewhat trivial, but for the avoidance of doubt I've recorded a video using the following simplified command as an example: ./configure --with-debugging=0 --with-cc=mpiicc --with-cxx=mpiicpc --with-fc=mpiifort --with-debugging=0 COPTFLAGS='-diag-disable=10441 -O3 -march=native -cc=icx' CXXOPTFLAGS='-O3 -march=native -cxx=icpx' FOPTFLAGS='-O3 -march=native -fc=ifx' --prefix=/home/felix/petsc4foam/petsc_build/ --with-precision=single&lt;BR /&gt;&lt;BR /&gt;I'll note that specifically the issue is with the C and CPP compilers error'ing out during configuration, but on the Fortran side, while the `mpiifort -fc=ifx` flag is accepted, the compiler is still detected as ifort&lt;BR /&gt;&lt;BR /&gt;Video available here: &lt;A href="https://www.youtube.com/watch?v=2oj7lC9cemw" target="_blank"&gt;https://www.youtube.com/watch?v=2oj7lC9cemw&lt;/A&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;BR /&gt;The "ideal" build would be using SYCL, but those are not yet considered production ready for PETSC.&lt;BR /&gt;&lt;BR /&gt;In the meantime, the closest equivalent is ~= `./configure --download-viennacl=1 --with-debugging=0 COPTFLAGS='-O3 -march=sapphirerapids -mno-amx-tile -mno-amx-int8 -mno-amx-bf16' CXXOPTFLAGS='-O3 -march=sapphirerapids -mno-amx-tile -mno-amx-int8 -mno-amx-bf16' FOPTFLAGS='-O3 -march=sapphirerapids -mno-amx-tile -mno-amx-int8 -mno-amx-bf16' --with-blaslapack-dir=$MKLROOT --with-mpi-dir=/opt/intel/oneapi/mpi/latest/ --prefix=/home/felix/petsc4foam/petsc_build/ --with-precision=single`&lt;/P&gt;</description>
      <pubDate>Mon, 19 Jun 2023 15:25:00 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1497085#M10696</guid>
      <dc:creator>FCLC</dc:creator>
      <dc:date>2023-06-19T15:25:00Z</dc:date>
    </item>
    <item>
      <title>Re: MPI linking with LLVM based compilers</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1497711#M10700</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;Thank you for your patience. Could you please try building PETSc with this command:&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;LI-CODE lang="markup"&gt;./configure --with-debugging=0 --with-cc='mpiicc -cc=icx' --with-cxx='mpiicpc -cxx=icpx' --with-fc='mpiifort -fc=ifx' --with-debugging=0 COPTFLAGS='-diag-disable=10441 -O3 -march=native ' CXXOPTFLAGS='-O3 -march=native' FOPTFLAGS='-O3 -march=native' --prefix=/home/felix/petsc4foam/petsc_build/ --with-precision=single&lt;/LI-CODE&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;When using the environment variables it is probably best to export them, the environment variables are only set for the exact following command and have to be specified for every single other command.&amp;nbsp;&lt;/P&gt;
&lt;P&gt;For example we could build/make with&amp;nbsp;&lt;/P&gt;
&lt;PRE class="line-numbers language-markup"&gt;&lt;CODE&gt;I_MPI_CC=icx I_MPI_CXX=icpx I_MPI_F90=ifx make &lt;/CODE&gt;&lt;/PRE&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;We were able to build PETSc library successfully with Intel LLVM based compilers (icx, icpx, ifx) with the given commands. Please refer to the log file for more information.&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;Thanks &amp;amp; Regards,&lt;/P&gt;
&lt;P&gt;Shaik Rabiya&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 21 Jun 2023 09:56:54 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1497711#M10700</guid>
      <dc:creator>RabiyaSK_Intel</dc:creator>
      <dc:date>2023-06-21T09:56:54Z</dc:date>
    </item>
    <item>
      <title>Re: MPI linking with LLVM based compilers</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1498230#M10706</link>
      <description>&lt;P&gt;Yup, above was able to solve the issue!&lt;BR /&gt;&lt;BR /&gt;Thanks for the follow up.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;-Felix&lt;/P&gt;</description>
      <pubDate>Thu, 22 Jun 2023 16:34:55 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1498230#M10706</guid>
      <dc:creator>FCLC</dc:creator>
      <dc:date>2023-06-22T16:34:55Z</dc:date>
    </item>
    <item>
      <title>Re:MPI linking with LLVM based compilers</title>
      <link>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1498469#M10707</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;Thanks for accepting our solution. If you need any additional information, you can post a new question on community as this thread will no longer be monitored by Intel.&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;Thanks &amp;amp; Regards,&lt;/P&gt;&lt;P&gt;Shaik Rabiya&lt;/P&gt;&lt;BR /&gt;</description>
      <pubDate>Fri, 23 Jun 2023 04:54:01 GMT</pubDate>
      <guid>https://community.intel.com/t5/Intel-MPI-Library/MPI-linking-with-LLVM-based-compilers/m-p/1498469#M10707</guid>
      <dc:creator>RabiyaSK_Intel</dc:creator>
      <dc:date>2023-06-23T04:54:01Z</dc:date>
    </item>
  </channel>
</rss>

