Thanks, but this is not really helping.

Could you please build a Minimal, Reproducible Example as described at
https://stackoverflow.com/help/minimal-reproducible-example ?

Cheers,

Gilles

On Mon, Aug 19, 2019 at 7:19 PM Sangam B via users
<users@lists.open-mpi.org> wrote:
>
> Hi,
>
> Here is the sample program snippet:
>
> --------
> #include "intrinsic_sizes.h"
> #include "redef.h"
>
> module module1_m
>
>   use mod1_m, only:  some__example2
>   use mod2_m, only:  some__example3
>   use mod3_m, only:  some__example4
>
>   use mpi
>   use, intrinsic :: iso_c_binding
>
> implicit none
>
>   private
>
>   public :: some__example___memory
>
>   type, public, extends(some__example5) :: some__example6
>      logical, public :: some__example7 = .False.
>      class(some__example8), private, pointer :: some__example9
>    contains
>
> ...
> ...
> end type some__example6
>
> contains
> ....
> some_pure_functions here
> ....
>
>
> subroutine recv(this,lmb)
>     class(some__example6), intent(inout) ::  this
>     integer, intent(in) :: lmb(2,2)
>
>     integer :: cs3, ierr
>     integer(kind=C_LONG) :: size
>
>     ! receive only from buffer at different process
>     if(this%is_bf_referred) return
>
>     cs3=this%uspecifier%get_recv_buff_3rd_dim_size(this%xb,this%vwb,lmb)
>     if(cs3.eq.0) return ! nothing to recv
>
>     size = this%size_dim(this%gi)*this%size_dim(this%gj)*cs3
>     if(this%is_exchange_off) then
>        call this%update_stats(size)
>        this%bf(:,:,1:cs3) = cmplx(0.,0.)
>     else
>        call MPI_Irecv(this%bf(:,:,1:cs3),size,MPI_COMPLEX_TYPE,&
>             this%nrank,this%tag,this%comm_xvw,this%request,ierr)
>     end if
>   end subroutine recv
>
>
> Hope this helps.
>
> On Mon, Aug 19, 2019 at 3:21 PM Gilles Gouaillardet via users 
> <users@lists.open-mpi.org> wrote:
>>
>> Thanks,
>>
>> and your reproducer is ?
>>
>> Cheers,
>>
>> Gilles
>>
>> On Mon, Aug 19, 2019 at 6:42 PM Sangam B via users
>> <users@lists.open-mpi.org> wrote:
>> >
>> > Hi,
>> >
>> > OpenMPI is configured as follows:
>> >
>> > export CC=`which clang`
>> > export CXX=`which clang++`
>> > export FC=`which flang`
>> > export F90=`which flang`
>> >
>> > ../configure --prefix=/sw/openmpi/3.1.1/aocc20hpcx210-mpifort 
>> > --enable-mpi-fortran --enable-mpi-cxx --without-psm --without-psm2 
>> > --without-knem --without-libfabric --without-lsf --with-verbs=/usr 
>> > --with-mxm=/sw/hpcx/hpcx-v2.1.0-gcc-MLNX_OFED_LINUX-4.3-1.0.1.0-redhat7.4-x86_64/mxm
>> >
>> >
>> > ..
>> >
>> > On Mon, Aug 19, 2019 at 2:43 PM Sangam B <forum....@gmail.com> wrote:
>> >>
>> >> Hi,
>> >>
>> >> I get following error if the application is compiled with openmpi-3.1.1:
>> >>
>> >> mpifort -O3 -march=native -funroll-loops -finline-aggressive -flto 
>> >> -J./bin/obj_amd64aocc20 -std=f2008 -O3 -march=native -funroll-loops 
>> >> -finline-aggressive -flto -fallow-fortran-gnu-ext -ffree-form 
>> >> -fdefault-real-8 example_program.F90
>> >> F90-S-0155-Could not resolve generic procedure mpi_irecv ( 
>> >> example_program.F90  : 97)
>> >>   0 inform,   0 warnings,   1 severes, 0 fatal for recv
>> >>
>> >> Following is the line causing this error:
>> >>
>> >> call MPI_Irecv(this%bf(:,:,1:cs3),size,MPI_COMPLEX_TYPE,&
>> >>             this%nrank,this%tag,this%comm_xvw,this%request,ierr)
>> >>
>> >> The program has following module mentioned in the beginning:
>> >>  use mpi
>> >>
>> >> The openmpi has following module files in lib folder:
>> >> $ ls *.mod
>> >> mpi_ext.mod            mpi_f08_ext.mod                   
>> >> mpi_f08_interfaces.mod  mpi_f08_types.mod  pmpi_f08_interfaces.mod
>> >> mpi_f08_callbacks.mod  mpi_f08_interfaces_callbacks.mod  mpi_f08.mod      
>> >>        mpi.mod
>> >>
>> >> The same program works with Intel MPI (gcc/intel as base compilers).
>> >> But fails with OpenMPI, whether gcc-8.1.0 or AOCC are used as base 
>> >> compilers. What could be the reason for it?
>> >>
>> >> ..
>> >
>> > _______________________________________________
>> > users mailing list
>> > users@lists.open-mpi.org
>> > https://lists.open-mpi.org/mailman/listinfo/users
>> _______________________________________________
>> users mailing list
>> users@lists.open-mpi.org
>> https://lists.open-mpi.org/mailman/listinfo/users
>
> _______________________________________________
> users mailing list
> users@lists.open-mpi.org
> https://lists.open-mpi.org/mailman/listinfo/users
_______________________________________________
users mailing list
users@lists.open-mpi.org
https://lists.open-mpi.org/mailman/listinfo/users

Reply via email to