使用MPI时的段故障(Fortran)

时间:2014-10-25 21:50:09

标签: fortran mpi fortran90

我对MPI和Fortran都很陌生。我一直在努力想要解决这个问题几个小时,没有运气。在我的下面的代码中,一切都运行得很好(除了我的s变量在进程之间被隔离的事实。当我尝试实现MPI_SENDMPI_RECV时,我不断得到seg错误我似乎无法弄清问题是什么。

  SUBROUTINE do_mpi_simpsons(l, u, n)
    INTEGER, INTENT (in) :: l, u, n
    ! REAL, INTENT (in) :: func
    DOUBLE PRECISION  :: result, walltime
    INTEGER :: clock_start, clock_rate, clock_max, clock_end
    DOUBLE PRECISION :: h, s, argVal, finalS
    INTEGER :: rank, size, ierror, tag, status(MPI_STATUS_SIZE), count, start, stop

    walltime = 0.0D0

    h = (u - l) / dble(n)
    s = func_hw(dble(l)) + func_hw(dble(u))

    CALL system_clock(clock_start, clock_rate, clock_max)
    CALL MPI_INIT(ierror)
    CALL MPI_COMM_SIZE(MPI_COMM_WORLD, size, ierror)
    CALL MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierror)

    count = n / size

    start = rank * count
    stop = start + count -1

    ! WRITE(*,*) "Start: ", start
    ! WRITE(*,*) "Stop: ", stop
    WRITE(*,*) rank

    DO i = start, stop, 2
      s = s + 4 * func_hw(dble(l)+dble(i)*h)
    END DO
    DO i = start+1, stop-1, 2
      s = s + 2 * func_hw(dble(l)+dble(i)*h)
    END DO

    ! This block is causing the seg faults
    IF(rank.eq.0) THEN
        finalS = s
        DO i = 1, size - 1
          CALL MPI_RECV(s, 64, MPI_DOUBLE, i, 1, MPI_COMM_WORLD, status, ierror)
          finalS = finalS + s
        END DO
    ELSE
        CALL MPI_SEND(s, 64, MPI_DOUBLE, 0, 1, MPI_COMM_WORLD, ierror)    
    END IF

    CALL MPI_FINALIZE(ierror)
    CALL system_clock(clock_end, clock_rate, clock_max)

    walltime = walltime + real(clock_end - clock_start) / real(clock_rate)
    result = s * h / 3 

    WRITE(*,*) "walltime = ", walltime, " seconds"
    WRITE(*,*) "result = ", result
  END SUBROUTINE

0 个答案:

没有答案