您好,欢迎访问三七文档
sgi2003.7sgi1.11.1sgiCPUDataI/OWebserving:Small,integratedsystemMediastreaming:AccessstorageandnetworkingSignalprocessing:NetworkingandcomputeDatabase/CRM/ERP:StorageGenomics:ComputecyclesTraditionalsupercomputer:Compute,networking,storagesgisgisgi1.2.3.1.21.2MPP(MassivelyParallelProcessors)NUMA(Non-UniformMemoryAccess)DistributedmemorySMP(SymmetricMultiprocessor)CentralizedmemoryIndividualAddressSpaceShareAddressSpacesgiCoherentSharedMemoryCPU1CPU2MemoryEasytoProgramHardtoScaleHardwareDistributedMemoryCPU1CPU2MemoryHardtoProgramEasytoScaleHardwareMemorysgiProcessorCacheProcessorCacheProcessorCacheProcessorCacheBusorCrosssbarSwitchMemoryI/OsgiProcessorCacheBusMemoryI/OsgiProcessorCacheBusMemoryI/OProcessorCacheBusMemoryI/OProcessorCacheBusMemoryI/OInterconnectionNetworkMIPSBedrockASICMIPSMIPSMIPSXIO+NUMA38GBPhysicalMemoryBedrockASICXIO+MIPSMIPSMIPSMIPS8GBPhysicalMemory16GBSharedPhysicalMemorysgisgiØØØsgi1.31.3sgiS2P4P3P2P1S1S2P3sgiS2P4P3P2P1S1P1P4processS1P2threadsgiS2P4P3P2P1S1S2P1S1S2P2S1S2P3S1S2P4S1process0process1process2process3sgiSPMDSPMDMPMDMPMDa.outa.outa.outSPMDMPMD:Mster/slaveMPMD:CoupleAnalysisp1p2p3p1p2p3p1p2p3Aprocess•Readarraya()fromtheinputfile•Setis=1andie=6•Processfroma(is)toa(ie)•Writearraya()totheoutputfilea.outprocess1•Readarraya()fromtheinputfile•Getmyrank•is=2*rank+1,ie=2*rank+2•Processfroma(is)toa(ie)•Gathertheresulttoprocess0•Ifrank=0thenwritearraya()totheouputfilea.outprocess2•Readarraya()fromtheinputfile•Getmyrank•is=2*rank+1,ie=2*rank+2•Processfroma(is)toa(ie)•Gathertheresulttoprocess0•Ifrank=0thenwritearraya()totheouputfileprocess0•Readarraya()fromtheinputfile•Getmyrank•is=2*rank+1,ie=2*rank+2•Processfroma(is)toa(ie)•Gathertheresulttoprocess0•Ifrank=0thenwritearraya()totheouputfileprocess1•Readarraya()fromtheinputfile•Getmyrank•If(rank.eq.0)is=1,ie=2•If(rank.eq.1)is=3,ie=4•If(rank.eq.2)is=5,ie=6Processfroma(is)toa(ie)•Gathertheresulttoprocess0•Ifrank=0thenwritearraya()totheouputfilea.outprocess2•Readarraya()fromtheinputfile•Getmyrank•If(rank.eq.0)is=1,ie=2•If(rank.eq.1)is=3,ie=4•If(rank.eq.2)is=5,ie=6Processfroma(is)toa(ie)•Gathertheresulttoprocess0•Ifrank=0thenwritearraya()totheouputfileprocess0•Readarraya()fromtheinputfile•Getmyrank•If(rank.eq.0)is=1,ie=2•If(rank.eq.1)is=3,ie=4•If(rank.eq.2)is=5,ie=6•Processfroma(is)toa(ie)•Gathertheresulttoprocess0•Ifrank=0thenwritearraya()totheouputfile1.41.4HPFOpenMPMPIsgipp1pp11.51.5sgipp1::••••••sgisgisgiMPI1291994MPI1997MPI-2200306MPI6MPIsgi1.2.3.4.5.6.7.sgiMPIsgiPoint-to-PointMPI_SEND,35MPI_RECV,MPI_WAIT,...CollectiveCommunicationMPI_BCAST,MPI_GATHER,30MPI_REDUCE,...DerivedDataTypeMPI_TYPE_CONTIGUOUS,21MPI_TYPE_COMMIT,...TopologyMPI_CART_CREATE,16MPI_GRAPH_CREATE,...CommunicatorMPI_COMM_SIZE,17MPI_COMM_RANK,...ProcessGroupMPI_GROUP_SIZE,13MPI_GROUP_RANK,...EnvironmentManagementMPI_INIT,18MPI_FINALIZE,MPI_ABORT,...TypeSubroutinesNumberuuuuuusgi2.12.1#include“mpi.h”main(intargc,char**argv){intmyrank,i,j,k;MPI_Statusstatus;charmsg[20];MPI_Init(&argc,&argv);MPI_Comm_rank(MPI_COMM_WORLD,&myrank);if(myrank==0){strcpy(msg,”Hellothere”);MPI_Send(msg,strlen(msg)+1,MPI_CHAR,1,99,MPI_COMM_WORLD);}elseif(myrank==1{MPI_Recv(msg,20,MPI_CHAR,0,99,MPI_COMM_WORLD,&status);printf(“Receivemessage=%s\n”,msg);}MPI_Finalize();}sgi2.22.2ØØØØØ/Ø/ØØØØsgi2.2.12.2.1usgi2.2.22.2.2usgi2.2.32.2.3usgi2.2.42.2.4usgi2.2.52.2.5//usgi2.2.62.2.6//usgi2.2.72.2.7u“”sgi2.2.82.2.8usgi2.2.92.2.9usgi2.2.102.2.10usgi2.32.3MPIMPIuuuuusgi2.3.12.3.1uMPIMPICFortran#include“mpi.h”Include‘mpif.h’sgi2.3.22.3.2MPIMPIuCFortranCRc=MPI_Xxxxx(parameter,…)Rc=MPI_Bsend(&buf,count,type,dest,tag,comm)RcMPI_SUCCESSFortranCALLMPI_XXXXX(parameter,…,ierr)Callmpi_xxxxx(parameter,…,ierr)CALLMPI_BSEND(buf,count,type,dest,tag,comm.ierr)MPI_SUCCESSsgisgi2.3.32.3.3MPIMPIMPIMPIMPIsgi2.3.42.3.4uMPIMPIMPI_COMM_WORLDMPIsgi2.3.52.3.5((rank)rank)uMPI“ID”0if(rank==0){0}elseif(rank==1){1}sgiMPIMPIuMPIMPIsgi3.13.1uMPI_InituMPI_Comm_sizeuMPI_Comm_rankuMPI_AbortuMPI_Get_processor_nameuMPI_InitializeduMPI_WtimeuMPI_WtickuMPI_Finalizesgi3.1.13.1.1MPI_Init()MPI_Init()uMPIMPIMPIMPICMPI_InitsgiuIntMPI_Init(int*argc,char*argv)uMPI_INIT(ierr)INTEGERierr33.1..1.22MPI_Finalize()MPI_Finalize()uMPIuuINTEGERierrsgiprograminitinclude‘mpif.h’IntegerierrCallMPI_INIT(ierr)Print*,‘helloworld’CallMPI_FINALIZE(ierr)endMPI3.1.33.1.3MPI_MPI_CommComm_size()_size()uMPI_COMM_WORDMPI_Comm_size(comm,size)INcommOUTsizecommuMPI_Comm_size(MPI_Commcomm,int*size)uMPI_COMM_SIZE(comm,size,ierr)INTEGERcomm,size,ierrsgi33.1..1.44MPI_MPI_CommComm_rank()_rank()u0~-1MPI_Comm_rank(comm,rank)INcommOUTrankuintMPI_Comm_rank(MPI_commcomm,int*rank)uMPI_COMM_RANK(comm,rank,ierr)INTEGERcomm,rank,ierrsgi0:nprocs=3myrank=01:nprocs=3myrank=12:nprocs=3myrank=21:nprocs=3myrank=12:nprocs=3myrank=20:nprocs=3myrank=033.1..1.55MPI_Abort()MPI_Abort()uuMPI_Abort(comm,errorcode)uMPI_ABORT(comm,errorcode,ierr)sgi33.1..1.66MPI_MPI_WtimeWtime()()uuMPI_Wtime()uMPI_WTIME()sgi3.23.2MPIMPIuCuFortransgi33.2.1.2.1CC#includempi.h”intmain(argc,argv)intargc;char*argv[];{intnumtasks,ra
本文标题:MPI培训教程
链接地址:https://www.777doc.com/doc-959374 .html