Я пытаюсь переместить код из мультипроцесса в MPI, используя python, потому что я перемещаю свой код в HPC.Вот почему я хочу охватить процессы на нескольких узлах и aovid использовать все оперативные памяти узла signle.
Можете ли вы помочь мне в его реализации?Я пробовал до сих пор это.
Multiprocces.map код:
import Home
import pickle
from multiprocessing import Pool
from functools import partial
import time
import os
def run(a):
name=a['name']
people=a['people']
save_path='res_semi_def'
save_path='res_semi_def'
path=os.getcwd()
if not os.path.exists(os.path.join(os.getcwd(),save_path)):
os.mkdir(os.path.join(os.getcwd(),save_path))
if sum(people.values())>0:
start=time.time()
home=Home.Home(people)
try:
home.simulate()
print name,time.time()-start, '[s]'
fname=str(name)+'.pkl'
with open(os.path.join(os.getcwd(),save_path,fname), "wb") as f:
pickle.dump(home.Consumption,f)
#fname=str(name)+'_person.pkl'
#with open(os.path.join(os.getcwd(),save_path,fname), "wb") as f:
# pickle.dump(home.personsList,f)
except Exception as e:
print name, 'error', e
pass
def main():
p = Pool(6)
fname='censimento2011_full_par.pkl'
with open(fname,'r') as f:
houses=pickle.load(f)
house=[{'name':name, 'people':people} for name, people in zip(houses.iterkeys(),houses.itervalues())]
#name,people=zip(*houses.iteritems())
#p.map(partial(run,int(name)), people)
start_t=time.time()
a=p.map(run, house)
p.close()
print time.time()-start_t, 'Total time[s]'
if __name__ == "__main__":
main()
Здесь основной с mpiy4py:
def main():
comm=MPI.COMM_WORLD
if comm.rank==0:
fname='censimento2011_full_par.pkl'
with open(fname,'r') as f:
houses=pickle.load(f)
house=[{'name':name, 'people':people} for name, people in zip(houses.iterkeys(),houses.itervalues())]
else:
house=None
my_work=comm.scatter(house)
if __name__ == "__main__":
main()
Но с этим кодом я получаю эту ошибку:
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 54118 RUNNING AT compute-0-8
= EXIT CODE: 4
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@compute-0-5] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:909): assert (!closed) failed
[proxy:0:0@compute-0-5] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
[proxy:0:2@compute-0-7] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:909): assert (!closed) failed
[proxy:0:2@compute-0-7] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
[proxy:0:2@compute-0-7] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
[proxy:0:1@compute-0-6] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:909): assert (!closed) failed
[proxy:0:1@compute-0-6] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
[proxy:0:1@compute-0-6] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
[proxy:0:0@compute-0-5] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
srun: error: compute-0-5: task 0: Exited with exit code 7
srun: error: compute-0-7: task 2: Exited with exit code 7
srun: error: compute-0-6: task 1: Exited with exit code 7
[mpiexec@compute-0-5] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
[mpiexec@compute-0-5] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
[mpiexec@compute-0-5] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
[mpiexec@compute-0-5] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
Как правильно применять процесс с помощью mpi4py?