Docker-in-Docker (DinD) capabilities of public runners deactivated. More info

Commit 73efe059 authored by Maude Le Jeune's avatar Maude Le Jeune
Browse files

Merge branch 'master' of gitlab.in2p3.fr:pipelet/pipelet

parents fd5fd135 f1ac345c
......@@ -252,23 +252,10 @@ def launch_pbs(pipe, n, address=None, authkey='secret', job_name="job_", log_lev
def launch_nersc (pipe, n, ppn=1, port=50000, authkey='secret', job_name="job_", log_level=logging.WARNING, job_header="""
#PBS -q regular
#PBS -l nodes=%d:ppn=%d
""", carver=False, python_mpi=False):
""", carver=False, python_mpi=False, n_numa=2, cori=False):
""" Launch a bunch of distant workers through a PBS batch system using aprun.
"""
# server_file="""
##PBS -q regular
##PBS -l mppwidth=%d
##PBS -l walltime=%s
#export CRAY_ROOTFS=DSL
#echo "export PIPELETD_HOST=$HOST" > ~/host_info.sh
#pipeletd -n -l %d -p %d -a $HOST << 'EOF' &
#%s
#EOF
#sleep 5
#aprun -np %d python $PIPELETPATH/launchers.py -p %d -s %s -l %s -H $HOST
#"""
server_file=job_header+"""
export CRAY_ROOTFS=DSL
echo "export PIPELETD_HOST=$HOST" > ~/host_info.sh
......@@ -288,6 +275,15 @@ sleep 5
mpirun -np %d %s $PIPELETPATH/launchers.py -p %d -s %s -l %s -H $HOSTNAME
"""
server_file_cori=job_header+"""
export CRAY_ROOTFS=DSL
echo "export PIPELETD_HOST=$HOSTNAME" > ~/host_info.sh
pipeletd -n -l %d -p %d -a $HOSTNAME << 'EOF' &
%s
EOF
sleep 5
srun -n %d %s $PIPELETPATH/launchers.py -p %d -s %s -l %s -H $HOSTNAME
"""
nnode=n/ppn
if (n%ppn>0):
......@@ -302,16 +298,22 @@ mpirun -np %d %s $PIPELETPATH/launchers.py -p %d -s %s -l %s -H $HOSTNAME
jobfile = get_log_file (pipe, '.job')
errfile = jobfile.replace('job', 'err')
logfile = jobfile.replace('job', 'log')
batch = 'qsub'
if carver:
server_file = server_file_carver
with closing(file(jobfile,'w')) as f:
f.write(server_file%(nnode, ppn, log_level, port, pipedesc, n, pyrun, port, authkey, logfile))
elif cori:
batch = 'sbatch'
server_file = server_file_cori
with closing(file(jobfile,'w')) as f:
f.write(server_file%(nnode, log_level, port, pipedesc, n, pyrun, port, authkey, logfile))
else:
with closing(file(jobfile,'w')) as f:
f.write(server_file%(n, log_level, port, pipedesc, nnode, 24/ppn, 24/ppn/2, pyrun, port, authkey, logfile))
f.write(server_file%(n, log_level, port, pipedesc, nnode, 24/ppn, 24/ppn/n_numa, pyrun, port, authkey, logfile))
subprocess.Popen(['qsub','-o' ,logfile, '-e', errfile,jobfile]).communicate()[0]
subprocess.Popen([batch,'-o' ,logfile, '-e', errfile,jobfile]).communicate()[0]
_job_file = """
#/bin/zsh
......
from pipelet.environment import *
import os
class PBEnvironment(Environment):
"""
"""
def __init__(self, w):
"""
"""
## worker instance
self._worker = w
## segment input
self.seg_input = w.task.task_input
## segment output
self.seg_output = []
## list of temporary files
self._tmpf = []
## logger instance
self.logger = init_logger ("pipelet", self._get_log_file(), level=[])
## list of param to save
self.lst_par = []
## list of param to expose
self.lst_tag = []
## dictionnary with external dependencies version or revision number
self.version = {"pipelet":pipelet.__version__}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment