[ic3-autosubmit] 01/01: Upstream 3.0.4
Alastair McKinstry
mckinstry at moszumanska.debian.org
Fri Jun 12 09:11:20 UTC 2015
This is an automated email from the git hooks/post-receive script.
mckinstry pushed a commit to tag upstream_3.0.4
in repository ic3-autosubmit.
commit c5fc052a78c64b245c56b1f2794678fa30708dfb
Author: Alastair McKinstry <mckinstry at debian.org>
Date: Fri Jun 12 09:14:06 2015 +0100
Upstream 3.0.4
---
CHANGES | 16 ++-
PKG-INFO | 6 +-
VERSION | 2 +-
autosubmit.egg-info/PKG-INFO | 6 +-
autosubmit.egg-info/requires.txt | 6 +-
autosubmit/autosubmit.py | 178 ++++++++++++++++----------------
autosubmit/config/basicConfig.py | 12 +--
autosubmit/config/config_common.py | 14 ++-
autosubmit/config/files/autosubmit.conf | 4 +-
autosubmit/config/files/platforms.conf | 10 +-
autosubmit/config/log.py | 2 -
autosubmit/database/db_common.py | 43 +++++---
autosubmit/job/job.py | 2 +-
autosubmit/job/job_common.py | 19 ++--
autosubmit/job/job_list.py | 82 ++++++++++-----
autosubmit/platforms/ecplatform.py | 14 +--
autosubmit/platforms/localplatform.py | 11 +-
autosubmit/platforms/lsfplatform.py | 12 +--
autosubmit/platforms/pbsplatform.py | 18 ++--
autosubmit/platforms/sgeplatform.py | 9 +-
autosubmit/platforms/slurmplatform.py | 5 +-
docs/autosubmit.pdf | Bin 1373082 -> 1154134 bytes
setup.py | 7 +-
23 files changed, 274 insertions(+), 204 deletions(-)
diff --git a/CHANGES b/CHANGES
index 37e49b7..7fb39e6 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,2 +1,16 @@
+3.0.4
+ Fixed bug in platform headers
+ MAX_WAITING_JOBS and TOTAL_JOBS now defined by platform
+ Simplified console output of run subcommand
+
+3.0.3
+ Fixed bug in expid test.
+
+3.0.2
+ Fixed bug in localplatform.
+
+3.0.1
+ Fixed bug in config.
+
3.0.0
- Restructure layout
+ Restructure layout.
diff --git a/PKG-INFO b/PKG-INFO
index 1c740d9..09fa98e 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,8 +1,8 @@
Metadata-Version: 1.1
Name: autosubmit
-Version: 3.0.0rc8
-Summary: Autosubmit: a versatile tool to manage Weather and Climate Experiments in diverse Supercomputing Environments
-Home-page: https://autosubmit.ic3.cat
+Version: 3.0.4
+Summary: Autosubmit: a versatile tool to manage Weather and Climate Experiments in diverseSupercomputing Environments
+Home-page: http://autosubmit.ic3.cat
Author: Domingo Manubens-Gil
Author-email: domingo.manubens at ic3.cat
License: GNU GPL v3
diff --git a/VERSION b/VERSION
index 398bfa1..b0f2dcb 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-3.0.0rc8
+3.0.4
diff --git a/autosubmit.egg-info/PKG-INFO b/autosubmit.egg-info/PKG-INFO
index 1c740d9..09fa98e 100644
--- a/autosubmit.egg-info/PKG-INFO
+++ b/autosubmit.egg-info/PKG-INFO
@@ -1,8 +1,8 @@
Metadata-Version: 1.1
Name: autosubmit
-Version: 3.0.0rc8
-Summary: Autosubmit: a versatile tool to manage Weather and Climate Experiments in diverse Supercomputing Environments
-Home-page: https://autosubmit.ic3.cat
+Version: 3.0.4
+Summary: Autosubmit: a versatile tool to manage Weather and Climate Experiments in diverseSupercomputing Environments
+Home-page: http://autosubmit.ic3.cat
Author: Domingo Manubens-Gil
Author-email: domingo.manubens at ic3.cat
License: GNU GPL v3
diff --git a/autosubmit.egg-info/requires.txt b/autosubmit.egg-info/requires.txt
index 4e34dcb..0a2a457 100644
--- a/autosubmit.egg-info/requires.txt
+++ b/autosubmit.egg-info/requires.txt
@@ -1,5 +1,5 @@
argparse>=1.2,<2
python-dateutil>=1,<2
-pydotplus
-pyparsing
-paramiko
\ No newline at end of file
+pydotplus>=2
+pyparsing>=2.0.1
+paramiko>=1.15
\ No newline at end of file
diff --git a/autosubmit/autosubmit.py b/autosubmit/autosubmit.py
index 2e76ef5..5da6b2c 100644
--- a/autosubmit/autosubmit.py
+++ b/autosubmit/autosubmit.py
@@ -155,7 +155,7 @@ class Autosubmit:
subparser.add_argument('-dbf', '--databasefilename', default=None, help='database filename')
subparser.add_argument('-lr', '--localrootpath', default=None, help='path to store experiments. If not '
'supplied, it will prompt for it')
- subparser.add_argument('-qc', '--platformsconfpath', default=None,
+ subparser.add_argument('-pc', '--platformsconfpath', default=None,
help='path to platforms.conf file to use by default. If not supplied, it will not'
' prompt for it')
subparser.add_argument('-jc', '--jobsconfpath', default=None, help='path to jobs.conf file to use by '
@@ -203,6 +203,8 @@ class Autosubmit:
# Refresh
subparser = subparsers.add_parser('refresh', description='refresh project directory for an experiment')
subparser.add_argument('expid', help='experiment identifier')
+ subparser.add_argument('-mc', '--model_conf', default=False, action='store_true',
+ help='overwrite model conf file')
args = parser.parse_args()
@@ -238,7 +240,7 @@ class Autosubmit:
elif args.command == 'test':
return Autosubmit.test(args.expid, args.chunks, args.member, args.stardate, args.HPC, args.branch)
elif args.command == 'refresh':
- return Autosubmit.refresh(args.expid)
+ return Autosubmit.refresh(args.expid, args.model_conf)
except Exception as e:
from traceback import format_exc
Log.critical('Unhandled exception on Autosubmit: {0}\n{1}', e, format_exc(10))
@@ -265,7 +267,7 @@ class Autosubmit:
return ret
@staticmethod
- def expid(hpc, description, copy_id='', dummy=False):
+ def expid(hpc, description, copy_id='', dummy=False, test=False):
"""
Creates a new experiment for given HPC
@@ -277,6 +279,7 @@ class Autosubmit:
:param description: short experiment's description.
:param copy_id: experiment identifier of experiment to copy
:param dummy: if true, writes a default dummy configuration for testing
+ :param test: if true, creates an experiment for testing
:return: experiment identifier. If method fails, returns ''.
:rtype: str
"""
@@ -295,7 +298,7 @@ class Autosubmit:
Log.error("Missing HPC.")
return ''
if not copy_id:
- exp_id = new_experiment(hpc, description, Autosubmit.autosubmit_version)
+ exp_id = new_experiment(description, Autosubmit.autosubmit_version)
if exp_id == '':
return ''
try:
@@ -328,7 +331,7 @@ class Autosubmit:
else:
try:
if os.path.exists(os.path.join(BasicConfig.LOCAL_ROOT_DIR, copy_id)):
- exp_id = copy_experiment(copy_id, hpc, description, Autosubmit.autosubmit_version)
+ exp_id = copy_experiment(copy_id, description, Autosubmit.autosubmit_version, test)
if exp_id == '':
return ''
dir_exp_id = os.path.join(BasicConfig.LOCAL_ROOT_DIR, exp_id)
@@ -451,8 +454,7 @@ class Autosubmit:
expid = as_conf.get_expid()
hpcarch = as_conf.get_platform()
- max_jobs = as_conf.get_total_jobs()
- max_waiting_jobs = as_conf.get_max_waiting_jobs()
+
safetysleeptime = as_conf.get_safetysleeptime()
retrials = as_conf.get_retrials()
@@ -461,8 +463,6 @@ class Autosubmit:
return False
Log.debug("The Experiment name is: {0}", expid)
- Log.debug("Total jobs to submit: {0}", max_jobs)
- Log.debug("Maximum waiting jobs in platforms: {0}", max_waiting_jobs)
Log.debug("Sleep: {0}", safetysleeptime)
Log.debug("Retrials: {0}", retrials)
Log.info("Starting job submission...")
@@ -492,7 +492,9 @@ class Autosubmit:
for job in joblist.get_job_list():
if job.platform_name is None:
job.platform_name = hpcarch
+ # noinspection PyTypeChecker
job.set_platform(platforms[job.platform_name])
+ # noinspection PyTypeChecker
platforms_to_test.add(platforms[job.platform_name])
joblist.check_scripts(as_conf)
@@ -507,62 +509,41 @@ class Autosubmit:
#########################
# Main loop. Finishing when all jobs have been submitted
while joblist.get_active():
- active = len(joblist.get_running())
- waiting = len(joblist.get_submitted() + joblist.get_queuing())
- available = max_waiting_jobs - waiting
-
# reload parameters changes
Log.debug("Reloading parameters...")
as_conf.reload()
Autosubmit._load_parameters(as_conf, joblist, platforms)
# variables to be updated on the fly
- max_jobs = as_conf.get_total_jobs()
- Log.debug("Total jobs: {0}".format(max_jobs))
total_jobs = len(joblist.get_job_list())
- Log.info("\n{0} of {1} jobs remaining ({2})".format(total_jobs-len(joblist.get_completed()), total_jobs,
+ Log.info("\n\n{0} of {1} jobs remaining ({2})".format(total_jobs-len(joblist.get_completed()), total_jobs,
strftime("%H:%M")))
safetysleeptime = as_conf.get_safetysleeptime()
Log.debug("Sleep: {0}", safetysleeptime)
retrials = as_conf.get_retrials()
Log.debug("Number of retrials: {0}", retrials)
- # read FAIL_RETRIAL number if, blank at creation time put a given number
- # check availability of machine, if not next iteration after sleep time
- # check availability of jobs, if no new jobs submited and no jobs available, then stop
+ ######################################
+ # AUTOSUBMIT - ALREADY SUBMITTED JOBS
+ ######################################
+ for platform in platforms_to_test:
- # ??? why
- joblist.save()
+ jobinqueue = joblist.get_in_queue(platform)
+ if len(jobinqueue) == 0:
+ continue
- Log.info("Active jobs in platforms:\t{0}", active)
- Log.info("Waiting jobs in platforms:\t{0}", waiting)
+ Log.info("\nJobs in {0} queue: {1}", platform.name, str(len(jobinqueue)))
- if available == 0:
- Log.debug("There's no room for more jobs...")
- else:
- Log.debug("We can safely submit {0} jobs...", available)
+ if not platform.check_host():
+ Log.debug("{0} is not available")
+ continue
- ######################################
- # AUTOSUBMIT - ALREADY SUBMITTED JOBS
- ######################################
- # get the list of jobs currently in the Queue
- jobinqueue = joblist.get_in_queue()
- Log.info("Number of jobs in platforms: {0}", str(len(jobinqueue)))
-
- for job in jobinqueue:
-
- job.print_job()
- Log.debug("Number of jobs in platforms: {0}", str(len(jobinqueue)))
- # Check platforms availability
- job_platform = job.get_platform()
- platform_available = job_platform.check_host()
- if not platform_available:
- Log.debug("There is no platforms available")
- else:
- status = job_platform.check_job(job.id)
+ for job in jobinqueue:
+ job.print_job()
+ status = platform.check_job(job.id)
if status == Status.COMPLETED:
Log.debug("This job seems to have completed...checking")
- job_platform.get_completed_files(job.name)
+ platform.get_completed_files(job.name)
job.check_completion()
else:
job.status = status
@@ -576,52 +557,60 @@ class Autosubmit:
Log.user_warning("Job {0} is FAILED", job.name)
elif job.status is Status.UNKNOWN:
Log.debug("Job {0} in UNKNOWN status. Checking completed files", job.name)
- job_platform.get_completed_files(job.name)
+ platform.get_completed_files(job.name)
job.check_completion(Status.UNKNOWN)
elif job.status is Status.SUBMITTED:
# after checking the jobs , no job should have the status "submitted"
Log.warning('Job {0} in SUBMITTED status after checking.', job.name)
- # explain it !!
- joblist.update_list()
-
##############################
# AUTOSUBMIT - JOBS TO SUBMIT
##############################
# get the list of jobs READY
- jobsavail = joblist.get_ready()
-
- if min(available, len(jobsavail)) == 0:
- Log.debug("There is no job READY or available")
- Log.debug("Number of jobs ready: {0}", len(jobsavail))
- Log.debug("Number of jobs available in platforms: {0}", available)
- elif min(available, len(jobsavail)) > 0 and len(jobinqueue) <= max_jobs:
- Log.info("\nStarting to submit {0} job(s)", min(available, len(jobsavail)))
- # should sort the jobsavail by priority Clean->post->sim>ini
- # s = sorted(jobsavail, key=lambda k:k.name.split('_')[1][:6])
- # probably useless to sort by year before sorting by type
- s = sorted(jobsavail, key=lambda k: k.long_name.split('_')[1][:6])
-
- list_of_jobs_avail = sorted(s, key=lambda k: k.priority, reverse=True)
-
- for job in list_of_jobs_avail[0:min(available, len(jobsavail), max_jobs - len(jobinqueue))]:
- Log.debug(job.name)
- scriptname = job.create_script(as_conf)
- Log.debug(scriptname)
-
- job_platform = job.get_platform()
- platform_available = job_platform.check_host()
- if not platform_available:
- Log.warning("Queue {0} is not available".format(job_platform.name))
- else:
- job_platform.send_script(scriptname)
- job.id = job_platform.submit_job(scriptname)
+ joblist.update_list()
+ for platform in platforms_to_test:
+
+ jobsavail = joblist.get_ready(platform)
+ if len(jobsavail) == 0:
+ continue
+
+ Log.info("\nJobs ready for {1}: {0}", len(jobsavail), platform.name)
+
+ if not platform.check_host():
+ Log.debug("{0} is not available", platform.name)
+ continue
+
+ max_jobs = platform.total_jobs
+ max_waiting_jobs = platform.max_waiting_jobs
+ waiting = len(joblist.get_submitted(platform) + joblist.get_queuing(platform))
+ available = max_waiting_jobs - waiting
+
+ if min(available, len(jobsavail)) == 0:
+ Log.debug("Number of jobs ready: {0}", len(jobsavail))
+ Log.debug("Number of jobs available: {0}", available)
+ elif min(available, len(jobsavail)) > 0 and len(jobinqueue) <= max_jobs:
+ Log.info("Jobs to submit: {0}", min(available, len(jobsavail)))
+ # should sort the jobsavail by priority Clean->post->sim>ini
+ # s = sorted(jobsavail, key=lambda k:k.name.split('_')[1][:6])
+ # probably useless to sort by year before sorting by type
+ s = sorted(jobsavail, key=lambda k: k.long_name.split('_')[1][:6])
+
+ list_of_jobs_avail = sorted(s, key=lambda k: k.priority, reverse=True)
+
+ for job in list_of_jobs_avail[0:min(available, len(jobsavail), max_jobs - len(jobinqueue))]:
+ Log.debug(job.name)
+ scriptname = job.create_script(as_conf)
+ Log.debug(scriptname)
+
+ platform.send_script(scriptname)
+ job.id = platform.submit_job(scriptname)
if job.id is None:
continue
# set status to "submitted"
job.status = Status.SUBMITTED
- Log.info("{0} submitted to {1}\n", job.name, job.get_platform().name)
+ Log.info("{0} submitted", job.name)
+ joblist.save()
time.sleep(safetysleeptime)
Log.info("No more jobs to run.")
@@ -776,6 +765,7 @@ class Autosubmit:
for job in jobs_to_recover:
if job.platform_name is None:
job.platform_name = hpcarch
+ # noinspection PyTypeChecker
job.set_platform(platforms[job.platform_name])
if job.get_platform().get_completed_files(job.name, 0, True):
@@ -835,6 +825,7 @@ class Autosubmit:
for job in joblist.get_job_list():
if job.platform_name is None:
job.platform_name = hpcarch
+ # noinspection PyTypeChecker
job.set_platform(platforms[job.platform_name])
return joblist.check_scripts(as_conf)
@@ -911,7 +902,7 @@ class Autosubmit:
if jobs_conf_path is not None:
parser.set('conf', 'jobs', jobs_conf_path)
if platforms_conf_path is not None:
- parser.set('conf', 'queues', platforms_conf_path)
+ parser.set('conf', 'platforms', platforms_conf_path)
parser.write(config_file)
config_file.close()
@@ -940,7 +931,7 @@ class Autosubmit:
return True
@staticmethod
- def refresh(expid):
+ def refresh(expid, model_conf):
"""
Refresh project folder for given experiment
@@ -957,9 +948,21 @@ class Autosubmit:
project_type = as_conf.get_project_type()
if Autosubmit._copy_code(as_conf, expid, project_type, True):
Log.result("Project folder updated")
+ Autosubmit._create_model_conf(as_conf, model_conf)
return True
@staticmethod
+ def _create_model_conf(as_conf, force):
+ destiny = as_conf.project_file
+ if os.path.exists(destiny):
+ if force:
+ os.remove(destiny)
+ else:
+ return
+ if as_conf.get_project_type() != 'none':
+ shutil.copyfile(os.path.join(as_conf.get_project_dir(), as_conf.get_file_project_conf()), destiny)
+
+ @staticmethod
def create(expid, noplot):
"""
Creates job list for given experiment. Configuration files must be valid before realizaing this process.
@@ -984,10 +987,7 @@ class Autosubmit:
if not Autosubmit._copy_code(as_conf, expid, project_type, False):
return False
- if as_conf.get_project_type() != 'none':
- destiny = as_conf.project_file
- if not os.path.exists(destiny):
- shutil.copyfile(os.path.join(as_conf.get_project_dir(), as_conf.get_file_project_conf()), destiny)
+ Autosubmit._create_model_conf(as_conf, False)
if project_type != "none":
# Check project configuration
@@ -1113,8 +1113,7 @@ class Autosubmit:
elif project_type == "local":
local_project_path = as_conf.get_local_project_path()
- project_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, BasicConfig.LOCAL_PROJ_DIR,
- project_destination)
+ project_path = os.path.join(BasicConfig.LOCAL_ROOT_DIR, expid, BasicConfig.LOCAL_PROJ_DIR)
if os.path.exists(project_path):
Log.info("Using project folder: {0}", project_path)
if not force:
@@ -1177,6 +1176,7 @@ class Autosubmit:
for job in job_list.get_job_list():
Autosubmit.change_status(final, final_status, job)
else:
+ # noinspection PyTypeChecker
data = json.loads(Autosubmit._create_json(fc))
for datejson in data['sds']:
date = datejson['sd']
@@ -1401,7 +1401,7 @@ class Autosubmit:
@staticmethod
def test(expid, chunks, member=None, stardate=None, hpc=None, branch=None):
"""
- Method to conduct a test for a given experiment. It creates a new experiment for a given experiment qith a
+ Method to conduct a test for a given experiment. It creates a new experiment for a given experiment with a
given number of chunks with a random start date and a random member to be run on a random HPC.
@@ -1423,7 +1423,7 @@ class Autosubmit:
:return: True if test was succesful, False otherwise
:rtype: bool
"""
- testid = Autosubmit.expid('test', 'test experiment for {0}'.format(expid), expid, False)
+ testid = Autosubmit.expid('test', 'test experiment for {0}'.format(expid), expid, False, True)
if testid == '':
return False
@@ -1454,6 +1454,7 @@ class Autosubmit:
"DATELIST = " + stardate)
content = content.replace(re.search('MEMBERS =.*', content).group(0),
"MEMBERS = " + member)
+ # noinspection PyTypeChecker
content = content.replace(re.search('NUMCHUNKS =.*', content).group(0),
"NUMCHUNKS = " + chunks)
content = content.replace(re.search('HPCARCH =.*', content).group(0),
@@ -1472,4 +1473,3 @@ class Autosubmit:
if not Autosubmit.run_experiment(testid):
return False
return Autosubmit.delete(testid, True)
-
diff --git a/autosubmit/config/basicConfig.py b/autosubmit/config/basicConfig.py
index 30491dd..5805d27 100755
--- a/autosubmit/config/basicConfig.py
+++ b/autosubmit/config/basicConfig.py
@@ -65,8 +65,8 @@ class BasicConfig:
BasicConfig.DB_FILE = parser.get('database', 'filename')
if parser.has_option('local', 'path'):
BasicConfig.LOCAL_ROOT_DIR = parser.get('local', 'path')
- if parser.has_option('conf', 'queues'):
- BasicConfig.DEFAULT_PLATFORMS_CONF = parser.get('conf', 'queues')
+ if parser.has_option('conf', 'platforms'):
+ BasicConfig.DEFAULT_PLATFORMS_CONF = parser.get('conf', 'platforms')
if parser.has_option('conf', 'jobs'):
BasicConfig.DEFAULT_JOBS_CONF = parser.get('conf', 'jobs')
@@ -76,11 +76,11 @@ class BasicConfig:
Reads configuration from .autosubmitrc files, first from /etc, then for user
directory and last for current path.
"""
- filename = '.autosubmitrc'
+ filename = 'autosubmitrc'
BasicConfig.__read_file_config(os.path.join('/etc', filename))
- BasicConfig.__read_file_config(os.path.join(os.path.expanduser('~'), filename))
- BasicConfig.__read_file_config(os.path.join('.', filename))
+ BasicConfig.__read_file_config(os.path.join(os.path.expanduser('~'), '.' + filename))
+ BasicConfig.__read_file_config(os.path.join('.', '.' + filename))
BasicConfig._update_config()
- return
\ No newline at end of file
+ return
diff --git a/autosubmit/config/config_common.py b/autosubmit/config/config_common.py
index 68fe19d..d86e652 100644
--- a/autosubmit/config/config_common.py
+++ b/autosubmit/config/config_common.py
@@ -165,6 +165,9 @@ class AutosubmitConfig:
result = result and AutosubmitConfig.check_is_boolean(self._platforms_parser, section,
'ADD_PROJECT_TO_HOST', False)
result = result and AutosubmitConfig.check_is_boolean(self._platforms_parser, section, 'TEST_SUITE', False)
+ result = result and AutosubmitConfig.check_is_int(self._platforms_parser, section, 'MAX_WAITING_JOBS',
+ False)
+ result = result and AutosubmitConfig.check_is_int(self._platforms_parser, section, 'TOTAL_JOBS', False)
if not result:
Log.critical("{0} is not a valid config file".format(os.path.basename(self._platforms_parser_file)))
@@ -718,6 +721,8 @@ class AutosubmitConfig:
local_platform.type = 'local'
local_platform.version = ''
local_platform.queue = ''
+ local_platform.max_waiting_jobs = self.get_max_waiting_jobs()
+ local_platform.total_jobs = self.get_total_jobs()
local_platform.set_host(platform.node())
local_platform.set_scratch(os.path.join(BasicConfig.LOCAL_ROOT_DIR, self.expid, BasicConfig.LOCAL_TMP_DIR))
local_platform.set_project(self.expid)
@@ -759,6 +764,11 @@ class AutosubmitConfig:
AutosubmitConfig.get_option(parser, section, 'PROJECT', None))
else:
host = AutosubmitConfig.get_option(parser, section, 'HOST', None)
+
+ remote_platform.max_waiting_jobs = AutosubmitConfig.get_option(parser, section, 'MAX_WAITING_JOBS',
+ self.get_max_waiting_jobs())
+ remote_platform.total_jobs = AutosubmitConfig.get_option(parser, section, 'TOTAL_JOBS',
+ self.get_total_jobs())
remote_platform.set_host(host)
remote_platform.set_project(AutosubmitConfig.get_option(parser, section, 'PROJECT', None))
remote_platform.set_budget(AutosubmitConfig.get_option(parser, section, 'BUDGET', remote_platform.project))
@@ -962,7 +972,3 @@ class AutosubmitConfig:
except:
Log.error("Invalid value {0}: {1}", key, value)
return False
-
-
-
-
diff --git a/autosubmit/config/files/autosubmit.conf b/autosubmit/config/files/autosubmit.conf
index cf56036..22afb98 100644
--- a/autosubmit/config/files/autosubmit.conf
+++ b/autosubmit/config/files/autosubmit.conf
@@ -5,10 +5,10 @@ EXPID =
# No need to change.
# Autosubmit version identifier
AUTOSUBMIT_VERSION =
-# Maximum number of jobs to be waiting in the HPC queue
+# Default maximum number of jobs to be waiting in any platform
# Default = 3
MAXWAITINGJOBS = 3
-# Maximum number of jobs to be running at the same time at the HPC
+# Default maximum number of jobs to be running at the same time at any platform
# Default = 6
TOTALJOBS = 6
# Time (seconds) between connections to the HPC queue scheduler to poll already submitted jobs status
diff --git a/autosubmit/config/files/platforms.conf b/autosubmit/config/files/platforms.conf
index 40c29cb..a4a5049 100644
--- a/autosubmit/config/files/platforms.conf
+++ b/autosubmit/config/files/platforms.conf
@@ -1,4 +1,4 @@
-# Example queue with all options specified
+# Example platform with all options specified
## Platform name
# [PLAFORM]
@@ -26,4 +26,10 @@
# SERIAL_PLATFORM = SERIAL_PLATFORM_NAME
## If specified, autosubmit will run jobs with only one processor in the specified queue.
## Autosubmit will ignore this configuration if SERIAL_PLATFORM is provided
-# SERIAL_QUEUE = SERIAL_QUEUE_NAME
\ No newline at end of file
+# SERIAL_QUEUE = SERIAL_QUEUE_NAME
+# Default Maximum number of jobs to be waiting in any platform queue
+# Default = 3
+MAX_WAITING_JOBS = 3
+# Default maximum number of jobs to be running at the same time at any platform
+# Default = 6
+TOTAL_JOBS = 6
\ No newline at end of file
diff --git a/autosubmit/config/log.py b/autosubmit/config/log.py
index 0f7fa7c..7e35dff 100644
--- a/autosubmit/config/log.py
+++ b/autosubmit/config/log.py
@@ -241,5 +241,3 @@ class Log:
:param args: arguments for message formating (it will be done using format() method on str)
"""
Log.log.critical(msg.format(*args))
-
-
diff --git a/autosubmit/database/db_common.py b/autosubmit/database/db_common.py
index fc2c61d..913c909 100644
--- a/autosubmit/database/db_common.py
+++ b/autosubmit/database/db_common.py
@@ -123,22 +123,25 @@ def check_experiment_exists(name, error_on_inexistence=True):
return True
-def new_experiment(hpc, description, version):
+def new_experiment(description, version, test=False):
"""
Stores a new experiment on the database and generates its identifier
- :param hpc: name of the main HPC to be used by the experiment
- :type hpc: str
+ :param test: flag for test experiments
+ :type test: bool
:param description: experiment's description
:type description: str
:return: experiment id for the new experiment
:rtype: str
"""
- last_exp_name = last_name_used()
+ if test:
+ last_exp_name = last_name_used(True)
+ else:
+ last_exp_name = last_name_used()
if last_exp_name == '':
return ''
if last_exp_name == 'empty':
- if hpc == 'test':
+ if test:
new_name = 'test0000'
else:
new_name = 'a000'
@@ -156,14 +159,12 @@ def new_experiment(hpc, description, version):
return new_name
-def copy_experiment(name, hpc, description, version):
+def copy_experiment(name, description, version, test=False):
"""
Creates a new experiment by copying an existing experiment
:param name: identifier of experiment to copy
:type name: str
- :param hpc: name of the main HPC to be used by the experiment
- :type hpc: str
:param description: experiment's description
:type description: str
:return: experiment id for the new experiment
@@ -171,7 +172,7 @@ def copy_experiment(name, hpc, description, version):
"""
if not check_experiment_exists(name):
return ''
- new_name = new_experiment(hpc, description, version)
+ new_name = new_experiment(description, version, test)
return new_name
@@ -236,11 +237,13 @@ def _next_name(name):
return base36encode(base36decode(name) + 1)
-def last_name_used():
+def last_name_used(test=False):
"""
- Gets last experiment identifier used for HPC
+ Gets last experiment identifier used
- :return: last experiment identifier used for HPC, 'empty' if there is none
+ :param test: flag for test experiments
+ :type test: bool
+ :return: last experiment identifier used, 'empty' if there is none
:rtype: str
"""
if not check_db():
@@ -251,10 +254,18 @@ def last_name_used():
Log.error('Connection to database could not be established: {0}', e.message)
return ''
conn.text_factory = str
- cursor.execute('SELECT name '
- 'FROM experiment '
- 'WHERE rowid=(SELECT max(rowid) FROM experiment WHERE autosubmit_version IS NOT NULL AND '
- 'NOT (autosubmit_version LIKE "%3.0.0b%"))')
+ if test:
+ cursor.execute('SELECT name '
+ 'FROM experiment '
+ 'WHERE rowid=(SELECT max(rowid) FROM experiment WHERE name LIKE "test%" AND '
+ 'autosubmit_version IS NOT NULL AND '
+ 'NOT (autosubmit_version LIKE "%3.0.0b%"))')
+ else:
+ cursor.execute('SELECT name '
+ 'FROM experiment '
+ 'WHERE rowid=(SELECT max(rowid) FROM experiment WHERE name NOT LIKE "test%" AND '
+ 'autosubmit_version IS NOT NULL AND '
+ 'NOT (autosubmit_version LIKE "%3.0.0b%"))')
row = cursor.fetchone()
close_conn(conn, cursor)
if row is None:
diff --git a/autosubmit/job/job.py b/autosubmit/job/job.py
index a33faef..84650e8 100644
--- a/autosubmit/job/job.py
+++ b/autosubmit/job/job.py
@@ -49,7 +49,7 @@ class Job:
"""
def __str__(self):
- return self.name
+ return "{0} STATUS: {1}".format(self.name, self.status)
def __init__(self, name, jobid, status, priority):
self._platform = None
diff --git a/autosubmit/job/job_common.py b/autosubmit/job/job_common.py
index 48095a1..fcafe62 100644
--- a/autosubmit/job/job_common.py
+++ b/autosubmit/job/job_common.py
@@ -51,7 +51,7 @@ class StatisticsSnippet:
###################
set -x
- job_name_ptrn=%ROOTDIR%/tmp/LOG_%EXPID%/%JOBNAME%
+ job_name_ptrn=%CURRENT_ROOTDIR%/tmp/LOG_%EXPID%/%JOBNAME%
job_cmd_stamp=$(stat -c %Z $job_name_ptrn.cmd)
job_start_time=$(date +%s)
@@ -94,20 +94,20 @@ class StatisticsSnippet:
###################
set -x
- job_name_ptrn=%SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/%JOBNAME%
+ job_name_ptrn=%CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%
job_cmd_stamp=$(stat -c %Z $job_name_ptrn.cmd)
job_start_time=$(date +%s)
job_queue_time=$((job_start_time - job_cmd_stamp))
- if [[ %HPCTYPE% == ecaccess ]]; then
- hpcversion=%HPCVERSION%
+ if [[ %CURRENT_TYPE% == ecaccess ]]; then
+ hpcversion=%CURRENT_VERSION%
if [[ ! -z ${hpcversion+x} ]]; then
if [[ $hpcversion == pbs ]]; then
- filein="$(ls -rt %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/.ecaccess_DO_NOT_REMOVE/job.i* | xargs grep -l %JOBNAME% | tail -1)"
+ filein="$(ls -rt %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/.ecaccess_DO_NOT_REMOVE/job.i* | xargs grep -l %JOBNAME% | tail -1)"
jobid="$(echo "$filein" | cut -d. -f3 | cut -c2-)"
- fileout="%SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/.ecaccess_DO_NOT_REMOVE/job.o"$jobid"_0"
+ fileout="%CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/.ecaccess_DO_NOT_REMOVE/job.o"$jobid"_0"
ln -s ${fileout} ${job_name_ptrn}_${jobid}.out
- fileerr="%SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/.ecaccess_DO_NOT_REMOVE/job.e"$jobid"_0"
+ fileerr="%CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/.ecaccess_DO_NOT_REMOVE/job.e"$jobid"_0"
ln -s ${fileerr} ${job_name_ptrn}_${jobid}.err
fi
fi
@@ -130,14 +130,14 @@ class StatisticsSnippet:
set -x
job_end_time=$(date +%s)
job_run_time=$((job_end_time - job_start_time))
- case %HPCTYPE% in
+ case %CURRENT_TYPE% in
sge) errfile_created="TRUE"; errfile_ptrn="\.e" ;;
lsf) errfile_created="TRUE"; errfile_ptrn="\.err" ;;
ecaccess) errfile_created="TRUE"; errfile_ptrn="\.err" ;;
pbs) errfile_created="FALSE"; errfile_ptrn="\.e" ;;
slurm) errfile_created="TRUE"; errfile_ptrn="\.err" ;;
ps) errfile_created="TRUE"; errfile_ptrn="\.err" ;;
- *) echo "!!! %HPCTYPE% is not valid scheduler !!!"; exit 1 ;;
+ *) echo "!!! %CURRENT_TYPE% is not valid scheduler !!!"; exit 1 ;;
esac
failed_jobs=0; failed_errfiles=""
set +e; ls -1 ${job_name_ptrn}* | grep $errfile_ptrn
@@ -172,4 +172,3 @@ class StatisticsSnippet:
echo "$job_end_time $job_queue_time $job_run_time $failed_jobs $failed_jobs_qt $failed_jobs_rt" > ${job_name_ptrn}_COMPLETED
exit 0
""")
-
diff --git a/autosubmit/job/job_list.py b/autosubmit/job/job_list.py
index caffea5..60880e8 100644
--- a/autosubmit/job/job_list.py
+++ b/autosubmit/job/job_list.py
@@ -190,113 +190,146 @@ class JobList:
"""
return self._job_list
- def get_completed(self):
+ def get_completed(self, platform=None):
"""
Returns a list of completed jobs
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: completed jobs
:rtype: list
"""
- return [job for job in self._job_list if job.status == Status.COMPLETED]
+ return [job for job in self._job_list if (platform is None or job.get_platform() is platform) and
+ job.status == Status.COMPLETED]
- def get_submitted(self):
+ def get_submitted(self, platform=None):
"""
Returns a list of submitted jobs
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: submitted jobs
:rtype: list
"""
- return [job for job in self._job_list if job.status == Status.SUBMITTED]
+ return [job for job in self._job_list if (platform is None or job.get_platform() is platform) and
+ job.status == Status.SUBMITTED]
- def get_running(self):
+ def get_running(self, platform=None):
"""
Returns a list of jobs running
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: running jobs
:rtype: list
"""
- return [job for job in self._job_list if job.status == Status.RUNNING]
+ return [job for job in self._job_list if (platform is None or job.get_platform() is platform) and
+ job.status == Status.RUNNING]
- def get_queuing(self):
+ def get_queuing(self, platform=None):
"""
Returns a list of jobs queuing
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: queuedjobs
:rtype: list
"""
- return [job for job in self._job_list if job.status == Status.QUEUING]
+ return [job for job in self._job_list if (platform is None or job.get_platform() is platform) and
+ job.status == Status.QUEUING]
- def get_failed(self):
+ def get_failed(self, platform=None):
"""
Returns a list of failed jobs
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: failed jobs
:rtype: list
"""
- return [job for job in self._job_list if job.status == Status.FAILED]
+ return [job for job in self._job_list if (platform is None or job.get_platform() is platform) and
+ job.status == Status.FAILED]
- def get_ready(self):
+ def get_ready(self, platform=None):
"""
Returns a list of ready jobs
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: ready jobs
:rtype: list
"""
- return [job for job in self._job_list if job.status == Status.READY]
+ return [job for job in self._job_list if (platform is None or job.get_platform() is platform) and
+ job.status == Status.READY]
- def get_waiting(self):
+ def get_waiting(self, platform=None):
"""
Returns a list of jobs waiting
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: waiting jobs
:rtype: list
"""
- return [job for job in self._job_list if job.status == Status.WAITING]
+ return [job for job in self._job_list if (platform is None or job.get_platform() is platform) and
+ job.status == Status.WAITING]
- def get_unknown(self):
+ def get_unknown(self, platform=None):
"""
Returns a list of jobs on unknown state
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: unknown state jobs
:rtype: list
"""
- return [job for job in self._job_list if job.status == Status.UNKNOWN]
+ return [job for job in self._job_list if (platform is None or job.get_platform() is platform) and
+ job.status == Status.UNKNOWN]
- def get_in_queue(self):
+ def get_in_queue(self, platform=None):
"""
Returns a list of jobs in the platforms (Submitted, Running, Queuing)
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: jobs in platforms
:rtype: list
"""
- return self.get_submitted() + self.get_running() + self.get_queuing()
+ return self.get_submitted(platform) + self.get_running(platform) + self.get_queuing(platform)
- def get_not_in_queue(self):
+ def get_not_in_queue(self, platform=None):
"""
Returns a list of jobs NOT in the platforms (Ready, Waiting)
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: jobs not in platforms
:rtype: list
"""
- return self.get_ready() + self.get_waiting()
+ return self.get_ready(platform) + self.get_waiting(platform)
- def get_finished(self):
+ def get_finished(self, platform=None):
"""
Returns a list of jobs finished (Completed, Failed)
+
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: finsihed jobs
:rtype: list
"""
- return self.get_completed() + self.get_failed()
+ return self.get_completed(platform) + self.get_failed(platform)
- def get_active(self):
+ def get_active(self, platform=None):
"""
Returns a list of active jobs (In platforms, Ready)
+ :param platform: job platform
+ :type platform: HPCPlatform
:return: active jobs
:rtype: list
"""
- return self.get_in_queue() + self.get_ready() + self.get_unknown()
+ return self.get_in_queue(platform) + self.get_ready(platform) + self.get_unknown(platform)
def get_job_by_name(self, name):
"""
@@ -424,6 +457,7 @@ class JobList:
# reset jobs that has failed less ethan 10 times
if 'RETRIALS' in self._parameters:
+ # noinspection PyTypeChecker
retrials = int(self._parameters['RETRIALS'])
else:
retrials = 4
diff --git a/autosubmit/platforms/ecplatform.py b/autosubmit/platforms/ecplatform.py
index 3c17c2d..5b84120 100644
--- a/autosubmit/platforms/ecplatform.py
+++ b/autosubmit/platforms/ecplatform.py
@@ -171,8 +171,8 @@ class EcHeader:
#@ class = ns
#@ job_type = serial
#@ job_name = %JOBNAME%
- #@ output = %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/$(job_name).$(jobid).out
- #@ error = %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/$(job_name).$(jobid).err
+ #@ output = %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/$(job_name).$(jobid).out
+ #@ error = %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/$(job_name).$(jobid).err
#@ notification = error
#@ resources = ConsumableCpus(1) ConsumableMemory(1200mb)
#@ wall_clock_limit = %WALLCLOCK%:00
@@ -191,8 +191,8 @@ class EcHeader:
#@ class = np
#@ job_type = parallel
#@ job_name = %JOBNAME%
- #@ output = %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/$(job_name).$(jobid).out
- #@ error = %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/$(job_name).$(jobid).err
+ #@ output = %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/$(job_name).$(jobid).out
+ #@ error = %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/$(job_name).$(jobid).err
#@ notification = error
#@ resources = ConsumableCpus(1) ConsumableMemory(1200mb)
#@ ec_smt = no
@@ -229,7 +229,7 @@ class EcCcaHeader:
#PBS -N %JOBNAME%
#PBS -q ns
#PBS -l walltime=%WALLCLOCK%:00
- #PBS -l EC_billing_account=%HPCBUDG%
+ #PBS -l EC_billing_account=%CURRENT_BUDG%
#
###############################################################################
@@ -247,7 +247,7 @@ class EcCcaHeader:
#PBS -l EC_threads_per_task=%NUMTHREADS%
#PBS -l EC_tasks_per_node=%NUMTASK%
#PBS -l walltime=%WALLCLOCK%:00
- #PBS -l EC_billing_account=%HPCBUDG%
+ #PBS -l EC_billing_account=%CURRENT_BUDG%
#
###############################################################################
- """)
\ No newline at end of file
+ """)
diff --git a/autosubmit/platforms/localplatform.py b/autosubmit/platforms/localplatform.py
index c863904..2b520ae 100644
--- a/autosubmit/platforms/localplatform.py
+++ b/autosubmit/platforms/localplatform.py
@@ -20,6 +20,7 @@ from commands import getstatusoutput
import os
import textwrap
from xml.dom.minidom import parseString
+import subprocess
from autosubmit.platforms.hpcplatform import HPCPlatform
from autosubmit.config.basicConfig import BasicConfig
@@ -67,7 +68,7 @@ class LocalPlatform(HPCPlatform):
return self.mkdir_cmd
def parse_job_output(self, output):
- return output
+ return output[0]
def get_submitted_job_id(self, output):
return output
@@ -87,10 +88,12 @@ class LocalPlatform(HPCPlatform):
return True
def send_command(self, command):
- (status, output) = getstatusoutput(command)
- if status != 0:
- Log.error('Could not execute command {0} on {1}'.format(command, self._host))
+ try:
+ output = subprocess.check_output(command, shell=True)
+ except subprocess.CalledProcessError as e:
+ Log.error('Could not execute command {0} on {1}'.format(e.cmd, self._host))
return False
+ Log.debug("Command '{0}': {1}", command, output)
self._ssh_output = output
return True
diff --git a/autosubmit/platforms/lsfplatform.py b/autosubmit/platforms/lsfplatform.py
index c42c502..67e9fab 100644
--- a/autosubmit/platforms/lsfplatform.py
+++ b/autosubmit/platforms/lsfplatform.py
@@ -103,7 +103,7 @@ class LsfHeader:
if job.parameters['CURRENT_QUEUE'] == '':
return ""
else:
- return "BSUB -q {0}".format(job.parameters['HPCQUEUE'])
+ return "BSUB -q {0}".format(job.parameters['CURRENT_QUEUE'])
SERIAL = textwrap.dedent("""\
#!/bin/sh
@@ -113,8 +113,8 @@ class LsfHeader:
#
#%QUEUE_DIRECTIVE%
#BSUB -J %JOBNAME%
- #BSUB -oo %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/%JOBNAME%_%J.out
- #BSUB -eo %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/%JOBNAME%_%J.err
+ #BSUB -oo %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%_%J.out
+ #BSUB -eo %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%_%J.err
#BSUB -W %WALLCLOCK%
#BSUB -n %NUMPROC%
#
@@ -129,11 +129,11 @@ class LsfHeader:
#
#%QUEUE_DIRECTIVE%
#BSUB -J %JOBNAME%
- #BSUB -oo %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/%JOBNAME%_%J.out
- #BSUB -eo %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/%JOBNAME%_%J.err
+ #BSUB -oo %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%_%J.out
+ #BSUB -eo %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%_%J.err
#BSUB -W %WALLCLOCK%
#BSUB -n %NUMPROC%
#BSUB -R "span[ptile=16]"
#
###############################################################################
- """)
\ No newline at end of file
+ """)
diff --git a/autosubmit/platforms/pbsplatform.py b/autosubmit/platforms/pbsplatform.py
index b217148..6be9def 100644
--- a/autosubmit/platforms/pbsplatform.py
+++ b/autosubmit/platforms/pbsplatform.py
@@ -127,7 +127,7 @@ class Pbs12Header:
#PBS -N %JOBNAME%
#PBS -l select=serial=true:ncpus=1
#PBS -l walltime=%WALLCLOCK%:00
- #PBS -A %HPCBUDG%
+ #PBS -A %CURRENT_BUDG%
#
###############################################################################
""")
@@ -141,7 +141,7 @@ class Pbs12Header:
#PBS -N %JOBNAME%
#PBS -l select=%NUMPROC%
#PBS -l walltime=%WALLCLOCK%:00
- #PBS -A %HPCBUDG%
+ #PBS -A %CURRENT_BUDG%
#
###############################################################################
""")
@@ -172,7 +172,7 @@ class Pbs10Header:
#PBS -N %JOBNAME%
#PBS -q serial
#PBS -l cput=%WALLCLOCK%:00
- #PBS -A %HPCBUDG%
+ #PBS -A %CURRENT_BUDG%
#
###############################################################################
""")
@@ -187,7 +187,7 @@ class Pbs10Header:
#PBS -l mppwidth=%NUMPROC%
#PBS -l mppnppn=32
#PBS -l walltime=%WALLCLOCK%:00
- #PBS -A %HPCBUDG%
+ #PBS -A %CURRENT_BUDG%
#
###############################################################################
""")
@@ -220,8 +220,8 @@ class Pbs11Header:
#PBS -l mppwidth=%NUMPROC%
#PBS -l mppnppn=%NUMTASK%
#PBS -l walltime=%WALLCLOCK%
- #PBS -e %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%
- #PBS -o %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%
+ #PBS -e %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%
+ #PBS -o %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%
#
###############################################################################
""")
@@ -237,8 +237,8 @@ class Pbs11Header:
#PBS -l mppwidth=%NUMPROC%
#PBS -l mppnppn=%NUMTASK%
#PBS -l walltime=%WALLCLOCK%
- #PBS -e %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%
- #PBS -o %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%
+ #PBS -e %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%
+ #PBS -o %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%
#
###############################################################################
- """)
\ No newline at end of file
+ """)
diff --git a/autosubmit/platforms/sgeplatform.py b/autosubmit/platforms/sgeplatform.py
index c621f80..0813b25 100644
--- a/autosubmit/platforms/sgeplatform.py
+++ b/autosubmit/platforms/sgeplatform.py
@@ -114,8 +114,8 @@ class SgeHeader:
#
#$ -S /bin/sh
#$ -N %JOBNAME%
- #$ -e %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/
- #$ -o %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/
+ #$ -e %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/
+ #$ -o %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/
#$ -V
#$ -l h_rt=%WALLCLOCK%:00
#%QUEUE_DIRECTIVE%
@@ -131,8 +131,8 @@ class SgeHeader:
#
#$ -S /bin/sh
#$ -N %JOBNAME%
- #$ -e %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/
- #$ -o %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/
+ #$ -e %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/
+ #$ -o %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/
#$ -V
#$ -l h_rt=%WALLCLOCK%:00
#$ -pe orte %NUMPROC%
@@ -140,4 +140,3 @@ class SgeHeader:
#
###############################################################################
""")
-
diff --git a/autosubmit/platforms/slurmplatform.py b/autosubmit/platforms/slurmplatform.py
index cca2748..999eac0 100644
--- a/autosubmit/platforms/slurmplatform.py
+++ b/autosubmit/platforms/slurmplatform.py
@@ -124,8 +124,8 @@ class SlurmHeader:
#SBATCH -n %NUMPROC%
#SBATCH -t %WALLCLOCK%:00
#SBATCH -J %JOBNAME%
- #SBATCH -o %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/%JOBNAME%-%j.out
- #SBATCH -e %SCRATCH_DIR%/%HPCPROJ%/%HPCUSER%/%EXPID%/LOG_%EXPID%/%JOBNAME%-%j.err
+ #SBATCH -o %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%-%j.out
+ #SBATCH -e %CURRENT_SCRATCH_DIR%/%CURRENT_PROJ%/%CURRENT_USER%/%EXPID%/LOG_%EXPID%/%JOBNAME%-%j.err
#
###############################################################################
""")
@@ -133,4 +133,3 @@ class SlurmHeader:
PARALLEL = textwrap.dedent("""\
""")
-
diff --git a/docs/autosubmit.pdf b/docs/autosubmit.pdf
index 8d61b2b..0d476f8 100644
Binary files a/docs/autosubmit.pdf and b/docs/autosubmit.pdf differ
diff --git a/setup.py b/setup.py
index 079f6b2..bb20652 100644
--- a/setup.py
+++ b/setup.py
@@ -32,14 +32,15 @@ setup(
license='GNU GPL v3',
platforms=['GNU/Linux Debian'],
version=version,
- description='Autosubmit: a versatile tool to manage Weather and Climate Experiments in diverse '
+ description='Autosubmit: a versatile tool to manage Weather and Climate Experiments in diverse'
'Supercomputing Environments',
author='Domingo Manubens-Gil',
author_email='domingo.manubens at ic3.cat',
- url='https://autosubmit.ic3.cat',
+ url='http://autosubmit.ic3.cat',
download_url='http://ic3.cat/wikicfu/index.php/Tools/Autosubmit',
keywords=['climate', 'weather', 'workflow', 'HPC'],
- install_requires=['argparse>=1.2,<2', 'python-dateutil>=1,<2', 'pydotplus', 'pyparsing', 'paramiko'],
+ install_requires=['argparse>=1.2,<2', 'python-dateutil>=1,<2', 'pydotplus>=2', 'pyparsing>=2.0.1',
+ 'paramiko>=1.15'],
# 'numpy','matplotlib>=1.1.1',
packages=find_packages(),
include_package_data=True,
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/ic3-autosubmit.git
More information about the debian-science-commits
mailing list