Skip to content

Commit

Permalink
Merge pull request #135 from tgen/release-v1.6.2
Browse files Browse the repository at this point in the history
Release v1.6.2
  • Loading branch information
bryce-turner authored Oct 25, 2021
2 parents ad1d051 + 147034e commit 5c3d4b7
Show file tree
Hide file tree
Showing 7 changed files with 23 additions and 13 deletions.
11 changes: 11 additions & 0 deletions docs/releases/1.6.2.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Jetstream v1.6.2 Release Notes

# Major changes

- Fixed issue #131
- Fixed parsing of account info when the cluster does not supply accounting info
- Fixed "RuntimeError: generator ignored GeneratorExit" exception handling

# Dev Notes

- Security issues resolved from dependabot
2 changes: 1 addition & 1 deletion jetstream/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

__author__ = 'Ryan Richholt'
__email__ = 'rrichholt@tgen.org'
__version__ = '1.6.1'
__version__ = '1.6.2'


# Configure parallel library dependencies (Used by numpy)
Expand Down
6 changes: 3 additions & 3 deletions jetstream/backends/slurm.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,11 +446,11 @@ def launch_sacct(*job_ids, delimiter=sacct_delimiter, raw=False):
def parse_sacct(data, delimiter=sacct_delimiter, id_pattern=job_id_pattern):
"""Parse stdout from sacct to a dictionary of job ids and data."""
jobs = dict()
lines = iter(data.strip().splitlines())
header = next(lines).strip().split(delimiter)
lines = iter(data.splitlines())
header = next(lines).split(delimiter)

for line in lines:
row = dict(zip(header, line.strip().split(delimiter)))
row = dict(zip(header, line.split(delimiter)))

try:
match = id_pattern.match(row['JobID'])
Expand Down
2 changes: 1 addition & 1 deletion jetstream/pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def find_pipelines(*dirs):
p = Pipeline(path)
log.debug(f'Found {p} at {path}')
yield p
except :
except Exception:
log.debug(f'Failed to load: {path}')

yield from find_pipelines(path)
Expand Down
3 changes: 1 addition & 2 deletions jetstream/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,8 +237,7 @@ def process_exec_directives(self, task):
exec_directive = task.directives.get('exec')

if exec_directive:
env = {'runner': self, 'task': task}
exec(exec_directive, None, env)
exec(exec_directive)
self._workflow_graph = self.workflow.reload_graph()
self._workflow_iterator = iter(self.workflow.graph)

Expand Down
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
confuse==1.0.0
decorator==4.4.0
filelock==3.0.12
Jinja2==2.10.1
Jinja2==2.11.3
MarkupSafe==1.1.1
networkx==2.3
PyYAML==5.1.1
PyYAML==5.4
ulid-py==0.0.9
8 changes: 4 additions & 4 deletions tests/templates/dependencies_3.jst
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
# This template tests dynamic features with the exec directive.
# This template tests dynamic features with the exec directive.
# During a run, add_tasks will add a new to the workflow. The
# last task will wait for the new task to complete via the
# last task will wait for the new task to complete via the
# after-re directive.
# Expected stdout: "Hello, world! All done!"
- name: start
cmd: printf 'Hello, '
cmd: printf 'Hello, '


- name: add_tasks
after: start
exec: |
runner.workflow.new_task(
self.workflow.new_task(
name='dynamic_task',
cmd="printf 'world! '"
)
Expand Down

0 comments on commit 5c3d4b7

Please sign in to comment.