Merge commit '92bfe2e65986524fc0d39b18d522ae7c55a28e92'

This commit is contained in:
burnettk 2023-08-09 16:14:27 -04:00
commit f791c4328c
No known key found for this signature in database
106 changed files with 5422 additions and 1640 deletions

View File

@ -0,0 +1,13 @@
class BpmnEvent:
def __init__(self, event_definition, payload=None, correlations=None, target=None):
self.event_definition = event_definition
self.payload = payload
self.correlations = correlations or {}
self.target = target
class PendingBpmnEvent:
def __init__(self, name, event_type, value=None):
self.name = name
self.event_type = event_type
self.value = value

View File

@ -63,11 +63,23 @@ class WorkflowTaskException(WorkflowException):
@staticmethod
def get_task_trace(task):
task_trace = [f"{task.task_spec.bpmn_name} ({task.workflow.spec.file})"]
workflow = task.workflow
while workflow != workflow.outer_workflow:
caller = workflow.name
workflow = workflow.outer_workflow
task_trace.append(f"{workflow.spec.task_specs[caller].bpmn_name} ({workflow.spec.file})")
top = task.workflow.top_workflow
parent = None if task.workflow is top else task.workflow.parent_workflow
# cap the iterations to ensure we do not infinitely loop and tie up all CPU's
max_iterations = 1000
iteration = 0
caller = task
while parent is not None:
if iteration > max_iterations:
raise WorkflowException(
f"Could not find full task trace after {max_iterations} iterations.",
task_spec=task.task_spec,
)
caller = parent.get_task_from_id(caller.workflow.parent_task_id)
task_trace.append(f"{caller.task_spec.bpmn_name} ({parent.spec.file})")
parent = None if caller.workflow is top else caller.workflow.parent_workflow
iteration += 1
return task_trace
@staticmethod

View File

@ -45,7 +45,8 @@ from SpiffWorkflow.bpmn.specs.defaults import (
BoundaryEvent,
EventBasedGateway
)
from SpiffWorkflow.bpmn.specs.event_definitions import NoneEventDefinition, TimerEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.simple import NoneEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.timer import TimerEventDefinition
from SpiffWorkflow.bpmn.specs.mixins.subworkflow_task import SubWorkflowTask as SubWorkflowTaskMixin
from SpiffWorkflow.bpmn.specs.mixins.events.start_event import StartEvent as StartEventMixin

View File

@ -43,7 +43,6 @@ class ProcessParser(NodeParser):
"""
super().__init__(node, nsmap, filename=filename, lane=lane)
self.parser = p
self.parsed_nodes = {}
self.lane = lane
self.spec = None
self.process_executable = self.is_executable()
@ -92,7 +91,7 @@ class ProcessParser(NodeParser):
"""
Returns a list of ids referenced by `bpmn:callActivity` nodes.
"""
return self.xpath("./bpmn:callActivity/@calledElement")
return self.xpath(".//bpmn:callActivity/@calledElement")
def parse_node(self, node):
"""
@ -100,8 +99,8 @@ class ProcessParser(NodeParser):
can be called by a TaskParser instance, that is owned by this
ProcessParser.
"""
if node.get('id') in self.parsed_nodes:
return self.parsed_nodes[node.get('id')]
if node.get('id') in self.spec.task_specs:
return self.spec.task_specs[node.get('id')]
(node_parser, spec_class) = self.parser._get_parser_class(node.tag)
if not node_parser or not spec_class:

View File

@ -25,8 +25,8 @@ from SpiffWorkflow.bpmn.specs.defaults import (
SequentialMultiInstanceTask,
ParallelMultiInstanceTask
)
from SpiffWorkflow.bpmn.specs.control import _BoundaryEventParent
from SpiffWorkflow.bpmn.specs.event_definitions import CancelEventDefinition
from SpiffWorkflow.bpmn.specs.control import BoundaryEventSplit, BoundaryEventJoin
from SpiffWorkflow.bpmn.specs.event_definitions.simple import CancelEventDefinition
from SpiffWorkflow.bpmn.specs.data_spec import TaskDataReference
from .util import one
@ -160,18 +160,23 @@ class TaskParser(NodeParser):
def _add_boundary_event(self, children):
parent = _BoundaryEventParent(
self.spec, '%s.BoundaryEventParent' % self.bpmn_id,
self.task, lane=self.task.lane)
self.process_parser.parsed_nodes[self.node.get('id')] = parent
parent.connect(self.task)
split_task = BoundaryEventSplit(self.spec, f'{self.bpmn_id}.BoundaryEventSplit', lane=self.task.lane)
join_task = BoundaryEventJoin(
self.spec,
f'{self.bpmn_id}.BoundaryEventJoin',
lane=self.task.lane,
split_task=split_task.name,
cancel=True
)
split_task.connect(self.task)
self.task.connect(join_task)
for event in children:
child = self.process_parser.parse_node(event)
if isinstance(child.event_definition, CancelEventDefinition) \
and not isinstance(self.task, TransactionSubprocess):
if isinstance(child.event_definition, CancelEventDefinition) and not isinstance(self.task, TransactionSubprocess):
self.raise_validation_exception('Cancel Events may only be used with transactions')
parent.connect(child)
return parent
split_task.connect(child)
child.connect(join_task)
return split_task
def parse_node(self):
"""
@ -198,8 +203,6 @@ class TaskParser(NodeParser):
boundary_event_nodes = self.doc_xpath('.//bpmn:boundaryEvent[@attachedToRef="%s"]' % self.bpmn_id)
if boundary_event_nodes:
parent = self._add_boundary_event(boundary_event_nodes)
else:
self.process_parser.parsed_nodes[self.node.get('id')] = self.task
children = []
outgoing = self.doc_xpath('.//bpmn:sequenceFlow[@sourceRef="%s"]' % self.bpmn_id)
@ -213,9 +216,10 @@ class TaskParser(NodeParser):
self.raise_validation_exception('When looking for a task spec, we found two items, '
'perhaps a form has the same ID? (%s)' % target_ref)
c = self.process_parser.parse_node(target_node)
split_task = self.spec.task_specs.get(f'{target_ref}.BoundaryEventSplit')
c = self.process_parser.parse_node(target_node) if split_task is None else split_task
position = self.get_position(target_node)
children.append((position, c, target_node, sequence_flow))
children.append((position, c, sequence_flow))
if children:
# Sort children by their y coordinate.
@ -225,11 +229,11 @@ class TaskParser(NodeParser):
default_outgoing = self.node.get('default')
if len(children) == 1 and isinstance(self.task, (ExclusiveGateway, InclusiveGateway)):
(position, c, target_node, sequence_flow) = children[0]
(position, c, sequence_flow) = children[0]
if self.parse_condition(sequence_flow) is None:
default_outgoing = sequence_flow.get('id')
for (position, c, target_node, sequence_flow) in children:
for (position, c, sequence_flow) in children:
self.connect_outgoing(c, sequence_flow, sequence_flow.get('id') == default_outgoing)
return parent if boundary_event_nodes else self.task

View File

@ -22,20 +22,28 @@ from lxml import etree
from .ValidationException import ValidationException
from .TaskParser import TaskParser
from .util import first, one
from ..specs.event_definitions import (
MultipleEventDefinition,
from SpiffWorkflow.bpmn.specs.event_definitions.simple import (
NoneEventDefinition,
CancelEventDefinition,
TerminateEventDefinition
)
from SpiffWorkflow.bpmn.specs.event_definitions.timer import (
TimeDateEventDefinition,
DurationTimerEventDefinition,
CycleTimerEventDefinition,
MessageEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
CycleTimerEventDefinition
)
from SpiffWorkflow.bpmn.specs.event_definitions.item_aware_event import (
SignalEventDefinition,
CancelEventDefinition,
TerminateEventDefinition,
NoneEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition
)
from SpiffWorkflow.bpmn.specs.event_definitions.message import (
MessageEventDefinition,
CorrelationProperty
)
from SpiffWorkflow.bpmn.specs.event_definitions.multiple import MultipleEventDefinition
CANCEL_EVENT_XPATH = './/bpmn:cancelEventDefinition'
ERROR_EVENT_XPATH = './/bpmn:errorEventDefinition'

View File

@ -58,7 +58,8 @@ class SubprocessParser:
workflow_start_event = task_parser.xpath('./bpmn:startEvent')
workflow_end_event = task_parser.xpath('./bpmn:endEvent')
if len(workflow_start_event) != 1:
raise ValidationException('Multiple Start points are not allowed in SubWorkflow Task',
raise ValidationException(
f'Exactly one start event is required in a SubWorkflow Task; found {len(workflow_start_event)}.',
node=task_parser.node,
file_name=task_parser.filename)
if len(workflow_end_event) == 0:

View File

@ -17,19 +17,24 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.specs.event_definitions import (
from SpiffWorkflow.bpmn.specs.event_definitions.simple import (
NoneEventDefinition,
CancelEventDefinition,
TerminateEventDefinition,
)
from SpiffWorkflow.bpmn.specs.event_definitions.item_aware_event import (
SignalEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
MessageEventDefinition,
NoneEventDefinition,
SignalEventDefinition,
TerminateEventDefinition,
)
from SpiffWorkflow.bpmn.specs.event_definitions.timer import (
TimeDateEventDefinition,
DurationTimerEventDefinition,
CycleTimerEventDefinition,
MultipleEventDefinition,
)
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.multiple import MultipleEventDefinition
from .helpers.spec import EventDefinitionConverter
class CancelEventDefinitionConverter(EventDefinitionConverter):
@ -44,7 +49,7 @@ class ErrorEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['error_code'] = event_definition.error_code
dct['code'] = event_definition.code
return dct
@ -55,7 +60,7 @@ class EscalationEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['escalation_code'] = event_definition.escalation_code
dct['code'] = event_definition.code
return dct

View File

@ -22,11 +22,7 @@ from functools import partial
from SpiffWorkflow.operators import Attrib, PathAttrib
from SpiffWorkflow.bpmn.specs.mixins.bpmn_spec_mixin import BpmnSpecMixin
from SpiffWorkflow.bpmn.specs.event_definitions import (
NamedEventDefinition,
TimerEventDefinition,
CorrelationProperty
)
from SpiffWorkflow.bpmn.specs.event_definitions.message import CorrelationProperty
class BpmnSpecConverter:
@ -96,19 +92,13 @@ class EventDefinitionConverter(BpmnSpecConverter):
def to_dict(self, event_definition):
dct = {
'internal': event_definition.internal,
'external': event_definition.external,
'description': event_definition.description,
'name': event_definition.name
}
if isinstance(event_definition, (NamedEventDefinition, TimerEventDefinition)):
dct['name'] = event_definition.name
return dct
def from_dict(self, dct):
internal, external = dct.pop('internal'), dct.pop('external')
event_definition = self.spec_class(**dct)
event_definition.internal = internal
event_definition.external = external
return event_definition
def correlation_properties_to_dict(self, props):

View File

@ -20,7 +20,7 @@
from datetime import datetime, timedelta
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.specs.event_definitions import LOCALTZ
from SpiffWorkflow.bpmn.specs.event_definitions.timer import LOCALTZ
from .exceptions import VersionMigrationError

View File

@ -0,0 +1,103 @@
from uuid import uuid4
def update_event_definition_attributes(dct):
def update_specs(wf_spec):
for spec in wf_spec['task_specs'].values():
if 'event_definition' in spec:
spec['event_definition'].pop('internal', None)
spec['event_definition'].pop('external', None)
if 'escalation_code' in spec['event_definition']:
spec['event_definition']['code'] = spec['event_definition'].pop('escalation_code')
if 'error_code' in spec['event_definition']:
spec['event_definition']['code'] = spec['event_definition'].pop('error_code')
update_specs(dct['spec'])
for sp_spec in dct['subprocess_specs'].values():
update_specs(sp_spec)
def remove_boundary_event_parent(dct):
def update_specs(wf_spec):
new_specs, delete_specs = {}, []
for spec in wf_spec['task_specs'].values():
if spec['typename'] == '_BoundaryEventParent':
delete_specs.append(spec['name'])
spec.pop('main_child_task_spec')
spec['typename'] = 'BoundaryEventSplit'
spec['name'] = spec['name'].replace('BoundaryEventParent', 'BoundaryEventSplit')
new_specs[spec['name']] = spec
join = {
"name": spec['name'].replace('BoundaryEventSplit', 'BoundaryEventJoin'),
"manual": False,
"bpmn_id": None,
"lookahead": 2,
"inputs": spec['outputs'],
"outputs": [],
"split_task": spec['name'],
"threshold": None,
"cancel": True,
"typename": "BoundaryEventJoin"
}
new_specs[join['name']] = join
for parent in spec['inputs']:
parent_spec = wf_spec['task_specs'][parent]
parent_spec['outputs'] = [name.replace('BoundaryEventParent', 'BoundaryEventSplit') for name in parent_spec['outputs']]
for child in spec['outputs']:
child_spec = wf_spec['task_specs'][child]
child_spec['outputs'].append(join['name'])
child_spec['inputs'] = [name.replace('BoundaryEventParent', 'BoundaryEventSplit') for name in child_spec['inputs']]
wf_spec['task_specs'].update(new_specs)
for name in delete_specs:
del wf_spec['task_specs'][name]
def update_tasks(wf):
new_tasks = {}
for task in wf['tasks'].values():
if task['task_spec'].endswith('BoundaryEventParent'):
task['task_spec'] = task['task_spec'].replace('BoundaryEventParent', 'BoundaryEventSplit')
completed = all([ wf['tasks'][child]['state'] in [64, 256] for child in task['children'] ])
for child in task['children']:
child_task = wf['tasks'][child]
if child_task['state'] < 8:
# MAYBE, LIKELY, FUTURE: use parent state
state = child_task['state']
elif child_task['state'] < 64:
# WAITING, READY, STARTED (definite): join is FUTURE
state = 4
elif child_task['state'] == 64:
# COMPLETED: if the join is not finished, WAITING, otherwise COMPLETED
state = 64 if completed else 8
elif child_task['state'] == 128:
# ERROR: we don't know what the original state was, but we can't proceed through the gateway
state = 8
else:
# Cancelled tasks don't have children
continue
new_task = {
'id': str(uuid4()),
'parent': child_task['id'],
'children': [],
'state': state,
'task_spec': task['task_spec'].replace('BoundaryEventSplit', 'BoundaryEventJoin'),
'last_state_change': None,
'triggered': False,
'internal_data': {},
'data': {},
}
child_task['children'].append(new_task['id'])
new_tasks[new_task['id']] = new_task
wf['tasks'].update(new_tasks)
pass
update_specs(dct['spec'])
for sp_spec in dct['subprocess_specs'].values():
update_specs(sp_spec)
update_tasks(dct)
for sp in dct['subprocesses'].values():
update_tasks(sp)

View File

@ -30,6 +30,14 @@ from .version_1_2 import (
convert_simple_tasks,
update_bpmn_attributes,
)
from .version_1_3 import update_event_definition_attributes, remove_boundary_event_parent
def from_version_1_2(old):
new = deepcopy(old)
update_event_definition_attributes(new)
remove_boundary_event_parent(new)
new['VERSION'] = "1.3"
return new
def from_version_1_1(old):
"""
@ -62,7 +70,7 @@ def from_version_1_1(old):
convert_simple_tasks(new)
update_bpmn_attributes(new)
new['VERSION'] = "1.2"
return new
return from_version_1_2(new)
def from_version_1_0(old):
"""
@ -85,4 +93,5 @@ def from_version_1_0(old):
MIGRATIONS = {
'1.0': from_version_1_0,
'1.1': from_version_1_1,
'1.2': from_version_1_2,
}

View File

@ -18,7 +18,6 @@
# 02110-1301 USA
from SpiffWorkflow.bpmn.specs.bpmn_process_spec import BpmnProcessSpec
from SpiffWorkflow.bpmn.specs.control import _BoundaryEventParent
from .helpers.spec import WorkflowSpecConverter
@ -89,8 +88,6 @@ class BpmnProcessSpecConverter(WorkflowSpecConverter):
# Now we have to go back and fix all the circular references to everything
for task_spec in spec.task_specs.values():
if isinstance(task_spec, _BoundaryEventParent):
task_spec.main_child_task_spec = spec.get_task_spec_from_name(task_spec.main_child_task_spec)
task_spec.inputs = [ spec.get_task_spec_from_name(name) for name in task_spec.inputs ]
task_spec.outputs = [ spec.get_task_spec_from_name(name) for name in task_spec.outputs ]

View File

@ -17,8 +17,15 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.specs.control import BpmnStartTask, _EndJoin, _BoundaryEventParent, SimpleBpmnTask
from SpiffWorkflow.bpmn.specs.bpmn_task_spec import _BpmnCondition
from SpiffWorkflow.bpmn.specs.control import (
BpmnStartTask,
_EndJoin,
BoundaryEventSplit,
BoundaryEventJoin,
SimpleBpmnTask
)
from SpiffWorkflow.bpmn.specs.defaults import (
UserTask,
ManualTask,
@ -137,17 +144,20 @@ class SequentialMultiInstanceTaskConverter(MultiInstanceTaskConverter):
super().__init__(SequentialMultiInstanceTask, registry)
class BoundaryEventParentConverter(BpmnTaskSpecConverter):
class BoundaryEventSplitConverter(BpmnTaskSpecConverter):
def __init__(self, registry):
super().__init__(BoundaryEventSplit, registry)
class BoundaryEventJoinConverter(BpmnTaskSpecConverter):
def __init__(self, registry):
super().__init__(_BoundaryEventParent, registry)
super().__init__(BoundaryEventJoin, registry)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['main_child_task_spec'] = spec.main_child_task_spec.name
dct.update(self.get_join_attributes(spec))
return dct
class SubWorkflowConverter(BpmnTaskSpecConverter):
def __init__(self, cls, registry):
@ -319,7 +329,8 @@ DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [
IntermediateThrowEventConverter,
EventBasedGatewayConverter,
BoundaryEventConverter,
BoundaryEventParentConverter,
BoundaryEventSplitConverter,
BoundaryEventJoinConverter,
ParallelGatewayConverter,
ExclusiveGatewayConverter,
InclusiveGatewayConverter,

View File

@ -23,7 +23,8 @@ from copy import deepcopy
from uuid import UUID
from SpiffWorkflow.task import Task
from SpiffWorkflow.bpmn.workflow import BpmnMessage, BpmnWorkflow
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow, BpmnSubWorkflow
from SpiffWorkflow.bpmn.event import BpmnEvent
from SpiffWorkflow.bpmn.specs.mixins.subworkflow_task import SubWorkflowTask
from .migration.version_migration import MIGRATIONS
@ -70,9 +71,8 @@ class BpmnWorkflowSerializer:
overhead of converting or restoring the entire thing.
"""
# This is the default version set on the workflow, it can be overwritten
# using the configure_workflow_spec_converter.
VERSION = "1.2"
# This is the default version set on the workflow, it can be overwritten in init
VERSION = "1.3"
VERSION_KEY = "serializer_version"
DEFAULT_JSON_ENCODER_CLS = None
DEFAULT_JSON_DECODER_CLS = None
@ -104,13 +104,14 @@ class BpmnWorkflowSerializer:
cls(spec_converter)
return spec_converter
def __init__(self, spec_converter=None, data_converter=None, wf_class=None, version=VERSION,
def __init__(self, spec_converter=None, data_converter=None, wf_class=None, sub_wf_class=None, version=VERSION,
json_encoder_cls=DEFAULT_JSON_ENCODER_CLS, json_decoder_cls=DEFAULT_JSON_DECODER_CLS):
"""Intializes a Workflow Serializer with the given Workflow, Task and Data Converters.
:param spec_converter: the workflow spec converter
:param data_converter: the data converter
:param wf_class: the workflow class
:param sub_wf_class: the subworkflow class
:param json_encoder_cls: JSON encoder class to be used for dumps/dump operations
:param json_decoder_cls: JSON decoder class to be used for loads/load operations
"""
@ -118,6 +119,7 @@ class BpmnWorkflowSerializer:
self.spec_converter = spec_converter if spec_converter is not None else self.configure_workflow_spec_converter()
self.data_converter = data_converter if data_converter is not None else DefaultRegistry()
self.wf_class = wf_class if wf_class is not None else BpmnWorkflow
self.sub_wf_class = sub_wf_class if sub_wf_class is not None else BpmnSubWorkflow
self.json_encoder_cls = json_encoder_cls
self.json_decoder_cls = json_decoder_cls
self.VERSION = version
@ -166,17 +168,14 @@ class BpmnWorkflowSerializer:
"""
# These properties are applicable to top level & subprocesses
dct = self.process_to_dict(workflow)
# These are only used at the top-level
dct['spec'] = self.spec_converter.convert(workflow.spec)
# These are only used at the top-level
dct['subprocess_specs'] = dict(
(name, self.spec_converter.convert(spec)) for name, spec in workflow.subprocess_specs.items()
)
dct['subprocesses'] = dict(
(str(task_id), self.process_to_dict(sp)) for task_id, sp in workflow.subprocesses.items()
(str(task_id), self.subworkflow_to_dict(sp)) for task_id, sp in workflow.subprocesses.items()
)
dct['bpmn_messages'] = [self.message_to_dict(msg) for msg in workflow.bpmn_messages]
dct['correlations'] = workflow.correlations
dct['bpmn_events'] = [self.event_to_dict(event) for event in workflow.bpmn_events]
return dct
def workflow_from_dict(self, dct):
@ -205,7 +204,7 @@ class BpmnWorkflowSerializer:
workflow = self.wf_class(spec, subprocess_specs, deserializing=True)
# Restore any unretrieve messages
workflow.bpmn_messages = [ self.message_from_dict(msg) for msg in dct.get('bpmn_messages', []) ]
workflow.bpmn_events = [ self.event_from_dict(msg) for msg in dct.get('bpmn_events', []) ]
workflow.correlations = dct_copy.pop('correlations', {})
@ -216,6 +215,11 @@ class BpmnWorkflowSerializer:
return workflow
def subworkflow_to_dict(self, workflow):
dct = self.process_to_dict(workflow)
dct['parent_task_id'] = str(workflow.parent_task_id)
return dct
def task_to_dict(self, task):
return {
'id': str(task.id),
@ -225,7 +229,6 @@ class BpmnWorkflowSerializer:
'state': task.state,
'task_spec': task.task_spec.name,
'triggered': task.triggered,
'workflow_name': task.workflow.name,
'internal_data': self.data_converter.convert(task.internal_data),
'data': self.data_converter.convert(task.data),
}
@ -265,7 +268,7 @@ class BpmnWorkflowSerializer:
if isinstance(task_spec, SubWorkflowTask) and task_id in top_dct.get('subprocesses', {}):
subprocess_spec = top.subprocess_specs[task_spec.spec]
subprocess = self.wf_class(subprocess_spec, {}, name=task_spec.name, parent=process, deserializing=True)
subprocess = self.sub_wf_class(subprocess_spec, task.id, top_level_workflow, deserializing=True)
subprocess_dct = top_dct['subprocesses'].get(task_id, {})
subprocess.spec.data_objects.update(process.spec.data_objects)
if len(subprocess.spec.data_objects) > 0:
@ -273,6 +276,7 @@ class BpmnWorkflowSerializer:
else:
subprocess.data = self.data_converter.restore(subprocess_dct.pop('data'))
subprocess.success = subprocess_dct.pop('success')
subprocess.correlations = subprocess_dct.pop('correlations', {})
subprocess.task_tree = self.task_tree_from_dict(subprocess_dct, subprocess_dct.pop('root'), None, subprocess, top, top_dct)
subprocess.completed_event.connect(task_spec._on_subworkflow_completed, task)
top_level_workflow.subprocesses[task.id] = subprocess
@ -288,24 +292,26 @@ class BpmnWorkflowSerializer:
def process_to_dict(self, process):
return {
'spec': self.spec_converter.convert(process.spec),
'data': self.data_converter.convert(process.data),
'correlations': process.correlations,
'last_task': str(process.last_task.id) if process.last_task is not None else None,
'success': process.success,
'tasks': self.task_tree_to_dict(process.task_tree),
'root': str(process.task_tree.id),
}
def message_to_dict(self, message):
def event_to_dict(self, event):
dct = {
'correlations': dict([ (k, self.data_converter.convert(v)) for k, v in message.correlations.items() ]),
'name': message.name,
'payload': self.spec_converter.convert(message.payload),
'event_definition': self.spec_converter.convert(event.event_definition),
'payload': self.data_converter.convert(event.payload),
'correlations': dict([ (k, self.data_converter.convert(v)) for k, v in event.correlations.items() ]),
}
return dct
def message_from_dict(self, dct):
return BpmnMessage(
def event_from_dict(self, dct):
return BpmnEvent(
self.spec_converter.restore(dct['event_definition']),
self.data_converter.restore(dct['payload']),
dict([ (k, self.data_converter.restore(v)) for k, v in dct['correlations'].items() ]),
dct['name'],
self.spec_converter.restore(dct['payload'])
)
)

View File

@ -77,9 +77,6 @@ class BpmnTaskSpec(TaskSpec):
def _on_complete_hook(self, my_task):
if my_task.parent:
my_task.parent.task_spec._child_complete_hook(my_task)
if self.io_specification is not None and len(self.io_specification.data_outputs) > 0:
data = {}
for var in self.io_specification.data_outputs:
@ -96,6 +93,3 @@ class BpmnTaskSpec(TaskSpec):
my_task.data.pop(obj.bpmn_id, None)
super()._on_complete_hook(my_task)
def _child_complete_hook(self, child_task):
pass

View File

@ -1,4 +1,3 @@
# Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
@ -18,8 +17,11 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.exceptions import WorkflowException
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.specs.StartTask import StartTask
from SpiffWorkflow.specs.Join import Join
from SpiffWorkflow.bpmn.specs.bpmn_task_spec import BpmnTaskSpec
from SpiffWorkflow.bpmn.specs.mixins.unstructured_join import UnstructuredJoin
from SpiffWorkflow.bpmn.specs.mixins.events.intermediate_event import BoundaryEvent
@ -31,37 +33,8 @@ class BpmnStartTask(BpmnTaskSpec, StartTask):
class SimpleBpmnTask(BpmnTaskSpec):
pass
class _BoundaryEventParent(BpmnTaskSpec):
"""This task is inserted before a task with boundary events."""
# I wonder if this would be better modelled as some type of join.
# It would make more sense to have the boundary events and the task
# they're attached to be inputs rather than outputs.
def __init__(self, wf_spec, name, main_child_task_spec, **kwargs):
super(_BoundaryEventParent, self).__init__(wf_spec, name, **kwargs)
self.main_child_task_spec = main_child_task_spec
@property
def spec_type(self):
return 'Boundary Event Parent'
def _run_hook(self, my_task):
# Clear any events that our children might have received and wait for new events
for child in my_task.children:
if isinstance(child.task_spec, BoundaryEvent):
child.task_spec.event_definition.reset(child)
child._set_state(TaskState.WAITING)
return True
def _child_complete_hook(self, child_task):
# If the main child completes, or a cancelling event occurs, cancel any unfinished children
if child_task.task_spec == self.main_child_task_spec or child_task.task_spec.cancel_activity:
for sibling in child_task.parent.children:
if sibling == child_task:
continue
if sibling.task_spec == self.main_child_task_spec or not sibling._is_finished():
sibling.cancel()
class BoundaryEventSplit(SimpleBpmnTask):
def _predict_hook(self, my_task):
# Events attached to the main task might occur
@ -69,9 +42,53 @@ class _BoundaryEventParent(BpmnTaskSpec):
# The main child's state is based on this task's state
state = TaskState.FUTURE if my_task._is_definite() else my_task.state
for child in my_task.children:
if child.task_spec == self.main_child_task_spec:
if not isinstance(child.task_spec, BoundaryEvent):
child._set_state(state)
def _update_hook(self, my_task):
super()._update_hook(my_task)
for task in my_task.children:
if isinstance(task.task_spec, BoundaryEvent) and task._is_predicted():
task._set_state(TaskState.WAITING)
task.task_spec._predict(task)
return True
class BoundaryEventJoin(Join, BpmnTaskSpec):
"""This task is inserted before a task with boundary events."""
def __init__(self, wf_spec, name, **kwargs):
super().__init__(wf_spec, name, **kwargs)
def _check_threshold_structured(self, my_task, force=False):
# Retrieve a list of all activated tasks from the associated
# task that did the conditional parallel split.
split_task = my_task._find_ancestor_from_name(self.split_task)
if split_task is None:
raise WorkflowException(f'Split at {self.split_task} was not reached', task_spec=self)
main, interrupting, noninterrupting = None, [], []
for task in split_task.children:
if not isinstance(task.task_spec, BoundaryEvent):
main = task
elif task.task_spec.cancel_activity:
interrupting.append(task)
else:
noninterrupting.append(task)
if main is None:
raise WorkflowException(f'No main task found', task_spec=self)
interrupt = any([t._has_state(TaskState.READY|TaskState.COMPLETED) for t in interrupting])
finished = main._is_finished() or interrupt
if finished:
cancel = [t for t in interrupting + noninterrupting if t.state == TaskState.WAITING]
if interrupt:
cancel += [main]
else:
cancel = []
return force or finished, cancel
class _EndJoin(UnstructuredJoin, BpmnTaskSpec):
@ -84,17 +101,7 @@ class _EndJoin(UnstructuredJoin, BpmnTaskSpec):
continue
if task.task_spec == my_task.task_spec:
continue
is_mine = False
w = task.workflow
if w == my_task.workflow:
is_mine = True
while w and w.outer_workflow != w:
w = w.outer_workflow
if w == my_task.workflow:
is_mine = True
if is_mine:
waiting_tasks.append(task)
waiting_tasks.append(task)
return force or len(waiting_tasks) == 0, waiting_tasks

View File

@ -1,500 +0,0 @@
# Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import re
from datetime import datetime, timedelta, timezone
from calendar import monthrange
from time import timezone as tzoffset, altzone as dstoffset, daylight as isdst
from copy import deepcopy
from SpiffWorkflow.exceptions import WorkflowException
seconds_from_utc = dstoffset if isdst else tzoffset
LOCALTZ = timezone(timedelta(seconds=-1 * seconds_from_utc))
class EventDefinition(object):
"""
This is the base class for Event Definitions. It implements the default throw/catch
behavior for events.
If internal is true, this event should be thrown to the current workflow
If external is true, this event should be thrown to the outer workflow
Default throw behavior is to send the event based on the values of the internal
and external flags.
Default catch behavior is to set the event to fired
"""
def __init__(self, description=None):
# Ideally I'd mke these parameters, but I don't want to them to be parameters
# for any subclasses (as they are based on event type, not user choice) and
# I don't want to write a separate deserializer for every every type.
self.internal, self.external = True, True
self.description = description
@property
def event_type(self):
return f'{self.__class__.__module__}.{self.__class__.__name__}'
def has_fired(self, my_task):
return my_task._get_internal_data('event_fired', False)
def catch(self, my_task, event_definition=None):
my_task._set_internal_data(event_fired=True)
def throw(self, my_task):
self._throw(
event=my_task.task_spec.event_definition,
workflow=my_task.workflow,
outer_workflow=my_task.workflow.outer_workflow
)
def reset(self, my_task):
my_task._set_internal_data(event_fired=False)
def _throw(self, event, workflow, outer_workflow, correlations=None):
# This method exists because usually we just want to send the event in our
# own task spec, but we can't do that for message events.
# We also don't have a more sophisticated method for addressing events to
# a particular process, but this at least provides a mechanism for distinguishing
# between processes and subprocesses.
if self.external and outer_workflow != workflow:
top = workflow._get_outermost_workflow()
top.catch(event, correlations)
else:
workflow.catch(event)
def __eq__(self, other):
return self.__class__.__name__ == other.__class__.__name__
class NamedEventDefinition(EventDefinition):
"""
Extend the base event class to provide a name for the event. Most throw/catch events
have names that names that will be used to identify the event.
:param name: the name of this event
"""
def __init__(self, name, **kwargs):
super(NamedEventDefinition, self).__init__(**kwargs)
self.name = name
def reset(self, my_task):
super(NamedEventDefinition, self).reset(my_task)
def __eq__(self, other):
return self.__class__.__name__ == other.__class__.__name__ and self.name == other.name
class CancelEventDefinition(EventDefinition):
"""
Cancel events are only handled by the outerworkflow, as they can only be used inside
of transaction subprocesses.
"""
def __init__(self, **kwargs):
super(CancelEventDefinition, self).__init__(**kwargs)
self.internal = False
class ErrorEventDefinition(NamedEventDefinition):
"""
Error events can occur only in subprocesses and as subprocess boundary events. They're
matched by code rather than name.
"""
def __init__(self, name, error_code=None, **kwargs):
super(ErrorEventDefinition, self).__init__(name,**kwargs)
self.error_code = error_code
self.internal = False
def __eq__(self, other):
return self.__class__.__name__ == other.__class__.__name__ and self.error_code in [ None, other.error_code ]
class EscalationEventDefinition(NamedEventDefinition):
"""
Escalation events have names, though they don't seem to be used for anything. Instead
the spec says that the escalation code should be matched.
"""
def __init__(self, name, escalation_code=None, **kwargs):
"""
Constructor.
:param escalation_code: The escalation code this event should
react to. If None then all escalations will activate this event.
"""
super(EscalationEventDefinition, self).__init__(name, **kwargs)
self.escalation_code = escalation_code
def __eq__(self, other):
return self.__class__.__name__ == other.__class__.__name__ and self.escalation_code in [ None, other.escalation_code ]
class CorrelationProperty:
"""Rules for generating a correlation key when a message is sent or received."""
def __init__(self, name, retrieval_expression, correlation_keys, expected_value=None):
self.name = name # This is the property name
self.retrieval_expression = retrieval_expression # This is how it's generated
self.correlation_keys = correlation_keys # These are the keys it's used by
class MessageEventDefinition(NamedEventDefinition):
"""The default message event."""
def __init__(self, name, correlation_properties=None, **kwargs):
super().__init__(name, **kwargs)
self.correlation_properties = correlation_properties or []
self.payload = None
self.internal = False
def catch(self, my_task, event_definition=None):
self.update_internal_data(my_task, event_definition)
super(MessageEventDefinition, self).catch(my_task, event_definition)
def throw(self, my_task):
# We can't update our own payload, because if this task is reached again
# we have to evaluate it again so we have to create a new event
event = MessageEventDefinition(self.name, self.correlation_properties)
# Generating a payload unfortunately needs to be handled using custom extensions
# However, there needs to be something to apply the correlations to in the
# standard case and this is line with the way Spiff works otherwise
event.payload = deepcopy(my_task.data)
correlations = self.get_correlations(my_task, event.payload)
my_task.workflow.correlations.update(correlations)
self._throw(event, my_task.workflow, my_task.workflow.outer_workflow, correlations)
def update_internal_data(self, my_task, event_definition):
my_task.internal_data[event_definition.name] = event_definition.payload
def update_task_data(self, my_task):
# I've added this method so that different message implementations can handle
# copying their message data into the task
payload = my_task.internal_data.get(self.name)
if payload is not None:
my_task.set_data(**payload)
def get_correlations(self, task, payload):
correlation_keys = {}
for property in self.correlation_properties:
for key in property.correlation_keys:
if key not in correlation_keys:
correlation_keys[key] = {}
try:
correlation_keys[key][property.name] = task.workflow.script_engine._evaluate(property.retrieval_expression, payload)
except WorkflowException as we:
we.add_note(
f"Failed to evaluate correlation property '{property.name}'"
f" invalid expression '{property.retrieval_expression}'")
we.task_spec = task.task_spec
raise we
return correlation_keys
def conversation(self):
"""An event may have many correlation properties, this figures out
which conversation exists across all of them, or return None if they
do not share a topic. """
conversation = None
if len(self.correlation_properties) > 0:
for prop in self.correlation_properties:
for key in prop.correlation_keys:
conversation = key
for prop in self.correlation_properties:
if conversation not in prop.correlation_keys:
break
return conversation
return None
class NoneEventDefinition(EventDefinition):
"""
This class defines behavior for NoneEvents. We override throw to do nothing.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.internal, self.external = False, False
def throw(self, my_task):
"""It's a 'none' event, so nothing to throw."""
pass
def reset(self, my_task):
"""It's a 'none' event, so nothing to reset."""
pass
class SignalEventDefinition(NamedEventDefinition):
"""The SignalEventDefinition is the implementation of event definition used for Signal Events."""
def __init__(self, name, **kwargs):
super().__init__(name, **kwargs)
class TerminateEventDefinition(EventDefinition):
"""The TerminateEventDefinition is the implementation of event definition used for Termination Events."""
def __init__(self, **kwargs):
super(TerminateEventDefinition, self).__init__(**kwargs)
self.external = False
class TimerEventDefinition(EventDefinition):
def __init__(self, name, expression, **kwargs):
"""
Constructor.
:param name: The description of the timer.
:param expression: An ISO 8601 datetime or interval expression.
"""
super().__init__(**kwargs)
self.name = name
self.expression = expression
@staticmethod
def get_datetime(expression):
dt = datetime.fromisoformat(expression)
if dt.tzinfo is None:
dt = datetime.combine(dt.date(), dt.time(), LOCALTZ)
return dt.astimezone(timezone.utc)
@staticmethod
def get_timedelta_from_start(parsed_duration, start=None):
start = start or datetime.now(timezone.utc)
years, months, days = parsed_duration.pop('years', 0), parsed_duration.pop('months', 0), parsed_duration.pop('days', 0)
months += years * 12
for idx in range(int(months)):
year, month = start.year + idx // 12, start.month + idx % 12
days += monthrange(year, month)[1]
year, month = start.year + months // 12, start.month + months % 12
days += (months - int(months)) * monthrange(year, month)[1]
parsed_duration['days'] = days
return timedelta(**parsed_duration)
@staticmethod
def get_timedelta_from_end(parsed_duration, end):
years, months, days = parsed_duration.pop('years', 0), parsed_duration.pop('months', 0), parsed_duration.pop('days', 0)
months += years * 12
for idx in range(1, int(months) + 1):
year = end.year - (1 + (idx - end.month) // 12)
month = 1 + (end.month - idx - 1) % 12
days += monthrange(year, month)[1]
days += (months - int(months)) * monthrange(
end.year - (1 + (int(months)- end.month) // 12),
1 + (end.month - months - 1) % 12)[1]
parsed_duration['days'] = days
return timedelta(**parsed_duration)
@staticmethod
def parse_iso_duration(expression):
# Based on https://en.wikipedia.org/wiki/ISO_8601#Time_intervals
parsed, expr_t, current = {}, False, expression.lower().strip('p').replace(',', '.')
for designator in ['years', 'months', 'weeks', 'days', 't', 'hours', 'minutes', 'seconds']:
value = current.split(designator[0], 1)
if len(value) == 2:
duration, remainder = value
if duration.isdigit():
parsed[designator] = int(duration)
elif duration.replace('.', '').isdigit() and not remainder:
parsed[designator] = float(duration)
if designator in parsed or designator == 't':
current = remainder
if designator == 't':
expr_t = True
date_specs, time_specs = ['years', 'months', 'days'], ['hours', 'minutes', 'seconds']
parsed_t = len([d for d in parsed if d in time_specs]) > 0
if len(current) or parsed_t != expr_t or ('weeks' in parsed and any(v for v in parsed if v in date_specs)):
raise Exception('Invalid duration')
# The actual timedelta will have to be computed based on a start or end date, to account for
# months lengths, leap days, etc. This returns a dict of the parsed elements
return parsed
@staticmethod
def parse_iso_week(expression):
# https://en.wikipedia.org/wiki/ISO_8601#Week_dates
m = re.match(r'(\d{4})W(\d{2})(\d)(T.+)?', expression.upper().replace('-', ''))
year, month, day, ts = m.groups()
ds = datetime.fromisocalendar(int(year), int(month), int(day)).strftime('%Y-%m-%d')
return TimerEventDefinition.get_datetime(ds + (ts or ''))
@staticmethod
def parse_time_or_duration(expression):
if expression.upper().startswith('P'):
return TimerEventDefinition.parse_iso_duration(expression)
elif 'W' in expression.upper():
return TimerEventDefinition.parse_iso_week(expression)
else:
return TimerEventDefinition.get_datetime(expression)
@staticmethod
def parse_iso_recurring_interval(expression):
components = expression.upper().replace('--', '/').strip('R').split('/')
cycles = int(components[0]) if components[0] else -1
start_or_duration = TimerEventDefinition.parse_time_or_duration(components[1])
if len(components) == 3:
end_or_duration = TimerEventDefinition.parse_time_or_duration(components[2])
else:
end_or_duration = None
if isinstance(start_or_duration, datetime):
# Start time + interval duration
start = start_or_duration
duration = TimerEventDefinition.get_timedelta_from_start(end_or_duration, start_or_duration)
elif isinstance(end_or_duration, datetime):
# End time + interval duration
duration = TimerEventDefinition.get_timedelta_from_end(start_or_duration, end_or_duration)
start = end_or_duration - duration
elif end_or_duration is None:
# Just an interval duration, assume a start time of now
start = datetime.now(timezone.utc)
duration = TimeDateEventDefinition.get_timedelta_from_start(start_or_duration, start)
else:
raise Exception("Invalid recurring interval")
return cycles, start, duration
def __eq__(self, other):
return self.__class__.__name__ == other.__class__.__name__ and self.name == other.name
class TimeDateEventDefinition(TimerEventDefinition):
"""A Timer event represented by a specific date/time."""
def has_fired(self, my_task):
event_value = my_task._get_internal_data('event_value')
if event_value is None:
event_value = my_task.workflow.script_engine.evaluate(my_task, self.expression)
my_task._set_internal_data(event_value=event_value)
if TimerEventDefinition.parse_time_or_duration(event_value) < datetime.now(timezone.utc):
my_task._set_internal_data(event_fired=True)
return my_task._get_internal_data('event_fired', False)
def timer_value(self, my_task):
return my_task._get_internal_data('event_value')
class DurationTimerEventDefinition(TimerEventDefinition):
"""A timer event represented by a duration"""
def has_fired(self, my_task):
event_value = my_task._get_internal_data("event_value")
if event_value is None:
expression = my_task.workflow.script_engine.evaluate(my_task, self.expression)
parsed_duration = TimerEventDefinition.parse_iso_duration(expression)
event_value = (datetime.now(timezone.utc) + TimerEventDefinition.get_timedelta_from_start(parsed_duration)).isoformat()
my_task._set_internal_data(event_value=event_value)
if TimerEventDefinition.get_datetime(event_value) < datetime.now(timezone.utc):
my_task._set_internal_data(event_fired=True)
return my_task._get_internal_data('event_fired', False)
def timer_value(self, my_task):
return my_task._get_internal_data("event_value")
class CycleTimerEventDefinition(TimerEventDefinition):
def cycle_complete(self, my_task):
event_value = my_task._get_internal_data('event_value')
if event_value is None:
# Don't necessarily like this, but it's a lot more staightforward than trying to only create
# a child task on loop iterations after the first
my_task._drop_children()
expression = my_task.workflow.script_engine.evaluate(my_task, self.expression)
cycles, start, duration = TimerEventDefinition.parse_iso_recurring_interval(expression)
event_value = {'cycles': cycles, 'next': start.isoformat(), 'duration': duration.total_seconds()}
# When the next timer event passes, return True to allow the parent task to generate another child
# Use event fired to indicate that this timer has completed all cycles and the task can be completed
ready = False
if event_value['cycles'] != 0:
next_event = datetime.fromisoformat(event_value['next'])
if next_event < datetime.now(timezone.utc):
event_value['next'] = (next_event + timedelta(seconds=event_value['duration'])).isoformat()
event_value['cycles'] -= 1
ready = True
else:
my_task.internal_data.pop('event_value', None)
my_task.internal_data['event_fired'] = True
my_task._set_internal_data(event_value=event_value)
return ready
def timer_value(self, my_task):
event_value = my_task._get_internal_data('event_value')
if event_value is not None and event_value['cycles'] != 0:
return event_value['next']
class MultipleEventDefinition(EventDefinition):
def __init__(self, event_definitions=None, parallel=False, **kwargs):
super().__init__(**kwargs)
self.event_definitions = event_definitions or []
self.parallel = parallel
def has_fired(self, my_task):
seen_events = my_task.internal_data.get('seen_events', [])
for event in self.event_definitions:
if isinstance(event, TimerEventDefinition):
child = [c for c in my_task.children if c.task_spec.event_definition == event]
child[0].task_spec._update_hook(child[0])
if event.has_fired(child[0]):
seen_events.append(event)
if self.parallel:
# Parallel multiple need to match all events
return all(event in seen_events for event in self.event_definitions)
else:
return len(seen_events) > 0
def catch(self, my_task, event_definition=None):
event_definition.catch(my_task, event_definition)
seen_events = my_task.internal_data.get('seen_events', []) + [event_definition]
my_task._set_internal_data(seen_events=seen_events)
def reset(self, my_task):
my_task.internal_data.pop('seen_events', None)
super().reset(my_task)
def __eq__(self, other):
# This event can catch any of the events associated with it
for event in self.event_definitions:
if event == other:
return True
return False
def throw(self, my_task):
# Mutiple events throw all associated events when they fire
for event_definition in self.event_definitions:
self._throw(
event=event_definition,
workflow=my_task.workflow,
outer_workflow=my_task.workflow.outer_workflow
)

View File

@ -0,0 +1,43 @@
from SpiffWorkflow.bpmn.event import BpmnEvent, PendingBpmnEvent
class EventDefinition(object):
"""
This is the base class for Event Definitions. It implements the default throw/catch
behavior for events.
If internal is true, this event should be thrown to the current workflow
If external is true, this event should be thrown to the outer workflow
Default throw behavior is to send the event based on the values of the internal
and external flags.
Default catch behavior is to set the event to fired
"""
def __init__(self, name=None, description=None):
self.name = name
self.description = description
def has_fired(self, my_task):
return my_task._get_internal_data('event_fired', False)
def catches(self, my_task, event):
return self == event.event_definition
def catch(self, my_task, event=None):
my_task._set_internal_data(event_fired=True)
def throw(self, my_task):
event = BpmnEvent(self)
my_task.workflow.top_workflow.catch(event)
def update_task_data(self, my_task):
"""This method allows events with payloads mrege them into the task"""
pass
def reset(self, my_task):
my_task._set_internal_data(event_fired=False)
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__)
def __eq__(self, other):
return self.__class__ is other.__class__

View File

@ -0,0 +1,76 @@
from copy import deepcopy
from SpiffWorkflow.bpmn.event import BpmnEvent, PendingBpmnEvent
from .base import EventDefinition
class ItemAwareEventDefinition(EventDefinition):
def __init__(self, name, description=None):
super().__init__(name, description)
def catch(self, my_task, event=None):
my_task.internal_data[self.name] = event.payload
super().catch(my_task, event)
def throw(self, my_task):
payload = deepcopy(my_task.data)
event = BpmnEvent(self, payload=payload)
my_task.workflow.top_workflow.catch(event)
def update_task_data(self, my_task):
payload = my_task.internal_data.get(self.name)
if payload is not None:
my_task.set_data(**payload)
def reset(self, my_task):
my_task.internal_data.pop(self.name, None)
super().reset(my_task)
class ErrorEventDefinition(ItemAwareEventDefinition):
"""
Error events can occur only in subprocesses and as subprocess boundary events. They're
matched by code rather than name.
"""
def __init__(self, name, code=None, **kwargs):
super(ErrorEventDefinition, self).__init__(name, **kwargs)
self.code = code
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__, self.code)
def __eq__(self, other):
return super().__eq__(other) and self.code in [None, other.code]
class EscalationEventDefinition(ItemAwareEventDefinition):
"""
Escalation events have names, though they don't seem to be used for anything. Instead
the spec says that the escalation code should be matched.
"""
def __init__(self, name, code=None, **kwargs):
"""
Constructor.
:param escalation_code: The escalation code this event should
react to. If None then all escalations will activate this event.
"""
super(EscalationEventDefinition, self).__init__(name, **kwargs)
self.code = code
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__, self.code)
def __eq__(self, other):
return super().__eq__(other) and self.code in [None, other.code]
class SignalEventDefinition(ItemAwareEventDefinition):
"""The SignalEventDefinition is the implementation of event definition used for Signal Events."""
def __init__(self, name, **kwargs):
super().__init__(name, **kwargs)
def __eq__(self, other):
return super().__eq__(other) and self.name == other.name

View File

@ -0,0 +1,71 @@
from copy import deepcopy
from SpiffWorkflow.exceptions import WorkflowException
from SpiffWorkflow.bpmn.event import BpmnEvent, PendingBpmnEvent
from .base import EventDefinition
class CorrelationProperty:
"""Rules for generating a correlation key when a message is sent or received."""
def __init__(self, name, retrieval_expression, correlation_keys):
self.name = name # This is the property name
self.retrieval_expression = retrieval_expression # This is how it's generated
self.correlation_keys = correlation_keys # These are the keys it's used by
class MessageEventDefinition(EventDefinition):
"""The default message event."""
def __init__(self, name, correlation_properties=None, **kwargs):
super().__init__(name, **kwargs)
self.correlation_properties = correlation_properties or []
def catches(self, my_task, event):
correlations = my_task.workflow.correlations
if len(self.correlation_properties) == 0 or not correlations:
# If we are not checking correlations (eg in lots of older workflows) OR this is the first message this is True
correlated = True
else:
# Otherwise we have to check to make sure any existing keys match
correlated = all([event.correlations.get(key) == correlations.get(key) for key in event.correlations ])
return self == event.event_definition and correlated
def catch(self, my_task, event=None):
self.update_internal_data(my_task, event)
super().catch(my_task, event)
def throw(self, my_task):
payload = deepcopy(my_task.data)
correlations = self.get_correlations(my_task, payload)
my_task.workflow.correlations.update(correlations)
event = BpmnEvent(self, payload=payload, correlations=correlations)
my_task.workflow.top_workflow.catch(event)
def update_internal_data(self, my_task, event):
my_task.internal_data[event.event_definition.name] = event.payload
def update_task_data(self, my_task):
# I've added this method so that different message implementations can handle
# copying their message data into the task
payload = my_task.internal_data.get(self.name)
if payload is not None:
my_task.set_data(**payload)
def get_correlations(self, task, payload):
correlations = {}
for property in self.correlation_properties:
for key in property.correlation_keys:
if key not in correlations:
correlations[key] = {}
try:
correlations[key][property.name] = task.workflow.script_engine._evaluate(property.retrieval_expression, payload)
except WorkflowException:
# Just ignore missing keys. The dictionaries have to match exactly
pass
return correlations
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__, self.correlation_properties)
def __eq__(self, other):
return super().__eq__(other) and self.name == other.name

View File

@ -0,0 +1,50 @@
from .timer import TimerEventDefinition, EventDefinition
class MultipleEventDefinition(EventDefinition):
def __init__(self, event_definitions=None, parallel=False, **kwargs):
super().__init__(**kwargs)
self.event_definitions = event_definitions or []
self.parallel = parallel
def has_fired(self, my_task):
event_definitions = list(self.event_definitions)
seen_events = my_task.internal_data.get('seen_events', [])
for event_definition in self.event_definitions:
if isinstance(event_definition, TimerEventDefinition):
child = [c for c in my_task.children if c.task_spec.event_definition == event_definition]
child[0].task_spec._update_hook(child[0])
if event_definition.has_fired(child[0]) and event_definition in event_definitions:
event_definitions.remove(event_definition)
else:
for event in seen_events:
if event_definition.catches(my_task, event) and event_definition in event_definitions:
event_definitions.remove(event_definition)
if self.parallel:
# Parallel multiple need to match all events
return len(event_definitions) == 0
else:
return len(seen_events) > 0
def catch(self, my_task, event=None):
event.event_definition.catch(my_task, event)
seen_events = my_task.internal_data.get('seen_events', []) + [event]
my_task._set_internal_data(seen_events=seen_events)
def reset(self, my_task):
my_task.internal_data.pop('seen_events', None)
super().reset(my_task)
def __eq__(self, other):
# This event can catch any of the events associated with it
for event in self.event_definitions:
if event == other:
return True
return False
def throw(self, my_task):
# Mutiple events throw all associated events when they fire
for event_definition in self.event_definitions:
event_definition.throw(my_task)

View File

@ -0,0 +1,39 @@
from SpiffWorkflow.bpmn.event import BpmnEvent
from .base import EventDefinition
class NoneEventDefinition(EventDefinition):
"""This class defines behavior for NoneEvents. We override throw to do nothing."""
def __init__(self, **kwargs):
super().__init__(**kwargs)
def throw(self, my_task):
"""It's a 'none' event, so nothing to throw."""
pass
def reset(self, my_task):
"""It's a 'none' event, so nothing to reset."""
pass
class CancelEventDefinition(EventDefinition):
"""Cancel events are only handled by the outerworkflow, as they can only be used inside of transaction subprocesses."""
def __init__(self, **kwargs):
super(CancelEventDefinition, self).__init__(**kwargs)
def throw(self, my_task, **kwargs):
event = BpmnEvent(self, target=my_task.workflow.parent_workflow)
my_task.workflow.top_workflow.catch(event)
class TerminateEventDefinition(EventDefinition):
"""The TerminateEventDefinition is the implementation of event definition used for Termination Events."""
def __init__(self, **kwargs):
super(TerminateEventDefinition, self).__init__(**kwargs)
def throw(self, my_task):
event = BpmnEvent(my_task.task_spec.event_definition, target=my_task.workflow)
my_task.workflow.top_workflow.catch(event)

View File

@ -0,0 +1,203 @@
import re
from datetime import datetime, timedelta, timezone
from calendar import monthrange
from time import timezone as tzoffset, altzone as dstoffset, daylight as isdst
from SpiffWorkflow.bpmn.event import PendingBpmnEvent
from .base import EventDefinition
seconds_from_utc = dstoffset if isdst else tzoffset
LOCALTZ = timezone(timedelta(seconds=-1 * seconds_from_utc))
class TimerEventDefinition(EventDefinition):
def __init__(self, name, expression, **kwargs):
"""
Constructor.
:param name: The description of the timer.
:param expression: An ISO 8601 datetime or interval expression.
"""
super().__init__(**kwargs)
self.name = name
self.expression = expression
@staticmethod
def get_datetime(expression):
dt = datetime.fromisoformat(expression)
if dt.tzinfo is None:
dt = datetime.combine(dt.date(), dt.time(), LOCALTZ)
return dt.astimezone(timezone.utc)
@staticmethod
def get_timedelta_from_start(parsed_duration, start=None):
start = start or datetime.now(timezone.utc)
years, months, days = parsed_duration.pop('years', 0), parsed_duration.pop('months', 0), parsed_duration.pop('days', 0)
months += years * 12
for idx in range(int(months)):
year, month = start.year + idx // 12, start.month + idx % 12
days += monthrange(year, month)[1]
year, month = start.year + months // 12, start.month + months % 12
days += (months - int(months)) * monthrange(year, month)[1]
parsed_duration['days'] = days
return timedelta(**parsed_duration)
@staticmethod
def get_timedelta_from_end(parsed_duration, end):
years, months, days = parsed_duration.pop('years', 0), parsed_duration.pop('months', 0), parsed_duration.pop('days', 0)
months += years * 12
for idx in range(1, int(months) + 1):
year = end.year - (1 + (idx - end.month) // 12)
month = 1 + (end.month - idx - 1) % 12
days += monthrange(year, month)[1]
days += (months - int(months)) * monthrange(
end.year - (1 + (int(months)- end.month) // 12),
1 + (end.month - months - 1) % 12)[1]
parsed_duration['days'] = days
return timedelta(**parsed_duration)
@staticmethod
def parse_iso_duration(expression):
# Based on https://en.wikipedia.org/wiki/ISO_8601#Time_intervals
parsed, expr_t, current = {}, False, expression.lower().strip('p').replace(',', '.')
for designator in ['years', 'months', 'weeks', 'days', 't', 'hours', 'minutes', 'seconds']:
value = current.split(designator[0], 1)
if len(value) == 2:
duration, remainder = value
if duration.isdigit():
parsed[designator] = int(duration)
elif duration.replace('.', '').isdigit() and not remainder:
parsed[designator] = float(duration)
if designator in parsed or designator == 't':
current = remainder
if designator == 't':
expr_t = True
date_specs, time_specs = ['years', 'months', 'days'], ['hours', 'minutes', 'seconds']
parsed_t = len([d for d in parsed if d in time_specs]) > 0
if len(current) or parsed_t != expr_t or ('weeks' in parsed and any(v for v in parsed if v in date_specs)):
raise Exception('Invalid duration')
# The actual timedelta will have to be computed based on a start or end date, to account for
# months lengths, leap days, etc. This returns a dict of the parsed elements
return parsed
@staticmethod
def parse_iso_week(expression):
# https://en.wikipedia.org/wiki/ISO_8601#Week_dates
m = re.match(r'(\d{4})W(\d{2})(\d)(T.+)?', expression.upper().replace('-', ''))
year, month, day, ts = m.groups()
ds = datetime.fromisocalendar(int(year), int(month), int(day)).strftime('%Y-%m-%d')
return TimerEventDefinition.get_datetime(ds + (ts or ''))
@staticmethod
def parse_time_or_duration(expression):
if expression.upper().startswith('P'):
return TimerEventDefinition.parse_iso_duration(expression)
elif 'W' in expression.upper():
return TimerEventDefinition.parse_iso_week(expression)
else:
return TimerEventDefinition.get_datetime(expression)
@staticmethod
def parse_iso_recurring_interval(expression):
components = expression.upper().replace('--', '/').strip('R').split('/')
cycles = int(components[0]) if components[0] else -1
start_or_duration = TimerEventDefinition.parse_time_or_duration(components[1])
if len(components) == 3:
end_or_duration = TimerEventDefinition.parse_time_or_duration(components[2])
else:
end_or_duration = None
if isinstance(start_or_duration, datetime):
# Start time + interval duration
start = start_or_duration
duration = TimerEventDefinition.get_timedelta_from_start(end_or_duration, start_or_duration)
elif isinstance(end_or_duration, datetime):
# End time + interval duration
duration = TimerEventDefinition.get_timedelta_from_end(start_or_duration, end_or_duration)
start = end_or_duration - duration
elif end_or_duration is None:
# Just an interval duration, assume a start time of now
start = datetime.now(timezone.utc)
duration = TimeDateEventDefinition.get_timedelta_from_start(start_or_duration, start)
else:
raise Exception("Invalid recurring interval")
return cycles, start, duration
def __eq__(self, other):
return super().__eq__(other) and self.name == other.name
class TimeDateEventDefinition(TimerEventDefinition):
"""A Timer event represented by a specific date/time."""
def has_fired(self, my_task):
event_value = my_task._get_internal_data('event_value')
if event_value is None:
event_value = my_task.workflow.script_engine.evaluate(my_task, self.expression)
my_task._set_internal_data(event_value=event_value)
if TimerEventDefinition.parse_time_or_duration(event_value) < datetime.now(timezone.utc):
my_task._set_internal_data(event_fired=True)
return my_task._get_internal_data('event_fired', False)
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__, my_task._get_internal_data('event_value'))
class DurationTimerEventDefinition(TimerEventDefinition):
"""A timer event represented by a duration"""
def has_fired(self, my_task):
event_value = my_task._get_internal_data("event_value")
if event_value is None:
expression = my_task.workflow.script_engine.evaluate(my_task, self.expression)
parsed_duration = TimerEventDefinition.parse_iso_duration(expression)
event_value = (datetime.now(timezone.utc) + TimerEventDefinition.get_timedelta_from_start(parsed_duration)).isoformat()
my_task._set_internal_data(event_value=event_value)
if TimerEventDefinition.get_datetime(event_value) < datetime.now(timezone.utc):
my_task._set_internal_data(event_fired=True)
return my_task._get_internal_data('event_fired', False)
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__, my_task._get_internal_data('event_value'))
class CycleTimerEventDefinition(TimerEventDefinition):
def cycle_complete(self, my_task):
event_value = my_task._get_internal_data('event_value')
if event_value is None:
expression = my_task.workflow.script_engine.evaluate(my_task, self.expression)
cycles, start, duration = TimerEventDefinition.parse_iso_recurring_interval(expression)
event_value = {'cycles': cycles, 'next': start.isoformat(), 'duration': duration.total_seconds()}
# When the next timer event passes, return True to allow the parent task to generate another child
# Use event fired to indicate that this timer has completed all cycles and the task can be completed
ready = False
if event_value['cycles'] != 0:
next_event = datetime.fromisoformat(event_value['next'])
if next_event < datetime.now(timezone.utc):
event_value['next'] = (next_event + timedelta(seconds=event_value['duration'])).isoformat()
event_value['cycles'] -= 1
ready = True
else:
my_task.internal_data.pop('event_value', None)
my_task.internal_data['event_fired'] = True
my_task._set_internal_data(event_value=event_value)
return ready
def details(self, my_task):
event_value = my_task._get_internal_data('event_value')
if event_value is not None and event_value['cycles'] != 0:
event_value = event_value['next']
return PendingBpmnEvent(self.name, self.__class__.__name__, event_value)

View File

@ -18,8 +18,8 @@
# 02110-1301 USA
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.specs.event_definitions.simple import TerminateEventDefinition, CancelEventDefinition
from .event_types import ThrowingEvent
from ...event_definitions import TerminateEventDefinition, CancelEventDefinition
class EndEvent(ThrowingEvent):
@ -49,7 +49,7 @@ class EndEvent(ThrowingEvent):
# We are finished. Set the workflow data and cancel all tasks
my_task.workflow.set_data(**my_task.data)
for task in my_task.workflow.get_tasks(TaskState.NOT_FINISHED_MASK, workflow=my_task.workflow):
for task in my_task.workflow.get_tasks(TaskState.NOT_FINISHED_MASK):
task.cancel()
elif isinstance(self.event_definition, CancelEventDefinition):

View File

@ -19,7 +19,9 @@
import time
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.specs.base import TaskSpec
from ...event_definitions import MessageEventDefinition, NoneEventDefinition, CycleTimerEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.simple import NoneEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.timer import CycleTimerEventDefinition
class CatchingEvent(TaskSpec):
@ -31,21 +33,18 @@ class CatchingEvent(TaskSpec):
:param event_definition: the EventDefinition that we must wait for.
"""
super(CatchingEvent, self).__init__(wf_spec, bpmn_id, **kwargs)
super().__init__(wf_spec, bpmn_id, **kwargs)
self.event_definition = event_definition
def catches(self, my_task, event_definition, correlations=None):
if self.event_definition == event_definition:
return all([correlations.get(key) == my_task.workflow.correlations.get(key) for key in correlations ])
else:
return False
def catches(self, my_task, event):
return my_task.task_spec.event_definition.catches(my_task, event)
def catch(self, my_task, event_definition):
def catch(self, my_task, event):
"""
Catch is called by the workflow when the task has matched an event
definition, at which point we can update our task's state.
"""
self.event_definition.catch(my_task, event_definition)
self.event_definition.catch(my_task, event)
my_task.last_update_time = time.time()
my_task._set_state(TaskState.WAITING)
@ -70,10 +69,13 @@ class CatchingEvent(TaskSpec):
def _run_hook(self, my_task):
if isinstance(self.event_definition, MessageEventDefinition):
self.event_definition.update_task_data(my_task)
self.event_definition.update_task_data(my_task)
self.event_definition.reset(my_task)
return super(CatchingEvent, self)._run_hook(my_task)
return super()._run_hook(my_task)
def _predict_hook(self, my_task):
if not isinstance(self.event_definition, CycleTimerEventDefinition):
super()._predict_hook(my_task)
class ThrowingEvent(TaskSpec):
@ -85,10 +87,10 @@ class ThrowingEvent(TaskSpec):
:param event_definition: the EventDefinition to be thrown.
"""
super(ThrowingEvent, self).__init__(wf_spec, bpmn_id, **kwargs)
super().__init__(wf_spec, bpmn_id, **kwargs)
self.event_definition = event_definition
def _run_hook(self, my_task):
super(ThrowingEvent, self)._run_hook(my_task)
super()._run_hook(my_task)
self.event_definition.throw(my_task)
return True

View File

@ -46,9 +46,9 @@ class BoundaryEvent(CatchingEvent):
super(BoundaryEvent, self).__init__(wf_spec, bpmn_id, event_definition, **kwargs)
self.cancel_activity = cancel_activity
def catches(self, my_task, event_definition, correlations=None):
def catches(self, my_task, event):
# Boundary events should only be caught while waiting
return super(BoundaryEvent, self).catches(my_task, event_definition, correlations) and my_task.state == TaskState.WAITING
return my_task.state == TaskState.WAITING and super().catches(my_task, event)
class EventBasedGateway(CatchingEvent):
@ -57,7 +57,6 @@ class EventBasedGateway(CatchingEvent):
my_task._sync_children(self.outputs, state=TaskState.MAYBE)
def _on_ready_hook(self, my_task):
seen_events = my_task.internal_data.get('seen_events', [])
for child in my_task.children:
if child.task_spec.event_definition not in seen_events:
if not child.internal_data.get('event_fired'):
child.cancel()

View File

@ -24,9 +24,9 @@ from .event_types import CatchingEvent
class StartEvent(CatchingEvent):
"""Task Spec for a bpmn:startEvent node with an optional event definition."""
def catch(self, my_task, event_definition):
def catch(self, my_task, event):
# We might need to revisit a start event after it completes or
# if it got cancelled so we'll still catch messages even if we're finished
if my_task.state == TaskState.COMPLETED or my_task.state == TaskState.CANCELLED:
my_task.workflow.reset_from_task_id(my_task.id)
super(StartEvent, self).catch(my_task, event_definition)
super(StartEvent, self).catch(my_task, event)

View File

@ -69,14 +69,19 @@ class InclusiveGateway(MultiChoice, UnstructuredJoin):
UnstructuredJoin.test(self)
def _check_threshold_unstructured(self, my_task, force=False):
# Look at the tree to find all places where this task is used.
tasks = my_task.workflow.get_tasks_from_spec_name(self.name)
completed_inputs, waiting_tasks = self._get_inputs_with_tokens(my_task)
uncompleted_inputs = [i for i in self.inputs if i not in completed_inputs]
# Look up which tasks have parents completed.
completed_inputs = set([ task.parent.task_spec for task in tasks if task.parent.state == TaskState.COMPLETED ])
# We only have to complete a task once for it to count, even if's on multiple paths
for task in waiting_tasks:
if task.task_spec in completed_inputs:
waiting_tasks.remove(task)
# Find waiting tasks
# Exclude tasks whose specs have already been completed
# A spec only has to complete once, even if on multiple paths
waiting_tasks = []
for task in tasks:
if task.parent._has_state(TaskState.DEFINITE_MASK) and task.parent.task_spec not in completed_inputs:
waiting_tasks.append(task.parent)
if force:
# If force is true, complete the task
@ -85,9 +90,9 @@ class InclusiveGateway(MultiChoice, UnstructuredJoin):
# If we have waiting tasks, we're obviously not done
complete = False
else:
# Handle the case where there are paths from active tasks that must go through uncompleted inputs
tasks = my_task.workflow.get_tasks(TaskState.READY | TaskState.WAITING, workflow=my_task.workflow)
sources = [t.task_spec for t in tasks]
# Handle the case where there are paths from active tasks that must go through waiting inputs
waiting_inputs = [i for i in self.inputs if i not in completed_inputs]
sources = [t.task_spec for t in my_task.workflow.get_tasks(TaskState.READY | TaskState.WAITING)]
# This will go back through a task spec's ancestors and return the source, if applicable
def check(spec):
@ -100,9 +105,9 @@ class InclusiveGateway(MultiChoice, UnstructuredJoin):
if source is not None:
sources.remove(source)
# Now check the rest of the uncompleted inputs and see if they can be reached from any of the remaining tasks
# Now check the rest of the waiting inputs and see if they can be reached from any of the remaining tasks
unfinished_paths = []
for spec in uncompleted_inputs:
for spec in waiting_inputs:
if check(spec) is not None:
unfinished_paths.append(spec)
break

View File

@ -75,6 +75,7 @@ class StandardLoopTask(LoopTask):
my_task._set_state(TaskState.WAITING)
task_spec = my_task.workflow.spec.task_specs[self.task_spec]
child = my_task._add_child(task_spec, TaskState.WAITING)
child.triggered = True
child.data = deepcopy(my_task.data)
def child_completed_action(self, my_task, child):
@ -128,6 +129,7 @@ class MultiInstanceTask(LoopTask):
task_spec = my_task.workflow.spec.task_specs[self.task_spec]
child = my_task._add_child(task_spec, TaskState.WAITING)
child.triggered = True
child.data = deepcopy(my_task.data)
if self.input_item is not None:
child.data[self.input_item.bpmn_id] = deepcopy(item)

View File

@ -17,6 +17,7 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.task import TaskState
from .unstructured_join import UnstructuredJoin
@ -40,7 +41,28 @@ class ParallelGateway(UnstructuredJoin):
Essentially, this means that we must wait until we have a completed parent
task on each incoming sequence.
"""
def _check_threshold_unstructured(self, my_task, force=False):
completed_inputs, waiting_tasks = self._get_inputs_with_tokens(my_task)
return force or len(completed_inputs) >= len(self.inputs), waiting_tasks
tasks = my_task.workflow.get_tasks_from_spec_name(self.name)
# Look up which tasks have parents completed.
waiting_tasks = []
waiting_inputs = set(self.inputs)
def remove_ancestor(task):
# This traces a tasks parents until it finds a spec in the list of sources
if task.task_spec in waiting_inputs:
waiting_inputs.remove(task.task_spec)
elif task.parent is not None:
remove_ancestor(task.parent)
for task in tasks:
if task.parent.state == TaskState.COMPLETED and task.parent.task_spec in waiting_inputs:
waiting_inputs.remove(task.parent.task_spec)
# Do not wait for descendants of this task
elif task._is_descendant_of(my_task):
remove_ancestor(task)
# Ignore predicted tasks; we don't care about anything not definite
elif task.parent._has_state(TaskState.DEFINITE_MASK):
waiting_tasks.append(task.parent)
return force or len(waiting_inputs) == 0, waiting_tasks

View File

@ -21,9 +21,6 @@ from copy import deepcopy
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.specs.base import TaskSpec
from SpiffWorkflow.bpmn.specs.control import _BoundaryEventParent
from SpiffWorkflow.bpmn.specs.mixins.events.intermediate_event import BoundaryEvent
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
@ -46,8 +43,7 @@ class SubWorkflowTask(TaskSpec):
self.update_data(my_task, subworkflow)
def _update_hook(self, my_task):
wf = my_task.workflow._get_outermost_workflow(my_task)
subprocess = wf.subprocesses.get(my_task.id)
subprocess = my_task.workflow.top_workflow.subprocesses.get(my_task.id)
if subprocess is None:
super()._update_hook(my_task)
self.create_workflow(my_task)
@ -57,7 +53,7 @@ class SubWorkflowTask(TaskSpec):
return subprocess.is_completed()
def _on_cancel(self, my_task):
subworkflow = my_task.workflow.get_subprocess(my_task)
subworkflow = my_task.workflow.top_workflow.get_subprocess(my_task)
if subworkflow is not None:
subworkflow.cancel()
@ -68,18 +64,18 @@ class SubWorkflowTask(TaskSpec):
# But our data management is already hopelessly messed up and in dire needs of reconsideration
if len(subworkflow.spec.data_objects) > 0:
subworkflow.data = my_task.workflow.data
start = subworkflow.get_tasks_from_spec_name('Start', workflow=subworkflow)
start = subworkflow.get_tasks_from_spec_name('Start')
start[0].set_data(**my_task.data)
def update_data(self, my_task, subworkflow):
my_task.data = deepcopy(subworkflow.last_task.data)
def create_workflow(self, my_task):
subworkflow = my_task.workflow.create_subprocess(my_task, self.spec, self.name)
subworkflow = my_task.workflow.top_workflow.create_subprocess(my_task, self.spec)
subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task)
def start_workflow(self, my_task):
subworkflow = my_task.workflow.get_subprocess(my_task)
subworkflow = my_task.workflow.top_workflow.get_subprocess(my_task)
self.copy_data(my_task, subworkflow)
for child in subworkflow.task_tree.children:
child.task_spec._update(child)
@ -93,7 +89,7 @@ class CallActivity(SubWorkflowTask):
def copy_data(self, my_task, subworkflow):
start = subworkflow.get_tasks_from_spec_name('Start', workflow=subworkflow)
start = subworkflow.get_tasks_from_spec_name('Start')
if subworkflow.spec.io_specification is None or len(subworkflow.spec.io_specification.data_inputs) == 0:
# Copy all task data into start task if no inputs specified
start[0].set_data(**my_task.data)
@ -114,7 +110,7 @@ class CallActivity(SubWorkflowTask):
# Copy all workflow data if no outputs are specified
my_task.data = deepcopy(subworkflow.last_task.data)
else:
end = subworkflow.get_tasks_from_spec_name('End', workflow=subworkflow)
end = subworkflow.get_tasks_from_spec_name('End')
# Otherwise only copy data with the specified names
for var in subworkflow.spec.io_specification.data_outputs:
if var.bpmn_id not in end[0].data:
@ -130,23 +126,3 @@ class TransactionSubprocess(SubWorkflowTask):
def __init__(self, wf_spec, bpmn_id, subworkflow_spec, **kwargs):
super(TransactionSubprocess, self).__init__(wf_spec, bpmn_id, subworkflow_spec, True, **kwargs)
def _on_complete_hook(self, my_task):
# It is possible that a transaction could end by throwing an event caught by a boundary event attached to it
# In that case both the subprocess and the boundary event become ready and whichever one gets executed
# first will cancel the other.
# So here I'm checking whether this has happened and cancelling this task in that case.
# I really hate this fix, so I'm only putting it in transactions because that's where I'm having the problem,
# but it's likely to be a general issue that we miraculously haven't run up against.
# We desperately need to get rid of this BonudaryEventParent BS.
parent = my_task.parent
if isinstance(parent.task_spec, _BoundaryEventParent) and len(
[t for t in parent.children if
isinstance(t.task_spec, BoundaryEvent) and
t.task_spec.cancel_activity and
t.state==TaskState.READY
]):
my_task._drop_children()
my_task._set_state(TaskState.CANCELLED)
else:
super()._on_complete_hook(my_task)

View File

@ -26,22 +26,6 @@ class UnstructuredJoin(Join):
A helper subclass of Join that makes it work in a slightly friendlier way
for the BPMN style threading
"""
def _get_inputs_with_tokens(self, my_task):
# Look at the tree to find all places where this task is used.
tasks = [ t for t in my_task.workflow.get_tasks_from_spec_name(self.name) if t.workflow == my_task.workflow ]
# Look up which tasks have parents completed.
waiting_tasks = []
completed_inputs = set()
for task in tasks:
if task.parent.state == TaskState.COMPLETED:
completed_inputs.add(task.parent.task_spec)
# Ignore predicted tasks; we don't care about anything not definite
elif task.parent._has_state(TaskState.READY | TaskState.FUTURE | TaskState.WAITING):
waiting_tasks.append(task.parent)
return completed_inputs, waiting_tasks
def _do_join(self, my_task):
split_task = self._get_split_task(my_task)

View File

@ -17,34 +17,48 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import copy
from SpiffWorkflow.task import TaskState, Task
from SpiffWorkflow.workflow import Workflow
from SpiffWorkflow.exceptions import WorkflowException, TaskNotFoundException
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException
from SpiffWorkflow.bpmn.specs.mixins.events.event_types import CatchingEvent
from SpiffWorkflow.bpmn.specs.mixins.events.start_event import StartEvent
from SpiffWorkflow.bpmn.specs.mixins.subworkflow_task import CallActivity
from SpiffWorkflow.bpmn.specs.event_definitions import (
MessageEventDefinition,
MultipleEventDefinition,
NamedEventDefinition,
TimerEventDefinition,
)
from SpiffWorkflow.bpmn.specs.control import _BoundaryEventParent
from SpiffWorkflow.bpmn.specs.control import BoundaryEventSplit
from .PythonScriptEngine import PythonScriptEngine
class BpmnMessage:
class BpmnSubWorkflow(Workflow):
def __init__(self, correlations, name, payload):
def __init__(self, spec, parent_task_id, top_workflow, **kwargs):
super().__init__(spec, **kwargs)
self.parent_task_id = parent_task_id
self.top_workflow = top_workflow
self.correlations = {}
self.correlations = correlations or {}
self.name = name
self.payload = payload
@property
def script_engine(self):
return self.top_workflow.script_engine
@property
def parent_workflow(self):
task = self.top_workflow.get_task_from_id(self.parent_task_id)
return task.workflow
@property
def depth(self):
current, depth = self, 0
while current.parent_workflow is not None:
depth += 1
current = current.parent_workflow
return depth
def get_task_from_id(self, task_id):
try:
return super().get_task_from_id(task_id)
except TaskNotFoundException as exc:
pass
class BpmnWorkflow(Workflow):
@ -53,7 +67,7 @@ class BpmnWorkflow(Workflow):
Spiff Workflow class with a few extra methods and attributes.
"""
def __init__(self, top_level_spec, subprocess_specs=None, name=None, script_engine=None, **kwargs):
def __init__(self, spec, subprocess_specs=None, script_engine=None, **kwargs):
"""
Constructor.
@ -61,271 +75,56 @@ class BpmnWorkflow(Workflow):
need a specialised version. Defaults to the script engine of the top
most workflow, or to the PythonScriptEngine if none is provided.
"""
super(BpmnWorkflow, self).__init__(top_level_spec, **kwargs)
self.name = name or top_level_spec.name
self.subprocess_specs = subprocess_specs or {}
self.subprocesses = {}
self.bpmn_messages = []
self.bpmn_events = []
self.correlations = {}
super(BpmnWorkflow, self).__init__(spec, **kwargs)
self.__script_engine = script_engine or PythonScriptEngine()
@property
def script_engine(self):
# The outermost script engine always takes precedence.
# All call activities, sub-workflows and DMNs should use the
# workflow engine of the outermost workflow.
return self._get_outermost_workflow().__script_engine
return self.__script_engine
@script_engine.setter
def script_engine(self, engine):
self.__script_engine = engine
def create_subprocess(self, my_task, spec_name, name):
# This creates a subprocess for an existing task
workflow = self._get_outermost_workflow(my_task)
subprocess = BpmnWorkflow(
workflow.subprocess_specs[spec_name], name=name,
script_engine=self.script_engine,
parent=my_task.workflow)
workflow.subprocesses[my_task.id] = subprocess
return subprocess
@property
def top_workflow(self):
return self
def delete_subprocess(self, my_task):
workflow = self._get_outermost_workflow(my_task)
if my_task.id in workflow.subprocesses:
del workflow.subprocesses[my_task.id]
@property
def parent_task_id(self):
return None
@property
def parent_workflow(self):
return None
@property
def depth(self):
return 0
def get_subprocess(self, my_task):
workflow = self._get_outermost_workflow(my_task)
return workflow.subprocesses.get(my_task.id)
def connect_subprocess(self, spec_name, name):
# This creates a new task associated with a process when an event that kicks of a process is received
# I need to know what class is being used to create new processes in this case, and this seems slightly
# less bad than adding yet another argument. Still sucks though.
# TODO: Make collaborations a class rather than trying to shoehorn them into a process.
for spec in self.spec.task_specs.values():
if isinstance(spec, CallActivity):
spec_class = spec.__class__
break
else:
# Default to the mixin class, which will probably fail in many cases.
spec_class = CallActivity
new = spec_class(self.spec, name, spec_name)
self.spec.start.connect(new)
task = Task(self, new)
start = self.get_tasks_from_spec_name('Start', workflow=self)[0]
start.children.append(task)
task.parent = start
# This (indirectly) calls create_subprocess
task.task_spec._update(task)
return self.subprocesses[task.id]
def _get_outermost_workflow(self, task=None):
workflow = task.workflow if task is not None else self
while workflow != workflow.outer_workflow:
workflow = workflow.outer_workflow
return workflow
def _get_or_create_subprocess(self, task_spec, wf_spec):
if isinstance(task_spec.event_definition, MultipleEventDefinition):
for sp in self.subprocesses.values():
start = sp.get_tasks_from_spec_name(task_spec.name)
if len(start) and start[0].state == TaskState.WAITING:
return sp
return self.connect_subprocess(wf_spec.name, f'{wf_spec.name}_{len(self.subprocesses)}')
def catch(self, event_definition, correlations=None):
"""
Tasks can always catch events, regardless of their state. The
event information is stored in the tasks internal data and processed
when the task is reached in the workflow. If a task should only
receive messages while it is running (eg a boundary event), the task
should call the event_definition's reset method before executing to
clear out a stale message.
We might be catching an event that was thrown from some other part of
our own workflow, and it needs to continue out, but if it originated
externally, we should not pass it on.
:param event_definition: the thrown event
"""
# Start a subprocess for known specs with start events that catch this
# This is totally hypocritical of me given how I've argued that specs should
# be immutable, but I see no other way of doing this.
for name, spec in self.subprocess_specs.items():
for task_spec in list(spec.task_specs.values()):
if isinstance(task_spec, StartEvent) and task_spec.event_definition == event_definition:
subprocess = self._get_or_create_subprocess(task_spec, spec)
subprocess.correlations.update(correlations or {})
# We need to get all the tasks that catch an event before completing any of them
# in order to prevent the scenario where multiple boundary events catch the
# same event and the first executed cancels the rest
tasks = [ t for t in self.get_catching_tasks() if t.task_spec.catches(t, event_definition, correlations or {}) ]
for task in tasks:
task.task_spec.catch(task, event_definition)
# Move any tasks that received message to READY
self.refresh_waiting_tasks()
# Figure out if we need to create an external message
if len(tasks) == 0 and isinstance(event_definition, MessageEventDefinition):
self.bpmn_messages.append(
BpmnMessage(correlations, event_definition.name, event_definition.payload))
def get_bpmn_messages(self):
messages = self.bpmn_messages
self.bpmn_messages = []
return messages
def catch_bpmn_message(self, name, payload):
"""Allows this workflow to catch an externally generated bpmn message.
Raises an error if this workflow is not waiting on the given message."""
event_definition = MessageEventDefinition(name)
event_definition.payload = payload
# There should be one and only be one task that can accept the message
# (messages are one to one, not one to many)
tasks = [t for t in self.get_waiting_tasks() if t.task_spec.event_definition == event_definition]
if len(tasks) == 0:
raise WorkflowException(
f"This process is not waiting on a message named '{event_definition.name}'")
if len(tasks) > 1:
raise WorkflowException(
f"This process has multiple tasks waiting on the same message '{event_definition.name}', which is not supported. ")
task = tasks[0]
conversation = task.task_spec.event_definition.conversation()
if not conversation:
raise WorkflowTaskException(
"The waiting task and message payload can not be matched to any correlation key (conversation topic). "
"And is therefor unable to respond to the given message.", task)
updated_props = self._correlate(conversation, payload, task)
task.task_spec.catch(task, event_definition)
self.refresh_waiting_tasks()
self.correlations[conversation] = updated_props
def _correlate(self, conversation, payload, task):
"""Assures that the provided payload correlates to the given
task's event definition and this workflows own correlation
properties. Returning an updated property list if successful"""
receive_event = task.task_spec.event_definition
current_props = self.correlations.get(conversation, {})
updated_props = copy.copy(current_props)
for prop in receive_event.correlation_properties:
try:
new_val = self.script_engine._evaluate(
prop.retrieval_expression, payload
)
except Exception as e:
raise WorkflowTaskException("Unable to accept the BPMN message. "
"The payload must contain "
f"'{prop.retrieval_expression}'", task, e)
if prop.name in current_props and \
new_val != updated_props[prop.name]:
raise WorkflowTaskException("Unable to accept the BPMN message. "
"The payload does not match. Expected "
f"'{prop.retrieval_expression}' to equal "
f"{current_props[prop.name]}.", task)
else:
updated_props[prop.name] = new_val
return updated_props
def waiting_events(self):
# Ultimately I'd like to add an event class so that EventDefinitions would not so double duty as both specs
# and instantiations, and this method would return that. However, I think that's beyond the scope of the
# current request.
events = []
for task in [t for t in self.get_waiting_tasks() if isinstance(t.task_spec, CatchingEvent)]:
event_definition = task.task_spec.event_definition
value = None
if isinstance(event_definition, TimerEventDefinition):
value = event_definition.timer_value(task)
elif isinstance(event_definition, MessageEventDefinition):
value = event_definition.correlation_properties
events.append({
'event_type': event_definition.__class__.__name__,
'name': event_definition.name if isinstance(event_definition, NamedEventDefinition) else None,
'value': value
})
return events
def do_engine_steps(self, exit_at = None, will_complete_task=None, did_complete_task=None):
"""
Execute any READY tasks that are engine specific (for example, gateways
or script tasks). This is done in a loop, so it will keep completing
those tasks until there are only READY User tasks, or WAITING tasks
left.
:param exit_at: After executing a task with a name matching this param return the task object
:param will_complete_task: Callback that will be called prior to completing a task
:param did_complete_task: Callback that will be called after completing a task
"""
engine_steps = list([t for t in self.get_tasks(TaskState.READY) if not t.task_spec.manual])
while engine_steps:
for task in engine_steps:
if will_complete_task is not None:
will_complete_task(task)
task.run()
if did_complete_task is not None:
did_complete_task(task)
if task.task_spec.name == exit_at:
return task
engine_steps = list([t for t in self.get_tasks(TaskState.READY) if not t.task_spec.manual])
def refresh_waiting_tasks(self,
will_refresh_task=None,
did_refresh_task=None):
"""
Refresh the state of all WAITING tasks. This will, for example, update
Catching Timer Events whose waiting time has passed.
:param will_refresh_task: Callback that will be called prior to refreshing a task
:param did_refresh_task: Callback that will be called after refreshing a task
"""
for my_task in self.get_tasks(TaskState.WAITING):
if will_refresh_task is not None:
will_refresh_task(my_task)
# This seems redundant, but the state could have been updated by another waiting task and no longer be waiting.
# Someday, I would like to get rid of this method, and also do_engine_steps
if my_task.state == TaskState.WAITING:
my_task.task_spec._update(my_task)
if did_refresh_task is not None:
did_refresh_task(my_task)
def get_tasks(self, state=TaskState.ANY_MASK, workflow=None):
tasks = []
wf = workflow or self
for task in Workflow.get_tasks_iterator(wf):
subprocess = self.subprocesses.get(task.id)
if task._has_state(state):
tasks.append(task)
if subprocess is not None:
tasks.extend(self.get_tasks(state, subprocess))
return tasks
def get_tasks_from_spec_name(self, name, workflow=None):
return [t for t in self.get_tasks(workflow=workflow) if t.task_spec.name == name]
def get_tasks(self, state=TaskState.ANY_MASK, workflow=None):
# Now that I've revisited and had to ask myself what the hell was I doing, I realize I should comment this
tasks = []
top = self._get_outermost_workflow()
# I think it makes more sense to start with the current workflow, which is probably going to be the top
# most of the time anyway
wf = workflow or self
# We can't filter the iterator on the state because we have to subprocesses, and the subprocess task will
# almost surely be in a different state than the tasks we want
for task in Workflow.get_tasks_iterator(wf):
subprocess = top.subprocesses.get(task.id)
if task._has_state(state):
tasks.append(task)
if subprocess is not None:
tasks.extend(subprocess.get_tasks(state, subprocess))
return tasks
def get_task_from_id(self, task_id, workflow=None):
for task in self.get_tasks(workflow=workflow):
if task.id == task_id:
return task
raise TaskNotFoundException(f'A task with the given task_id ({task_id}) was not found', task_spec=self.spec)
def get_ready_user_tasks(self, lane=None, workflow=None):
"""Returns a list of User Tasks that are READY for user action"""
if lane is not None:
return [t for t in self.get_tasks(TaskState.READY, workflow)
if t.task_spec.manual and t.task_spec.lane == lane]
return [t for t in self.get_tasks(TaskState.READY, workflow) if t.task_spec.manual and t.task_spec.lane == lane]
else:
return [t for t in self.get_tasks(TaskState.READY, workflow) if t.task_spec.manual]
@ -336,40 +135,162 @@ class BpmnWorkflow(Workflow):
def get_catching_tasks(self, workflow=None):
return [task for task in self.get_tasks(workflow=workflow) if isinstance(task.task_spec, CatchingEvent)]
def reset_from_task_id(self, task_id, data=None):
"""Override method from base class, and assures that if the task
being reset has a boundary event parent, we reset that parent and
run it rather than resetting to the current task. This assures
our boundary events are set to the correct state."""
def create_subprocess(self, my_task, spec_name):
# This creates a subprocess for an existing task
subprocess = BpmnSubWorkflow(
self.subprocess_specs[spec_name],
parent_task_id=my_task.id,
top_workflow=self)
self.subprocesses[my_task.id] = subprocess
return subprocess
def get_subprocess(self, my_task):
return self.subprocesses.get(my_task.id)
def delete_subprocess(self, my_task):
subprocess = self.subprocesses.get(my_task.id)
tasks = subprocess.get_tasks()
for sp in [c for c in self.subprocesses.values() if c.parent_workflow == subprocess]:
tasks.extend(self.delete_subprocess(self.get_task_from_id(sp.parent_task_id)))
del self.subprocesses[my_task.id]
return tasks
def get_active_subprocesses(self):
return [sp for sp in self.subprocesses.values() if not sp.is_completed()]
def catch(self, event):
"""
Tasks can always catch events, regardless of their state. The event information is stored in the task's
internal data and processed when the task is reached in the workflow. If a task should only receive messages
while it is running (eg a boundary event), the task should call the event_definition's reset method before
executing to clear out a stale message.
:param event: the thrown event
"""
if event.target is None:
self.update_collaboration(event)
tasks = [t for t in self.get_catching_tasks() if t.task_spec.catches(t, event)]
# Figure out if we need to create an external event
if len(tasks) == 0:
self.bpmn_events.append(event)
else:
catches = lambda t: isinstance(t.task_spec, CatchingEvent) and t.task_spec.catches(t, event)
tasks = [t for t in event.target.get_tasks_iterator(TaskState.NOT_FINISHED_MASK) if catches(t)]
for task in tasks:
task.task_spec.catch(task, event)
self.refresh_waiting_tasks()
def send_event(self, event):
"""Allows this workflow to catch an externally generated event."""
tasks = [t for t in self.get_catching_tasks() if t.task_spec.catches(t, event)]
if len(tasks) == 0:
raise WorkflowException(f"This process is not waiting for {event.event_definition.name}")
for task in tasks:
task.task_spec.catch(task, event)
self.refresh_waiting_tasks()
def get_events(self):
"""Returns the list of events that cannot be handled from within this workflow."""
events = self.bpmn_events
self.bpmn_events = []
return events
def waiting_events(self):
return [t.task_spec.event_definition.details(t) for t in self.get_waiting_tasks()
if isinstance(t.task_spec, CatchingEvent)]
def do_engine_steps(self, will_complete_task=None, did_complete_task=None):
"""
Execute any READY tasks that are engine specific (for example, gateways
or script tasks). This is done in a loop, so it will keep completing
those tasks until there are only READY User tasks, or WAITING tasks
left.
:param will_complete_task: Callback that will be called prior to completing a task
:param did_complete_task: Callback that will be called after completing a task
"""
def update_workflow(wf):
count = 0
# Wanted to use the iterator method here, but at least this is a shorter list
for task in wf.get_tasks(TaskState.READY):
if not task.task_spec.manual:
if will_complete_task is not None:
will_complete_task(task)
task.run()
count += 1
if did_complete_task is not None:
did_complete_task(task)
return count
active_subprocesses = self.get_active_subprocesses()
for subprocess in sorted(active_subprocesses, key=lambda v: v.depth, reverse=True):
count = None
while count is None or count > 0:
count = update_workflow(subprocess)
if subprocess.parent_task_id is not None:
task = self.get_task_from_id(subprocess.parent_task_id)
task.task_spec._update(task)
count = update_workflow(self)
if count > 0 or len(self.get_active_subprocesses()) > len(active_subprocesses):
self.do_engine_steps(will_complete_task, did_complete_task)
def refresh_waiting_tasks(self, will_refresh_task=None, did_refresh_task=None):
"""
Refresh the state of all WAITING tasks. This will, for example, update
Catching Timer Events whose waiting time has passed.
:param will_refresh_task: Callback that will be called prior to refreshing a task
:param did_refresh_task: Callback that will be called after refreshing a task
"""
def update_task(task):
if will_refresh_task is not None:
will_refresh_task(task)
task.task_spec._update(task)
if did_refresh_task is not None:
did_refresh_task(task)
for subprocess in sorted(self.get_active_subprocesses(), key=lambda v: v.depth, reverse=True):
for task in subprocess.get_tasks_iterator(TaskState.WAITING):
update_task(task)
for task in self.get_tasks_iterator(TaskState.WAITING):
update_task(task)
def get_task_from_id(self, task_id):
for subprocess in self.subprocesses.values():
task = subprocess.get_task_from_id(task_id)
if task is not None:
return task
return super().get_task_from_id(task_id)
def reset_from_task_id(self, task_id, data=None, remove_subprocess=True):
task = self.get_task_from_id(task_id)
run_task_at_end = False
if isinstance(task.parent.task_spec, _BoundaryEventParent):
if isinstance(task.parent.task_spec, BoundaryEventSplit):
task = task.parent
run_task_at_end = True # we jumped up one level, so exectute so we are on the correct task as requested.
descendants = super().reset_from_task_id(task_id, data)
descendant_ids = [t.id for t in descendants]
top = self._get_outermost_workflow()
descendants = []
# Since recursive deletion of subprocesses requires access to the tasks, we have to delete any subprocesses first
# We also need diffeent behavior for the case where we explictly reset to a subprocess (in which case we delete it)
# vs resetting inside (where we leave it and reset the tasks that descend from it)
for item in task:
if item == task and not remove_subprocess:
continue
if item.id in self.subprocesses:
descendants.extend(self.delete_subprocess(item))
descendants.extend(super().reset_from_task_id(task.id, data))
delete, reset = [], []
for sp_id, sp in top.subprocesses.items():
if sp_id in descendant_ids:
delete.append(sp_id)
delete.extend([t.id for t in sp.get_tasks() if t.id in top.subprocesses])
if task in sp.get_tasks():
reset.append(sp_id)
# Remove any subprocesses for removed tasks
for sp_id in delete:
del top.subprocesses[sp_id]
# Reset any containing subprocesses
for sp_id in reset:
descendants.extend(self.reset_from_task_id(sp_id))
sp_task = self.get_task_from_id(sp_id)
sp_task.state = TaskState.WAITING
if task.workflow.parent_task_id is not None:
sp_task = self.get_task_from_id(task.workflow.parent_task_id)
descendants.extend(self.reset_from_task_id(sp_task.id, remove_subprocess=False))
sp_task._set_state(TaskState.WAITING)
if run_task_at_end:
task.run()
@ -381,9 +302,8 @@ class BpmnWorkflow(Workflow):
wf = workflow or self
cancelled = Workflow.cancel(wf)
cancelled_ids = [t.id for t in cancelled]
top = self._get_outermost_workflow()
to_cancel = []
for sp_id, sp in top.subprocesses.items():
for sp_id, sp in self.subprocesses.items():
if sp_id in cancelled_ids:
to_cancel.append(sp)
@ -391,3 +311,41 @@ class BpmnWorkflow(Workflow):
cancelled.extend(self.cancel(sp))
return cancelled
def update_collaboration(self, event):
def get_or_create_subprocess(task_spec, wf_spec):
for sp in self.subprocesses.values():
start = sp.get_tasks_from_spec_name(task_spec.name)
if len(start) and start[0].state == TaskState.WAITING:
return sp
# This creates a new task associated with a process when an event that kicks of a process is received
# I need to know what class is being used to create new processes in this case, and this seems slightly
# less bad than adding yet another argument. Still sucks though.
# TODO: Make collaborations a class rather than trying to shoehorn them into a process.
for spec in self.spec.task_specs.values():
if isinstance(spec, CallActivity):
spec_class = spec.__class__
break
else:
# Default to the mixin class, which will probably fail in many cases.
spec_class = CallActivity
new = spec_class(self.spec, f'{wf_spec.name}_{len(self.subprocesses)}', wf_spec.name)
self.spec.start.connect(new)
task = Task(self, new)
start = self.get_tasks_from_spec_name('Start', workflow=self)[0]
start.children.append(task)
task.parent = start
# This (indirectly) calls create_subprocess
task.task_spec._update(task)
return self.subprocesses[task.id]
# Start a subprocess for known specs with start events that catch this
for spec in self.subprocess_specs.values():
for task_spec in spec.task_specs.values():
if isinstance(task_spec, StartEvent) and task_spec.event_definition == event.event_definition:
subprocess = get_or_create_subprocess(task_spec, spec)
subprocess.correlations.update(event.correlations)

View File

@ -29,7 +29,7 @@ class MessageEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['correlation_properties'] = self.correlation_properties_to_dict(event_definition.correlation_properties)
dct['payload'] = event_definition.payload
dct['expression'] = event_definition.expression
dct['result_var'] = event_definition.result_var
return dct

View File

@ -17,7 +17,8 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from SpiffWorkflow.bpmn.event import BpmnEvent
class MessageEventDefinition(MessageEventDefinition):
"""
@ -29,34 +30,25 @@ class MessageEventDefinition(MessageEventDefinition):
# this should be revisited: for one thing, we're relying on some Camunda-specific
# properties.
def __init__(self, name, correlation_properties=None, payload=None, result_var=None, **kwargs):
def __init__(self, name, correlation_properties=None, expression=None, result_var=None, **kwargs):
super(MessageEventDefinition, self).__init__(name, correlation_properties, **kwargs)
self.payload = payload
self.expression = expression
self.result_var = result_var
# The BPMN spec says that Messages should not be used within a process; however
# our camunda workflows depend on it
self.internal = True
def throw(self, my_task):
# We need to evaluate the message payload in the context of this task
result = my_task.workflow.script_engine.evaluate(my_task, self.payload)
# We can't update our own payload, because if this task is reached again
# we have to evaluate it again so we have to create a new event
event = MessageEventDefinition(self.name, payload=result, result_var=self.result_var)
self._throw(event, my_task.workflow, my_task.workflow.outer_workflow)
def update_internal_data(self, my_task, event_definition):
if event_definition.result_var is None:
result_var = f'{my_task.task_spec.name}_Response'
else:
result_var = event_definition.result_var
# Prevent this from conflicting
my_task.internal_data[self.name] = {
'payload': event_definition.payload,
'result_var': result_var
result = my_task.workflow.script_engine.evaluate(my_task, self.expression)
payload = {
'payload': result,
'result_var': self.result_var
}
event = BpmnEvent(self, payload=payload)
my_task.workflow.top_workflow.catch(event)
def update_internal_data(self, my_task, event):
if event.payload.get('result_var') is None:
event.payload['result_var'] = f'{my_task.task_spec.name}_Response'
my_task.internal_data[self.name] = event.payload
def update_task_data(self, my_task):
event_data = my_task.internal_data.get(self.name)

View File

@ -505,12 +505,11 @@ class DictionarySerializer(Serializer):
workflow.task_tree = self.deserialize_task(workflow, s_state['task_tree'], reset_specs)
# Re-connect parents and update states if necessary
tasklist = workflow.get_tasks()
root = workflow.get_tasks_from_spec_name('Root')[0]
update_state = root.state != TaskState.COMPLETED
for task in tasklist:
for task in workflow.get_tasks_iterator():
if task.parent is not None:
task.parent = workflow.get_task_from_id(task.parent, tasklist)
task.parent = workflow.get_task_from_id(task.parent)
if update_state:
if task.state == 32:
task.state = TaskState.COMPLETED
@ -518,8 +517,7 @@ class DictionarySerializer(Serializer):
task.state = TaskState.CANCELLED
if workflow.last_task is not None:
workflow.last_task = workflow.get_task_from_id(s_state['last_task'],tasklist)
workflow.update_task_mapping()
workflow.last_task = workflow.get_task_from_id(s_state['last_task'])
return workflow
@ -531,7 +529,6 @@ class DictionarySerializer(Serializer):
" internal_data to store the subworkflow).")
s_state = dict()
s_state['id'] = task.id
s_state['workflow_name'] = task.workflow.name
s_state['parent'] = task.parent.id if task.parent is not None else None
if not skip_children:
s_state['children'] = [self.serialize_task(child) for child in task.children]
@ -548,7 +545,7 @@ class DictionarySerializer(Serializer):
old_spec_name = s_state['task_spec']
if old_spec_name in ignored_specs:
return None
task_spec = workflow.get_task_spec_from_name(old_spec_name)
task_spec = workflow.spec.get_task_spec_from_name(old_spec_name)
if task_spec is None:
raise MissingSpecError("Unknown task spec: " + old_spec_name)
task = Task(workflow, task_spec)

View File

@ -656,9 +656,6 @@ class XmlSerializer(Serializer):
if workflow.last_task is not None:
SubElement(elem, 'last-task').text = str(workflow.last_task.id)
# outer_workflow
# SubElement(elem, 'outer-workflow').text = workflow.outer_workflow.id
if workflow.success:
SubElement(elem, 'success')
task_tree_elem = SubElement(elem, 'task-tree')
@ -674,10 +671,6 @@ class XmlSerializer(Serializer):
workflow.data = self.deserialize_value_map(elem.find('data'))
workflow.success = elem.find('success') is not None
# outer_workflow
# workflow.outer_workflow =
# find_workflow_by_id(remap_workflow_id(elem['outer_workflow']))
task_tree_elem = elem.find('task-tree')
workflow.task_tree = self.deserialize_task(workflow, task_tree_elem[0])
@ -731,7 +724,7 @@ class XmlSerializer(Serializer):
assert isinstance(workflow, Workflow)
task_spec_name = elem.findtext('spec')
task_spec = workflow.get_task_spec_from_name(task_spec_name)
task_spec = workflow.spec.get_task_spec_from_name(task_spec_name)
task = Task(workflow, task_spec)
task.id = elem.findtext('id')
# The parent is later resolved by the workflow deserializer

View File

@ -32,7 +32,7 @@ class CancelTask(Trigger):
def _run_hook(self, my_task):
for task_name in self.context:
cancel_tasks = my_task.workflow.get_task_spec_from_name(task_name)
cancel_tasks = my_task.workflow.spec.get_task_spec_from_name(task_name)
for cancel_task in my_task._get_root()._find_any(cancel_tasks):
cancel_task.cancel()
return True

View File

@ -57,7 +57,7 @@ class Choose(Trigger):
self.choice = choice is not None and choice or []
def _run_hook(self, my_task):
context = my_task.workflow.get_task_spec_from_name(self.context)
context = my_task.workflow.spec.get_task_spec_from_name(self.context)
triggered = []
for task in my_task.workflow.task_tree:
if task.thread_id != my_task.thread_id:

View File

@ -64,10 +64,10 @@ class ExclusiveChoice(MultiChoice):
def _run_hook(self, my_task):
output = self._wf_spec.get_task_spec_from_name(self.default_task_spec)
output = my_task.workflow.spec.get_task_spec_from_name(self.default_task_spec)
for condition, spec_name in self.cond_task_specs:
if condition is not None and condition._matches(my_task):
output = self._wf_spec.get_task_spec_from_name(spec_name)
output = my_task.workflow.spec.get_task_spec_from_name(spec_name)
break
if output is None:

View File

@ -54,7 +54,7 @@ class Gate(TaskSpec):
def _update_hook(self, my_task):
super()._update_hook(my_task)
context_task = my_task.workflow.get_task_spec_from_name(self.context)
context_task = my_task.workflow.spec.get_task_spec_from_name(self.context)
root_task = my_task.workflow.task_tree
for task in root_task._find_any(context_task):
if task.thread_id != my_task.thread_id:

View File

@ -139,7 +139,7 @@ class Join(TaskSpec):
# We are looking for all task instances that must be joined.
# We limit our search by starting at the split point.
if self.split_task:
task_spec = my_task.workflow.get_task_spec_from_name(self.split_task)
task_spec = my_task.workflow.spec.get_task_spec_from_name(self.split_task)
split_task = my_task._find_ancestor(task_spec)
else:
split_task = my_task.workflow.task_tree

View File

@ -95,9 +95,9 @@ class MultiChoice(TaskSpec):
if self.choice is not None and output not in self.choice:
continue
if condition is None:
unconditional.append(self._wf_spec.get_task_spec_from_name(output))
unconditional.append(my_task.workflow.spec.get_task_spec_from_name(output))
else:
conditional.append(self._wf_spec.get_task_spec_from_name(output))
conditional.append(my_task.workflow.spec.get_task_spec_from_name(output))
state = TaskState.MAYBE if my_task.state == TaskState.MAYBE else TaskState.LIKELY
my_task._sync_children(unconditional, state)
for spec in conditional:
@ -109,7 +109,7 @@ class MultiChoice(TaskSpec):
if self.choice is not None and output not in self.choice:
continue
if condition is None or condition._matches(my_task):
outputs.append(self._wf_spec.get_task_spec_from_name(output))
outputs.append(my_task.workflow.spec.get_task_spec_from_name(output))
return outputs
def _run_hook(self, my_task):

View File

@ -98,8 +98,7 @@ class SubWorkflow(TaskSpec):
with open(file_name) as fp:
xml = etree.parse(fp).getroot()
wf_spec = WorkflowSpec.deserialize(serializer, xml, filename=file_name)
outer_workflow = my_task.workflow.outer_workflow
subworkflow = Workflow(wf_spec, parent=outer_workflow)
subworkflow = Workflow(wf_spec)
my_task._sync_children(self.outputs, TaskState.FUTURE)
for child in subworkflow.task_tree.children:
my_task.children.insert(0, child)

View File

@ -105,7 +105,7 @@ class ThreadMerge(Join):
my_task._set_state(TaskState.WAITING)
return
split_task_spec = my_task.workflow.get_task_spec_from_name(self.split_task)
split_task_spec = my_task.workflow.spec.get_task_spec_from_name(self.split_task)
split_task = my_task._find_ancestor(split_task_spec)
# Find the inbound task that was completed last.

View File

@ -84,7 +84,7 @@ class Trigger(TaskSpec):
times = int(valueof(my_task, self.times, 1)) + self.queued
for i in range(times):
for task_name in self.context:
task_spec = my_task.workflow.get_task_spec_from_name(task_name)
task_spec = my_task.workflow.spec.get_task_spec_from_name(task_name)
task_spec._on_trigger(my_task)
self.queued = 0
return True

View File

@ -18,10 +18,20 @@
# 02110-1301 USA
from SpiffWorkflow.bpmn.parser.event_parsers import EventDefinitionParser, ReceiveTaskParser
from SpiffWorkflow.bpmn.parser.event_parsers import StartEventParser, EndEventParser, \
IntermediateCatchEventParser, IntermediateThrowEventParser, BoundaryEventParser, \
SendTaskParser
from SpiffWorkflow.spiff.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.parser.event_parsers import (
StartEventParser,
EndEventParser,
IntermediateCatchEventParser,
IntermediateThrowEventParser,
BoundaryEventParser,
SendTaskParser,
)
from SpiffWorkflow.spiff.specs.event_definitions import (
MessageEventDefinition,
SignalEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
)
from SpiffWorkflow.bpmn.parser.util import one
from SpiffWorkflow.spiff.parser.task_spec import SpiffTaskParser
@ -47,6 +57,50 @@ class SpiffEventDefinitionParser(SpiffTaskParser, EventDefinitionParser):
message_var=extensions.get('messageVariable')
)
def parse_signal_event(self, signal_event):
"""Parse a Spiff signal event"""
signal_ref = signal_event.get('signalRef')
if signal_ref is not None:
signal = one(self.doc_xpath(f'.//bpmn:signal[@id="{signal_ref}"]'))
name = signal.get('name')
extensions = self.parse_extensions(signal)
expression = extensions.get('payloadExpression')
variable = extensions.get('variableName')
else:
name = signal_event.getparent().get('name')
expression, variable = None, None
return SignalEventDefinition(name, expression=expression, variable=variable)
def parse_error_event(self, error_event):
"""Parse a Spiff error event"""
error_ref = error_event.get('errorRef')
if error_ref is not None:
error = one(self.doc_xpath(f'.//bpmn:error[@id="{error_ref}"]'))
name = error.get('name')
code = error.get('errorCode')
extensions = self.parse_extensions(error)
expression = extensions.get('payloadExpression')
variable = extensions.get('variableName')
else:
name = error_event.getparent().get('name')
code, expression, variable = None, None, None
return ErrorEventDefinition(name, expression=expression, variable=variable, code=code)
def parse_escalation_event(self, escalation_event):
"""Parse a Spiff error event"""
escalation_ref = escalation_event.get('escalationRef')
if escalation_ref is not None:
escalation = one(self.doc_xpath(f'.//bpmn:escalation[@id="{escalation_ref}"]'))
name = escalation.get('name')
code = escalation.get('escalationCode')
extensions = self.parse_extensions(escalation)
expression = extensions.get('payloadExpression')
variable = extensions.get('variableName')
else:
name = escalation_event.getparent().get('name')
code, expression, variable = None, None, None
return EscalationEventDefinition(name, expression=expression, variable=variable, code=code)
class SpiffStartEventParser(SpiffEventDefinitionParser, StartEventParser):
def create_task(self):

View File

@ -3,6 +3,8 @@
<xsd:element name="instructionsForEndUser" type="xsd:string"/>
<xsd:element name="messagePayload" type="xsd:string"/>
<xsd:element name="messageVariable" type="xsd:string"/>
<xsd:element name="payloadExpression" type="xsd:string"/>
<xsd:element name="variableName" type="xsd:string"/>
<xsd:element name="preScript" type="xsd:string"/>
<xsd:element name="postScript" type="xsd:string"/>
<xsd:element name="properties">

View File

@ -30,7 +30,8 @@ from SpiffWorkflow.bpmn.serializer.task_spec import (
IntermediateThrowEventConverter,
EventBasedGatewayConverter,
BoundaryEventConverter,
BoundaryEventParentConverter,
BoundaryEventSplitConverter,
BoundaryEventJoinConverter,
ParallelGatewayConverter,
ExclusiveGatewayConverter,
InclusiveGatewayConverter,
@ -53,8 +54,19 @@ from .task_spec import (
BusinessRuleTaskConverter,
)
from SpiffWorkflow.bpmn.serializer.event_definition import MessageEventDefinitionConverter as DefaultMessageEventDefinitionConverter
from .event_definition import MessageEventDefinitionConverter
from SpiffWorkflow.bpmn.serializer.event_definition import (
MessageEventDefinitionConverter as DefaultMessageEventDefinitionConverter,
SignalEventDefinitionConverter as DefaultSignalEventDefinitionConverter,
ErrorEventDefinitionConverter as DefaultErrorEventDefinitionConverter,
EscalationEventDefinitionConverter as DefaultEscalationEventDefinitionConverter,
)
from .event_definition import (
MessageEventDefinitionConverter,
SignalEventDefinitionConverter,
ErrorEventDefinitionConverter,
EscalationEventDefinitionConverter,
)
SPIFF_SPEC_CONFIG = deepcopy(DEFAULT_SPEC_CONFIG)
SPIFF_SPEC_CONFIG['task_specs'] = [
@ -67,7 +79,8 @@ SPIFF_SPEC_CONFIG['task_specs'] = [
IntermediateThrowEventConverter,
EventBasedGatewayConverter,
BoundaryEventConverter,
BoundaryEventParentConverter,
BoundaryEventSplitConverter,
BoundaryEventJoinConverter,
ParallelGatewayConverter,
ExclusiveGatewayConverter,
InclusiveGatewayConverter,
@ -87,4 +100,10 @@ SPIFF_SPEC_CONFIG['task_specs'] = [
BusinessRuleTaskConverter
]
SPIFF_SPEC_CONFIG['event_definitions'].remove(DefaultMessageEventDefinitionConverter)
SPIFF_SPEC_CONFIG['event_definitions'].append(MessageEventDefinitionConverter)
SPIFF_SPEC_CONFIG['event_definitions'].remove(DefaultSignalEventDefinitionConverter)
SPIFF_SPEC_CONFIG['event_definitions'].remove(DefaultErrorEventDefinitionConverter)
SPIFF_SPEC_CONFIG['event_definitions'].remove(DefaultEscalationEventDefinitionConverter)
SPIFF_SPEC_CONFIG['event_definitions'].append(MessageEventDefinitionConverter)
SPIFF_SPEC_CONFIG['event_definitions'].append(SignalEventDefinitionConverter)
SPIFF_SPEC_CONFIG['event_definitions'].append(ErrorEventDefinitionConverter)
SPIFF_SPEC_CONFIG['event_definitions'].append(EscalationEventDefinitionConverter)

View File

@ -19,7 +19,12 @@
from SpiffWorkflow.bpmn.serializer.helpers.spec import EventDefinitionConverter
from SpiffWorkflow.spiff.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.spiff.specs.event_definitions import (
MessageEventDefinition,
SignalEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
)
class MessageEventDefinitionConverter(EventDefinitionConverter):
@ -37,3 +42,38 @@ class MessageEventDefinitionConverter(EventDefinitionConverter):
dct['correlation_properties'] = self.correlation_properties_from_dict(dct['correlation_properties'])
event_definition = super().from_dict(dct)
return event_definition
class ItemAwareEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['expression'] = event_definition.expression
dct['variable'] = event_definition.variable
return dct
class SignalEventDefinitionConverter(ItemAwareEventDefinitionConverter):
def __init__(self, registry):
super().__init__(SignalEventDefinition, registry)
class ErrorEventDefinitionConverter(ItemAwareEventDefinitionConverter):
def __init__(self, registry):
super().__init__(ErrorEventDefinition, registry)
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['code'] = event_definition.code
return dct
class EscalationEventDefinitionConverter(ItemAwareEventDefinitionConverter):
def __init__(self, registry):
super().__init__(EscalationEventDefinition, registry)
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['code'] = event_definition.code
return dct

View File

@ -17,29 +17,68 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.item_aware_event import (
ItemAwareEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
SignalEventDefinition,
)
from SpiffWorkflow.bpmn.event import BpmnEvent
class MessageEventDefinition(MessageEventDefinition):
def __init__(self, name, correlation_properties=None, expression=None, message_var=None, **kwargs):
super(MessageEventDefinition, self).__init__(name, correlation_properties, **kwargs)
self.expression = expression
self.message_var = message_var
self.internal = False
def throw(self, my_task):
# We can't update our own payload, because if this task is reached again
# we have to evaluate it again so we have to create a new event
event = MessageEventDefinition(self.name, self.correlation_properties, self.expression, self.message_var)
event.payload = my_task.workflow.script_engine.evaluate(my_task, self.expression)
correlations = self.get_correlations(my_task, event.payload)
payload = my_task.workflow.script_engine.evaluate(my_task, self.expression)
correlations = self.get_correlations(my_task, payload)
event = BpmnEvent(self, payload=payload, correlations=correlations)
my_task.workflow.correlations.update(correlations)
self._throw(event, my_task.workflow, my_task.workflow.outer_workflow, correlations)
my_task.workflow.top_workflow.catch(event)
def update_task_data(self, my_task):
my_task.data[self.message_var] = my_task.internal_data[self.name]
if self.message_var is not None:
my_task.data[self.message_var] = my_task.internal_data.pop(self.name)
def reset(self, my_task):
my_task.internal_data.pop(self.message_var, None)
super(MessageEventDefinition, self).reset(my_task)
class SpiffItemAwareEventDefinition(ItemAwareEventDefinition):
def __init__(self, name, expression=None, variable=None, **kwargs):
super().__init__(name, **kwargs)
self.expression = expression
self.variable = variable
def throw(self, my_task):
if self.expression is not None:
payload = my_task.workflow.script_engine.evaluate(my_task, self.expression)
else:
payload = None
event = BpmnEvent(self, payload=payload)
my_task.workflow.top_workflow.catch(event)
def update_task_data(self, my_task):
if self.variable is not None:
my_task.data[self.variable] = my_task.internal_data.pop(self.name, None)
def reset(self, my_task):
my_task.internal_data.pop(self.name, None)
super().reset(my_task)
class SignalEventDefinition(SpiffItemAwareEventDefinition, SignalEventDefinition):
pass
class ErrorEventDefinition(SpiffItemAwareEventDefinition, ErrorEventDefinition):
pass
class EscalationEventDefinition(SpiffItemAwareEventDefinition, EscalationEventDefinition):
pass

View File

@ -151,13 +151,11 @@ class Task(object):
raise StopIteration()
current = self.path[-1]
# Assure we don't recurse forever.
self.count += 1
if self.count > self.MAX_ITERATIONS:
raise WorkflowException("Task Iterator entered infinite recursion loop", task_spec=current)
# If the current task has children, the first child is the next
# item. If the current task is LIKELY, and predicted tasks are not
# specificly searched, we can ignore the children, because
@ -169,21 +167,22 @@ class Task(object):
ignore_task = is_predicted and not search_predicted
if current.children and not ignore_task:
self.path.append(current.children[0])
if (self.filter is not None and
current.state & self.filter == 0):
if (self.filter is not None and current.state & self.filter == 0):
return None
return current
# Ending up here, this task has no children. Crop the path until we
# reach a task that has unvisited children, or until we hit the
# end.
# reach a task that has unvisited children, or until we hit the end.
while True:
old_child = self.path.pop(-1)
if len(self.path) == 0:
break
# If this task has a sibling, choose it.
parent = self.path[-1]
# We might have updated a task while iterating over the workflow and its children would have been dropped
# Not sure I like this change, but it hasn't caused anything to break
if parent.state == TaskState.CANCELLED:
continue
pos = parent.children.index(old_child)
if len(parent.children) > pos + 1:
self.path.append(parent.children[pos + 1])
@ -259,7 +258,6 @@ class Task(object):
extra = dct or {}
extra.update({
'workflow_spec': self.workflow.spec.name,
'workflow_name': self.workflow.spec.description,
'task_spec': self.task_spec.name,
'task_name': self.task_spec.description,
'task_id': self.id,

View File

@ -29,14 +29,13 @@ logger = logging.getLogger('spiff')
class Workflow(object):
"""
The engine that executes a workflow.
It is a essentially a facility for managing all branches.
A Workflow is also the place that holds the data of a running workflow.
"""
def __init__(self, workflow_spec, deserializing=False, **kwargs):
def __init__(self, workflow_spec, deserializing=False, parent=None):
"""
Constructor.
@ -47,11 +46,9 @@ class Workflow(object):
generating tasks twice (and associated problems with multiple
hierarchies of tasks)
"""
self.name = None
assert workflow_spec is not None
self.spec = workflow_spec
self.data = {}
self.outer_workflow = kwargs.get('parent', self)
self.locks = {}
self.last_task = None
if 'Root' in workflow_spec.task_specs:
@ -63,24 +60,20 @@ class Workflow(object):
self.task_tree = Task(self, root, state=TaskState.COMPLETED)
start = self.task_tree._add_child(self.spec.start, state=TaskState.FUTURE)
self.success = True
self.debug = False
# Events.
self.completed_event = Event()
if not deserializing:
self._predict()
if 'parent' not in kwargs:
if parent is None:
start.task_spec._update(start)
logger.info('Initialize', extra=self.log_info())
self.task_mapping = self._get_task_mapping()
def log_info(self, dct=None):
extra = dct or {}
extra.update({
'workflow_spec': self.spec.name,
'workflow_name': self.spec.description,
'task_spec': '-',
'task_type': None,
'task_id': None,
@ -95,8 +88,7 @@ class Workflow(object):
:rtype: bool
:return: Whether the workflow is completed.
"""
mask = TaskState.NOT_FINISHED_MASK
iter = Task.Iterator(self.task_tree, mask)
iter = Task.Iterator(self.task_tree, TaskState.NOT_FINISHED_MASK)
try:
next(iter)
except StopIteration:
@ -108,15 +100,11 @@ class Workflow(object):
for task in Workflow.get_tasks(self,TaskState.NOT_FINISHED_MASK):
task.task_spec._predict(task, mask=mask)
def _get_waiting_tasks(self):
waiting = Task.Iterator(self.task_tree, TaskState.WAITING)
return [w for w in waiting]
def _task_completed_notify(self, task):
if task.get_name() == 'End':
self.data.update(task.data)
# Update the state of every WAITING task.
for thetask in self._get_waiting_tasks():
# Update the state of every WAITING task
for thetask in self.get_tasks_iterator(TaskState.WAITING):
thetask.task_spec._update(thetask)
if self.completed_event.n_subscribers() == 0:
# Since is_completed() is expensive it makes sense to bail
@ -130,7 +118,7 @@ class Workflow(object):
self.locks[name] = mutex()
return self.locks[name]
def _get_task_mapping(self):
def get_task_mapping(self):
task_mapping = {}
for task in self.task_tree:
thread_task_mapping = task_mapping.get(task.thread_id, {})
@ -140,17 +128,8 @@ class Workflow(object):
task_mapping[task.thread_id] = thread_task_mapping
return task_mapping
def update_task_mapping(self):
"""
Update the task_mapping of workflow, make sure the method is called
every time you reconstruct task instance.
"""
self.task_mapping = self._get_task_mapping()
def set_data(self, **kwargs):
"""
Defines the given attribute/value pairs.
"""
"""Defines the given attribute/value pairs."""
self.data.update(kwargs)
def get_data(self, name, default=None):
@ -183,27 +162,16 @@ class Workflow(object):
logger.info(f'Cancel with {len(cancel)} remaining', extra=self.log_info())
return cancel
def get_task_spec_from_name(self, name):
def get_tasks_iterator(self, state=TaskState.ANY_MASK):
"""
Returns the task spec with the given name.
Returns a iterator of Task objects with the given state.
:type name: str
:param name: The name of the task.
:rtype: TaskSpec
:returns: The task spec with the given name.
:type state: integer
:param state: A bitmask of states.
:rtype: Task.Iterator
:returns: A list of tasks.
"""
return self.spec.get_task_spec_from_name(name)
def get_tasks_from_spec_name(self, name):
"""
Returns all tasks whose spec has the given name.
:type name: str
:param name: The name of a task spec.
:rtype: list[Task]
:returns: A list of tasks that relate to the spec with the given name.
"""
return [task for task in self.get_tasks_iterator() if task.task_spec.name == name]
return Task.Iterator(self.task_tree, state)
def get_tasks(self, state=TaskState.ANY_MASK):
"""
@ -216,37 +184,32 @@ class Workflow(object):
"""
return [t for t in Task.Iterator(self.task_tree, state)]
def get_tasks_iterator(self, state=TaskState.ANY_MASK):
def get_tasks_from_spec_name(self, name):
"""
Returns a iterator of Task objects with the given state.
Returns all tasks whose spec has the given name.
:type state: integer
:param state: A bitmask of states.
:rtype: Task.Iterator
:returns: A list of tasks.
:type name: str
:param name: The name of a task spec.
:rtype: list[Task]
:returns: A list of tasks that relate to the spec with the given name.
"""
return Task.Iterator(self.task_tree, state)
return [task for task in self.get_tasks_iterator() if task.task_spec.name == name]
def get_task_from_id(self, task_id, tasklist=None):
def get_task_from_id(self, task_id):
"""
Returns the task with the given id.
:type id:integer
:param id: The id of a task.
:param tasklist: Optional cache of get_tasks for operations
where we are calling multiple times as when we
are deserializing the workflow
:rtype: Task
:returns: The task with the given id.
"""
if task_id is None:
raise WorkflowException('task_id is None', task_spec=self.spec)
tasklist = tasklist or self.task_tree
for task in self.task_tree:
if task.id == task_id:
return task
msg = 'A task with the given task_id (%s) was not found' % task_id
raise TaskNotFoundException(msg, task_spec=self.spec)
return task
raise TaskNotFoundException(f'A task with id {task_id} was not found', task_spec=self.spec)
def run_task_from_id(self, task_id):
"""
@ -267,6 +230,7 @@ class Workflow(object):
:param data: optionall set the task data
"""
task = self.get_task_from_id(task_id)
self.last_task = task.parent
return task.reset_token(data)
def run_next(self, pick_up=True, halt_on_manual=True):

View File

@ -46,8 +46,6 @@ class ApprovalsTest(BpmnWorkflowTestCase):
def testRunThroughHappy(self):
self.do_next_named_step('First_Approval_Wins.Manager_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done')
self.do_next_named_step('Approvals.Manager_Approval__P_')
@ -57,15 +55,11 @@ class ApprovalsTest(BpmnWorkflowTestCase):
self.do_next_named_step('Parallel_Approvals_SP.Step1')
self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval')
self.do_next_named_step('Parallel_Approvals_SP.Supervisor_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')
def testRunThroughHappyOtherOrders(self):
self.do_next_named_step('First_Approval_Wins.Supervisor_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done')
self.do_next_named_step('Approvals.Supervisor_Approval__P_')
@ -75,15 +69,11 @@ class ApprovalsTest(BpmnWorkflowTestCase):
self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval')
self.do_next_named_step('Parallel_Approvals_SP.Step1')
self.do_next_named_step('Parallel_Approvals_SP.Supervisor_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')
def testSaveRestore(self):
self.do_next_named_step('First_Approval_Wins.Manager_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.save_restore()
self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done')
@ -96,16 +86,12 @@ class ApprovalsTest(BpmnWorkflowTestCase):
self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval')
self.do_next_exclusive_step('Parallel_Approvals_SP.Step1')
self.do_next_exclusive_step('Parallel_Approvals_SP.Supervisor_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')
def testSaveRestoreWaiting(self):
self.do_next_named_step('First_Approval_Wins.Manager_Approval')
self.save_restore()
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done')
self.save_restore()
@ -122,8 +108,6 @@ class ApprovalsTest(BpmnWorkflowTestCase):
self.save_restore()
self.do_next_exclusive_step('Parallel_Approvals_SP.Supervisor_Approval')
self.save_restore()
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')

View File

@ -76,7 +76,7 @@ class BpmnWorkflowTestCase(unittest.TestCase):
if (p.task_spec.name == parent_name or p.task_spec.bpmn_name == parent_name):
found = True
break
if p.parent is None and p.workflow != p.workflow.outer_workflow:
if p.parent is None and p.workflow != p.workflow.parent:
p = switch_workflow(p)
else:
p = p.parent
@ -117,13 +117,6 @@ class BpmnWorkflowTestCase(unittest.TestCase):
tasks[0].set_data(**set_attribs)
tasks[0].run()
def complete_subworkflow(self):
# A side effect of finer grained contol over task execution is that tasks require more explicit intervention
# to change states. Subworkflows tasks no longer go directly to ready when the subworkflow completes.
# So they may need to explicitly refreshed to become ready, and then run.
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
def save_restore(self):
script_engine = self.workflow.script_engine
@ -140,7 +133,7 @@ class BpmnWorkflowTestCase(unittest.TestCase):
self.assertEqual(before_state, after_state)
self.workflow = after
self.workflow.script_engine = script_engine
def restore(self, state):
self.workflow = self.serializer.workflow_from_dict(state)

View File

@ -17,7 +17,6 @@ class CallActivityTest(BpmnWorkflowTestCase):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertDictEqual(self.workflow.data, {'pre_var': 'some string', 'my_var': 'World', 'my_other_var': 'Mike'})
def test_call_activity_has_same_script_engine(self):
@ -26,7 +25,6 @@ class CallActivityTest(BpmnWorkflowTestCase):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=CustomScriptEngine())
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertTrue(self.workflow.is_completed())
self.assertIsInstance(self.workflow.script_engine, CustomScriptEngine)
@ -42,7 +40,6 @@ class CallActivityTest(BpmnWorkflowTestCase):
# data should be removed in the final output as well.
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertTrue(self.workflow.is_completed())
self.assertNotIn('remove_this_var', self.workflow.last_task.data.keys())

View File

@ -1,5 +1,6 @@
from SpiffWorkflow.bpmn.specs.mixins.subworkflow_task import CallActivity
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.event import BpmnEvent
from SpiffWorkflow.task import TaskState
from .BpmnWorkflowTestCase import BpmnWorkflowTestCase
@ -46,30 +47,27 @@ class CollaborationTest(BpmnWorkflowTestCase):
start.data['lover_name'] = 'Peggy'
workflow.do_engine_steps()
# An external message should be created
messages = workflow.get_bpmn_messages()
messages = workflow.get_events()
self.assertEqual(len(messages), 1)
self.assertEqual(len(workflow.bpmn_messages), 0)
self.assertEqual(len(workflow.bpmn_events), 0)
receive = workflow.get_tasks_from_spec_name('EventReceiveLetter')[0]
# Waiting Events should contain details about what we are no waiting on.
events = workflow.waiting_events()
self.assertEqual(1, len(events))
self.assertEqual("MessageEventDefinition", events[0]['event_type'])
self.assertEqual("Love Letter Response", events[0]['name'])
self.assertEqual(['lover'], events[0]['value'][0].correlation_keys)
self.assertEqual('from_name', events[0]['value'][0].retrieval_expression)
self.assertEqual('lover_name', events[0]['value'][0].name)
self.assertEqual("MessageEventDefinition", events[0].event_type)
self.assertEqual("Love Letter Response", events[0].name)
self.assertEqual(['lover'], events[0].value[0].correlation_keys)
self.assertEqual('from_name', events[0].value[0].retrieval_expression)
self.assertEqual('lover_name', events[0].value[0].name)
# As shown above, the waiting event is looking for a payload with a
# 'from_name' that should be used to retrieve the lover's name.
new_message_payload = {'from_name': 'Peggy', 'other_nonsense': 1001}
workflow.catch_bpmn_message('Love Letter Response', new_message_payload)
message = BpmnEvent(
receive.task_spec.event_definition,
{'from_name': 'Peggy', 'other_nonsense': 1001}
)
workflow.send_event(message)
workflow.do_engine_steps()
# The external message created above should be caught
self.assertEqual(receive.state, TaskState.COMPLETED)
# Spiff extensions allow us to specify the destination of a workflow
# but base BPMN does not, and all keys are added directly to the
# task data.
self.assertEqual(workflow.last_task.data, {'from_name': 'Peggy', 'lover_name': 'Peggy', 'other_nonsense': 1001})
self.assertEqual(workflow.correlations, {'lover':{'lover_name':'Peggy'}})
self.assertEqual(workflow.is_completed(), True)

View File

@ -36,8 +36,6 @@ class CustomInlineScriptTest(BpmnWorkflowTestCase):
if save_restore:
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.complete_subworkflow()
if save_restore:
self.save_restore()
data = self.workflow.last_task.data

View File

@ -20,7 +20,7 @@ class NestedProcessesTest(BpmnWorkflowTestCase):
self.complete_task('Action2', True)
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.complete_task('Action3', True)
self.complete_workflow()
self.assertTrue(self.workflow.is_completed())
def testResetToTop(self):
@ -36,7 +36,7 @@ class NestedProcessesTest(BpmnWorkflowTestCase):
self.complete_task('Action2')
self.complete_task('Action3')
self.complete_workflow()
self.assertTrue(self.workflow.is_completed())
def testResetToIntermediate(self):
@ -53,16 +53,27 @@ class NestedProcessesTest(BpmnWorkflowTestCase):
task.run()
self.complete_task('Action3')
self.complete_workflow()
self.assertTrue(self.workflow.is_completed())
def testResetToSubworkflow(self):
self.complete_task('Action1', True)
self.complete_task('Action2', True)
self.complete_task('Action3', True)
# "Nested level 1"
task = self.workflow.get_tasks_from_spec_name('sid-C014B4B9-889F-4EE9-9949-C89502C35CF0')[0]
self.workflow.reset_from_task_id(task.id)
self.workflow.do_engine_steps()
self.assertEqual(len(self.workflow.subprocesses), 1)
self.assertEqual(task.state, TaskState.WAITING)
self.complete_task('Action2', True)
self.complete_task('Action3', True)
self.assertTrue(self.workflow.is_completed())
def complete_task(self, name, save_restore=False):
self.do_next_named_step(name)
self.workflow.do_engine_steps()
if save_restore:
self.save_restore()
def complete_workflow(self):
self.complete_subworkflow()
self.complete_subworkflow()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))

View File

@ -46,3 +46,16 @@ class ParserTest(unittest.TestCase):
self.assertRaisesRegex(
ValidationException, "The process '\w+' was not found*",
self.parser.get_spec, "Process")
def testBoundaryEvent(self):
bpmn_file = os.path.join(os.path.dirname(__file__), 'data', 'boundary_event_split.bpmn')
self.parser.add_bpmn_file(bpmn_file)
spec = self.parser.get_spec('Process_0ymnx41')
gw1 = spec.task_specs.get('gw_1')
gw2 = spec.task_specs.get('gw_2')
task = spec.task_specs.get('task_2')
split_task = spec.task_specs.get(f'{task.name}.BoundaryEventSplit')
self.assertNotIn(task, gw1.outputs)
self.assertIn(split_task, gw1.outputs)
self.assertNotIn(task, gw2.outputs)
self.assertIn(split_task, gw2.outputs)

View File

@ -24,6 +24,10 @@ class ProcessParserTest(unittest.TestCase):
parser = _process_parser("multiple_call_activities.bpmn", "Process_90mmqlw")
assert parser.called_element_ids() == ["Process_sypm122", "Process_diu8ta2", "Process_l14lar1"]
def testHandlesNestedCallActivity(self):
parser = _process_parser("nested_call_activity.bpmn", "Process_expand_call_activity")
assert parser.called_element_ids() == ["is_this_missing", "set_permissions_process"]
def testCanAddDmnFromString(self):
parser = BpmnDmnParser()
parser.add_dmn_str(EMPTY_DMN)

View File

@ -24,7 +24,7 @@ class ResetSubProcessTest(BpmnWorkflowTestCase):
state = self.serializer.serialize_json(self.workflow)
self.workflow = self.serializer.deserialize_json(state)
self.workflow.spec = spec
self.workflow.subprocesses = subprocesses
self.workflow.subprocess_specs = subprocesses
def testSaveRestore(self):
self.actualTest(True)
@ -38,14 +38,11 @@ class ResetSubProcessTest(BpmnWorkflowTestCase):
task = self.workflow.get_ready_user_tasks()[0]
self.save_restore()
top_level_task = self.workflow.get_tasks_from_spec_name('Task1')[0]
# top_level_task.reset_token({}, reset_data=True)
self.workflow.reset_from_task_id(top_level_task.id)
task = self.workflow.get_ready_user_tasks()[0]
self.assertEqual(len(self.workflow.get_ready_user_tasks()), 1,
"There should only be one task in a ready state.")
self.assertEqual(len(self.workflow.get_ready_user_tasks()), 1, "There should only be one task in a ready state.")
self.assertEqual(task.get_name(), 'Task1')
def actualTest(self, save_restore=False):
self.workflow.do_engine_steps()
@ -73,7 +70,6 @@ class ResetSubProcessTest(BpmnWorkflowTestCase):
self.assertEqual(task.get_name(),'Subtask2A')
task.run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
task = self.workflow.get_ready_user_tasks()[0]
self.assertEqual(task.get_name(),'Task2')
task.run()

View File

@ -0,0 +1,108 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_1oz74lp" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
<bpmn:process id="Process_0ymnx41" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_01pj69c</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:exclusiveGateway id="gw_1" default="Flow_1e2doc4">
<bpmn:incoming>Flow_01pj69c</bpmn:incoming>
<bpmn:outgoing>Flow_1imjpib</bpmn:outgoing>
<bpmn:outgoing>Flow_1e2doc4</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_01pj69c" sourceRef="StartEvent_1" targetRef="gw_1" />
<bpmn:exclusiveGateway id="gw_2" default="Flow_1baqf6p">
<bpmn:incoming>Flow_1imjpib</bpmn:incoming>
<bpmn:outgoing>Flow_1baqf6p</bpmn:outgoing>
<bpmn:outgoing>Flow_0a5mw5g</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_1imjpib" sourceRef="gw_1" targetRef="gw_2">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">False</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:task id="task_1">
<bpmn:incoming>Flow_1baqf6p</bpmn:incoming>
<bpmn:outgoing>Flow_0qrmpxi</bpmn:outgoing>
</bpmn:task>
<bpmn:sequenceFlow id="Flow_1baqf6p" sourceRef="gw_2" targetRef="task_1" />
<bpmn:task id="task_2">
<bpmn:incoming>Flow_0a5mw5g</bpmn:incoming>
<bpmn:incoming>Flow_1e2doc4</bpmn:incoming>
<bpmn:outgoing>Flow_1or38ex</bpmn:outgoing>
</bpmn:task>
<bpmn:sequenceFlow id="Flow_0a5mw5g" sourceRef="gw_2" targetRef="task_2">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">True</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_1e2doc4" sourceRef="gw_1" targetRef="task_2" />
<bpmn:endEvent id="Event_0wmymez">
<bpmn:incoming>Flow_0qrmpxi</bpmn:incoming>
<bpmn:incoming>Flow_1or38ex</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0qrmpxi" sourceRef="task_1" targetRef="Event_0wmymez" />
<bpmn:sequenceFlow id="Flow_1or38ex" sourceRef="task_2" targetRef="Event_0wmymez" />
<bpmn:boundaryEvent id="Event_0sh7wyp" attachedToRef="task_2">
<bpmn:signalEventDefinition id="SignalEventDefinition_0aridnp" />
</bpmn:boundaryEvent>
<bpmn:boundaryEvent id="Event_00eeq9m" attachedToRef="task_1">
<bpmn:signalEventDefinition id="SignalEventDefinition_0rlpg5k" />
</bpmn:boundaryEvent>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0ymnx41">
<bpmndi:BPMNEdge id="Flow_01pj69c_di" bpmnElement="Flow_01pj69c">
<di:waypoint x="188" y="177" />
<di:waypoint x="395" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1imjpib_di" bpmnElement="Flow_1imjpib">
<di:waypoint x="445" y="177" />
<di:waypoint x="495" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1baqf6p_di" bpmnElement="Flow_1baqf6p">
<di:waypoint x="545" y="177" />
<di:waypoint x="600" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0a5mw5g_di" bpmnElement="Flow_0a5mw5g">
<di:waypoint x="520" y="202" />
<di:waypoint x="520" y="290" />
<di:waypoint x="600" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1e2doc4_di" bpmnElement="Flow_1e2doc4">
<di:waypoint x="420" y="202" />
<di:waypoint x="420" y="320" />
<di:waypoint x="600" y="320" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0qrmpxi_di" bpmnElement="Flow_0qrmpxi">
<di:waypoint x="700" y="177" />
<di:waypoint x="762" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1or38ex_di" bpmnElement="Flow_1or38ex">
<di:waypoint x="700" y="290" />
<di:waypoint x="731" y="290" />
<di:waypoint x="731" y="177" />
<di:waypoint x="762" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Gateway_1vvx4a6_di" bpmnElement="gw_1" isMarkerVisible="true">
<dc:Bounds x="395" y="152" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0vqubcb_di" bpmnElement="gw_2" isMarkerVisible="true">
<dc:Bounds x="495" y="152" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_10scr9s_di" bpmnElement="task_1">
<dc:Bounds x="600" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0rfxfcx_di" bpmnElement="task_2">
<dc:Bounds x="600" y="250" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0wmymez_di" bpmnElement="Event_0wmymez">
<dc:Bounds x="762" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="152" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0becpnz_di" bpmnElement="Event_0sh7wyp">
<dc:Bounds x="642" y="312" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0n62i6p_di" bpmnElement="Event_00eeq9m">
<dc:Bounds x="642" y="119" width="36" height="36" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,111 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_1qnx3d3" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
<bpmn:process id="main" isExecutable="true">
<bpmn:startEvent id="Event_0gayte6">
<bpmn:outgoing>Flow_1b4mvkp</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1b4mvkp" sourceRef="Event_0gayte6" targetRef="set" />
<bpmn:scriptTask id="set" name="Set x = 0">
<bpmn:incoming>Flow_1b4mvkp</bpmn:incoming>
<bpmn:outgoing>Flow_0igglvl</bpmn:outgoing>
<bpmn:script>x = 0</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0igglvl" sourceRef="set" targetRef="parallel" />
<bpmn:parallelGateway id="parallel">
<bpmn:incoming>Flow_0igglvl</bpmn:incoming>
<bpmn:incoming>Flow_0r5ndy6</bpmn:incoming>
<bpmn:outgoing>Flow_0ric3rl</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:sequenceFlow id="Flow_0ric3rl" sourceRef="parallel" targetRef="increment" />
<bpmn:scriptTask id="increment" name="Increment x">
<bpmn:incoming>Flow_0ric3rl</bpmn:incoming>
<bpmn:outgoing>Flow_19u7g47</bpmn:outgoing>
<bpmn:script>x += 1</bpmn:script>
</bpmn:scriptTask>
<bpmn:exclusiveGateway id="exclusive" default="Flow_0tubcbv">
<bpmn:incoming>Flow_19u7g47</bpmn:incoming>
<bpmn:outgoing>Flow_0tubcbv</bpmn:outgoing>
<bpmn:outgoing>Flow_1jejn7h</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_19u7g47" sourceRef="increment" targetRef="exclusive" />
<bpmn:endEvent id="Event_0nk70qa">
<bpmn:incoming>Flow_0tubcbv</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0tubcbv" sourceRef="exclusive" targetRef="Event_0nk70qa" />
<bpmn:sequenceFlow id="Flow_1jejn7h" name="x &#60; 2" sourceRef="exclusive" targetRef="task">
<bpmn:conditionExpression>x &lt; 2</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:task id="task" name="Task">
<bpmn:incoming>Flow_1jejn7h</bpmn:incoming>
<bpmn:outgoing>Flow_0r5ndy6</bpmn:outgoing>
</bpmn:task>
<bpmn:sequenceFlow id="Flow_0r5ndy6" sourceRef="task" targetRef="parallel" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
<bpmndi:BPMNShape id="Event_0gayte6_di" bpmnElement="Event_0gayte6">
<dc:Bounds x="172" y="222" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_18u8kup_di" bpmnElement="set">
<dc:Bounds x="260" y="200" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0ifyqd2_di" bpmnElement="parallel">
<dc:Bounds x="415" y="215" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="422" y="185" width="36" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0h7ytkx_di" bpmnElement="increment">
<dc:Bounds x="520" y="200" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_1r9dzbn_di" bpmnElement="exclusive" isMarkerVisible="true">
<dc:Bounds x="675" y="215" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="677" y="185" width="46" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0nk70qa_di" bpmnElement="Event_0nk70qa">
<dc:Bounds x="782" y="222" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0o2cxkw_di" bpmnElement="task">
<dc:Bounds x="520" y="320" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1b4mvkp_di" bpmnElement="Flow_1b4mvkp">
<di:waypoint x="208" y="240" />
<di:waypoint x="260" y="240" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0igglvl_di" bpmnElement="Flow_0igglvl">
<di:waypoint x="360" y="240" />
<di:waypoint x="415" y="240" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ric3rl_di" bpmnElement="Flow_0ric3rl">
<di:waypoint x="465" y="240" />
<di:waypoint x="520" y="240" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_19u7g47_di" bpmnElement="Flow_19u7g47">
<di:waypoint x="620" y="240" />
<di:waypoint x="675" y="240" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0tubcbv_di" bpmnElement="Flow_0tubcbv">
<di:waypoint x="725" y="240" />
<di:waypoint x="782" y="240" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1jejn7h_di" bpmnElement="Flow_1jejn7h">
<di:waypoint x="700" y="265" />
<di:waypoint x="700" y="360" />
<di:waypoint x="620" y="360" />
<bpmndi:BPMNLabel>
<dc:Bounds x="707" y="339" width="25" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0r5ndy6_di" bpmnElement="Flow_0r5ndy6">
<di:waypoint x="520" y="360" />
<di:waypoint x="440" y="360" />
<di:waypoint x="440" y="265" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,201 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_1larxsm" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.10.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
<bpmn:process id="SignalAndCancel" isExecutable="true">
<bpmn:startEvent id="Event_0sg9cla" name="Start">
<bpmn:outgoing>Flow_1f7we2y</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:scriptTask id="ON_SIGNAL" name="On Signal " camunda:resultVariable="test_message">
<bpmn:incoming>Flow_0mixvu9</bpmn:incoming>
<bpmn:outgoing>Flow_08knksn</bpmn:outgoing>
<bpmn:script>cancel="cancel_signal"</bpmn:script>
</bpmn:scriptTask>
<bpmn:userTask id="UserTaskOne" name="UserTaskOne" camunda:formKey="HowMany">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="how_many" label="How many?" type="long" defaultValue="1" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_02yqmm2</bpmn:incoming>
<bpmn:outgoing>Flow_1i2iik3</bpmn:outgoing>
</bpmn:userTask>
<bpmn:manualTask id="hello" name="Hello">
<bpmn:documentation>&lt;H1&gt;Hello&lt;/H1&gt;</bpmn:documentation>
<bpmn:incoming>Flow_0ynr3ge</bpmn:incoming>
<bpmn:outgoing>Flow_02yqmm2</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:endEvent id="Event_0bs4tb7">
<bpmn:incoming>Flow_0xvajbj</bpmn:incoming>
</bpmn:endEvent>
<bpmn:manualTask id="Activity_0zko8f8" name="Good Bye">
<bpmn:documentation>&lt;H1&gt;Good Bye&lt;/H1&gt;</bpmn:documentation>
<bpmn:incoming>Flow_0elk2a3</bpmn:incoming>
<bpmn:outgoing>Flow_0xvajbj</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:scriptTask id="ON_CANCEL" name="On Cancel" camunda:resultVariable="cancel_message">
<bpmn:documentation>&lt;H1&gt;Cancel Message&lt;/H1&gt;</bpmn:documentation>
<bpmn:incoming>Flow_0zc9byy</bpmn:incoming>
<bpmn:outgoing>Flow_0zy1z3a</bpmn:outgoing>
<bpmn:script>cancel="cancel_event"</bpmn:script>
</bpmn:scriptTask>
<bpmn:transaction id="Activity_053djnm">
<bpmn:incoming>Flow_1i2iik3</bpmn:incoming>
<bpmn:outgoing>Flow_0elk2a3</bpmn:outgoing>
<bpmn:startEvent id="Event_0wo2ff1">
<bpmn:outgoing>Flow_0syqz17</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:userTask id="UserTaskTwo" name="UserTaskTwo" camunda:formKey="FormModify">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="modify" label="Modify Data?" type="boolean" defaultValue="True" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0syqz17</bpmn:incoming>
<bpmn:outgoing>Flow_0m0ilsi</bpmn:outgoing>
</bpmn:userTask>
<bpmn:endEvent id="Event_01m9nxq">
<bpmn:incoming>Flow_0m0ilsi</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0m0ilsi" sourceRef="UserTaskTwo" targetRef="Event_01m9nxq" />
<bpmn:sequenceFlow id="Flow_0syqz17" sourceRef="Event_0wo2ff1" targetRef="UserTaskTwo" />
</bpmn:transaction>
<bpmn:boundaryEvent id="Event_0dhiirm" name="TokenReset" attachedToRef="UserTaskOne">
<bpmn:outgoing>Flow_0mixvu9</bpmn:outgoing>
<bpmn:signalEventDefinition id="SignalEventDefinition_0fwhq9v" signalRef="Signal_1eo0jkr" />
</bpmn:boundaryEvent>
<bpmn:boundaryEvent id="Event_CANCEL" attachedToRef="Activity_053djnm">
<bpmn:outgoing>Flow_0zc9byy</bpmn:outgoing>
<bpmn:cancelEventDefinition id="CancelEventDefinition_1hi8rs6" />
</bpmn:boundaryEvent>
<bpmn:sequenceFlow id="Flow_1f7we2y" sourceRef="Event_0sg9cla" targetRef="Activity_1tajk3e" />
<bpmn:sequenceFlow id="Flow_0mixvu9" name="TestMessageFlow" sourceRef="Event_0dhiirm" targetRef="ON_SIGNAL" />
<bpmn:sequenceFlow id="Flow_02yqmm2" sourceRef="hello" targetRef="UserTaskOne" />
<bpmn:sequenceFlow id="Flow_1i2iik3" sourceRef="UserTaskOne" targetRef="Activity_053djnm" />
<bpmn:sequenceFlow id="Flow_0xvajbj" sourceRef="Activity_0zko8f8" targetRef="Event_0bs4tb7" />
<bpmn:sequenceFlow id="Flow_0elk2a3" sourceRef="Activity_053djnm" targetRef="Activity_0zko8f8" />
<bpmn:sequenceFlow id="Flow_0zc9byy" sourceRef="Event_CANCEL" targetRef="ON_CANCEL" />
<bpmn:endEvent id="Event_0a1dppg">
<bpmn:incoming>Flow_08knksn</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_08knksn" sourceRef="ON_SIGNAL" targetRef="Event_0a1dppg" />
<bpmn:endEvent id="Event_1d65clv">
<bpmn:incoming>Flow_0zy1z3a</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0zy1z3a" sourceRef="ON_CANCEL" targetRef="Event_1d65clv" />
<bpmn:scriptTask id="Activity_1tajk3e" name="setup" camunda:resultVariable="test_message">
<bpmn:incoming>Flow_1f7we2y</bpmn:incoming>
<bpmn:outgoing>Flow_0ynr3ge</bpmn:outgoing>
<bpmn:script>cancel="none"</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0ynr3ge" sourceRef="Activity_1tajk3e" targetRef="hello" />
</bpmn:process>
<bpmn:signal id="Signal_1eo0jkr" name="cancel" />
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="SignalAndCancel">
<bpmndi:BPMNEdge id="Flow_1f7we2y_di" bpmnElement="Flow_1f7we2y">
<di:waypoint x="188" y="180" />
<di:waypoint x="210" y="180" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0mixvu9_di" bpmnElement="Flow_0mixvu9">
<di:waypoint x="560" y="238" />
<di:waypoint x="560" y="331" />
<bpmndi:BPMNLabel>
<dc:Bounds x="465" y="273" width="89" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_02yqmm2_di" bpmnElement="Flow_02yqmm2">
<di:waypoint x="460" y="180" />
<di:waypoint x="510" y="180" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1i2iik3_di" bpmnElement="Flow_1i2iik3">
<di:waypoint x="610" y="180" />
<di:waypoint x="679" y="180" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0xvajbj_di" bpmnElement="Flow_0xvajbj">
<di:waypoint x="1259" y="180" />
<di:waypoint x="1381" y="180" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0elk2a3_di" bpmnElement="Flow_0elk2a3">
<di:waypoint x="1029" y="180" />
<di:waypoint x="1159" y="180" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0zc9byy_di" bpmnElement="Flow_0zc9byy">
<di:waypoint x="869" y="298" />
<di:waypoint x="869" y="331" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_08knksn_di" bpmnElement="Flow_08knksn">
<di:waypoint x="610" y="371" />
<di:waypoint x="662" y="371" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0zy1z3a_di" bpmnElement="Flow_0zy1z3a">
<di:waypoint x="919" y="371" />
<di:waypoint x="972" y="371" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ynr3ge_di" bpmnElement="Flow_0ynr3ge">
<di:waypoint x="310" y="180" />
<di:waypoint x="360" y="180" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_130b520_di" bpmnElement="UserTaskOne">
<dc:Bounds x="510" y="140" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0bs4tb7_di" bpmnElement="Event_0bs4tb7">
<dc:Bounds x="1381" y="162" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0zko8f8_di" bpmnElement="Activity_0zko8f8">
<dc:Bounds x="1159" y="140" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_09cvrck_di" bpmnElement="ON_CANCEL">
<dc:Bounds x="819" y="331" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1d65clv_di" bpmnElement="Event_1d65clv">
<dc:Bounds x="972" y="353" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0x9874h_di" bpmnElement="ON_SIGNAL">
<dc:Bounds x="510" y="331" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0sg9cla_di" bpmnElement="Event_0sg9cla">
<dc:Bounds x="152" y="162" width="36" height="36" />
<bpmndi:BPMNLabel>
<dc:Bounds x="158" y="205" width="24" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1tajk3e_di" bpmnElement="Activity_1tajk3e">
<dc:Bounds x="210" y="140" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_06wus2t_di" bpmnElement="hello">
<dc:Bounds x="360" y="140" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0a1dppg_di" bpmnElement="Event_0a1dppg">
<dc:Bounds x="662" y="353" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_053djnm_di" bpmnElement="Activity_053djnm" isExpanded="true">
<dc:Bounds x="679" y="80" width="350" height="200" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0m0ilsi_di" bpmnElement="Flow_0m0ilsi">
<di:waypoint x="899" y="180" />
<di:waypoint x="951" y="180" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0syqz17_di" bpmnElement="Flow_0syqz17">
<di:waypoint x="755" y="180" />
<di:waypoint x="799" y="180" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Event_0wo2ff1_di" bpmnElement="Event_0wo2ff1">
<dc:Bounds x="719" y="162" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1f1fveb_di" bpmnElement="UserTaskTwo">
<dc:Bounds x="799" y="140" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_01m9nxq_di" bpmnElement="Event_01m9nxq">
<dc:Bounds x="951" y="162" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0dhiirm_di" bpmnElement="Event_0dhiirm">
<dc:Bounds x="542" y="202" width="36" height="36" />
<bpmndi:BPMNLabel>
<dc:Bounds x="580" y="233" width="59" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1sc7ju9_di" bpmnElement="Event_CANCEL">
<dc:Bounds x="851" y="262" width="36" height="36" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,115 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_expand_call_activity" isExecutable="true">
<bpmn:subProcess id="Activity_0jqrbg8">
<bpmn:incoming>Flow_0rrlqcy</bpmn:incoming>
<bpmn:outgoing>Flow_1x7ckx0</bpmn:outgoing>
<bpmn:startEvent id="Event_1gj4k6s">
<bpmn:outgoing>Flow_09wo95v</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_09wo95v" sourceRef="Event_1gj4k6s" targetRef="Activity_0f25k4g" />
<bpmn:endEvent id="Event_12ep3rp">
<bpmn:incoming>Flow_0fbenj5</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0fbenj5" sourceRef="Activity_0f25k4g" targetRef="Event_12ep3rp" />
<bpmn:callActivity id="Activity_0f25k4g" name="missing?" calledElement="is_this_missing">
<bpmn:incoming>Flow_09wo95v</bpmn:incoming>
<bpmn:outgoing>Flow_0fbenj5</bpmn:outgoing>
</bpmn:callActivity>
</bpmn:subProcess>
<bpmn:endEvent id="Event_0woz4y5">
<bpmn:incoming>Flow_0q4zwqq</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1x7ckx0" sourceRef="Activity_0jqrbg8" targetRef="Activity_1tglo3a" />
<bpmn:startEvent id="Event_0l2o0kx">
<bpmn:outgoing>Flow_0rrlqcy</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0rrlqcy" sourceRef="Event_0l2o0kx" targetRef="Activity_0jqrbg8" />
<bpmn:sequenceFlow id="Flow_0q4zwqq" sourceRef="Activity_1tglo3a" targetRef="Event_0woz4y5" />
<bpmn:subProcess id="Activity_1tglo3a" name="missing?">
<bpmn:incoming>Flow_1x7ckx0</bpmn:incoming>
<bpmn:outgoing>Flow_0q4zwqq</bpmn:outgoing>
<bpmn:startEvent id="Event_003dbci">
<bpmn:outgoing>Flow_1gg1sal</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1gg1sal" sourceRef="Event_003dbci" targetRef="Activity_00etcsd" />
<bpmn:endEvent id="Event_0e6rn1x">
<bpmn:incoming>Flow_151l7v5</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_151l7v5" sourceRef="Activity_00etcsd" targetRef="Event_0e6rn1x" />
<bpmn:callActivity id="Activity_00etcsd" name="also missing?" calledElement="set_permissions_process">
<bpmn:incoming>Flow_1gg1sal</bpmn:incoming>
<bpmn:outgoing>Flow_151l7v5</bpmn:outgoing>
</bpmn:callActivity>
</bpmn:subProcess>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_expand_call_activity">
<bpmndi:BPMNShape id="Event_0woz4y5_di" bpmnElement="Event_0woz4y5">
<dc:Bounds x="422" y="-128" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0jqrbg8_di" bpmnElement="Activity_0jqrbg8" isExpanded="true">
<dc:Bounds x="-170" y="-210" width="350" height="200" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1gj4k6s_di" bpmnElement="Event_1gj4k6s">
<dc:Bounds x="-130" y="-128" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_12ep3rp_di" bpmnElement="Event_12ep3rp">
<dc:Bounds x="122" y="-128" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_10rwvqv_di" bpmnElement="Activity_0f25k4g">
<dc:Bounds x="-40" y="-150" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_09wo95v_di" bpmnElement="Flow_09wo95v">
<di:waypoint x="-94" y="-110" />
<di:waypoint x="-40" y="-110" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0fbenj5_di" bpmnElement="Flow_0fbenj5">
<di:waypoint x="60" y="-110" />
<di:waypoint x="122" y="-110" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Event_0l2o0kx_di" bpmnElement="Event_0l2o0kx">
<dc:Bounds x="-328" y="-128" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1jcx1cz_di" bpmnElement="Activity_1tglo3a">
<dc:Bounds x="240" y="-150" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1x7ckx0_di" bpmnElement="Flow_1x7ckx0">
<di:waypoint x="180" y="-110" />
<di:waypoint x="240" y="-110" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0rrlqcy_di" bpmnElement="Flow_0rrlqcy">
<di:waypoint x="-292" y="-110" />
<di:waypoint x="-170" y="-110" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0q4zwqq_di" bpmnElement="Flow_0q4zwqq">
<di:waypoint x="340" y="-110" />
<di:waypoint x="422" y="-110" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
<bpmndi:BPMNDiagram id="BPMNDiagram_16455gw">
<bpmndi:BPMNPlane id="BPMNPlane_0yuaj40" bpmnElement="Activity_1tglo3a">
<bpmndi:BPMNShape id="Event_003dbci_di" bpmnElement="Event_003dbci">
<dc:Bounds x="312" y="162" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0e6rn1x_di" bpmnElement="Event_0e6rn1x">
<dc:Bounds x="552" y="162" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_00bloq9_di" bpmnElement="Activity_00etcsd">
<dc:Bounds x="400" y="140" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1gg1sal_di" bpmnElement="Flow_1gg1sal">
<di:waypoint x="348" y="180" />
<di:waypoint x="400" y="180" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_151l7v5_di" bpmnElement="Flow_151l7v5">
<di:waypoint x="500" y="180" />
<di:waypoint x="552" y="180" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -48,7 +48,6 @@ class ActionManagementTest(BpmnWorkflowTestCase):
self.do_next_named_step("Complete Work", choice="Done")
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertTrue(self.workflow.is_completed())
@ -74,7 +73,7 @@ class ActionManagementTest(BpmnWorkflowTestCase):
time.sleep(self.FINISH_TIME_DELTA)
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(3, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertNotEqual('Finish Time', self.workflow.get_tasks(TaskState.WAITING)[0].task_spec.bpmn_name)
overdue_escalation_task = [
@ -86,8 +85,6 @@ class ActionManagementTest(BpmnWorkflowTestCase):
self.do_next_named_step("Complete Work", choice="Done")
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertTrue(self.workflow.is_completed())
def testRunThroughCancel(self):

View File

@ -80,7 +80,6 @@ class CallActivityEscalationTest(BpmnWorkflowTestCase):
for task in self.workflow.get_tasks(TaskState.READY):
task.set_data(should_escalate=False)
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.workflow.run_all()
self.assertEqual(True, self.workflow.is_completed())

View File

@ -1,9 +1,10 @@
from datetime import timedelta
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.event import BpmnEvent, BpmnEvent
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from SpiffWorkflow.bpmn.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from SpiffWorkflow.task import TaskState
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
@ -29,7 +30,7 @@ class EventBasedGatewayTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.script_engine = self.script_engine
self.assertEqual(len(waiting_tasks), 2)
self.workflow.catch(MessageEventDefinition('message_1'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('message_1'), {}))
self.workflow.do_engine_steps()
self.workflow.refresh_waiting_tasks()
self.assertEqual(self.workflow.is_completed(), True)
@ -42,8 +43,8 @@ class EventBasedGatewayTest(BpmnWorkflowTestCase):
self.workflow.do_engine_steps()
waiting_tasks = self.workflow.get_waiting_tasks()
self.assertEqual(len(waiting_tasks), 2)
timer_event = waiting_tasks[0].task_spec.event_definition.event_definitions[-1]
self.workflow.catch(timer_event)
timer_event_definition = waiting_tasks[0].task_spec.event_definition.event_definitions[-1]
self.workflow.catch(BpmnEvent(timer_event_definition))
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
self.assertEqual(self.workflow.is_completed(), True)
@ -55,7 +56,7 @@ class EventBasedGatewayTest(BpmnWorkflowTestCase):
spec, subprocess = self.load_workflow_spec('multiple-start-parallel.bpmn', 'main')
workflow = BpmnWorkflow(spec)
workflow.do_engine_steps()
workflow.catch(MessageEventDefinition('message_1'))
workflow.catch(MessageEventDefinition('message_2'))
workflow.catch(BpmnEvent(MessageEventDefinition('message_1'), {}))
workflow.catch(BpmnEvent(MessageEventDefinition('message_2'), {}))
workflow.refresh_waiting_tasks()
workflow.do_engine_steps()

View File

@ -2,7 +2,8 @@
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.event import BpmnEvent
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'matth'
@ -29,7 +30,6 @@ class MessageInterruptsSpTest(BpmnWorkflowTestCase):
self.do_next_exclusive_step('Do Something In a Subprocess')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.do_next_exclusive_step('Ack Subprocess Done')
@ -50,7 +50,7 @@ class MessageInterruptsSpTest(BpmnWorkflowTestCase):
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.workflow.do_engine_steps()
self.save_restore()

View File

@ -4,7 +4,8 @@ import unittest
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.event import BpmnEvent
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'matth'
@ -33,7 +34,6 @@ class MessageInterruptsTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING)))
self.save_restore()
@ -52,7 +52,7 @@ class MessageInterruptsTest(BpmnWorkflowTestCase):
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.save_restore()
self.workflow.do_engine_steps()
@ -64,7 +64,6 @@ class MessageInterruptsTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
@ -80,7 +79,6 @@ class MessageInterruptsTest(BpmnWorkflowTestCase):
self.do_next_exclusive_step('Do Something That Takes A Long Time')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.do_engine_steps()
@ -95,7 +93,7 @@ class MessageInterruptsTest(BpmnWorkflowTestCase):
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.WAITING)))
@ -104,7 +102,6 @@ class MessageInterruptsTest(BpmnWorkflowTestCase):
self.do_next_exclusive_step('Acknowledge Interrupt Message')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))

View File

@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.event import BpmnEvent
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'matth'
@ -19,8 +20,7 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.save_restore()
self.do_next_exclusive_step(
'Select Test', choice='Message Non Interrupt')
self.do_next_exclusive_step('Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.save_restore()
@ -31,7 +31,6 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING)))
self.save_restore()
@ -43,19 +42,18 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.save_restore()
self.do_next_exclusive_step(
'Select Test', choice='Message Non Interrupt')
self.do_next_exclusive_step('Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.save_restore()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Acknowledge Non-Interrupt Message')
@ -71,14 +69,12 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughHappy(self):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.do_next_exclusive_step(
'Select Test', choice='Message Non Interrupt')
self.do_next_exclusive_step('Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
@ -87,7 +83,6 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.do_next_exclusive_step('Do Something That Takes A Long Time')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.do_engine_steps()
@ -96,45 +91,42 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
def testRunThroughMessageInterrupt(self):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.do_next_exclusive_step(
'Select Test', choice='Message Non Interrupt')
self.do_next_exclusive_step('Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Acknowledge Non-Interrupt Message')
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(3, len(self.workflow.get_tasks(TaskState.WAITING)))
self.do_next_named_step('Do Something That Takes A Long Time')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterruptOtherOrder(self):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.do_next_exclusive_step(
'Select Test', choice='Message Non Interrupt')
self.do_next_exclusive_step('Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Do Something That Takes A Long Time')
@ -145,7 +137,6 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.do_next_named_step('Acknowledge Non-Interrupt Message')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterruptOtherOrderSaveAndRestore(self):
@ -160,11 +151,11 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Do Something That Takes A Long Time')
@ -177,5 +168,4 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))

View File

@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.event import BpmnEvent
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'matth'
@ -28,7 +29,6 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.do_next_exclusive_step('Do Something In a Subprocess')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.do_next_exclusive_step('Ack Subprocess Done')
@ -49,11 +49,10 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.do_next_named_step('Do Something In a Subprocess')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.do_next_named_step('Ack Subprocess Done')
@ -78,10 +77,9 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.do_next_named_step('Do Something In a Subprocess')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.do_next_named_step('Acknowledge SP Parallel Message')
@ -106,7 +104,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.do_next_named_step('Acknowledge SP Parallel Message')
self.workflow.do_engine_steps()
@ -114,7 +112,6 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.do_next_named_step('Do Something In a Subprocess')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.do_next_named_step('Ack Subprocess Done')

View File

@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.event import BpmnEvent
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'matth'
@ -22,15 +23,14 @@ class MessagesTest(BpmnWorkflowTestCase):
self.workflow.do_engine_steps()
self.assertEqual([], self.workflow.get_tasks(TaskState.READY))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Wrong Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Wrong Message'), {}))
self.assertEqual([], self.workflow.get_tasks(TaskState.READY))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual('Test Message', self.workflow.get_tasks(TaskState.READY)[0].task_spec.bpmn_name)
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughSaveAndRestore(self):
@ -43,13 +43,12 @@ class MessagesTest(BpmnWorkflowTestCase):
self.assertEqual([], self.workflow.get_tasks(TaskState.READY))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(MessageEventDefinition('Wrong Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Wrong Message'), {}))
self.assertEqual([], self.workflow.get_tasks(TaskState.READY))
self.workflow.catch(MessageEventDefinition('Test Message'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))

View File

@ -1,5 +1,6 @@
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.specs.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.event import BpmnEvent
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
@ -28,7 +29,7 @@ class MultipleStartEventTest(BpmnWorkflowTestCase):
self.assertEqual(len(waiting_tasks), 1)
self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1')
self.workflow.catch(MessageEventDefinition('message_1'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('message_1'), {}))
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
@ -62,7 +63,7 @@ class ParallelStartEventTest(BpmnWorkflowTestCase):
self.assertEqual(len(waiting_tasks), 1)
self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1')
self.workflow.catch(MessageEventDefinition('message_1'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('message_1'), {}))
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
@ -71,11 +72,11 @@ class ParallelStartEventTest(BpmnWorkflowTestCase):
self.assertEqual(len(waiting_tasks), 1)
self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1')
self.workflow.catch(MessageEventDefinition('message_2'))
self.workflow.catch(BpmnEvent(MessageEventDefinition('message_2'), {}))
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
# Now the first task should be ready
ready_tasks = self.workflow.get_ready_user_tasks()
self.assertEqual(len(ready_tasks), 1)
self.assertEqual(ready_tasks[0].task_spec.name, 'any_task')
self.assertEqual(ready_tasks[0].task_spec.name, 'any_task')

View File

@ -1,75 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
from SpiffWorkflow.bpmn.specs.event_definitions import CancelEventDefinition, SignalEventDefinition
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'kellym'
class MultipleEventsTest(BpmnWorkflowTestCase):
def setUp(self):
self.spec, self.subprocesses = self.load_workflow_spec('multipleEvents.bpmn', 'SignalAndCancel')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.workflow.do_engine_steps()
ready_tasks = self.workflow.get_tasks(TaskState.READY)
self.assertEqual("hello", ready_tasks[0].get_name())
def test_cancel_does_nothing_if_no_one_is_listening(self,save_restore = False):
# Send cancel notifications to the workflow
self.workflow.catch(SignalEventDefinition('cancel')) # generate a cancel signal.
self.workflow.catch(CancelEventDefinition())
# Nothing should have happened.
ready_tasks = self.workflow.get_tasks(TaskState.READY)
self.assertEqual("hello", ready_tasks[0].get_name())
def test_cancel_works_with_signal(self,save_restore = False):
task = self.workflow.get_tasks(TaskState.READY)[0]
# Move to User Task 1
self.workflow.run_task_from_id(task.id)
self.workflow.do_engine_steps()
task = self.workflow.get_tasks(TaskState.READY)[0]
self.assertEqual('UserTaskOne', task.get_name())
# Send cancel notifications to the workflow
self.workflow.catch(SignalEventDefinition('cancel')) # generate a cancel signal.
self.workflow.catch(CancelEventDefinition())
self.workflow.do_engine_steps()
# The cancel event should have been called.
self.assertEqual("cancel_signal", self.workflow.last_task.data['cancel'])
def test_cancel_works_with_cancel_Event(self,save_restore = False):
task = self.workflow.get_tasks(TaskState.READY)[0]
# Move to User Task 2
self.workflow.run_task_from_id(task.id)
self.workflow.do_engine_steps()
task = self.workflow.get_tasks(TaskState.READY)[0]
self.workflow.run_task_from_id(task.id)
self.workflow.do_engine_steps()
task = self.workflow.get_tasks(TaskState.READY)[0]
self.assertEqual('UserTaskTwo', task.get_name())
# Send cancel notifications to the workflow
self.workflow.catch(SignalEventDefinition('cancel')) # generate a cancel signal.
self.workflow.catch(CancelEventDefinition())
self.workflow.do_engine_steps()
# The cancel event shave h
self.assertEqual("cancel_event", self.workflow.last_task.data['cancel'])
def suite():
return unittest.TestLoader().loadTestsFromTestCase(MultipleEventsTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())

View File

@ -19,7 +19,6 @@ class MultipleThrowEventIntermediateCatchTest(BpmnWorkflowTestCase):
if save_restore:
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(len(self.workflow.get_waiting_tasks()), 0)
self.assertEqual(self.workflow.is_completed(), True)
@ -45,5 +44,4 @@ class MultipleThrowEventStartsEventTest(BpmnWorkflowTestCase):
self.assertEqual(len(ready_tasks), 1)
ready_tasks[0].run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(self.workflow.is_completed(), True)

View File

@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
import unittest
import datetime
import time
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
@ -38,9 +36,10 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
starttime = datetime.datetime.now()
# test bpmn has a timeout of .2s; we should terminate loop before that.
# The subprocess will also wait
while len(self.workflow.get_waiting_tasks()) == 2 and loopcount < 10:
while event.state == TaskState.WAITING and loopcount < 10:
if save_restore:
self.save_restore()
event = self.workflow.get_tasks_from_spec_name('Event_0jyy8ao')[0]
time.sleep(0.1)
ready_tasks = self.workflow.get_tasks(TaskState.READY)
# There should be one ready task until the boundary event fires
@ -66,12 +65,5 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(self.workflow.is_completed(), True)
self.assertEqual(self.workflow.last_task.data, {'work_done': 'Yes', 'delay_reason': 'Just Because'})
def suite():
return unittest.TestLoader().loadTestsFromTestCase(NITimerDurationTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())

View File

@ -1,7 +1,7 @@
import unittest
from datetime import datetime
from SpiffWorkflow.bpmn.specs.event_definitions import TimerEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.timer import TimerEventDefinition
class TimeDurationParseTest(unittest.TestCase):
"Non-exhaustive ISO durations, but hopefully covers basic support"

View File

@ -1,7 +1,4 @@
# -*- coding: utf-8 -*-
import datetime
import unittest
import time
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
@ -70,7 +67,3 @@ class TimerCycleTest(BpmnWorkflowTestCase):
self.assertEqual(timer.state, TaskState.COMPLETED)
self.assertEqual(counter, 2)
def suite():
return unittest.TestLoader().loadTestsFromTestCase(TimerCycleTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())

View File

@ -22,7 +22,6 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase):
ready_tasks[0].update_data({'quantity': 2})
ready_tasks[0].run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertIn('value', self.workflow.last_task.data)
# Check that workflow and next task completed
@ -63,7 +62,8 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase):
# We should not have this task, as we followed the 'cancel branch'
print_task = self.workflow.get_tasks_from_spec_name("Activity_Print_Data")
self.assertEqual(len(print_task), 0)
self.assertEqual(len(print_task), 1)
self.assertEqual(print_task[0].state, TaskState.CANCELLED)
def testSubworkflowErrorCodeNone(self):
@ -94,7 +94,8 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase):
# Make sure this branch didn't getfollowed
print_task = self.workflow.get_tasks_from_spec_name("Activity_Print_Data")
self.assertEqual(len(print_task), 0)
self.assertEqual(len(print_task), 1)
self.assertEqual(print_task[0].state, TaskState.CANCELLED)
def testSubworkflowErrorCodeOne(self):
@ -119,4 +120,5 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase):
self.assertEqual(error_1_task.state, TaskState.COMPLETED)
print_task = self.workflow.get_tasks_from_spec_name("Activity_Print_Data")
self.assertEqual(len(print_task), 0)
self.assertEqual(len(print_task), 1)
self.assertEqual(print_task[0].state, TaskState.CANCELLED)

View File

@ -0,0 +1,19 @@
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
class ParallelGatewayLoopInputTest(BpmnWorkflowTestCase):
def setUp(self):
spec, subprocess_specs = self.load_workflow_spec('gateway_loop_input.bpmn', 'main')
self.workflow = BpmnWorkflow(spec, subprocess_specs)
def test_loop_input(self):
self.workflow.do_engine_steps()
ready = self.workflow.get_tasks(TaskState.READY)
self.assertEqual(len(ready), 1)
ready[0].run()
self.workflow.do_engine_steps()
self.assertTrue(self.workflow.is_completed())
self.assertDictEqual(self.workflow.data, { 'x': 2})

View File

@ -33,7 +33,6 @@ class ParallelMultipleSplitsTest(BpmnWorkflowTestCase):
self.workflow.do_engine_steps()
self.do_next_named_step('SP 3 - Yes Task')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.do_next_named_step('Done')
self.workflow.do_engine_steps()

View File

@ -162,7 +162,6 @@ class BpmnWorkflowSerializerTest(BaseTestCase):
self.assertIsInstance(w1, BpmnWorkflow)
self.assertIsInstance(w2, BpmnWorkflow)
self.assertEqual(w1.data, w2.data)
self.assertEqual(w1.name, w2.name)
for task in w1.get_ready_user_tasks():
w2_task = w2.get_task_from_id(task.id)
self.assertIsNotNone(w2_task)

View File

@ -11,8 +11,6 @@ from .BaseTestCase import BaseTestCase
class Version_1_0_Test(BaseTestCase):
SERIALIZER_VERSION = "1.2"
def test_convert_subprocess(self):
# The serialization used here comes from NestedSubprocessTest saved at line 25 with version 1.0
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.0.json')
@ -92,3 +90,46 @@ class Version_1_1_Test(BaseTestCase):
ready_tasks[0].run()
ready_tasks = wf.get_tasks(TaskState.READY)
self.assertTrue(wf.is_completed())
class Version1_2_Test(BaseTestCase):
def test_remove_boundary_events(self):
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.2-boundary-events.json')
wf = self.serializer.deserialize_json(open(fn).read())
ready_tasks = wf.get_tasks(TaskState.READY)
ready_tasks[0].update_data({'value': 'asdf'})
ready_tasks[0].run()
wf.do_engine_steps()
ready_tasks = wf.get_tasks(TaskState.READY)
ready_tasks[0].update_data({'quantity': 2})
ready_tasks[0].run()
wf.do_engine_steps()
self.assertIn('value', wf.last_task.data)
# Check that workflow and next task completed
subprocess = wf.get_tasks_from_spec_name('Subprocess')[0]
self.assertEqual(subprocess.state, TaskState.COMPLETED)
print_task = wf.get_tasks_from_spec_name("Activity_Print_Data")[0]
self.assertEqual(print_task.state, TaskState.COMPLETED)
# Check that the boundary events were cancelled
cancel_task = wf.get_tasks_from_spec_name("Catch_Cancel_Event")[0]
self.assertEqual(cancel_task.state, TaskState.CANCELLED)
error_1_task = wf.get_tasks_from_spec_name("Catch_Error_1")[0]
self.assertEqual(error_1_task.state, TaskState.CANCELLED)
error_none_task = wf.get_tasks_from_spec_name("Catch_Error_None")[0]
self.assertEqual(error_none_task.state, TaskState.CANCELLED)
def test_remove_noninterrupting_boundary_events(self):
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.2-boundary-events-noninterrupting.json')
wf = self.serializer.deserialize_json(open(fn).read())
wf.get_tasks_from_spec_name('sid-D3365C47-2FAE-4D17-98F4-E68B345E18CE')[0].run()
wf.do_engine_steps()
self.assertEqual(1, len(wf.get_tasks(TaskState.READY)))
self.assertEqual(3, len(wf.get_tasks(TaskState.WAITING)))
wf.get_tasks_from_spec_name('sid-6FBBB56D-00CD-4C2B-9345-486986BB4992')[0].run()
wf.do_engine_steps()
self.assertTrue(wf.is_completed())

View File

@ -41,7 +41,6 @@ class CallActivityMessageTest(BaseTestCase):
current_task.update_data(step[1])
current_task.run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
if save_restore:
self.save_restore()
ready_tasks = self.workflow.get_tasks(TaskState.READY)

View File

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.event import BpmnEvent
from SpiffWorkflow.camunda.specs.event_definitions import MessageEventDefinition
from .BaseTestCase import BaseTestCase
@ -19,19 +19,19 @@ class ExternalMessageBoundaryTest(BaseTestCase):
def testThroughSaveRestore(self):
self.actual_test(save_restore=True)
def actual_test(self,save_restore = False):
def actual_test(self, save_restore=False):
self.workflow.do_engine_steps()
ready_tasks = self.workflow.get_tasks(TaskState.READY)
self.assertEqual(1, len(ready_tasks),'Expected to have only one ready task')
self.workflow.catch(MessageEventDefinition('Interrupt', payload='SomethingImportant', result_var='interrupt_var'))
self.workflow.catch(BpmnEvent(
MessageEventDefinition('Interrupt'),
{'result_var': 'interrupt_var', 'payload': 'SomethingImportant'}
))
self.workflow.do_engine_steps()
ready_tasks = self.workflow.get_tasks(TaskState.READY)
self.assertEqual(2,len(ready_tasks),'Expected to have two ready tasks')
# here because the thread just dies and doesn't lead to a task, we expect the data
# to die with it.
# item 1 should be at 'Pause'
self.assertEqual('Pause',ready_tasks[1].task_spec.bpmn_name)
self.assertEqual('SomethingImportant', ready_tasks[1].data['interrupt_var'])
@ -40,11 +40,11 @@ class ExternalMessageBoundaryTest(BaseTestCase):
self.assertEqual(False, ready_tasks[0].data['caughtinterrupt'])
ready_tasks[1].run()
self.workflow.do_engine_steps()
# what I think is going on here is that when we hit the reset, it is updating the
# last_task and appending the data to whatever happened there, so it would make sense that
# we have the extra variables that happened in 'pause'
# if on the other hand, we went on from 'meaningless task' those variables would not get added.
self.workflow.catch(MessageEventDefinition('reset', payload='SomethingDrastic', result_var='reset_var'))
self.workflow.catch(BpmnEvent(
MessageEventDefinition('reset'),
{'result_var': 'reset_var', 'payload': 'SomethingDrastic'}
))
ready_tasks = self.workflow.get_tasks(TaskState.READY)
# The user activity was cancelled and we should continue from the boundary event
self.assertEqual(2, len(ready_tasks), 'Expected to have two ready tasks')

View File

@ -1,7 +1,3 @@
# -*- coding: utf-8 -*-
import unittest
import time
from datetime import timedelta
from SpiffWorkflow.task import TaskState
@ -26,8 +22,6 @@ class MessageBoundaryTest(BaseTestCase):
def testThroughSaveRestore(self):
self.actual_test(save_restore=True)
def actual_test(self,save_restore = False):
steps = [('Activity_Interrupt', {'interrupt_task':'No'}),
('Activity_Interrupt', {'interrupt_task': 'No'}),
@ -54,9 +48,3 @@ class MessageBoundaryTest(BaseTestCase):
self.workflow.do_engine_steps()
self.assertEqual(self.workflow.is_completed(),True,'Expected the workflow to be complete at this point')
def suite():
return unittest.TestLoader().loadTestsFromTestCase(MessageBoundaryTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())

View File

@ -82,7 +82,6 @@ class NIMessageBoundaryTest(BaseTestCase):
task.data['work_completed'] = 'Lots of Stuff'
self.workflow.run_task_from_id(task.id)
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(self.workflow.is_completed(),True)
self.assertEqual(self.workflow.last_task.data,{'Event_InterruptBoundary_Response': 'Youre late!',
'flag_task': 'Yes',

View File

@ -77,7 +77,6 @@ class ResetTokenTestSubProcess(BaseTestCase):
task.update_data({step['formvar']: step['answer']})
task.run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
if save_restore:
self.save_restore()

View File

@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
import unittest
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from .BaseTestCase import BaseTestCase
__author__ = 'kellym'
@ -62,9 +59,3 @@ class StartMessageTest(BaseTestCase):
'ApprovalResult': 'Yes',
'Done': 'OK!'
})
def suite():
return unittest.TestLoader().loadTestsFromTestCase(StartMessageTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())

View File

@ -33,7 +33,6 @@ class SubWorkflowTest(BaseTestCase):
task.update_data({"Field"+answer: answer})
task.run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
if save_restore:
self.save_restore()

View File

@ -18,7 +18,7 @@ class CamundaExtensionsTest(BaseTestCase):
def assertMyExtension(self):
"""Assure that we have a very specific extension on specific task."""
task = self.workflow.get_task_spec_from_name("Task_User_Select_Type")
task = self.workflow.spec.get_task_spec_from_name("Task_User_Select_Type")
self.assertIsNotNone(task)
self.assertTrue(hasattr(task, 'extensions'))
self.assertTrue("my_extension" in task.extensions)

View File

@ -109,7 +109,7 @@ class TaskSpecTest(unittest.TestCase):
# Now refresh waiting tasks:
# Update the state of every WAITING task.
for thetask in self.workflow._get_waiting_tasks():
for thetask in [t for t in self.workflow.get_tasks(TaskState.WAITING)]:
thetask.task_spec._update(thetask)
self.do_next_unique_task('last')

View File

@ -2,16 +2,21 @@ import os
from lxml import etree
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
from SpiffWorkflow.dmn.engine.DMNEngine import DMNEngine
from SpiffWorkflow.dmn.parser.DMNParser import DMNParser, get_dmn_ns
class WorkflowSpec:
def __init__(self):
self.file = 'my_mock_file'
self.name = 'Mock Workflow Spec'
self.task_specs = {}
class Workflow:
def __init__(self, script_engine):
self.script_engine = script_engine
self.outer_workflow = self
self.spec = Box({'file': 'my_mock_file'})
self.parent = None
self.spec = WorkflowSpec()
self.top_workflow = self
class TaskSpec:
def __init__(self):

Some files were not shown because too many files have changed in this diff Show More