diff --git a/comparisons/comparison-argo.md b/comparisons/comparison-argo.md index 5be92f30..f2b9f53a 100644 --- a/comparisons/comparison-argo.md +++ b/comparisons/comparison-argo.md @@ -404,7 +404,7 @@ states: actions: - functionRef: gen-random-int-bash actionDataFilter: - dataResultsPath: "${ .results }" + results: "${ .results }" transition: print-message - name: print-message type: operation @@ -581,7 +581,7 @@ states: actions: - functionRef: flip-coin-function actionDataFilter: - dataResultsPath: "${ .flip.result }" + results: "${ .flip.result }" transition: show-flip-results - name: show-flip-results type: switch @@ -595,14 +595,14 @@ states: actions: - functionRef: echo actionDataFilter: - dataResultsPath: it was heads + results: it was heads end: true - name: show-results-tails type: operation actions: - functionRef: echo actionDataFilter: - dataResultsPath: it was tails + results: it was tails end: true ``` @@ -761,7 +761,7 @@ states: actions: - functionRef: flip-coin-function actionDataFilter: - dataResultsPath: "${ .steps.flip-coin.outputs.result }" + results: "${ .steps.flip-coin.outputs.result }" transition: flip-coin-check - name: flip-coin-check type: switch diff --git a/comparisons/comparison-brigade.md b/comparisons/comparison-brigade.md index 97903ea1..15c46f65 100644 --- a/comparisons/comparison-brigade.md +++ b/comparisons/comparison-brigade.md @@ -403,7 +403,7 @@ states: - eventRefs: - execEvent eventDataFilter: - dataOutputPath: "${ .event }" + results: "${ .event }" actions: - name: eventInfoAction functionRef: @@ -484,18 +484,18 @@ states: - eventRefs: - execEvent eventDataFilter: - dataOutputPath: "${ .event }" + data: "${ .event }" actions: - name: helloAction actionDataFilter: - dataResultsPath: "${ .helloResult }" + results: "${ .helloResult }" functionRef: refName: greetingFunction arguments: message: hello - name: worldAction actionDataFilter: - dataResultsPath: "${ .worldResults }" + results: "${ .worldResults }" functionRef: refName: greetingAction arguments: @@ -572,7 +572,7 @@ states: - execEvent actions: [] eventDataFilter: - dataOutputPath: "${ .execEvent }" + data: "${ .execEvent }" transition: nextState: NextEventState produceEvents: @@ -590,7 +590,7 @@ states: - eventRefs: - nextEvent eventDataFilter: - dataOutputPath: "${ .nextEvent }" + data: "${ .nextEvent }" actions: - name: consoleLogAction functionRef: diff --git a/comparisons/comparison-cadence.md b/comparisons/comparison-cadence.md index 6984ae5e..5b97debd 100644 --- a/comparisons/comparison-cadence.md +++ b/comparisons/comparison-cadence.md @@ -201,7 +201,7 @@ states: arguments: filename: "${ .file.name }" actionDataFilter: - dataResultsPath: "${ .processed }" + results: "${ .processed }" - functionRef: refName: uploadfunction arguments: diff --git a/comparisons/comparison-google-cloud-workflows.md b/comparisons/comparison-google-cloud-workflows.md index 08923919..b1350128 100644 --- a/comparisons/comparison-google-cloud-workflows.md +++ b/comparisons/comparison-google-cloud-workflows.md @@ -95,7 +95,7 @@ languages. "outputVar": "Hello ${ .firstname + \" \" + .lastname }" }, "stateDataFilter": { - "dataOutputPath": "${ .outputVar }" + "output": "${ .outputVar }" }, "end": true } @@ -206,7 +206,7 @@ instance is created. See the Serverless Workflow ["Workflow Data"](../specificat ] }, "stateDataFilter": { - "dataOutputPath": "${ .array | join(\"\") }" + "output": "${ .array | join(\"\") }" }, "end": true } diff --git a/examples/README.md b/examples/README.md index 07ccedce..85f83fdd 100644 --- a/examples/README.md +++ b/examples/README.md @@ -166,7 +166,7 @@ Which is added to the states data and becomes the workflow data output. } }, "actionDataFilter": { - "dataResultsPath": "${ .greeting }" + "results": "${ .greeting }" } } ], @@ -197,7 +197,7 @@ states: arguments: name: "${ .person.name }" actionDataFilter: - dataResultsPath: "${ .greeting }" + results: "${ .greeting }" end: true ``` @@ -243,20 +243,20 @@ Note that in the workflow definition you can see two filters defined. The event ```json { "eventDataFilter": { - "dataOutputPath": "${ .data.greet } " + "data": "${ .data.greet } " } } ``` -which is triggered when the greeting event is consumed. It extracts its "data.greet" of the event and -merges it with the states data. +which is triggered when the greeting event is consumed. It extracts its "data.greet" of the event data (payload) and +merges it with the state data. The second, a state data filter, which is defined on the event state itself: ```json { "stateDataFilter": { - "dataOutputPath": "${ .payload.greeting }" + "output": "${ .payload.greeting }" } } ``` @@ -310,7 +310,7 @@ filters what is selected to be the state data output which then becomes the work "onEvents": [{ "eventRefs": ["GreetingEvent"], "eventDataFilter": { - "dataOutputPath": "${ .data.greet }" + "data": "${ .data.greet }" }, "actions":[ { @@ -324,7 +324,7 @@ filters what is selected to be the state data output which then becomes the work ] }], "stateDataFilter": { - "dataOutputPath": "${ .payload.greeting }" + "output": "${ .payload.greeting }" }, "end": true } @@ -355,14 +355,14 @@ states: - eventRefs: - GreetingEvent eventDataFilter: - dataOutputPath: "${ .data.greet }" + data: "${ .data.greet }" actions: - functionRef: refName: greetingFunction arguments: name: "${ .greet.name }" stateDataFilter: - dataOutputPath: "${ .payload.greeting }" + output: "${ .payload.greeting }" end: true ``` @@ -438,7 +438,7 @@ result of the workflow execution. } ], "stateDataFilter": { - "dataOutputPath": "${ .results }" + "output": "${ .results }" }, "end": true } @@ -470,7 +470,7 @@ states: arguments: expression: "${ .singleexpression }" stateDataFilter: - dataOutputPath: "${ .results }" + output: "${ .results }" end: true ``` @@ -892,7 +892,7 @@ The data output of the workflow contains the information of the exception caught } ], "stateDataFilter": { - "dataOutputPath": "${ .exceptions }" + "output": "${ .exceptions }" }, "transition": "ApplyOrder", "onErrors": [ @@ -960,7 +960,7 @@ states: arguments: order: "${ .order }" stateDataFilter: - dataOutputPath: "${ .exceptions }" + output: "${ .exceptions }" transition: ApplyOrder onErrors: - error: Missing order id @@ -1060,7 +1060,7 @@ In the case job submission raises a runtime error, we transition to a SubFlow st } }, "actionDataFilter": { - "dataResultsPath": "${ .jobuid }" + "results": "${ .jobuid }" } } ], @@ -1071,7 +1071,7 @@ In the case job submission raises a runtime error, we transition to a SubFlow st } ], "stateDataFilter": { - "dataOutputPath": "${ .jobuid }" + "output": "${ .jobuid }" }, "transition": "WaitForCompletion'" }, @@ -1100,12 +1100,12 @@ In the case job submission raises a runtime error, we transition to a SubFlow st } }, "actionDataFilter": { - "dataResultsPath": "${ .jobstatus }" + "results": "${ .jobstatus }" } } ], "stateDataFilter": { - "dataOutputPath": "${ .jobstatus }" + "output": "${ .jobstatus }" }, "transition": "DetermineCompletion" }, @@ -1190,12 +1190,12 @@ states: arguments: name: "${ .job.name }" actionDataFilter: - dataResultsPath: "${ .jobuid }" + results: "${ .jobuid }" onErrors: - error: "*" transition: SubmitError stateDataFilter: - dataOutputPath: "${ .jobuid }" + output: "${ .jobuid }" transition: WaitForCompletion - name: SubmitError type: subflow @@ -1214,9 +1214,9 @@ states: arguments: name: "${ .jobuid }" actionDataFilter: - dataResultsPath: "${ .jobstatus }" + results: "${ .jobstatus }" stateDataFilter: - dataOutputPath: "${ .jobstatus }" + output: "${ .jobstatus }" transition: DetermineCompletion - name: DetermineCompletion type: switch @@ -2358,7 +2358,7 @@ For this example we assume that the workflow instance is started given the follo "resultEventRef": "VetAppointmentInfo" }, "actionDataFilter": { - "dataResultsPath": "${ .appointmentInfo }" + "results": "${ .appointmentInfo }" }, "timeout": "PT15M" } @@ -2395,7 +2395,7 @@ states: data: "${ .patientInfo }" resultEventRef: VetAppointmentInfo actionDataFilter: - dataResultsPath: "${ .appointmentInfo }" + results: "${ .appointmentInfo }" timeout: PT15M end: true ``` @@ -3126,7 +3126,7 @@ the data for an hour, send report, and so on. } ], "eventDataFilter": { - "dataOutputPath": "${ .readings }" + "data": "${ .readings }" } } ], @@ -3208,7 +3208,7 @@ states: - functionRef: refName: LogReading eventDataFilter: - dataOutputPath: "${ .readings }" + data: "${ .readings }" end: true - name: GenerateReport type: operation diff --git a/media/spec/event-data-filter-example1.png b/media/spec/event-data-filter-example1.png index dc3e64ef..5b076d31 100644 Binary files a/media/spec/event-data-filter-example1.png and b/media/spec/event-data-filter-example1.png differ diff --git a/media/spec/state-data-filter-example1.png b/media/spec/state-data-filter-example1.png index 4b3e7572..ace7f18e 100644 Binary files a/media/spec/state-data-filter-example1.png and b/media/spec/state-data-filter-example1.png differ diff --git a/media/spec/state-data-filter-example2.png b/media/spec/state-data-filter-example2.png index c0a44ed0..15cb2d81 100644 Binary files a/media/spec/state-data-filter-example2.png and b/media/spec/state-data-filter-example2.png differ diff --git a/media/spec/workflowdataflow.png b/media/spec/workflowdataflow.png new file mode 100644 index 00000000..ef1fd789 Binary files /dev/null and b/media/spec/workflowdataflow.png differ diff --git a/roadmap/README.md b/roadmap/README.md index 3e4ca486..057bbf87 100644 --- a/roadmap/README.md +++ b/roadmap/README.md @@ -38,6 +38,7 @@ _Status description:_ | ✔️| Replace JsonPath with jq | [spec doc](../specification.md) | | ✔️| Update start definition (move to top-level worklow param) | [spec doc](../specification.md) | | ✔️| Updated schedule definition | [spec doc](../specification.md) | +| ✔️| Update data filters | [spec doc](../specification.md) | | 🚩 | Workflow invocation bindings | | | 🚩 | CE Subscriptions & Discovery | | | 🚩 | Error types | [issue](https://github.com/serverlessworkflow/specification/issues/200) | diff --git a/schema/workflow.json b/schema/workflow.json index 03320f0e..bdfec7cb 100644 --- a/schema/workflow.json +++ b/schema/workflow.json @@ -265,6 +265,7 @@ } }, "eventDataFilter": { + "description": "Event data filter", "$ref": "#/definitions/eventdatafilter" } }, @@ -314,6 +315,7 @@ "description": "Time period to wait for function execution to complete" }, "actionDataFilter": { + "description": "Action data filter", "$ref": "#/definitions/actiondatafilter" } }, @@ -421,6 +423,7 @@ "description": "State end definition" }, "stateDataFilter": { + "description": "State data filter", "$ref": "#/definitions/statedatafilter" }, "timeDelay": { @@ -524,6 +527,7 @@ "description": "Time period to wait for incoming events (ISO 8601 format)" }, "stateDataFilter": { + "description": "State data filter", "$ref": "#/definitions/statedatafilter" }, "onErrors": { @@ -601,6 +605,7 @@ "description": "State end definition" }, "stateDataFilter": { + "description": "State data filter", "$ref": "#/definitions/statedatafilter" }, "actionMode": { @@ -712,6 +717,7 @@ "description": "State end definition" }, "stateDataFilter": { + "description": "State data filter", "$ref": "#/definitions/statedatafilter" }, "branches": { @@ -839,6 +845,7 @@ "description": "State type" }, "stateDataFilter": { + "description": "State data filter", "$ref": "#/definitions/statedatafilter" }, "eventConditions": { @@ -904,6 +911,7 @@ "description": "State type" }, "stateDataFilter": { + "description": "State data filter", "$ref": "#/definitions/statedatafilter" }, "dataConditions": { @@ -1058,10 +1066,10 @@ }, "condition": { "type": "string", - "description": "JsonPath expression evaluated against state data. True if results are not empty" + "description": "Workflow expression evaluated against state data. Must evaluate to true or false" }, "transition": { - "description": "Next transition of the workflow if there is valid matches", + "description": "Workflow transition if condition is evaluated to true", "$ref": "#/definitions/transition" } }, @@ -1083,11 +1091,11 @@ }, "condition": { "type": "string", - "description": "JsonPath expression evaluated against state data. True if results are not empty" + "description": "Workflow expression evaluated against state data. Must evaluate to true or false" }, "end": { "$ref": "#/definitions/end", - "description": "Explicit transition to end" + "description": "Workflow end definition" } }, "metadata": { @@ -1134,6 +1142,7 @@ "description": "SubFlow state repeat exec definition" }, "stateDataFilter": { + "description": "State data filter", "$ref": "#/definitions/statedatafilter" }, "onErrors": { @@ -1224,6 +1233,7 @@ "description": "JSON object which can be set as states data input and can be manipulated via filters" }, "stateDataFilter": { + "description": "State data filter", "$ref": "#/definitions/statedatafilter" }, "transition": { @@ -1303,11 +1313,11 @@ }, "inputCollection": { "type": "string", - "description": "JsonPath expression selecting an array element of the states data" + "description": "Workflow expression selecting an array element of the states data" }, "outputCollection": { "type": "string", - "description": "JsonPath expression specifying an array element of the states data to add the results of each iteration" + "description": "Workflow expression specifying an array element of the states data to add the results of each iteration" }, "iterationParam": { "type": "string", @@ -1335,6 +1345,7 @@ "description": "Unique Id of a workflow to be executed for each of the elements of inputCollection" }, "stateDataFilter": { + "description": "State data filter", "$ref": "#/definitions/statedatafilter" }, "onErrors": { @@ -1455,11 +1466,11 @@ "description": "Time period to wait for incoming events (ISO 8601 format)" }, "eventDataFilter": { - "description": "Callback event data filter definition", + "description": "Event data filter", "$ref": "#/definitions/eventdatafilter" }, "stateDataFilter": { - "description": "State data filter definition", + "description": "State data filter", "$ref": "#/definitions/statedatafilter" }, "onErrors": { @@ -1665,13 +1676,13 @@ "statedatafilter": { "type": "object", "properties": { - "dataInputPath": { + "input": { "type": "string", - "description": "JsonPath definition that selects parts of the states data input" + "description": "Workflow expression to filter the state data input" }, - "dataOutputPath": { + "output": { "type": "string", - "description": "JsonPath definition that selects parts of the states data output" + "description": "Workflow expression that filters the state data output" } }, "required": [] @@ -1679,9 +1690,13 @@ "eventdatafilter": { "type": "object", "properties": { - "dataOutputPath": { + "data": { "type": "string", - "description": "JsonPath definition that selects parts of the event data, to be merged with the states data" + "description": "Workflow expression that filters of the event data (payload)" + }, + "toStateData": { + "type": "string", + "description": " Workflow expression that selects a state data element to which the event payload should be added/merged into. If not specified, denotes, the top-level state data element." } }, "required": [] @@ -1689,23 +1704,17 @@ "actiondatafilter": { "type": "object", "properties": { - "dataInputPath": { + "fromStateData": { "type": "string", - "description": "JsonPath definition that selects parts of the states data input to be the action data" + "description": "Workflow expression that selects state data that the state action can use" }, - "dataResultsPath": { + "results": { "type": "string", - "description": "JsonPath definition that selects parts of the actions data result, to be merged with the states data" - } - }, - "required": [] - }, - "errordatafilter": { - "type": "object", - "properties": { - "dataOutputPath": { + "description": "Workflow expression that filters the actions data results" + }, + "toStateData": { "type": "string", - "description": "JsonPath definition that selects parts of the error data, to be merged with the states data" + "description": "Workflow expression that selects a state data element to which the action results should be added/merged into. If not specified, denote, the top-level state data element" } }, "required": [] diff --git a/specification.md b/specification.md index 0dce181f..c1990ee0 100644 --- a/specification.md +++ b/specification.md @@ -157,406 +157,333 @@ The Serverless Workflow language is composed of: ### Workflow Data Serverless Workflow data is represented in [JSON](https://www.json.org/json-en.html) format. -Data flow during workflow execution is composed of: +Data flow and execution logic go hand in hand, meaning as workflow execution follows the workflow definition +logic, so does the workflow data: -- [Workfow data input](#Workflow-data-input) -- [Event data](#Event-data) -- [Action data](#Action-data) -- [Information passing between states](#Information-passing-Between-States) -- [State Data filtering](#State-data-filtering) -- [Workflow data output](#Workflow-data-output) +

+Serverless Workflow Data Flow +

+ +The initial [Workfow data input](#Workflow-data-input) is passed to the workflow starting state as its data input. +When a state finishes its execution, [its data output is passed as data input to the next state](#Information-passing-Between-States) that should be executed. -### Workflow Functions +When workflow execution ends, the last executed workflow state's data output becomes the final [Workflow data output](#Workflow-data-output). -Workflow [functions](#Function-Definition) are reusable definitions for RESTful service invocations and/or expression evaluation. -They can be referenced by their domain-specific names inside workflow [states](#State-Definition). +States can filter their data inputs and outputs using [State Data filters](#State-data-filters). -Reference the following sections to learn more about workflow functions: -* [Using functions for RESTful service invocations](#Using-Functions-For-RESTful-Service-Invocations) -* [Using functions for RPC service invocation](#Using-Functions-For-RPC-Service-Invocations) -* [Using functions for expression evaluations](#Using-Functions-For-Expression-Evaluation) +States can also consume events as well as invoke services. These event payloads and service invocation results +can be filtered using [Event data filters](#Event-data-filters) and [Action data filters](#Action-data-filters). -### Using Functions For RESTful Service Invocations +Data filters use [workflow expressions](#Workflow-Expressions) for selecting and manipulating state data +input and output, action inputs and results, and event payloads. -[Functions](#Function-Definition) can be used to describe services and their operations that need to be invoked during -workflow execution. They can be referenced by states [action definitions](#Action-Definition)] to clearly -define when the service operations should be invoked during workflow execution, as well as the data parameters -passed to them if needed. +Multiple filters can be combined to gain high level of control of your workflow state data. You can find an example of that in +[this](#Using-multiple-data-filters) section. -Note that with Serverless Workflow we can also define service invocations via events. -To learn more about that, please reference the [event definitions](#Event-Definition) section, -as well as the [actions definitions](#Action-Definition) [eventRef](#EventRef-Definition) property. - -Because of an overall lack of a common way to describe different services and their operations, -many workflow languages typically chose to define custom function definitions. -This approach however often runs into issues such as lack of portability, limited capabilities, as well as -forcing non-workflow-specific information, such as service authentication, to be added inside the workflow language. +Data from consumed events,and action execution results are added/merged +to state data. Reference the [data merging section](#Data-Merging) to learn about the merging rules that should be applied. -To avoid these issues, the Serverless Workflow specification mandates that details about -RESTful services and their operations be described using the [OpenAPI Specification](https://www.openapis.org/) specification. -OpenAPI is a language-agnostic standard that describes discovery of RESTful services. -This allows Serverless Workflow language to describe RESTful services in a portable -way, as well as workflow runtimes to utilize OpenAPI tooling and APIs to invoke service operations. +#### Workflow Data Input -Here is an example function definition for a RESTful service operation. +The initial data input into a workflow instance. Must be a valid [JSON object](https://tools.ietf.org/html/rfc7159#section-4). +If no input is provided, the default data input should be an empty JSON object: ```json -{ -"functions": [ - { - "name": "sendOrderConfirmation", - "operation": "file://confirmationapi.json#sendOrderConfirmation" - } -] -} +{ } ``` -It can, as previously mentioned be referenced during workflow execution when invocation of this service is desired. -For example: +Workflow data input is passed to the workflow starting state as its data input. + +

+Workflow data input +

+ +#### Information Passing Between States + +States in a workflow can receive data (data input) and produce a data result (data output). The state's data input is typically the previous state's data output. +When a state completes its execution, its data output is passed to the state's data input it transitions to. +There are two rules to consider here: + +- If the state is the workflow starting state, its data input is the [workflow data input](#Workflow-data-input). +- When workflow execution ends, the data output of the last executed state becomes the [workflow data output](#Workflow-data-output). + +

+Basic state data passing +

+ +#### Workflow data output + +Each workflow execution should produce a data output. +The workflow data output is the data output of the last executed workflow state. + +#### State data filters + +| Parameter | Description | Type | Required | +| --- | --- | --- | --- | +| input | Workflow expression to filter the states data input | string | no | +| output | Workflow expression that filters the states data output | string | no | + +
Click to view example definition +

+ + + + + + + + + + +
JSONYAML
```json { -"states": [ - { - "name":"SendConfirmState", - "type":"operation", - "actions":[ - { - "functionRef": "sendOrderConfirmation" - }], - "end": true - }] + "stateDataFilter": { + "input": "${ .orders }", + "output": "${ .provisionedOrders }" + } } ``` -Note that the referenced function definition type in this case must be `rest` (default type). + -For more information about functions, reference the [Functions definitions](#Function-Definition) section. +```yaml +stateDataFilter: + input: "${ .orders }" + output: "${ .provisionedOrders }" +``` -### Using Functions For RPC Service Invocations +
-Similar to defining invocations of operations on RESTful services, you can also use the workflow -[functions definitions](#Function-Definition) that follow the remote procedure call (RPC) protocol. -For RPC invocations, the Serverless Workflow specification mandates that they are described using [gRPC](https://grpc.io/), -a widely used RPC system. -gRPC uses [Protocol Buffers](https://developers.google.com/protocol-buffers/docs/overview) to define messages, services, -and the methods on those services that can be invoked. +

-Let's look at an example of invoking a service method using RPC. For this example let's say we have the following -gRP prototocol buffer definition in a myuserservice.proto file: +State data filters can be used to filter the states data input and output. -```text -service UserService { - rpc AddUser(User) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/api/v1/users" - body: "*" - }; - } - rpc ListUsers(ListUsersRequest) returns (stream User) { - option (google.api.http) = { - get: "/api/v1/users" - }; - } - rpc ListUsersByRole(UserRole) returns (stream User) { - option (google.api.http) = { - get: "/api/v1/users/role" - }; - } - rpc UpdateUser(UpdateUserRequest) returns (User) { - option (google.api.http) = { - patch: "/api/v1/users/{user.id}" - body: "*" - }; +The state data filters `input` property expression is applied when the workflow transitions to the current state and receives its data input. +It can be used to select only data that is needed and disregard what is not needed. +If `input` is not defined or does not select any parts of the state's data input, its data input is not filtered. + +The state data filter `output` property expression is applied right before the state transitions to the next state defined. +It filters the state's data output to be passed as data input to the transitioning state. +If the current state is the workflow end state, the filtered state's data output becomes the workflow data output. +If `output` is not defined or does not select any parts of the state's data output, its data output is not filtered. + +Results of the `input` expression should become the state data. +Results of the `output` expression should become the state data output. + +For more information on this you can reference the [data merging](#Data-Merging) section. + +Let's take a look at some examples of state filters. For our examples let's say the data input to our state is as follows: + +```json +{ + "fruits": [ "apple", "orange", "pear" ], + "vegetables": [ + { + "veggieName": "potato", + "veggieLike": true + }, + { + "veggieName": "broccoli", + "veggieLike": false } + ] } ``` -In our workflow definition, we can then use function definitions: +For the first example, our state only cares about fruits data, and we want to disregard the vegetables. To do this +we can define a state filter: ```json { -"functions": [ - { - "name": "listUsers", - "operation": "file://myuserservice.proto#UserService#ListUsers", - "type": "rpc" + "stateDataFilter": { + "input": "${ {fruits: .fruits} }" } -] } ``` -Note that the `operation` property has the following format: -```text -## +The state data output then would include only the fruits data: + +```json +{ + "fruits": [ "apple", "orange", "pear"] +} ``` -Note that the referenced function definition type in this case must be `rpc`. +

+State Data Filter Example +

-For more information about functions, reference the [Functions definitions](#Function-Definition) section. +For our second example, let's say that we are interested in the only vegetable "veggie-like". +Here we have two ways of filtering our data, depending on if actions within our state need access to all vegetables, or +only the ones that are "veggie-like". -### Using Functions For Expression Evaluation +The first way would be to use both "input", and "output": -In addition to defining RESTful and RPC services and their operations, workflow [functions definitions](#Function-Definition) -can also be used to define expressions that should be evaluated during workflow execution. +```json +{ + "stateDataFilter": { + "input": "${ {vegetables: .vegetables} }", + "output": "${ {vegetables: .vegetables[] | select(.veggieLike == true)} }" + } +} +``` -Defining expressions as part of function definitions has the benefit of being able to reference -them by their logical name through workflow states where expression evaluation is required, thus making them -reusable definitions. +The states data input filter selects all the vegetables from the main data input. Once all actions have performed, before the state transition +or workflow execution completion (if this is an end state), the "output" of the state filter selects only the vegetables which are "veggie like". -Expression expression functions must declare their `type` parameter to be `expression`. +

+State Data Filter Example +

-Let's take at an example of such definitions: +The second way would be to directly filter only the "veggie like" vegetables with just the data input path: ```json { -"functions": [ - { - "name": "isAdult", - "operation": ".applicant | .age >= 18", - "type": "expression" - }, - { - "name": "isMinor", - "operation": ".applicant | .age < 18", - "type": "expression" + "stateDataFilter": { + "input": "${ {vegetables: .vegetables[] | select(.veggieLike == true)} }" } -] } ``` -Here we define two reusable expression functions. Expressions in Serverless Workflow -are evaluated against the workflow data. Note that different data filters play a big role as to which parts of the -workflow data are selected. Reference the -[State Data Filtering](#State-Data-Filtering) section for more information on this. +#### Action data filters -Our expression function definitions can now be referenced by workflow states when they need to be evaluated. For example: +| Parameter | Description | Type | Required | +| --- | --- | --- | --- | +| fromStateData | Workflow expression that filters state data that can be used by the action | string | no | +| results | Workflow expression that filters the actions data results | string | no | +| toStateData | Workflow expression that selects a state data element to which the action results should be added/merged into. If not specified denotes the top-level state data element | string | no | + +
Click to view example definition +

+ + + + + + + + + + +
JSONYAML
```json { -"states":[ - { - "name":"CheckApplicant", - "type":"switch", - "dataConditions": [ - { - "name": "Applicant is adult", - "condition": "${ fn:isAdult }", - "transition": "ApproveApplication" - }, - { - "name": "Applicant is minor", - "condition": "${ fn:isMinor }", - "transition": "RejectApplication" - } - ], - "default": { - "transition": "RejectApplication" - } + "actionDataFilter": { + "fromStateData": "${ .language }", + "results": "${ .results.greeting }", + "toStateData": "${ .finalgreeting }" } -] } ``` -Note that the used function definition type in this case must be `expression`. + -For more information about functions, reference the [Functions definitions](#Function-Definition) section. +```yaml +actionDataFilter: + fromStateData: "${ .language }" + results: "${ .results.greeting }" + toStateData: "${ .finalgreeting }" +``` -For more information about workflow expressions, reference the [Workflow Expressions](#Workflow-Expressions) section. +
-### Workflow Expressions +

-Workflow model parameters can use expressions to select/manipulate workflow and/or state data. +Action data filters can be used inside [Action definitions.](#Action-Definition) +Each action can define this filter which can: -Note that different data filters play a big role as to which parts of the states data are to be used when the expression is -evaluated. Reference the -[State Data Filtering](#State-Data-Filtering) section for more information about state data filters. +* Filter the state data to select only the data that can be used within function definition arguments using its `fromStateData` property. +* Filter the action results to select only the result data that should be added/merged back into the state data +using its `results` property. +* Select the part of state data which the action data results should be added/merged to +using the `toStateData` property. -By default, all workflow expressions should be defined using the [jq](https://stedolan.github.io/jq/) syntax. -You can find more information on jq in its [manual](https://stedolan.github.io/jq/manual/). +To give an example, let's say we have an action which returns a list of breads and pasta types. +For our workflow, we are only interested into breads and not the pasta. -Serverless Workflow does not mandate the use of jq and it's possible to use an expression language -of your choice with the restriction that a single one must be used for all expressions -in a workflow definition. If a different expression language needs to be used, make sure to set the workflow -`expressionLang` property to identify it to runtime implementations. - -Note that using a non-default expression language could lower the portability of your workflow definitions -across multiple container/cloud platforms. - -All workflow expressions in this document, [specification examples](examples/README.md) as well as [comparisons examples](comparisons/README.md) -are written using the default jq syntax. - -Workflow expressions have the following format: - -```text -${ expression } -``` - -Where `expression` can be either an in-line expression, or a reference to a -defined [expression function definition](#Using-Functions-For-Expression-Evaluation). - -To reference a defined [expression function definition](#Using-Functions-For-Expression-Evaluation) -the expression must have the following format, for example: - -```text -${ fn:myExprFuncName } -``` - -Where `fn` is the namespace of the defined expression functions and -`myExprName` is the unique expression function name. - -To show some expression examples, let's say we have the following state data: - -```json -{ - "applicant": { - "name": "John Doe", - "age" : 26, - "address" : { - "streetAddress": "Naist street", - "city" : "Nara", - "postalCode" : "630-0192" - }, - "phoneNumbers": [ - { - "type" : "iPhone", - "number": "0123-4567-8888" - }, - { - "type" : "home", - "number": "0123-4567-8910" - } - ] - } -} -``` - -In our workflow model we can define our reusable expression function: +Action results: ```json { -"functions": [ - { - "name": "IsAdultApplicant", - "operation": ".applicant | .age > 18", - "type": "expression" - } -] + "breads": ["baguette", "brioche", "rye"], + "pasta": [ "penne", "spaghetti", "ravioli"] } ``` -We will get back to this function definition in just a bit, but now let's take a look at using -an inline expression that sets an input parameter inside an action for example: +We can use an action data filter to filter only the breads data: ```json { -"actions": [ - { - "functionRef": { - "refName": "confirmApplicant", - "parameters": { - "applicantName": "${ .applicant.name }" - } - } +"actions":[ + { + "functionRef": "breadAndPastaTypesFunction", + "actionDataFilter": { + "results": "${ {breads: .breads} }" + } } -] -} -``` - -In this case our input parameter `applicantName` would be set to "John Doe". - -Expressions can also be used to select and manipulate state data, this is in particularly useful for -state data filters. -For example let's use another inline expression: - -```json -{ - "stateDataFilter": { - "dataOutputPath": "${ .applicant | {applicant: .name, contactInfo: { email: .email, phone: .phoneNumbers }} }" - } + ] } ``` -This would set the data output of the particular state to: +The `results` will filter the action results, which would then be: ```json { - "applicant": "John Doe", - "contactInfo": { - "email": "johndoe@something.com", - "phone": [ - { - "type": "iPhone", - "number": "0123-4567-8888" - }, - { - "type": "home", - "number": "0123-4567-8910" - } - ] - } + "breads": [ + "baguette", + "brioche", + "rye" + ] } ``` -[Switch state](#Switch-State) [conditions](#switch-state-dataconditions) require for expressions to be resolved to a boolean value (true / false). - -We can now get back to our previously defined "IsAdultApplicant" expression function and reference it: +Now let's take a look at a similar example (same expected action results) and assume our current state data is: ```json { - "dataConditions": [ { - "condition": "${ fn:IsAdultApplicant }", - "transition": "StartApplication" - }] + "itemsToBuyAtStore": [ + ] } ``` -As previously mentioned, expressions are evaluated against certain subsets of data. For example -the `parameters` param of the [functionRef definition](#FunctionRef-Definition) can evaluate expressions -only against the data that is available to the [action](#Action-Definition) it belongs to. -One thing to note here are the top-level [workflow definition](#Workflow-Definition) parameters. Expressions defined -in them can only be evaluated against the initial [workflow data input](#Workflow-Data-Input). - -For example let's say that we have a workflow data input of: +and have the following action definition: ```json { - "inputVersion" : "1.0.0" +"actions":[ + { + "functionRef": "breadAndPastaTypesFunction", + "actionDataFilter": { + "results": "${ [ .breads[0], .pasta[1] ] }", + "toStateData": "${ .itemsToBuyAtStore }" + } + } + ] } ``` -we can use this expression in the workflow "version" parameter: +In this case, our `results` select the first bread and the second element of the pasta array. +The `toStateData` expression then selects the `itemsToBuyAtStore` array of the state data to add/merge these results +into. With this, after our action executes the state data would be: ```json { - "id": "MySampleWorkflow", - "name": "Sample Workflow", - "version": "${ .inputVersion }" + "itemsToBuyAtStore": [ + "baguette", + "spaghetti" + ] } ``` -which would set the workflow version to "1.0.0". -Note that the workflow "id" property value is not allowed to use an expression. The workflow -definition "id" must be a constant value. - -### Workflow Definition +#### Event data filters | Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| id | Workflow unique identifier | string | yes | -| name | Workflow name | string | yes | -| description | Workflow description | string | no | -| version | Workflow version | string | no | -| [start](#Start-Definition) | Workflow start definition | string | yes | -| schemaVersion | Workflow schema version | string | no | -| expressionLang | Identifies the expression language used for workflow expressions. Default value is "jq" | string | no | -| [execTimeout](#ExecTimeout-Definition) | Defines the execution timeout for a workflow instance | object | no | -| keepActive | If "true", workflow instances is not terminated when there are no active execution paths. Instance can be terminated with "terminate end definition" or reaching defined "execTimeout" | boolean | no | -| [events](#Event-Definition) | Workflow event definitions. | array or string | no | -| [functions](#Function-Definition) | Workflow function definitions. Can be either inline function definitions (if array) or URI pointing to a resource containing json/yaml function definitions (if string) | array or string| no | -| [retries](#Retry-Definition) | Workflow retries definitions. Can be either inline retries definitions (if array) or URI pointing to a resource containing json/yaml retry definitions (if string) | array or string| no | -| [states](#State-Definition) | Workflow states | array | yes | -| [metadata](#Workflow-Metadata) | Metadata information| object | no | +| --- | --- | --- | --- | +| data | Workflow expression that filters the event data (payload) | string | no | +| toStateData | Workflow expression that selects a state data element to which the action results should be added/merged into. If not specified denotes the top-level state data element | string | no |
Click to view example definition

@@ -570,16 +497,10 @@ definition "id" must be a constant value. ```json -{ - "id": "sampleWorkflow", - "version": "1.0", - "name": "Sample Workflow", - "description": "Sample Workflow", - "start": "MyStartingState", - "states": [], - "functions": [], - "events": [], - "retries":[] +{ + "eventDataFilter": { + "data": "${ .data.results }" + } } ``` @@ -587,15 +508,8 @@ definition "id" must be a constant value. ```yaml -id: sampleWorkflow -version: '1.0' -name: Sample Workflow -description: Sample Workflow -start: MyStartingState -states: [] -functions: [] -events: [] -retries: [] +eventDataFilter: + data: "${ .data.results }" ``` @@ -604,472 +518,826 @@ retries: []

-Defines the top-level structure of a serverless workflow model. -Following figure describes the main workflow definition blocks. - -

-Serverless Workflow Definitions Blocks -

+Event data filters can be used to filter consumed event payloads. +They can be used to: -The `id` property defines the unique workflow identifier. +* Filter the event payload to select only the data that should be added/merged into the state data +using its `data` property. +* Select the part of state data into which the event payload should be added/merged into +using the `toStateData` property. -The `name` property is the workflow logical name. +Allows event data to be filtered and added to or merged with the state data. All events have to be in the CloudEvents format +and event data filters can filter both context attributes and the event payload (data) using the `data` property. -The `description` property can be used to give further information about the workflow. +Here is an example using an event filter: -The `version` property can be used to provide a specific workflow version. +

+Event Data Filter Example +

-The `start` property defines the workflow starting information. For more information see the [start definition](#Start-Definition) section. +#### Using multiple data filters -The `schemaVersion` property can be used to set the specific Serverless Workflow schema version to use -to validate this workflow markup. If not provided the latest released schema version is assumed. +As [Event states](#Event-State) can take advantage of all defined data filters. In the example below, we define +a workflow with a single event state and show how data filters can be combined. -The `expressionLang` property can be used to identify the expression language used for all expressions in -the workflow definition. The default value of this property is ["jq"](https://stedolan.github.io/jq/). -You should set this property if you chose to define [workflow expressions](#Workflow-Expressions) -with an expression language / syntax other than the default. +```json +{ + "id": "GreetCustomersWorkflow", + "name": "Greet Customers when they arrive", + "version": "1.0", + "start": "WaitForCustomerToArrive", + "states":[ + { + "name": "WaitForCustomerToArrive", + "type": "event", + "onEvents": [{ + "eventRefs": ["CustomerArrivesEvent"], + "eventDataFilter": { + "data": "${ .customer }", + "toStateData": "${ .customerInfo }" + }, + "actions":[ + { + "functionRef": { + "refName": "greetingFunction", + "arguments": { + "greeting": "${ .spanish } ", + "customerName": "${ .customerInfo.name } " + } + }, + "actionDataFilter": { + "fromStateData": "${ .hello }", + "results": "${ .greetingMessageResult }", + "toStateData": "${ .finalCustomerGreeting }" + } + } + ] + }], + "stateDataFilter": { + "input": "${ .greetings } ", + "output": "${ .finalCustomerGreeting }" + }, + "end": true + } + ], + "events": [{ + "name": "CustomerArrivesEvent", + "type": "customer-arrival-type", + "source": "customer-arrival-event-source" + }], + "functions": [{ + "name": "greetingFunction", + "operation": "http://my.api.org/myapi.json#greeting" + }] +} +``` -The `execTimeout` property is used to define execution timeout for a workflow instance. -For more information about this property and its use cases see the [execTimeout definition](#ExecTimeout-Definition) section. +The workflow data input when starting workflow execution is assumed to include greetings in different languages: -The `functions` property can be either an in-line [function](#Function-Definition) definition array, or an URI reference to -a resource containing an array of [functions](#Function-Definition) definition. -Referenced resource can be used by multiple workflow definitions. +```json +{ + "greetings": { + "hello": { + "english": "Hello", + "spanish": "Hola", + "german": "Hallo", + "russian": "Здравствуйте" + }, + "goodbye": { + "english": "Goodbye", + "spanish": "Adiós", + "german": "Auf Wiedersehen", + "russian": "Прощай" + } + } +} +``` +The workflow data input then becomes the data input of the starting workflow state. -Here is an example of using external resource for function definitions: +We also assume for this example that the CloudEvent that our event state consumes include the data (payload): -1. Workflow definition: ```json -{ - "id": "sampleWorkflow", - "version": "1.0", - "name": "Sample Workflow", - "description": "Sample Workflow", - "start": "MyStartingState", - "functions": "http://myhost:8080/functiondefs.json", - "states":[ - ... - ] +{ + "customer": { + "name": "John Michaels", + "address": "111 Some Street, SomeCity, SomeCountry", + "age": 40 + } } ``` -2. Function definitions resource +Here is a sample diagram showing our workflow, each numbered step on this diagram shows a certain defined point during +workflow execution at which data filters are invoked and correspond to the numbered items below. + +

+Using Multple Filters Example +

+ +**(1) Workflow execution starts**: Workflow data is passed to our "WaitForCustomerToArrive" event state as data input. +Workflow executes its starting state, namely the "WaitForCustomerToArrive" event state. + +The event state **stateDataFilter** is invoked to filter its data input. The filters "input" expression is evaluated and +selects only the "greetings" data. The rest of the state data input should be disregarded. + +At this point our state data should be: + ```json { - "functions": [ - { - "name":"HelloWorldFunction", - "operation":"file://myapi.json#helloWorld" - } - ] + "hello": { + "english": "Hello", + "spanish": "Hola", + "german": "Hallo", + "russian": "Здравствуйте" + }, + "goodbye": { + "english": "Goodbye", + "spanish": "Adiós", + "german": "Auf Wiedersehen", + "russian": "Прощай" + } } ``` -Referenced resource must conform to the specifications [Workflow Functions JSON Schema](schema/functions.json). - -The `events` property can be either an in-line [event](#Event-Definition) definition array, or an [URI](https://en.wikipedia.org/wiki/Uniform_Resource_Identifier) reference to -a resource containing an array of [event](#Event-Definition) definition. Referenced resource can be used by multiple workflow definitions. +**(2) CloudEvent of type "customer-arrival-type" is consumed**: Once the vent is consumed, the "eventDataFilter" is triggered. +Its "data" expression selects the "customer" object from the events data. The "toStateData" expression +says that we should add/merge this selected event data to the state data in its "customerInfo" property. If this property +exists it should be merged, if it does not exist, one should be created. -Here is an example of using external resource for event definitions: +At this point our state data contains: -1. Workflow definition: ```json -{ - "id": "sampleWorkflow", - "version": "1.0", - "name": "Sample Workflow", - "description": "Sample Workflow", - "start": "MyStartingState", - "events": "http://myhost:8080/eventsdefs.json", - "states":[ - ... - ] +{ + "hello": { + "english": "Hello", + "spanish": "Hola", + "german": "Hallo", + "russian": "Здравствуйте" + }, + "goodbye": { + "english": "Goodbye", + "spanish": "Adiós", + "german": "Auf Wiedersehen", + "russian": "Прощай" + }, + "customerInfo": { + "name": "John Michaels", + "address": "111 Some Street, SomeCity, SomeCountry", + "age": 40 + } } ``` -2. Event definitions resource +**(3) Event state performs its actions**: +Before the first action is executed, its actionDataFilter is invoked. Its "fromStateData" expression filters +the current state data to select from its data that should be available to action arguments. In this example +it selects the "hello" property from the current state data. +At this point the action is executed. +We assume that for this example "greetingFunction" returns: + ```json { - "events": [ - { - "name": "ApplicantInfo", - "type": "org.application.info", - "source": "applicationssource", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] - } - ] + "execInfo": { + "execTime": "10ms", + "failures": false + }, + "greetingMessageResult": "Hola John Michaels!" } ``` -Referenced resource must conform to the specifications [Workflow Events JSON Schema](schema/events.json). - -The `retries` property can be either an in-line [retry](#Retry-Definition) definition array, or an URI reference to -a resource containing an array of [retry](#Retry-Definition) definition. -Referenced resource can be used by multiple workflow definitions. For more information about -using and referencing retry definitions see the [Workflow Error Handling](#Workflow-Error-Handling) section. - -The `keepActive` property allows you to change the default behavior of workflow instances. -By default, as described in the [Core Concepts](#Core-Concepts) section, a workflow instance is terminated once there are no more -active execution paths, one of its active paths ends in a "terminate" [end definition](#End-Definition), or when -its [`execTimeout`](#ExecTimeout-Definition) time is reached. +After the action is executed, the actionDataFilter "results" expression is evaluated to filter the results returned from the action execution. In this case, we select only the "greetingMessageResult" element from the results. -Setting the `keepActive` property to "true" allows you change this default behavior in that a workflow instance -created from this workflow definition can only be terminated if one of its active paths ends in a "terminate" [end definition](#End-Definition), or when -its [`execTimeout`](#ExecTimeout-Definition) time is reached. -This allows you to explicitly model workflows where an instance should be kept alive, to collect (event) data for example. +The action filters "toStateData" expression then defines that we want to add/merge this action result to +state data under the "finalCustomerGreeting" element. -You can reference the [specification examples](#Examples) to see the `keepActive` property in action. +At this point, our state data contains: -#### ExecTimeout Definition +```json +{ + "hello": { + "english": "Hello", + "spanish": "Hola", + "german": "Hallo", + "russian": "Здравствуйте" + }, + "goodbye": { + "english": "Goodbye", + "spanish": "Adiós", + "german": "Auf Wiedersehen", + "russian": "Прощай" + }, + "customerInfo": { + "name": "John Michaels", + "address": "111 Some Street, SomeCity, SomeCountry", + "age": 40 + }, + "finalCustomerGreeting": "Hola John Michaels!" +} +``` -| Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| duration | Timeout duration (ISO 8601 duration format) | string | yes | -| interrupt | If `false`, workflow instance is allowed to finish current execution. If `true`, current workflow execution is stopped immediately. Default is `false` | boolean | no | -| runBefore | Name of a workflow state to be executed before workflow instance is terminated | string | no | +**(4) Event State Completes Execution**: -
Click to view example definition -

+When our event state finishes its execution, the states "stateDataFilter" "output" filter expression is executed +to filter the state data to create the final state data output. - - - - - - - - - -
JSONYAML
+Because our event state is also an end state, its data output becomes the final [workflow data output](#Workflow-data-output). Namely: ```json -{ - "time": "PT2M", - "runBefore": "createandsendreport" +{ + "finalCustomerGreeting": "Hola John Michaels!" } ``` - - -```yaml -time: PT2M -runBefore: createandsendreport -``` +#### Data Merging -
+Consumed event data (payload) and action execution results should be merged into the state data. +Event and action data filters can be used to give more details about this operation. -

+By default (no data filters specified), when an event is consumed, its entire data section (payload) should be merged +to the state data. The merge should be applied to the entire state data JSON element. -The `duration` property defines the time duration of the execution timeout. Once a workflow instance is created, -and the amount of the defined time is reached, the workflow instance should be terminated. +In case of event and action filters, their "toStateData" property can be defined to select a specific element +of the state data with which the merge should be done against. If this element does not exist, a new one should +be created first. -The `interrupt` property defines if the currently running instance should be allowed to finish its current -execution flow before it needs to be terminated. If set to `true`, the current instance execution should stop immediately. +When merging, the state data element and the data (payload)/action result should have the same type, meaning +that you should not merge arrays with objects or objects with arrays etc. -The `runBefore` property defines a name of a workflow state to be executed before workflow instance is terminated. -States referenced by `runBefore` (as well as any other states that they transition to) must obey following rules: -* They should not have any incoming transitions (should not be part of the main workflow control-flow logic) -* They cannot be states marked for compensation (have their `usedForCompensation` property and set to `true`) -* If it is a single state, it must define an [end definition](#End-Definition), if it transitions to other states, -at last one must define it. -* They can transition only to states are also not part of the main control flow logic (and are not marked -for compensation). - -Runtime implementations should raise compile time / parsing exceptions if any of the rules mentioned above are -not obeyed in the workflow definition. +When merging elements of type object should be done by inserting all the key-value pairs from both objects into +a single combined object. If both objects contain a value for the same key, the object of the event data/action results +should "win". To give an example, let's say we have the following state data: -#### Function Definition +```json +{ + "customer": { + "name": "John", + "address": "1234 street", + "zip": "12345" + } +} +``` -| Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| name | Unique function name | string | yes | -| operation | If type is `rest`, #. If type is `rpc`, ##. If type is `expression`, defines the workflow expression. | string | no | -| type | Defines the function type. Is either `rest`, `rpc` or `expression`. Default is `rest` | enum | no | -| [metadata](#Workflow-Metadata) | Metadata information. Can be used to define custom function information | object | no | +and we have the following event payload that needs to be merged into the state data: -
Click to view example definition -

+```json +{ + "customer": { + "name": "John", + "zip": "54321" + } +} +``` - - - - - - - - - -
JSONYAML
+After the merge the state data should be: ```json -{ - "name": "HelloWorldFunction", - "operation": "https://hellworldservice.api.com/api.json#helloWorld" +{ + "customer": { + "name": "John", + "address": "1234 street", + "zip": "54321" + } } ``` - +Merging array types should be done by concatenating them into a larger array including unique elements of both arrays. +To give an example, merging: -```yaml -name: HelloWorldFunction -operation: https://hellworldservice.api.com/api.json#helloWorld +```json +{ + "customers": [ + { + "name": "John", + "address": "1234 street", + "zip": "12345" + }, + { + "name": "Jane", + "address": "4321 street", + "zip": "54321" + } + ] +} ``` -
+into state data: -

+```json +{ + "customers": [ + { + "name": "Michael", + "address": "6789 street", + "zip": "6789" + } + ] +} +``` -The `name` property defines an unique name of the function definition. +should produce state data: -The `type` property defines the function type. Its value can be either `rest` or `expression`. Default value is `rest`. +```json +{ + "customers": [ + { + "name": "Michael", + "address": "6789 street", + "zip": "6789" + }, + { + "name": "John", + "address": "1234 street", + "zip": "12345" + }, + { + "name": "Jane", + "address": "4321 street", + "zip": "54321" + } + ] +} +``` -Depending on the function `type`, the `operation` property can be: -* If `type` is `rest`, a combination of the function/service OpenAPI definition document URI and the particular service operation that needs to be invoked, separated by a '#'. - For example `https://petstore.swagger.io/v2/swagger.json#getPetById`. -* If `type` is `rpc`, a combination of the gRPC proto document URI and the particular service name and service method name that needs to be invoked, separated by a '#'. -For example `file://myuserservice.proto#UserService#ListUsers`. -* If `type` is `expression`, defines the expression syntax. Take a look at the [workflow expressions section](#Workflow-Expressions) for more information on this. -The [`metadata`](#Workflow-Metadata) property allows users to define custom information to function definitions. -This allows you for example to define functions that describe of a command executions on a Docker image: +To give an example, merging: -```yaml -functions: -- name: whalesayimage - metadata: - image: docker/whalesay - command: cowsay +```json +{ + "customers": [ + { + "name": "John", + "address": "1234 street", + "zip": "12345" + }, + { + "name": "Jane", + "address": "4321 street", + "zip": "54321" + } + ] +} ``` -Note that using metadata for cases such as above heavily reduces the portability of your workflow markup. +into state data: -Function definitions themselves do not define data input parameters. Parameters can be -defined via the `parameters` property in [function definitions](#FunctionRef-Definition) inside [actions](#Action-Definition). +```json +{ + "customers": [ + { + "name": "Michael", + "address": "6789 street", + "zip": "6789" + } + ] +} +``` -#### Event Definition +should produce state data: -| Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| name | Unique event name | string | yes | -| source | CloudEvent source | string | yes if kind is set to "consumed", otherwise no | -| type | CloudEvent type | string | yes | -| kind | Defines the event is either `consumed` or `produced` by the workflow. Default is `consumed` | enum | no | -| [correlation](#Correlation-Definition) | Define event correlation rules for this event. Only used for consumed events | array | no | -| [metadata](#Workflow-Metadata) | Metadata information | object | no | +```json +{ + "customers": [ + { + "name": "Michael", + "address": "6789 street", + "zip": "6789" + }, + { + "name": "John", + "address": "1234 street", + "zip": "12345" + }, + { + "name": "Jane", + "address": "4321 street", + "zip": "54321" + } + ] +} +``` -
Click to view example definition -

- - - - - - - - - -
JSONYAML
+Merging number types should be done by overwriting the data from events data/action results into the merging element of the state data. +For example merging action results: ```json -{ - "name": "ApplicantInfo", - "type": "org.application.info", - "source": "applicationssource", - "kind": "consumed", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] +{ + "age": 30 } ``` - +into state data: -```yaml -name: ApplicantInfo -type: org.application.info -source: applicationssource -kind: consumed -correlation: -- contextAttributeName: applicantId +```json +{ + "age": 20 +} ``` -
+would produce state data: -

+```json +{ + "age": 30 +} +``` -Used to define events and their correlations. These events can be either consumed or produced during workflow execution as well -as can be used to [trigger function/service invocations](#EventRef-Definition). +Merging string types should be done by overwriting the data from events data/action results into the merging element of the state data. -The Serverless Workflow specification mandates that all events conform to the [CloudEvents](https://github.com/cloudevents/spec) specification. -This is to assure consistency and portability of the events format used. +Merging number types should be done by overwriting the data from events data/action results into the merging element of the state data. -The `name` property defines a single name of the event that is unique inside the workflow definition. This event name can be -then referenced within [function](#Function-Definition) and [state](#State-Definition) definitions. +### Workflow Functions -The `source` property matches this event definition with the [source](https://github.com/cloudevents/spec/blob/master/spec.md#source-1) -property of the CloudEvent required attributes. +Workflow [functions](#Function-Definition) are reusable definitions for RESTful service invocations and/or expression evaluation. +They can be referenced by their domain-specific names inside workflow [states](#State-Definition). -The `type` property matches this event definition with the [type](https://github.com/cloudevents/spec/blob/master/spec.md#type) property of the -CloudEvent required attributes. +Reference the following sections to learn more about workflow functions: +* [Using functions for RESTful service invocations](#Using-Functions-For-RESTful-Service-Invocations) +* [Using functions for RPC service invocation](#Using-Functions-For-RPC-Service-Invocations) +* [Using functions for expression evaluations](#Using-Functions-For-Expression-Evaluation) -The `kind` property defines this event as either `consumed` or `produced`. In terms of the workflow, this means it is either an event -that triggers workflow instance creation, or continuation of workflow instance execution (consumed), or an event -that the workflow instance creates during its execution (produced). -The default value (if not specified) of the `kind` property is `consumed`. -Note that for `produced` event definitions, implementations must provide the value of the CloudEvent source attribute. -In this case (i.e., when the `kind` property is set to `produced`), the `source` property of the event definition is not required. -Otherwise, (i.e., when the `kind` property is set to `consumed`), the `source` property must be defined in the event definition. +### Using Functions For RESTful Service Invocations -Event correlation plays a big role in large event-driven applications. Correlating one or more events with a particular workflow instance -can be done by defining the event correlation rules within the `correlation` property. -This property is an array of [correlation](#Correlation-Definition) definitions. -The CloudEvents specification allows users to add [Extension Context Attributes](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes) -and the correlation definitions can use these attributes to define clear matching event correlation rules. -Extension context attributes are not part of the event payload, so they are serialized the same way as other standard required attributes. -This means that the event payload does not have to be inspected by implementations in order to read and evaluate the defined correlation rules. +[Functions](#Function-Definition) can be used to describe services and their operations that need to be invoked during +workflow execution. They can be referenced by states [action definitions](#Action-Definition)] to clearly +define when the service operations should be invoked during workflow execution, as well as the data parameters +passed to them if needed. +Note that with Serverless Workflow, we can also define service invocations via events. +To learn more about that, please reference the [event definitions](#Event-Definition) section, +as well as the [actions definitions](#Action-Definition) [eventRef](#EventRef-Definition) property. -Let's take a look at an example. Here we have two events that have an extension context attribute called "patientId" (as well as "department", which -will be used in further examples below): +Because of an overall lack of a common way to describe different services and their operations, +many workflow languages typically chose to define custom function definitions. +This approach, however, often runs into issues such as lack of portability, limited capabilities, as well as +forcing non-workflow-specific information, such as service authentication, to be added inside the workflow language. + +To avoid these issues, the Serverless Workflow specification mandates that details about +RESTful services and their operations be described using the [OpenAPI Specification](https://www.openapis.org/) specification. +OpenAPI is a language-agnostic standard that describes discovery of RESTful services. +This allows Serverless Workflow language to describe RESTful services in a portable +way, as well as workflow runtimes to utilize OpenAPI tooling and APIs to invoke service operations. + +Here is an example function definition for a RESTful service operation. ```json { - "specversion" : "1.0", - "type" : "com.hospital.patient.heartRateMonitor", - "source" : "hospitalMonitorSystem", - "subject" : "HeartRateReading", - "id" : "A234-1234-1234", - "time" : "2020-01-05T17:31:00Z", - "patientId" : "PID-12345", - "department": "UrgentCare", - "data" : { - "value": "80bpm" - } +"functions": [ + { + "name": "sendOrderConfirmation", + "operation": "file://confirmationapi.json#sendOrderConfirmation" + } +] } ``` -and +It can, as previously mentioned be referenced during workflow execution when the invocation of this service is desired. +For example: ```json { - "specversion" : "1.0", - "type" : "com.hospital.patient.bloodPressureMonitor", - "source" : "hospitalMonitorSystem", - "subject" : "BloodPressureReading", - "id" : "B234-1234-1234", - "time" : "2020-02-05T17:31:00Z", - "patientId" : "PID-12345", - "department": "UrgentCare", - "data" : { - "value": "110/70" - } +"states": [ + { + "name":"SendConfirmState", + "type":"operation", + "actions":[ + { + "functionRef": "sendOrderConfirmation" + }], + "end": true + }] } ``` -We can then define a correlation rule, through which all consumed events with the "hospitalMonitorSystem", and the "com.hospital.patient.heartRateMonitor" -type that have the **same** value of the `patientId` property to be correlated to the created workflow instance: +Note that the referenced function definition type in this case must be `rest` (default type). -```json -{ -"events": [ - { - "name": "HeartRateReadingEvent", - "source": "hospitalMonitorSystem", - "type": "com.hospital.patient.heartRateMonitor", - "kind": "consumed", - "correlation": [ - { - "contextAttributeName": "patientId" +For more information about functions, reference the [Functions definitions](#Function-Definition) section. + +### Using Functions For RPC Service Invocations + +Similar to defining invocations of operations on RESTful services, you can also use the workflow +[functions definitions](#Function-Definition) that follow the remote procedure call (RPC) protocol. +For RPC invocations, the Serverless Workflow specification mandates that they are described using [gRPC](https://grpc.io/), +a widely used RPC system. +gRPC uses [Protocol Buffers](https://developers.google.com/protocol-buffers/docs/overview) to define messages, services, +and the methods on those services that can be invoked. + +Let's look at an example of invoking a service method using RPC. For this example let's say we have the following +gRP protocol buffer definition in a myuserservice.proto file: + +```text +service UserService { + rpc AddUser(User) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/api/v1/users" + body: "*" + }; + } + rpc ListUsers(ListUsersRequest) returns (stream User) { + option (google.api.http) = { + get: "/api/v1/users" + }; + } + rpc ListUsersByRole(UserRole) returns (stream User) { + option (google.api.http) = { + get: "/api/v1/users/role" + }; + } + rpc UpdateUser(UpdateUserRequest) returns (User) { + option (google.api.http) = { + patch: "/api/v1/users/{user.id}" + body: "*" + }; } - ] - } -] } ``` -If a workflow instance is created (e.g., via Event state) by consuming a "HeartRateReadingEvent" event, all other consumed events -from the defined source and with the defined type that have the same "patientId" as the event that triggered the workflow instance -should then also be associated with the same instance. - -You can also correlate multiple events together. In the following example, we assume that the workflow consumes two different event types, -and we want to make sure that both are correlated, as in the above example, with the same "patientId": - +In our workflow definition, we can then use function definitions: ```json { -"events": [ - { - "name": "HeartRateReadingEvent", - "source": "hospitalMonitorSystem", - "type": "com.hospital.patient.heartRateMonitor", - "kind": "consumed", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] - }, - { - "name": "BloodPressureReadingEvent", - "source": "hospitalMonitorSystem", - "type": "com.hospital.patient.bloodPressureMonitor", - "kind": "consumed", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] +"functions": [ + { + "name": "listUsers", + "operation": "file://myuserservice.proto#UserService#ListUsers", + "type": "rpc" } ] } ``` -Event correlation can be based on equality (values of the defined "contextAttributeName" must be equal), but it can also be based -on comparing it to custom defined values (string, or expression). For example: +Note that the `operation` property has the following format: +```text +## +``` -```json -{ -"events": [ - { - "name": "HeartRateReadingEvent", - "source": "hospitalMonitorSystem", - "type": "com.hospital.patient.heartRateMonitor", - "kind": "consumed", - "correlation": [ - { - "contextAttributeName": "patientId" - }, +Note that the referenced function definition type in this case must be `rpc`. + +For more information about functions, reference the [Functions definitions](#Function-Definition) section. + +### Using Functions For Expression Evaluation + +In addition to defining RESTful and RPC services and their operations, workflow [functions definitions](#Function-Definition) +can also be used to define expressions that should be evaluated during workflow execution. + +Defining expressions as part of function definitions has the benefit of being able to reference +them by their logical name through workflow states where expression evaluation is required, thus making them +reusable definitions. + +Expression expression functions must declare their `type` parameter to be `expression`. + +Let's take at an example of such definitions: + +```json +{ +"functions": [ + { + "name": "isAdult", + "operation": ".applicant | .age >= 18", + "type": "expression" + }, + { + "name": "isMinor", + "operation": ".applicant | .age < 18", + "type": "expression" + } +] +} +``` + +Here we define two reusable expression functions. Expressions in Serverless Workflow +are evaluated against the workflow data. Note that different data filters play a big role as to which parts of the +workflow data are selected. Reference the +[State Data Filtering](#State-data-filters) section for more information on this. + +Our expression function definitions can now be referenced by workflow states when they need to be evaluated. For example: + +```json +{ +"states":[ + { + "name":"CheckApplicant", + "type":"switch", + "dataConditions": [ + { + "name": "Applicant is adult", + "condition": "${ fn:isAdult }", + "transition": "ApproveApplication" + }, + { + "name": "Applicant is minor", + "condition": "${ fn:isMinor }", + "transition": "RejectApplication" + } + ], + "default": { + "transition": "RejectApplication" + } + } +] +} +``` + +Note that the used function definition type in this case must be `expression`. + +For more information about functions, reference the [Functions definitions](#Function-Definition) section. + +For more information about workflow expressions, reference the [Workflow Expressions](#Workflow-Expressions) section. + +### Workflow Expressions + +Workflow model parameters can use expressions to select/manipulate workflow and/or state data. + +Note that different data filters play a big role as to which parts of the states data are to be used when the expression is +evaluated. Reference the +[State Data Filtering](#State-data-filters) section for more information about state data filters. + +By default, all workflow expressions should be defined using the [jq](https://stedolan.github.io/jq/) syntax. +You can find more information on jq in its [manual](https://stedolan.github.io/jq/manual/). + +Serverless Workflow does not mandate the use of jq and it's possible to use an expression language +of your choice with the restriction that a single one must be used for all expressions +in a workflow definition. If a different expression language needs to be used, make sure to set the workflow +`expressionLang` property to identify it to runtime implementations. + +Note that using a non-default expression language could lower the portability of your workflow definitions +across multiple container/cloud platforms. + +All workflow expressions in this document, [specification examples](examples/README.md) as well as [comparisons examples](comparisons/README.md) +are written using the default jq syntax. + +Workflow expressions have the following format: + +```text +${ expression } +``` + +Where `expression` can be either an in-line expression, or a reference to a +defined [expression function definition](#Using-Functions-For-Expression-Evaluation). + +To reference a defined [expression function definition](#Using-Functions-For-Expression-Evaluation) +the expression must have the following format, for example: + +```text +${ fn:myExprFuncName } +``` + +Where `fn` is the namespace of the defined expression functions and +`myExprName` is the unique expression function name. + +To show some expression examples, let's say we have the following state data: + +```json +{ + "applicant": { + "name": "John Doe", + "age" : 26, + "address" : { + "streetAddress": "Naist street", + "city" : "Nara", + "postalCode" : "630-0192" + }, + "phoneNumbers": [ + { + "type" : "iPhone", + "number": "0123-4567-8888" + }, + { + "type" : "home", + "number": "0123-4567-8910" + } + ] + } +} +``` + +In our workflow model we can define our reusable expression function: + +```json +{ +"functions": [ + { + "name": "IsAdultApplicant", + "operation": ".applicant | .age > 18", + "type": "expression" + } +] +} +``` + +We will get back to this function definition in just a bit, but now let's take a look at using +an inline expression that sets an input parameter inside an action for example: + +```json +{ +"actions": [ { - "contextAttributeName": "department", - "contextAttributeValue" : "UrgentCare" + "functionRef": { + "refName": "confirmApplicant", + "parameters": { + "applicantName": "${ .applicant.name }" + } + } } - ] - } ] } ``` -In this example, we have two correlation rules defined: The first one is on the "patientId" CloudEvent context attribute, meaning again that -all consumed events from this source and type must have the same "patientId" to be considered. The second rule -says that these events must all have a context attribute named "department" with the value of "UrgentCare". +In this case our input parameter `applicantName` would be set to "John Doe". -This allows developers to write orchestration workflows that are specifically targeted to patients that are in the hospital urgent care unit, -for example. +Expressions can also be used to select and manipulate state data, this is in particularly useful for +state data filters. -#### Correlation Definition +For example let's use another inline expression: + +```json +{ + "stateDataFilter": { + "output": "${ .applicant | {applicant: .name, contactInfo: { email: .email, phone: .phoneNumbers }} }" + } +} +``` + +This would set the data output of the particular state to: + +```json +{ + "applicant": "John Doe", + "contactInfo": { + "email": "johndoe@something.com", + "phone": [ + { + "type": "iPhone", + "number": "0123-4567-8888" + }, + { + "type": "home", + "number": "0123-4567-8910" + } + ] + } +} +``` + +[Switch state](#Switch-State) [conditions](#switch-state-dataconditions) require for expressions to be resolved to a boolean value (true / false). + +We can now get back to our previously defined "IsAdultApplicant" expression function and reference it: + +```json +{ + "dataConditions": [ { + "condition": "${ fn:IsAdultApplicant }", + "transition": "StartApplication" + }] +} +``` + +As previously mentioned, expressions are evaluated against certain subsets of data. For example +the `parameters` param of the [functionRef definition](#FunctionRef-Definition) can evaluate expressions +only against the data that is available to the [action](#Action-Definition) it belongs to. +One thing to note here are the top-level [workflow definition](#Workflow-Definition) parameters. Expressions defined +in them can only be evaluated against the initial [workflow data input](#Workflow-Data-Input). + +For example let's say that we have a workflow data input of: + +```json +{ + "inputVersion" : "1.0.0" +} +``` + +we can use this expression in the workflow "version" parameter: + +```json +{ + "id": "MySampleWorkflow", + "name": "Sample Workflow", + "version": "${ .inputVersion }" +} +``` + +which would set the workflow version to "1.0.0". +Note that the workflow "id" property value is not allowed to use an expression. The workflow +definition "id" must be a constant value. + +### Workflow Definition | Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| contextAttributeName | CloudEvent Extension Context Attribute name | string | yes | -| contextAttributeValue | CloudEvent Extension Context Attribute name | string | no | +| --- | --- | --- | --- | +| id | Workflow unique identifier | string | yes | +| name | Workflow name | string | yes | +| description | Workflow description | string | no | +| version | Workflow version | string | no | +| [start](#Start-Definition) | Workflow start definition | string | yes | +| schemaVersion | Workflow schema version | string | no | +| expressionLang | Identifies the expression language used for workflow expressions. Default value is "jq" | string | no | +| [execTimeout](#ExecTimeout-Definition) | Defines the execution timeout for a workflow instance | object | no | +| keepActive | If "true", workflow instances is not terminated when there are no active execution paths. Instance can be terminated with "terminate end definition" or reaching defined "execTimeout" | boolean | no | +| [events](#Event-Definition) | Workflow event definitions. | array or string | no | +| [functions](#Function-Definition) | Workflow function definitions. Can be either inline function definitions (if array) or URI pointing to a resource containing json/yaml function definitions (if string) | array or string| no | +| [retries](#Retry-Definition) | Workflow retries definitions. Can be either inline retries definitions (if array) or URI pointing to a resource containing json/yaml retry definitions (if string) | array or string| no | +| [states](#State-Definition) | Workflow states | array | yes | +| [metadata](#Workflow-Metadata) | Metadata information| object | no |
Click to view example definition

@@ -1084,15 +1352,15 @@ for example. ```json { - "correlation": [ - { - "contextAttributeName": "patientId" - }, - { - "contextAttributeName": "department", - "contextAttributeValue" : "UrgentCare" - } - ] + "id": "sampleWorkflow", + "version": "1.0", + "name": "Sample Workflow", + "description": "Sample Workflow", + "start": "MyStartingState", + "states": [], + "functions": [], + "events": [], + "retries":[] } ``` @@ -1100,57 +1368,151 @@ for example. ```yaml -correlation: -- contextAttributeName: patientId -- contextAttributeName: department - contextAttributeValue: UrgentCare +id: sampleWorkflow +version: '1.0' +name: Sample Workflow +description: Sample Workflow +start: MyStartingState +states: [] +functions: [] +events: [] +retries: [] +``` + + + + + +

+ +Defines the top-level structure of a serverless workflow model. +Following figure describes the main workflow definition blocks. + +

+Serverless Workflow Definitions Blocks +

+ +The `id` property defines the unique workflow identifier. + +The `name` property is the workflow logical name. + +The `description` property can be used to give further information about the workflow. + +The `version` property can be used to provide a specific workflow version. + +The `start` property defines the workflow starting information. For more information see the [start definition](#Start-Definition) section. + +The `schemaVersion` property can be used to set the specific Serverless Workflow schema version to use +to validate this workflow markup. If not provided the latest released schema version is assumed. + +The `expressionLang` property can be used to identify the expression language used for all expressions in +the workflow definition. The default value of this property is ["jq"](https://stedolan.github.io/jq/). +You should set this property if you chose to define [workflow expressions](#Workflow-Expressions) +with an expression language / syntax other than the default. + +The `execTimeout` property is used to define execution timeout for a workflow instance. +For more information about this property and its use cases see the [execTimeout definition](#ExecTimeout-Definition) section. + +The `functions` property can be either an in-line [function](#Function-Definition) definition array, or an URI reference to +a resource containing an array of [functions](#Function-Definition) definition. +Referenced resource can be used by multiple workflow definitions. + +Here is an example of using external resource for function definitions: + +1. Workflow definition: +```json +{ + "id": "sampleWorkflow", + "version": "1.0", + "name": "Sample Workflow", + "description": "Sample Workflow", + "start": "MyStartingState", + "functions": "http://myhost:8080/functiondefs.json", + "states":[ + ... + ] +} +``` + +2. Function definitions resource +```json +{ + "functions": [ + { + "name":"HelloWorldFunction", + "operation":"file://myapi.json#helloWorld" + } + ] +} +``` + +Referenced resource must conform to the specifications [Workflow Functions JSON Schema](schema/functions.json). + +The `events` property can be either an in-line [event](#Event-Definition) definition array, or an [URI](https://en.wikipedia.org/wiki/Uniform_Resource_Identifier) reference to +a resource containing an array of [event](#Event-Definition) definition. Referenced resource can be used by multiple workflow definitions. + +Here is an example of using external resource for event definitions: + +1. Workflow definition: +```json +{ + "id": "sampleWorkflow", + "version": "1.0", + "name": "Sample Workflow", + "description": "Sample Workflow", + "start": "MyStartingState", + "events": "http://myhost:8080/eventsdefs.json", + "states":[ + ... + ] +} +``` + +2. Event definitions resource +```json +{ + "events": [ + { + "name": "ApplicantInfo", + "type": "org.application.info", + "source": "applicationssource", + "correlation": [ + { + "contextAttributeName": "applicantId" + } + ] + } + ] +} ``` - - - - - - -Used to define event correlation rules. Only usable for `consumed` event definitions. - -The `contextAttributeName` property defines the name of the CloudEvent [extension context attribute](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes). -The `contextAttributeValue` property defines the value of the defined the CloudEvent [extension context attribute](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes). +Referenced resource must conform to the specifications [Workflow Events JSON Schema](schema/events.json). -#### State Definition +The `retries` property can be either an in-line [retry](#Retry-Definition) definition array, or an URI reference to +a resource containing an array of [retry](#Retry-Definition) definition. +Referenced resource can be used by multiple workflow definitions. For more information about +using and referencing retry definitions see the [Workflow Error Handling](#Workflow-Error-Handling) section. -States define building blocks of the Serverless Workflow. The specification defines the following states: +The `keepActive` property allows you to change the default behavior of workflow instances. +By default, as described in the [Core Concepts](#Core-Concepts) section, a workflow instance is terminated once there are no more +active execution paths, one of its active paths ends in a "terminate" [end definition](#End-Definition), or when +its [`execTimeout`](#ExecTimeout-Definition) time is reached. -| Name | Description | Consumes events? | Produces events? | Executes actions? | Handles errors/retries? | Allows parallel execution? | Makes data-based transitions? | Can be workflow start state? | Can be workflow end state? | -| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | -| **[Event](#Event-State)** | Define events that trigger action execution | yes | yes | yes | yes | yes | no | yes | yes | -| **[Operation](#Operation-State)** | Execute one or more actions | no | yes | yes | yes | yes | no | yes | yes | -| **[Switch](#Switch-State)** | Define data-based or event-based workflow transitions | no | yes | no | yes | no | yes | yes | no | -| **[Delay](#Delay-State)** | Delay workflow execution | no | yes | no | yes | no | no | yes | yes | -| **[Parallel](#Parallel-State)** | Causes parallel execution of branches (set of states) | no | yes | no | yes | yes | no | yes | yes | -| **[SubFlow](#SubFlow-State)** | Represents the invocation of another workflow from within a workflow | no | yes | no | yes | no | no | yes | yes | -| **[Inject](#Inject-State)** | Inject static data into state data | no | yes | no | yes | no | no | yes | yes | -| **[ForEach](#ForEach-State)** | Parallel execution of states for each element of a data array | no | yes | no | yes | yes | no | yes | yes | -| **[Callback](#Callback-State)** | Manual decision step. Executes a function and waits for callback event that indicates completion of the manual decision | yes | yes | yes | yes | no | no | yes | yes | +Setting the `keepActive` property to "true" allows you change this default behavior in that a workflow instance +created from this workflow definition can only be terminated if one of its active paths ends in a "terminate" [end definition](#End-Definition), or when +its [`execTimeout`](#ExecTimeout-Definition) time is reached. +This allows you to explicitly model workflows where an instance should be kept alive, to collect (event) data for example. -The following is a detailed description of each of the defined states. +You can reference the [specification examples](#Examples) to see the `keepActive` property in action. -#### Event State +#### ExecTimeout Definition | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| id | Unique state id | string | no | -| name | State name | string | yes | -| type | State type | string | yes | -| exclusive | If "true", consuming one of the defined events causes its associated actions to be performed. If "false", all of the defined events must be consumed in order for actions to be performed. Default is "true" | boolean | no | -| [onEvents](#eventstate-onevents) | Define the events to be consumed and optional actions to be performed | array | yes | -| [timeout](#eventstate-timeout) | Time period to wait for incoming events (ISO 8601 format). For example: "PT15M" (wait 15 minutes), or "P2DT3H4M" (wait 2 days, 3 hours and 4 minutes)| string | no | -| [stateDataFilter](#state-data-filter) | State data filter definition| object | no | -| [transition](#Transitions) | Next transition of the workflow after all the actions have been performed | object | yes | -| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | -| [end](#End-Definition) | Is this state an end state | object | no | -| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | -| [metadata](#Workflow-Metadata) | Metadata information| object | no | +| duration | Timeout duration (ISO 8601 duration format) | string | yes | +| interrupt | If `false`, workflow instance is allowed to finish current execution. If `true`, current workflow execution is stopped immediately. Default is `false` | boolean | no | +| runBefore | Name of a workflow state to be executed before workflow instance is terminated | string | no | +
Click to view example definition

@@ -1164,47 +1526,9 @@ The following is a detailed description of each of the defined states. ```json -{ -"name": "MonitorVitals", -"type": "event", -"exclusive": true, -"onEvents": [{ - "eventRefs": ["HighBodyTemperature"], - "actions": [{ - "functionRef": { - "refName": "sendTylenolOrder", - "arguments": { - "patientid": "${ .patientId }" - } - } - }] - }, - { - "eventRefs": ["HighBloodPressure"], - "actions": [{ - "functionRef": { - "refName": "callNurse", - "arguments": { - "patientid": "${ .patientId }" - } - } - }] - }, - { - "eventRefs": ["HighRespirationRate"], - "actions": [{ - "functionRef": { - "refName": "callPulmonologist", - "arguments": { - "patientid": "${ .patientId }" - } - } - }] - } -], -"end": { - "terminate": true -} +{ + "time": "PT2M", + "runBefore": "createandsendreport" } ``` @@ -1212,33 +1536,8 @@ The following is a detailed description of each of the defined states. ```yaml -name: MonitorVitals -type: event -exclusive: true -onEvents: -- eventRefs: - - HighBodyTemperature - actions: - - functionRef: - refName: sendTylenolOrder - arguments: - patientid: "${ .patientId }" -- eventRefs: - - HighBloodPressure - actions: - - functionRef: - refName: callNurse - arguments: - patientid: "${ .patientId }" -- eventRefs: - - HighRespirationRate - actions: - - functionRef: - refName: callPulmonologist - arguments: - patientid: "${ .patientId }" -end: - terminate: true +time: PT2M +runBefore: createandsendreport ``` @@ -1247,44 +1546,32 @@ end:

-Event states await one or more events and perform actions when they are received. -If defined as the workflow starting state, the event state definition controls when the workflow -instances should be created. - -The `exclusive` property determines if the state should wait for any of the defined events in the `onEvents` array, or -if all defined events must be present for their associated actions to be performed. - -Following two figures illustrate the `exclusive` property: - -

-Event state with exclusive set to true -

- -If the Event state in this case is a workflow starting state, the occurrence of *any* of the defined events would start a new workflow instance. - -

-Event state with exclusive set to false -

+The `duration` property defines the time duration of the execution timeout. Once a workflow instance is created, +and the amount of the defined time is reached, the workflow instance should be terminated. -If the Event state in this case is a workflow starting state, the occurrence of *all* defined events would start a new - workflow instance. - -In order to consider only events that are related to each other, we need to set the `correlation` property in the workflow - [events definitions](#Event-Definition). This allows us to set up event correlation rules against the events - extension context attributes. +The `interrupt` property defines if the currently running instance should be allowed to finish its current +execution flow before it needs to be terminated. If set to `true`, the current instance execution should stop immediately. -If the Event state is not a workflow starting state, the `timeout` property can be used to define the time duration from the -invocation of the event state. If the defined event, or events have not been received during this time, -the state should transition to the next state or can end the workflow execution (if it is an end state). +The `runBefore` property defines a name of a workflow state to be executed before workflow instance is terminated. +States referenced by `runBefore` (as well as any other states that they transition to) must obey following rules: +* They should not have any incoming transitions (should not be part of the main workflow control-flow logic) +* They cannot be states marked for compensation (have their `usedForCompensation` property and set to `true`) +* If it is a single state, it must define an [end definition](#End-Definition), if it transitions to other states, +at last one must define it. +* They can transition only to states are also not part of the main control flow logic (and are not marked +for compensation). + +Runtime implementations should raise compile time / parsing exceptions if any of the rules mentioned above are +not obeyed in the workflow definition. -#### Event State: onEvents Definition +#### Function Definition | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| eventRefs | References one or more unique event names in the defined workflow [events](#Event-Definition) | array | yes | -| actionMode | Specifies how actions are to be performed (in sequence of parallel). Default is "sequential" | string | no | -| [actions](#Action-Definition) | Actions to be performed | array | no | -| [eventDataFilter](#event-data-filter) | Event data filter definition | object | no | +| name | Unique function name | string | yes | +| operation | If type is `rest`, #. If type is `rpc`, ##. If type is `expression`, defines the workflow expression. | string | no | +| type | Defines the function type. Is either `rest`, `rpc` or `expression`. Default is `rest` | enum | no | +| [metadata](#Workflow-Metadata) | Metadata information. Can be used to define custom function information | object | no |
Click to view example definition

@@ -1294,149 +1581,67 @@ the state should transition to the next state or can end the workflow execution JSON YAML - - - -```json -{ - "eventRefs": ["HighBodyTemperature"], - "actions": [{ - "functionRef": { - "refName": "sendTylenolOrder", - "arguments": { - "patientid": "${ .patientId }" - } - } - }] -} -``` - - - - -```yaml -eventRefs: -- HighBodyTemperature -actions: -- functionRef: - refName: sendTylenolOrder - arguments: - patientid: "${ .patientId }" -``` - - - - - -

- -OnEvent definition allow you to define which [actions](#Action-Definition) are to be performed -for the one or more [events definitions](#Event-Definition) defined in the `eventRefs` property. - -The `actionMode` property defines if the defined actions need to be performed sequentially or in parallel. - -The `actions` property defines a list of actions to be performed. - -When specifying the `onEvents` definition it is important to consider the Event states `exclusive` property, -because it determines how 'onEvents' is interpreted. -Let's look at the following JSON definition of 'onEvents' to show this: - -```json -{ - "onEvents": [{ - "eventRefs": ["HighBodyTemperature", "HighBloodPressure"], - "actions": [{ - "functionRef": { - "refName": "SendTylenolOrder", - "arguments": { - "patient": "${ .patientId }" - } - } - }, - { - "functionRef": { - "refName": "CallNurse", - "arguments": { - "patient": "${ .patientId }" - } - } - } - ] - }] + + + +```json +{ + "name": "HelloWorldFunction", + "operation": "https://hellworldservice.api.com/api.json#helloWorld" } ``` -Depending on the value of the Event states `exclusive` property, this definition can mean two different things: - -1. If `exclusive` is set to "true", the consumption of **either** the `HighBodyTemperature` or `HighBloodPressure` events will trigger action execution. - -2. If `exclusive` is set to "false", the consumption of **both** the `HighBodyTemperature` and `HighBloodPressure` events will trigger action execution. + + -This is visualized in the diagram below: +```yaml +name: HelloWorldFunction +operation: https://hellworldservice.api.com/api.json#helloWorld +``` -

-Event onEvents example -

+ + + -#### Event State: Timeout + -The event state timeout period is described in the ISO 8601 data and time format. -You can specify for example "PT15M" to represent 15 minutes or "P2DT3H4M" to represent 2 days, 3 hours and 4 minutes. -Timeout values should always be represented as durations and not as time/repeating intervals. +The `name` property defines an unique name of the function definition. -The timeout property needs to be described in detail as it depends on whether or not the Event state is a workflow starting -state or not. +The `type` property defines the function type. Its value can be either `rest` or `expression`. Default value is `rest`. -If the Event state is a workflow starting state, incoming events may trigger workflow instances. In this case, -if the `exclusive` property is set to true, the timeout property should be ignored. +Depending on the function `type`, the `operation` property can be: +* If `type` is `rest`, a combination of the function/service OpenAPI definition document URI and the particular service operation that needs to be invoked, separated by a '#'. + For example `https://petstore.swagger.io/v2/swagger.json#getPetById`. +* If `type` is `rpc`, a combination of the gRPC proto document URI and the particular service name and service method name that needs to be invoked, separated by a '#'. +For example `file://myuserservice.proto#UserService#ListUsers`. +* If `type` is `expression`, defines the expression syntax. Take a look at the [workflow expressions section](#Workflow-Expressions) for more information on this. -If the `exclusive` property is set to false, in this case, the defined timeout represents the time -between arrival of specified events. To give an example, consider the following: +The [`metadata`](#Workflow-Metadata) property allows users to define custom information to function definitions. +This allows you for example to define functions that describe of a command executions on a Docker image: -```json -{ -"states": [ -{ - "name": "ExampleEventState", - "type": "event", - "exclusive": false, - "timeout": "PT2M", - "onEvents": [ - { - "eventRefs": [ - "ExampleEvent1", - "ExampleEvent2" - ], - "actions": [ - ... - ] - } - ], - "end": { - "terminate": true - } -} -] -} +```yaml +functions: +- name: whalesayimage + metadata: + image: docker/whalesay + command: cowsay ``` -The first timeout would start once any of the referenced events are consumed. If the second event does not occur within -the defined timeout, no workflow instance should be created. +Note that using metadata for cases such as above heavily reduces the portability of your workflow markup. -If the event state is not a workflow starting state, the `timeout` property is relative to the time when the -state becomes active. If the defined event conditions (regardless of the value of the exclusive property) -are not satisfied within the defined timeout period, the event state should transition to the next state or end the workflow -instance in case it is an end state without performing any actions. +Function definitions themselves do not define data input parameters. Parameters can be +defined via the `parameters` property in [function definitions](#FunctionRef-Definition) inside [actions](#Action-Definition). -#### Action Definition +#### Event Definition | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| name | Unique action name | string | no | -| [functionRef](#FunctionRef-Definition) | References a reusable function definition | object | yes if `eventRef` is not used | -| [eventRef](#EventRef-Definition) | References a `trigger` and `result` reusable event definitions | object | yes if `functionRef` is not used | -| timeout | Time period to wait for function execution to complete or the resultEventRef to be consumed (ISO 8601 format). For example: "PT15M" (15 minutes), or "P2DT3H4M" (2 days, 3 hours and 4 minutes)| string | no | -| [actionDataFilter](#action-data-filter) | Action data filter definition | object | no | +| name | Unique event name | string | yes | +| source | CloudEvent source | string | yes if kind is set to "consumed", otherwise no | +| type | CloudEvent type | string | yes | +| kind | Defines the event is either `consumed` or `produced` by the workflow. Default is `consumed` | enum | no | +| [correlation](#Correlation-Definition) | Define event correlation rules for this event. Only used for consumed events | array | no | +| [metadata](#Workflow-Metadata) | Metadata information | object | no |
Click to view example definition

@@ -1450,15 +1655,16 @@ instance in case it is an end state without performing any actions. ```json -{ - "name": "Finalize Application Action", - "functionRef": { - "refName": "finalizeApplicationFunction", - "arguments": { - "applicantid": "${ .applicantId }" - } - }, - "timeout": "PT15M" +{ + "name": "ApplicantInfo", + "type": "org.application.info", + "source": "applicationssource", + "kind": "consumed", + "correlation": [ + { + "contextAttributeName": "applicantId" + } + ] } ``` @@ -1466,12 +1672,12 @@ instance in case it is an end state without performing any actions. ```yaml -name: Finalize Application Action -functionRef: - refName: finalizeApplicationFunction - arguments: - applicantid: "${ .applicantId }" -timeout: PT15M +name: ApplicantInfo +type: org.application.info +source: applicationssource +kind: consumed +correlation: +- contextAttributeName: applicantId ``` @@ -1480,99 +1686,171 @@ timeout: PT15M

-Actions specify invocations of services during workflow execution. -Service invocation can be done in two different ways: +Used to define events and their correlations. These events can be either consumed or produced during workflow execution as well +as can be used to [trigger function/service invocations](#EventRef-Definition). -* Reference [functions definitions](#Function-Definition) by its unique name using the `functionRef` property. -* Reference a `produced` and `consumed` [event definitions](#Event-Definition) via the `eventRef` property. -In this scenario a service or a set of services we want to invoke -are not exposed via a specific resource URI for example, but can only be invoked via events. -The [eventRef](#EventRef-Definition) defines the -referenced `produced` event via its `triggerEventRef` property and a `consumed` event via its `resultEventRef` property. +The Serverless Workflow specification mandates that all events conform to the [CloudEvents](https://github.com/cloudevents/spec) specification. +This is to assure consistency and portability of the events format used. -The `timeout` property defines the amount of time to wait for function execution to complete, or the consumed event referenced by the -`resultEventRef` to become available. -It is described in ISO 8601 format, so for example "PT2M" would mean the maximum time for the function to complete -its execution is two minutes. +The `name` property defines a single name of the event that is unique inside the workflow definition. This event name can be +then referenced within [function](#Function-Definition) and [state](#State-Definition) definitions. -Possible invocation timeouts should be handled via the states [onErrors](#Workflow-Error-Handling) definition. +The `source` property matches this event definition with the [source](https://github.com/cloudevents/spec/blob/master/spec.md#source-1) +property of the CloudEvent required attributes. -#### FunctionRef Definition +The `type` property matches this event definition with the [type](https://github.com/cloudevents/spec/blob/master/spec.md#type) property of the +CloudEvent required attributes. + +The `kind` property defines this event as either `consumed` or `produced`. In terms of the workflow, this means it is either an event +that triggers workflow instance creation, or continuation of workflow instance execution (consumed), or an event +that the workflow instance creates during its execution (produced). +The default value (if not specified) of the `kind` property is `consumed`. +Note that for `produced` event definitions, implementations must provide the value of the CloudEvent source attribute. +In this case (i.e., when the `kind` property is set to `produced`), the `source` property of the event definition is not required. +Otherwise, (i.e., when the `kind` property is set to `consumed`), the `source` property must be defined in the event definition. + +Event correlation plays a big role in large event-driven applications. Correlating one or more events with a particular workflow instance +can be done by defining the event correlation rules within the `correlation` property. +This property is an array of [correlation](#Correlation-Definition) definitions. +The CloudEvents specification allows users to add [Extension Context Attributes](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes) +and the correlation definitions can use these attributes to define clear matching event correlation rules. +Extension context attributes are not part of the event payload, so they are serialized the same way as other standard required attributes. +This means that the event payload does not have to be inspected by implementations in order to read and evaluate the defined correlation rules. -`FunctionRef` definition can have two types, either `string` or `object`. -If `string`, it defines the name of the referenced [function](#Function-Definition). -This can be used as a short-cut definition when you don't need to define any parameters, for example: + +Let's take a look at an example. Here we have two events that have an extension context attribute called "patientId" (as well as "department", which +will be used in further examples below): ```json -"functionRef": "myFunction" +{ + "specversion" : "1.0", + "type" : "com.hospital.patient.heartRateMonitor", + "source" : "hospitalMonitorSystem", + "subject" : "HeartRateReading", + "id" : "A234-1234-1234", + "time" : "2020-01-05T17:31:00Z", + "patientId" : "PID-12345", + "department": "UrgentCare", + "data" : { + "value": "80bpm" + } +} ``` -If you need to define parameters in your `functionRef` definition, you can define -it with its `object` type which has the following properties: +and -| Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| refName | Name of the referenced [function](#Function-Definition) | string | yes | -| arguments | Arguments to be passed to the referenced function | object | no | +```json +{ + "specversion" : "1.0", + "type" : "com.hospital.patient.bloodPressureMonitor", + "source" : "hospitalMonitorSystem", + "subject" : "BloodPressureReading", + "id" : "B234-1234-1234", + "time" : "2020-02-05T17:31:00Z", + "patientId" : "PID-12345", + "department": "UrgentCare", + "data" : { + "value": "110/70" + } +} +``` -
Click to view example definition -

+We can then define a correlation rule, through which all consumed events with the "hospitalMonitorSystem", and the "com.hospital.patient.heartRateMonitor" +type that have the **same** value of the `patientId` property to be correlated to the created workflow instance: + +```json +{ +"events": [ + { + "name": "HeartRateReadingEvent", + "source": "hospitalMonitorSystem", + "type": "com.hospital.patient.heartRateMonitor", + "kind": "consumed", + "correlation": [ + { + "contextAttributeName": "patientId" + } + ] + } +] +} +``` + +If a workflow instance is created (e.g., via Event state) by consuming a "HeartRateReadingEvent" event, all other consumed events +from the defined source and with the defined type that have the same "patientId" as the event that triggered the workflow instance +should then also be associated with the same instance. + +You can also correlate multiple events together. In the following example, we assume that the workflow consumes two different event types, +and we want to make sure that both are correlated, as in the above example, with the same "patientId": - - - - - - - - - -
JSONYAML
```json { - "refName": "finalizeApplicationFunction", - "arguments": { - "applicantid": "${ .applicantId }" +"events": [ + { + "name": "HeartRateReadingEvent", + "source": "hospitalMonitorSystem", + "type": "com.hospital.patient.heartRateMonitor", + "kind": "consumed", + "correlation": [ + { + "contextAttributeName": "patientId" } + ] + }, + { + "name": "BloodPressureReadingEvent", + "source": "hospitalMonitorSystem", + "type": "com.hospital.patient.bloodPressureMonitor", + "kind": "consumed", + "correlation": [ + { + "contextAttributeName": "patientId" + } + ] + } +] } ``` - - -```yaml -refName: finalizeApplicationFunction -arguments: - applicantid: "${ .applicantId }" -``` - -
- -

- -The `refName` property is the name of the referenced [function](#Function-Definition). -The `arguments` property defines the arguments that are to be passed to the referenced function. -Values of the `arguments` property can be either static values, or an expression, for example: +Event correlation can be based on equality (values of the defined "contextAttributeName" must be equal), but it can also be based +on comparing it to custom defined values (string, or expression). For example: ```json { - "refName": "checkFundsAvailabe", - "arguments": { - "account": "${ .accountId }", - "forAmount": "${.payment.amount }", - "insufficientMessage": "The requested amount is not available." - } +"events": [ + { + "name": "HeartRateReadingEvent", + "source": "hospitalMonitorSystem", + "type": "com.hospital.patient.heartRateMonitor", + "kind": "consumed", + "correlation": [ + { + "contextAttributeName": "patientId" + }, + { + "contextAttributeName": "department", + "contextAttributeValue" : "UrgentCare" + } + ] + } +] } ``` -#### EventRef Definition +In this example, we have two correlation rules defined: The first one is on the "patientId" CloudEvent context attribute, meaning again that +all consumed events from this source and type must have the same "patientId" to be considered. The second rule +says that these events must all have a context attribute named "department" with the value of "UrgentCare". + +This allows developers to write orchestration workflows that are specifically targeted to patients that are in the hospital urgent care unit, +for example. + +#### Correlation Definition | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| [triggerEventRef](#Event-Definition) | Reference to the unique name of a `produced` event definition | string | yes | -| [resultEventRef](#Event-Definitions) | Reference to the unique name of a `consumed` event definition | string | yes | -| data | If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by `triggerEventRef`. If object type, a custom object to become the data (payload) of the event referenced by `triggerEventRef`. | string or object | no | -| contextAttributes | Add additional event extension context attributes to the trigger/produced event | object | no | +| contextAttributeName | CloudEvent Extension Context Attribute name | string | yes | +| contextAttributeValue | CloudEvent Extension Context Attribute name | string | no |
Click to view example definition

@@ -1586,12 +1864,16 @@ Values of the `arguments` property can be either static values, or an expression ```json -{ - "eventRef": { - "triggerEventRef": "MakeVetAppointment", - "data": "${ .patientInfo }", - "resultEventRef": "VetAppointmentInfo" - } +{ + "correlation": [ + { + "contextAttributeName": "patientId" + }, + { + "contextAttributeName": "department", + "contextAttributeValue" : "UrgentCare" + } + ] } ``` @@ -1599,10 +1881,10 @@ Values of the `arguments` property can be either static values, or an expression ```yaml -eventRef: - triggerEventRef: MakeVetAppointment - data: "${ .patientInfo }" - resultEventRef: VetAppointmentInfo +correlation: +- contextAttributeName: patientId +- contextAttributeName: department + contextAttributeValue: UrgentCare ``` @@ -1611,22 +1893,45 @@ eventRef:

-References a `produced` and `consumed` [event definitions](#Event-Definition) via the "triggerEventRef" and `resultEventRef` properties, respectively. +Used to define event correlation rules. Only usable for `consumed` event definitions. -The `data` property can have two types: string or object. If it is of string type, it is an expression that can select parts of state data -to be used as payload of the event referenced by `triggerEventRef`. If it is of object type, you can define a custom object to be the event payload. +The `contextAttributeName` property defines the name of the CloudEvent [extension context attribute](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes). +The `contextAttributeValue` property defines the value of the defined the CloudEvent [extension context attribute](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes). -The `contextAttributes` property allows you to add one or more [extension context attributes](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes) -to the trigger/produced event. +#### State Definition -#### Error Definition +States define building blocks of the Serverless Workflow. The specification defines the following states: + +| Name | Description | Consumes events? | Produces events? | Executes actions? | Handles errors/retries? | Allows parallel execution? | Makes data-based transitions? | Can be workflow start state? | Can be workflow end state? | +| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | +| **[Event](#Event-State)** | Define events that trigger action execution | yes | yes | yes | yes | yes | no | yes | yes | +| **[Operation](#Operation-State)** | Execute one or more actions | no | yes | yes | yes | yes | no | yes | yes | +| **[Switch](#Switch-State)** | Define data-based or event-based workflow transitions | no | yes | no | yes | no | yes | yes | no | +| **[Delay](#Delay-State)** | Delay workflow execution | no | yes | no | yes | no | no | yes | yes | +| **[Parallel](#Parallel-State)** | Causes parallel execution of branches (set of states) | no | yes | no | yes | yes | no | yes | yes | +| **[SubFlow](#SubFlow-State)** | Represents the invocation of another workflow from within a workflow | no | yes | no | yes | no | no | yes | yes | +| **[Inject](#Inject-State)** | Inject static data into state data | no | yes | no | yes | no | no | yes | yes | +| **[ForEach](#ForEach-State)** | Parallel execution of states for each element of a data array | no | yes | no | yes | yes | no | yes | yes | +| **[Callback](#Callback-State)** | Manual decision step. Executes a function and waits for callback event that indicates completion of the manual decision | yes | yes | yes | yes | no | no | yes | yes | + +The following is a detailed description of each of the defined states. + +#### Event State | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| error | Domain-specific error name, or '*' to indicate all possible errors | string | yes | -| code | Error code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. Should not be defined if error is set to '*' | string | no | -| retryRef | Defines the unique retry strategy definition to be used | string | no | -| [transition](#Transitions) or [end](#End-Definition) | Transition to next state to handle the error, or end workflow execution if this error is encountered | object | yes | +| id | Unique state id | string | no | +| name | State name | string | yes | +| type | State type | string | yes | +| exclusive | If "true", consuming one of the defined events causes its associated actions to be performed. If "false", all of the defined events must be consumed in order for actions to be performed. Default is "true" | boolean | no | +| [onEvents](#eventstate-onevents) | Define the events to be consumed and optional actions to be performed | array | yes | +| [timeout](#eventstate-timeout) | Time period to wait for incoming events (ISO 8601 format). For example: "PT15M" (wait 15 minutes), or "P2DT3H4M" (wait 2 days, 3 hours and 4 minutes)| string | no | +| [stateDataFilter](#State-data-filters) | State data filter definition| object | no | +| [transition](#Transitions) | Next transition of the workflow after all the actions have been performed | object | yes | +| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | +| [end](#End-Definition) | Is this state an end state | object | no | +| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | +| [metadata](#Workflow-Metadata) | Metadata information| object | no |
Click to view example definition

@@ -1641,8 +1946,46 @@ to the trigger/produced event. ```json { - "error": "Item not in inventory", - "transition": "IssueRefundToCustomer" +"name": "MonitorVitals", +"type": "event", +"exclusive": true, +"onEvents": [{ + "eventRefs": ["HighBodyTemperature"], + "actions": [{ + "functionRef": { + "refName": "sendTylenolOrder", + "arguments": { + "patientid": "${ .patientId }" + } + } + }] + }, + { + "eventRefs": ["HighBloodPressure"], + "actions": [{ + "functionRef": { + "refName": "callNurse", + "arguments": { + "patientid": "${ .patientId }" + } + } + }] + }, + { + "eventRefs": ["HighRespirationRate"], + "actions": [{ + "functionRef": { + "refName": "callPulmonologist", + "arguments": { + "patientid": "${ .patientId }" + } + } + }] + } +], +"end": { + "terminate": true +} } ``` @@ -1650,8 +1993,33 @@ to the trigger/produced event. ```yaml -error: Item not in inventory -transition: IssueRefundToCustomer +name: MonitorVitals +type: event +exclusive: true +onEvents: +- eventRefs: + - HighBodyTemperature + actions: + - functionRef: + refName: sendTylenolOrder + arguments: + patientid: "${ .patientId }" +- eventRefs: + - HighBloodPressure + actions: + - functionRef: + refName: callNurse + arguments: + patientid: "${ .patientId }" +- eventRefs: + - HighRespirationRate + actions: + - functionRef: + refName: callPulmonologist + arguments: + patientid: "${ .patientId }" +end: + terminate: true ``` @@ -1660,35 +2028,44 @@ transition: IssueRefundToCustomer

-Error definitions describe errors that can occur during workflow execution and how to handle them. - -The `error`property defines the domain-specific name of the error. Users can also set the name to -`*` which is a wildcard specifying "all" errors, in the case where no other error definitions are defined, -or "all other" errors if there are other errors defined within the same states `onErrors` definition. +Event states await one or more events and perform actions when they are received. +If defined as the workflow starting state, the event state definition controls when the workflow +instances should be created. -The `code` property can be used in addition to `name` to help runtimes resolve the defined -domain-specific error to the actual technical errors/exceptions that may happen during runtime execution. +The `exclusive` property determines if the state should wait for any of the defined events in the `onEvents` array, or +if all defined events must be present for their associated actions to be performed. -The `transition` property defines the transition to the next workflow state in cases when the defined -error happens during runtime execution. +Following two figures illustrate the `exclusive` property: -If `transition` is not defined you can also define the `end` property which will end workflow execution at that point. +

+Event state with exclusive set to true +

-The `retryRef` property is used to define the retry strategy to be used for this particular error. +If the Event state in this case is a workflow starting state, the occurrence of *any* of the defined events would start a new workflow instance. -For more information, see the [Workflow Error Handling](#Workflow-Error-Handling) sections. +

+Event state with exclusive set to false +

-#### Retry Definition +If the Event state in this case is a workflow starting state, the occurrence of *all* defined events would start a new + workflow instance. + +In order to consider only events that are related to each other, we need to set the `correlation` property in the workflow + [events definitions](#Event-Definition). This allows us to set up event correlation rules against the events + extension context attributes. -| Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| name | Unique retry strategy name | string | yes | -| delay | Time delay between retry attempts (ISO 8601 duration format) | string | no | -| maxAttempts | Maximum number of retry attempts. Value of 0 means no retries are performed | string or number | no | -| maxDelay | Maximum amount of delay between retry attempts (ISO 8601 duration format) | string | no | -| increment | Static duration which will be added to the delay between successive retries (ISO 8601 duration format) | string | no | -| multiplier | Float value by which the delay is multiplied before each attempt. For example: "1.2" meaning that each successive delay is 20% longer than the previous delay. For example, if delay is 'PT10S', then the delay between the first and second attempts will be 10 seconds, and the delay before the third attempt will be 12 seconds. | float or string | no | -| jitter | If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0.0 and 1.0). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) | float or string | no | +If the Event state is not a workflow starting state, the `timeout` property can be used to define the time duration from the +invocation of the event state. If the defined event, or events have not been received during this time, +the state should transition to the next state or can end the workflow execution (if it is an end state). + +#### Event State: onEvents Definition + +| Parameter | Description | Type | Required | +| --- | --- | --- | --- | +| eventRefs | References one or more unique event names in the defined workflow [events](#Event-Definition) | array | yes | +| actionMode | Specifies how actions are to be performed (in sequence of parallel). Default is "sequential" | string | no | +| [actions](#Action-Definition) | Actions to be performed | array | no | +| [eventDataFilter](#Event-data-filters) | Event data filter definition | object | no |
Click to view example definition

@@ -1703,10 +2080,15 @@ For more information, see the [Workflow Error Handling](#Workflow-Error-Handling ```json { - "name": "TimeoutRetryStrat", - "delay": "PT2M", - "maxAttempts": 3, - "jitter": "PT0.001S" + "eventRefs": ["HighBodyTemperature"], + "actions": [{ + "functionRef": { + "refName": "sendTylenolOrder", + "arguments": { + "patientid": "${ .patientId }" + } + } + }] } ``` @@ -1714,10 +2096,13 @@ For more information, see the [Workflow Error Handling](#Workflow-Error-Handling ```yaml -name: TimeoutRetryStrat -delay: PT2M -maxAttempts: 3 -jitter: PT0.001S +eventRefs: +- HighBodyTemperature +actions: +- functionRef: + refName: sendTylenolOrder + arguments: + patientid: "${ .patientId }" ``` @@ -1726,103 +2111,190 @@ jitter: PT0.001S

-Defines the states retry policy (strategy). This is an explicit definition and can be reused across multiple -defined workflow state errors. +OnEvent definition allow you to define which [actions](#Action-Definition) are to be performed +for the one or more [events definitions](#Event-Definition) defined in the `eventRefs` property. -The `name` property specifies the unique name of the retry definition (strategy). This unique name -can be referred by workflow states [error definitions](#Error-Definition). +The `actionMode` property defines if the defined actions need to be performed sequentially or in parallel. -The `delay` property specifies the initial time delay between retry attempts (ISO 8601 duration format). +The `actions` property defines a list of actions to be performed. + +When specifying the `onEvents` definition it is important to consider the Event states `exclusive` property, +because it determines how 'onEvents' is interpreted. +Let's look at the following JSON definition of 'onEvents' to show this: -The `increment` property specifies a static duration which will be added to the delay between successive retries. -To explain this better, let's say we have the following retry definition: ```json { - "name": "Timeout Errors Strategy", - "delay": "PT10S", - "increment": "PT2S", - "maxAttempts": 4 + "onEvents": [{ + "eventRefs": ["HighBodyTemperature", "HighBloodPressure"], + "actions": [{ + "functionRef": { + "refName": "SendTylenolOrder", + "arguments": { + "patient": "${ .patientId }" + } + } + }, + { + "functionRef": { + "refName": "CallNurse", + "arguments": { + "patient": "${ .patientId }" + } + } + } + ] + }] } ``` -which means that we will retry up to 4 times after waiting with increasing delay between attempts; -in this example 10, 12, 14, and 16 seconds between retries. -The `multiplier` property specifies the value by which the interval time is increased for each of the retry attempts. -To explain this better, let's say we have the following retry definition: +Depending on the value of the Event states `exclusive` property, this definition can mean two different things: + +1. If `exclusive` is set to "true", the consumption of **either** the `HighBodyTemperature` or `HighBloodPressure` events will trigger action execution. + +2. If `exclusive` is set to "false", the consumption of **both** the `HighBodyTemperature` and `HighBloodPressure` events will trigger action execution. + +This is visualized in the diagram below: + +

+Event onEvents example +

+ +#### Event State: Timeout + +The event state timeout period is described in the ISO 8601 data and time format. +You can specify for example "PT15M" to represent 15 minutes or "P2DT3H4M" to represent 2 days, 3 hours and 4 minutes. +Timeout values should always be represented as durations and not as time/repeating intervals. + +The timeout property needs to be described in detail as it depends on whether or not the Event state is a workflow starting +state or not. + +If the Event state is a workflow starting state, incoming events may trigger workflow instances. In this case, +if the `exclusive` property is set to true, the timeout property should be ignored. + +If the `exclusive` property is set to false, in this case, the defined timeout represents the time +between arrival of specified events. To give an example, consider the following: ```json { - "name": "Timeout Errors Strategy", - "delay": "PT10S", - "multiplier": 2, - "maxAttempts": 4 +"states": [ +{ + "name": "ExampleEventState", + "type": "event", + "exclusive": false, + "timeout": "PT2M", + "onEvents": [ + { + "eventRefs": [ + "ExampleEvent1", + "ExampleEvent2" + ], + "actions": [ + ... + ] + } + ], + "end": { + "terminate": true + } +} +] } ``` -which means that we will retry up to 4 times after waiting with increasing delay between attempts; -in this example 10, 20, 40, and 80 seconds between retries. -The `maxAttempts` property determines the maximum number of retry attempts allowed and is a positive integer value. +The first timeout would start once any of the referenced events are consumed. If the second event does not occur within +the defined timeout, no workflow instance should be created. -The `jitter` property is important to prevent certain scenarios where clients -are retrying in sync, possibly causing or contributing to a transient failure -precisely because they're retrying at the same time. Adding a typically small, -bounded random amount of time to the period between retries serves the purpose -of attempting to prevent these retries from happening simultaneously, possibly -reducing total time to complete requests and overall congestion. How this value -is used in the exponential backoff algorithm is left up to implementations. +If the event state is not a workflow starting state, the `timeout` property is relative to the time when the +state becomes active. If the defined event conditions (regardless of the value of the exclusive property) +are not satisfied within the defined timeout period, the event state should transition to the next state or end the workflow +instance in case it is an end state without performing any actions. -`jitter` may be specified as a percentage relative to the total delay. -For example, if `interval` is 2 seconds, `multiplier` is 2 seconds, and we're at -the third attempt, there will be a delay of 6 seconds. If we set `jitter` to -0.3, then a random amount of time between 0 and 1.8 (`totalDelay * jitter == 6 * 0.3`) -will be added or subtracted from the delay. +#### Action Definition -Alternatively, `jitter` may be defined as an absolute value specified as an ISO -8601 duration. This way, the maximum amount of random time added is fixed and -will not increase as new attempts are made. +| Parameter | Description | Type | Required | +| --- | --- | --- | --- | +| name | Unique action name | string | no | +| [functionRef](#FunctionRef-Definition) | References a reusable function definition | object | yes if `eventRef` is not used | +| [eventRef](#EventRef-Definition) | References a `trigger` and `result` reusable event definitions | object | yes if `functionRef` is not used | +| timeout | Time period to wait for function execution to complete or the resultEventRef to be consumed (ISO 8601 format). For example: "PT15M" (15 minutes), or "P2DT3H4M" (2 days, 3 hours and 4 minutes)| string | no | +| [actionDataFilter](#Action-data-filters) | Action data filter definition | object | no | -The `maxDelay` property determines the maximum amount of delay that is desired between retry attempts, and is applied -after `increment`, `multiplier`, and `jitter`. +
Click to view example definition +

-To explain this better, let's say we have the following retry definition: + + + + + + + + + +
JSONYAML
```json { - "name": "Timeout Errors Strategy", - "delay": "PT10S", - "maxDelay": "PT100S", - "multiplier": 4, - "jitter": "PT1S", - "maxAttempts": 4 + "name": "Finalize Application Action", + "functionRef": { + "refName": "finalizeApplicationFunction", + "arguments": { + "applicantid": "${ .applicantId }" + } + }, + "timeout": "PT15M" } ``` -which means that we will retry up to 4 times after waiting with increasing delay between attempts; -in this example we might observe the following series of delays: -* 11s (min(`maxDelay`, (`delay` +/- rand(`jitter`)) => min(100, 10 + 1)) -* 43s (min(`maxDelay`, (11s * `multiplier`) +/- rand(`jitter`)) => min(100, (11 * 4) - 1)) -* 100s (min(`maxDelay`, (43s * `multiplier`) +/- rand(`jitter`)) => min(100, (43 * 4) + 0)) -* 100s (min(`maxDelay`, (100s * `multiplier`) +/- rand(`jitter`)) => min(100, (100 * 4) - 1)) -For more information, refer to the [Workflow Error Handling](#Workflow-Error-Handling) sections. + -#### Transition Definition +```yaml +name: Finalize Application Action +functionRef: + refName: finalizeApplicationFunction + arguments: + applicantid: "${ .applicantId }" +timeout: PT15M +``` -`Transition` definition can have two types, either `string` or `object`. -If `string`, it defines the name of the state to transition to. -This can be used as a short-cut definition when you don't need to define any other parameters, for example: +
+ +

+ +Actions specify invocations of services during workflow execution. +Service invocation can be done in two different ways: + +* Reference [functions definitions](#Function-Definition) by its unique name using the `functionRef` property. +* Reference a `produced` and `consumed` [event definitions](#Event-Definition) via the `eventRef` property. +In this scenario a service or a set of services we want to invoke +are not exposed via a specific resource URI for example, but can only be invoked via events. +The [eventRef](#EventRef-Definition) defines the +referenced `produced` event via its `triggerEventRef` property and a `consumed` event via its `resultEventRef` property. + +The `timeout` property defines the amount of time to wait for function execution to complete, or the consumed event referenced by the +`resultEventRef` to become available. +It is described in ISO 8601 format, so for example "PT2M" would mean the maximum time for the function to complete +its execution is two minutes. + +Possible invocation timeouts should be handled via the states [onErrors](#Workflow-Error-Handling) definition. + +#### FunctionRef Definition + +`FunctionRef` definition can have two types, either `string` or `object`. +If `string`, it defines the name of the referenced [function](#Function-Definition). +This can be used as a short-cut definition when you don't need to define any parameters, for example: ```json -"transition": "myNextState" +"functionRef": "myFunction" ``` -If you need to define additional parameters in your `transition` definition, you can define +If you need to define parameters in your `functionRef` definition, you can define it with its `object` type which has the following properties: | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| [nextState](#Transitions) | Name of the state to transition to next | string | yes | -| [compensate](#Workflow-Compensation) | If set to `true`, triggers workflow compensation before this transition is taken. Default is `false` | boolean | no | -| produceEvents | Array of [producedEvent](#ProducedEvent-Definition) definitions. Events to be produced before the transition takes place | array | no | +| refName | Name of the referenced [function](#Function-Definition) | string | yes | +| arguments | Arguments to be passed to the referenced function | object | no |
Click to view example definition

@@ -1837,11 +2309,10 @@ it with its `object` type which has the following properties: ```json { - "produceEvents": [{ - "eventRef": "produceResultEvent", - "data": "${ .result.data }" - }], - "nextState": "EvalResultState" + "refName": "finalizeApplicationFunction", + "arguments": { + "applicantid": "${ .applicantId }" + } } ``` @@ -1849,10 +2320,9 @@ it with its `object` type which has the following properties: ```yaml -produceEvents: -- eventRef: produceResultEvent - data: "${ .result.data }" -nextState: EvalResultState +refName: finalizeApplicationFunction +arguments: + applicantid: "${ .applicantId }" ``` @@ -1861,29 +2331,29 @@ nextState: EvalResultState

-The `nextState` property defines the name of the state to transition to next. -The `compensate` property allows you to trigger [compensation](#Workflow-Compensation) before the transition (if set to true). -The `produceEvents` property allows you to define a list of events to produce before the transition happens. +The `refName` property is the name of the referenced [function](#Function-Definition). +The `arguments` property defines the arguments that are to be passed to the referenced function. +Values of the `arguments` property can be either static values, or an expression, for example: -Transitions allow you to move from one state (control-logic block) to another. For more information see the -[Transitions section](#Transitions) section. +```json +{ + "refName": "checkFundsAvailabe", + "arguments": { + "account": "${ .accountId }", + "forAmount": "${.payment.amount }", + "insufficientMessage": "The requested amount is not available." + } +} +``` -#### Operation State +#### EventRef Definition | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| id | Unique state id | string | no | -| name | State name | string | yes | -| type | State type | string | yes | -| actionMode | Should actions be performed sequentially or in parallel | string | no | -| [actions](#Action-Definition) | Actions to be performed | array | yes | -| [stateDataFilter](#state-data-filter) | State data filter | object | no | -| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | -| [transition](#Transitions) | Next transition of the workflow after all the actions have been performed | object | yes (if end is not defined) | -| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | -| [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | -| [metadata](#Workflow-Metadata) | Metadata information| object | no | -| [end](#End-Definition) | Is this state an end state | object | no | +| [triggerEventRef](#Event-Definition) | Reference to the unique name of a `produced` event definition | string | yes | +| [resultEventRef](#Event-Definitions) | Reference to the unique name of a `consumed` event definition | string | yes | +| data | If string type, an expression which selects parts of the states data output to become the data (payload) of the event referenced by `triggerEventRef`. If object type, a custom object to become the data (payload) of the event referenced by `triggerEventRef`. | string or object | no | +| contextAttributes | Add additional event extension context attributes to the trigger/produced event | object | no |
Click to view example definition

@@ -1898,20 +2368,11 @@ Transitions allow you to move from one state (control-logic block) to another. F ```json { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "customer": "${ .customer }" - } - } - } - ], - "end": true + "eventRef": { + "triggerEventRef": "MakeVetAppointment", + "data": "${ .patientInfo }", + "resultEventRef": "VetAppointmentInfo" + } } ``` @@ -1919,15 +2380,10 @@ Transitions allow you to move from one state (control-logic block) to another. F ```yaml -name: RejectApplication -type: operation -actionMode: sequential -actions: -- functionRef: - refName: sendRejectionEmailFunction - arguments: - customer: "${ .customer }" -end: true +eventRef: + triggerEventRef: MakeVetAppointment + data: "${ .patientInfo }" + resultEventRef: VetAppointmentInfo ``` @@ -1936,24 +2392,22 @@ end: true

-Operation state defines a set of actions to be performed in sequence or in parallel. -Once all actions have been performed, a transition to another state can occur. +References a `produced` and `consumed` [event definitions](#Event-Definition) via the "triggerEventRef" and `resultEventRef` properties, respectively. -#### Switch State +The `data` property can have two types: string or object. If it is of string type, it is an expression that can select parts of state data +to be used as payload of the event referenced by `triggerEventRef`. If it is of object type, you can define a custom object to be the event payload. + +The `contextAttributes` property allows you to add one or more [extension context attributes](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes) +to the trigger/produced event. + +#### Error Definition | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| id | Unique state id | string | no | -| name | State name | string | yes | -| type | State type | string | yes | -| [dataConditions](#switch-state-dataconditions) or [eventConditions](#switch-state-eventconditions) | Defined if the Switch state evaluates conditions and transitions based on state data, or arrival of events. | array | yes (one) | -| [stateDataFilter](#state-data-filter) | State data filter | object | no | -| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | -| eventTimeout | If eventConditions is used, defines the time period to wait for events (ISO 8601 format). For example: "PT15M" (15 minutes), or "P2DT3H4M" (2 days, 3 hours and 4 minutes)| string | yes only if eventConditions is defined | -| default | Default transition of the workflow if there is no matching data conditions or event timeout is reached. Can be a transition or end definition | object | no | -| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | -| [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | -| [metadata](#Workflow-Metadata) | Metadata information| object | no | +| error | Domain-specific error name, or '*' to indicate all possible errors | string | yes | +| code | Error code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. Should not be defined if error is set to '*' | string | no | +| retryRef | Defines the unique retry strategy definition to be used | string | no | +| [transition](#Transitions) or [end](#End-Definition) | Transition to next state to handle the error, or end workflow execution if this error is encountered | object | yes |
Click to view example definition

@@ -1967,23 +2421,9 @@ Once all actions have been performed, a transition to another state can occur. ```json -{ - "name":"CheckVisaStatus", - "type":"switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": "HandleApprovedVisa" - }, - { - "eventRef": "visaRejectedEvent", - "transition": "HandleRejectedVisa" - } - ], - "eventTimeout": "PT1H", - "default": { - "transition": "HandleNoVisaDecision" - } +{ + "error": "Item not in inventory", + "transition": "IssueRefundToCustomer" } ``` @@ -1991,16 +2431,8 @@ Once all actions have been performed, a transition to another state can occur. ```yaml -name: CheckVisaStatus -type: switch -eventConditions: -- eventRef: visaApprovedEvent - transition: HandleApprovedVisa -- eventRef: visaRejectedEvent - transition: HandleRejectedVisa -eventTimeout: PT1H -default: - transition: HandleNoVisaDecision +error: Item not in inventory +transition: IssueRefundToCustomer ``` @@ -2009,35 +2441,35 @@ default:

-Switch states can be viewed as workflow gateways: they can direct transitions of a workflow based on certain conditions. -There are two types of conditions for switch states: -* [Data-based conditions](#switch-state-dataconditions) -* [Event-based conditions](#switch-state-eventconditions) +Error definitions describe errors that can occur during workflow execution and how to handle them. -These are exclusive, meaning that a switch state can define one or the other condition type, but not both. +The `error`property defines the domain-specific name of the error. Users can also set the name to +`*` which is a wildcard specifying "all" errors, in the case where no other error definitions are defined, +or "all other" errors if there are other errors defined within the same states `onErrors` definition. -At times multiple defined conditions can be evaluated to `true` by runtime implementations. -Conditions defined first take precedence over conditions defined later. This is backed by the fact that arrays/sequences -are ordered in both JSON and YAML. For example, let's say there are two `true` conditions: A and B, defined in that order. -Because A was defined first, its transition will be executed, not B's. +The `code` property can be used in addition to `name` to help runtimes resolve the defined +domain-specific error to the actual technical errors/exceptions that may happen during runtime execution. -In case of data-based conditions definition, switch state controls workflow transitions based on the states data. -If no defined conditions can be matched, the state transitions is taken based on the `default` property. -This property can be either a `transition` to another workflow state, or an `end` definition meaning a workflow end. +The `transition` property defines the transition to the next workflow state in cases when the defined +error happens during runtime execution. -For event-based conditions, a switch state acts as a workflow wait state. It halts workflow execution -until one of the referenced events arrive, then making a transition depending on that event definition. -If events defined in event-based conditions do not arrive before the states `eventTimeout` property expires, - state transitions are based on the defined `default` property. +If `transition` is not defined you can also define the `end` property which will end workflow execution at that point. -#### Switch State: Data Conditions +The `retryRef` property is used to define the retry strategy to be used for this particular error. + +For more information, see the [Workflow Error Handling](#Workflow-Error-Handling) sections. + +#### Retry Definition | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| name | Data condition name | string | no | -| [condition](#Workflow-Expressions) | Workflow expression evaluated against state data. Must evaluate to true or false | string | yes | -| [transition](#Transitions) or [end](#End-Definition) | Defines what to do if condition is true. Transition to another state, or end workflow | object | yes | -| [metadata](#Workflow-Metadata) | Metadata information| object | no | +| name | Unique retry strategy name | string | yes | +| delay | Time delay between retry attempts (ISO 8601 duration format) | string | no | +| maxAttempts | Maximum number of retry attempts. Value of 0 means no retries are performed | string or number | no | +| maxDelay | Maximum amount of delay between retry attempts (ISO 8601 duration format) | string | no | +| increment | Static duration which will be added to the delay between successive retries (ISO 8601 duration format) | string | no | +| multiplier | Float value by which the delay is multiplied before each attempt. For example: "1.2" meaning that each successive delay is 20% longer than the previous delay. For example, if delay is 'PT10S', then the delay between the first and second attempts will be 10 seconds, and the delay before the third attempt will be 12 seconds. | float or string | no | +| jitter | If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0.0 and 1.0). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) | float or string | no |
Click to view example definition

@@ -2052,9 +2484,10 @@ If events defined in event-based conditions do not arrive before the states `eve ```json { - "name": "Eighteen or older", - "condition": "${ .applicant | .age >= 18 }", - "transition": "StartApplication" + "name": "TimeoutRetryStrat", + "delay": "PT2M", + "maxAttempts": 3, + "jitter": "PT0.001S" } ``` @@ -2062,9 +2495,10 @@ If events defined in event-based conditions do not arrive before the states `eve ```yaml -name: Eighteen or older -condition: "${ .applicant | .age >= 18 }" -transition: StartApplication +name: TimeoutRetryStrat +delay: PT2M +maxAttempts: 3 +jitter: PT0.001S ``` @@ -2073,81 +2507,103 @@ transition: StartApplication

-Switch state data conditions specify a data-based condition statement, which causes a transition to another -workflow state if evaluated to true. -The `condition` property of the condition defines an expression (e.g., `${ .applicant | .age > 18 }`), which selects -parts of the state data input. The condition must evaluate to `true` or `false`. +Defines the states retry policy (strategy). This is an explicit definition and can be reused across multiple +defined workflow state errors. -If the condition is evaluated to `true`, you can specify either the `transition` or `end` definitions -to decide what to do, transition to another workflow state, or end workflow execution. +The `name` property specifies the unique name of the retry definition (strategy). This unique name +can be referred by workflow states [error definitions](#Error-Definition). -#### Switch State: Event Conditions +The `delay` property specifies the initial time delay between retry attempts (ISO 8601 duration format). -| Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| name | Event condition name | string | no | -| eventRef | References an unique event name in the defined workflow events | string | yes | -| [transition](#Transitions) or [end](#End-Definition) | Defines what to do if condition is true. Transition to another state, or end workflow | object | yes | -| [eventDataFilter](#event-data-filter) | Event data filter definition | object | no | -| [metadata](#Workflow-Metadata) | Metadata information| object | no | +The `increment` property specifies a static duration which will be added to the delay between successive retries. +To explain this better, let's say we have the following retry definition: +```json +{ + "name": "Timeout Errors Strategy", + "delay": "PT10S", + "increment": "PT2S", + "maxAttempts": 4 +} +``` +which means that we will retry up to 4 times after waiting with increasing delay between attempts; +in this example 10, 12, 14, and 16 seconds between retries. -
Click to view example definition -

+The `multiplier` property specifies the value by which the interval time is increased for each of the retry attempts. +To explain this better, let's say we have the following retry definition: - - - - - - - - - -
JSONYAML
+```json +{ + "name": "Timeout Errors Strategy", + "delay": "PT10S", + "multiplier": 2, + "maxAttempts": 4 +} +``` +which means that we will retry up to 4 times after waiting with increasing delay between attempts; +in this example 10, 20, 40, and 80 seconds between retries. + +The `maxAttempts` property determines the maximum number of retry attempts allowed and is a positive integer value. + +The `jitter` property is important to prevent certain scenarios where clients +are retrying in sync, possibly causing or contributing to a transient failure +precisely because they're retrying at the same time. Adding a typically small, +bounded random amount of time to the period between retries serves the purpose +of attempting to prevent these retries from happening simultaneously, possibly +reducing total time to complete requests and overall congestion. How this value +is used in the exponential backoff algorithm is left up to implementations. + +`jitter` may be specified as a percentage relative to the total delay. +For example, if `interval` is 2 seconds, `multiplier` is 2 seconds, and we're at +the third attempt, there will be a delay of 6 seconds. If we set `jitter` to +0.3, then a random amount of time between 0 and 1.8 (`totalDelay * jitter == 6 * 0.3`) +will be added or subtracted from the delay. + +Alternatively, `jitter` may be defined as an absolute value specified as an ISO +8601 duration. This way, the maximum amount of random time added is fixed and +will not increase as new attempts are made. + +The `maxDelay` property determines the maximum amount of delay that is desired between retry attempts, and is applied +after `increment`, `multiplier`, and `jitter`. + +To explain this better, let's say we have the following retry definition: ```json { - "name": "Visa approved", - "eventRef": "visaApprovedEvent", - "transition": "HandleApprovedVisa" + "name": "Timeout Errors Strategy", + "delay": "PT10S", + "maxDelay": "PT100S", + "multiplier": 4, + "jitter": "PT1S", + "maxAttempts": 4 } ``` +which means that we will retry up to 4 times after waiting with increasing delay between attempts; +in this example we might observe the following series of delays: +* 11s (min(`maxDelay`, (`delay` +/- rand(`jitter`)) => min(100, 10 + 1)) +* 43s (min(`maxDelay`, (11s * `multiplier`) +/- rand(`jitter`)) => min(100, (11 * 4) - 1)) +* 100s (min(`maxDelay`, (43s * `multiplier`) +/- rand(`jitter`)) => min(100, (43 * 4) + 0)) +* 100s (min(`maxDelay`, (100s * `multiplier`) +/- rand(`jitter`)) => min(100, (100 * 4) - 1)) - - -```yaml -name: Visa approved -eventRef: visaApprovedEvent -transition: HandleApprovedVisa -``` - -
- -

+For more information, refer to the [Workflow Error Handling](#Workflow-Error-Handling) sections. -Switch state event conditions specify events, which the switch state must wait for. Each condition -can reference one workflow-defined event. Upon arrival of this event, the associated transition is taken. -The `eventRef` property references a name of one of the defined workflow events. +#### Transition Definition -If the referenced event is received, you can specify either the `transition` or `end` definitions -to decide what to do, transition to another workflow state, or end workflow execution. +`Transition` definition can have two types, either `string` or `object`. +If `string`, it defines the name of the state to transition to. +This can be used as a short-cut definition when you don't need to define any other parameters, for example: -The `eventDataFilter` property can be used to filter event when it is received. +```json +"transition": "myNextState" +``` -#### Delay State +If you need to define additional parameters in your `transition` definition, you can define +it with its `object` type which has the following properties: | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| id | Unique state id | string | no | -| name |State name | string | yes | -| type |State type | string | yes | -| timeDelay |Amount of time (ISO 8601 format) to delay when in this state. For example: "PT15M" (delay 15 minutes), or "P2DT3H4M" (delay 2 days, 3 hours and 4 minutes) | integer | yes | -| [stateDataFilter](#state-data-filter) | State data filter | object | no | -| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | -| [transition](#Transitions) | Next transition of the workflow after the delay | object | yes (if end is not defined) | -| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | -| [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | -| [end](#End-Definition) |If this state an end state | object | no | +| [nextState](#Transitions) | Name of the state to transition to next | string | yes | +| [compensate](#Workflow-Compensation) | If set to `true`, triggers workflow compensation before this transition is taken. Default is `false` | boolean | no | +| produceEvents | Array of [producedEvent](#ProducedEvent-Definition) definitions. Events to be produced before the transition takes place | array | no |
Click to view example definition

@@ -2162,10 +2618,11 @@ The `eventDataFilter` property can be used to filter event when it is received. ```json { - "name": "WaitForCompletion", - "type": "delay", - "timeDelay": "PT5S", - "transition": "GetJobStatus" + "produceEvents": [{ + "eventRef": "produceResultEvent", + "data": "${ .result.data }" + }], + "nextState": "EvalResultState" } ``` @@ -2173,10 +2630,10 @@ The `eventDataFilter` property can be used to filter event when it is received. ```yaml -name: WaitForCompletion -type: delay -timeDelay: PT5S -transition: GetJobStatus +produceEvents: +- eventRef: produceResultEvent + data: "${ .result.data }" +nextState: EvalResultState ``` @@ -2185,25 +2642,29 @@ transition: GetJobStatus

-Delay state waits for a certain amount of time before transitioning to a next state. The amount of delay is specified by the `timeDelay` property in ISO 8601 format. +The `nextState` property defines the name of the state to transition to next. +The `compensate` property allows you to trigger [compensation](#Workflow-Compensation) before the transition (if set to true). +The `produceEvents` property allows you to define a list of events to produce before the transition happens. -#### Parallel State +Transitions allow you to move from one state (control-logic block) to another. For more information see the +[Transitions section](#Transitions) section. + +#### Operation State | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| id | Unique state id | string | no | +| id | Unique state id | string | no | | name | State name | string | yes | | type | State type | string | yes | -| [branches](#parallel-state-branch) | List of branches for this parallel state| array | yes | -| completionType | Option types on how to complete branch execution. Default is "and" | enum | no | -| n | Used when branchCompletionType is set to `n_of_m` to specify the `n` value. | string or number | no | -| [stateDataFilter](#state-data-filter) | State data filter | object | no | +| actionMode | Should actions be performed sequentially or in parallel | string | no | +| [actions](#Action-Definition) | Actions to be performed | array | yes | +| [stateDataFilter](#State-data-filters) | State data filter | object | no | | [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | -| [transition](#Transitions) | Next transition of the workflow after all branches have completed execution | object | yes (if end is not defined) | +| [transition](#Transitions) | Next transition of the workflow after all the actions have been performed | object | yes (if end is not defined) | | [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | | [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | | [metadata](#Workflow-Metadata) | Metadata information| object | no | -| [end](#End-Definition) | If this state and end state | object | no | +| [end](#End-Definition) | Is this state an end state | object | no |
Click to view example definition

@@ -2217,124 +2678,21 @@ Delay state waits for a certain amount of time before transitioning to a next st ```json - { - "name":"ParallelExec", - "type":"parallel", - "completionType": "and", - "branches": [ +{ + "name": "RejectApplication", + "type": "operation", + "actionMode": "sequential", + "actions": [ { - "name": "Branch1", - "actions": [ - { - "functionRef": { - "refName": "functionNameOne", - "arguments": { - "order": "${ .someParam }" - } + "functionRef": { + "refName": "sendRejectionEmailFunction", + "arguments": { + "customer": "${ .customer }" } } - ] - }, - { - "name": "Branch2", - "actions": [ - { - "functionRef": { - "refName": "functionNameTwo", - "arguments": { - "order": "${ .someParam }" - } - } - } - ] } - ], - "end": true -} -``` - - - - -```yaml -name: ParallelExec -type: parallel -completionType: and -branches: -- name: Branch1 - actions: - - functionRef: - refName: functionNameOne - arguments: - order: "${ .someParam }" -- name: Branch2 - actions: - - functionRef: - refName: functionNameTwo - arguments: - order: "${ .someParam }" -end: true -``` - - - - - -

- -Parallel state defines a collection of `branches` that are executed in parallel. -A parallel state can be seen a state which splits up the current workflow instance execution path -into multiple ones, one for each of each branch. These execution paths are performed in parallel -and are joined back into the current execution path depending on the defined `completionType` parameter value. - -The "completionType" enum specifies the different ways of completing branch execution: -* and: All branches must complete execution before state can perform its transition. This is the default value in case this parameter is not defined in the parallel state definition. -* xor: State can transition when one of the branches completes execution -* n_of_m: State can transition once `n` number of branches have completed execution. In this case you should also -specify the `n` property to define this number. - -Exceptions may occur during execution of branches of the Parallel state, this is described in detail in [this section](#parallel-state-exceptions). - -#### Parallel State: Branch - -| Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| name | Branch name | string | yes | -| [actions](#Action-Definition) | Actions to be executed in this branch | array | yes if workflowId is not defined | -| workflowId | Unique Id of a workflow to be executed in this branch | string | yes if actions is not defined | - -
Click to view example definition -

- - - - - - - - - -
JSONYAML
- -```json -{ - "name": "Branch1", - "actions": [ - { - "functionRef": { - "refName": "functionNameOne", - "arguments": { - "order": "${ .someParam }" - } - } - }, - { - "functionRef": { - "refName": "functionNameTwo", - "arguments": { - "order": "${ .someParamTwo }" - } - } - } - ] + ], + "end": true } ``` @@ -2342,68 +2700,41 @@ Exceptions may occur during execution of branches of the Parallel state, this is ```yaml -name: Branch1 -actions: -- functionRef: - refName: functionNameOne - arguments: - order: "${ .someParam }" -- functionRef: - refName: functionNameTwo - arguments: - order: "${ .someParamTwo }" -``` - -
- -

- -Each branch receives the same copy of the Parallel state's data input. - -A branch can define either actions or a workflow id of the workflow that needs to be executed. -The workflow id defined cannot be the same id of the workflow there the branch is defined. - -#### Parallel State: Handling Exceptions - -Exceptions can occur during execution of Parallel state branches. - -By default, exceptions that are not handled within branches stop branch execution and are propagated -to the Parallel state and should be handled with its `onErrors` definition. - -If the parallel states branch defines actions, all exceptions that arise from executing these actions - are propagated to the parallel state -and can be handled with the parallel states `onErrors` definition. +name: RejectApplication +type: operation +actionMode: sequential +actions: +- functionRef: + refName: sendRejectionEmailFunction + arguments: + customer: "${ .customer }" +end: true +``` -If the parallel states defines a `workflowId`, exceptions that occur during execution of the called workflow -can chose to handle exceptions on their own. All unhandled exceptions from the called workflow -execution however are propagated back to the parallel state and can be handled with the parallel states -`onErrors` definition. + + + -Note that once an error that is propagated to the parallel state from a branch and handled by the -states `onErrors` definition is handled (its associated transition is taken) no further errors from branches of this -parallel state should be considered as the workflow control flow logic has already moved to a different state. + -For more information, see the [Workflow Error Handling](#Workflow-Error-Handling) sections. +Operation state defines a set of actions to be performed in sequence or in parallel. +Once all actions have been performed, a transition to another state can occur. -#### SubFlow State +#### Switch State | Parameter | Description | Type | Required | | --- | --- | --- | --- | | id | Unique state id | string | no | -| name |State name | string | yes | -| type |State type | string | yes | -| waitForCompletion | If workflow execution must wait for sub-workflow to finish before continuing | boolean | yes | -| workflowId |Sub-workflow unique id | boolean | no | -| [repeat](#Repeat-Definition) | SubFlow state repeat exec definition | object | no | -| [stateDataFilter](#state-data-filter) | State data filter | object | no | +| name | State name | string | yes | +| type | State type | string | yes | +| [dataConditions](#switch-state-dataconditions) or [eventConditions](#switch-state-eventconditions) | Defined if the Switch state evaluates conditions and transitions based on state data, or arrival of events. | array | yes (one) | +| [stateDataFilter](#State-data-filters) | State data filter | object | no | | [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | -| [transition](#Transitions) | Next transition of the workflow after subflow has completed | object | yes (if end is not defined) | +| eventTimeout | If eventConditions is used, defines the time period to wait for events (ISO 8601 format). For example: "PT15M" (15 minutes), or "P2DT3H4M" (2 days, 3 hours and 4 minutes)| string | yes only if eventConditions is defined | +| default | Default transition of the workflow if there is no matching data conditions or event timeout is reached. Can be a transition or end definition | object | yes | | [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | | [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | | [metadata](#Workflow-Metadata) | Metadata information| object | no | -| [end](#End-Definition) | If this state and end state | object | no |
Click to view example definition

@@ -2417,11 +2748,23 @@ For more information, see the [Workflow Error Handling](#Workflow-Error-Handling ```json -{ - "name": "HandleApprovedVisa", - "type": "subflow", - "workflowId": "handleApprovedVisaWorkflowID", - "end": true +{ + "name":"CheckVisaStatus", + "type":"switch", + "eventConditions": [ + { + "eventRef": "visaApprovedEvent", + "transition": "HandleApprovedVisa" + }, + { + "eventRef": "visaRejectedEvent", + "transition": "HandleRejectedVisa" + } + ], + "eventTimeout": "PT1H", + "default": { + "transition": "HandleNoVisaDecision" + } } ``` @@ -2429,10 +2772,16 @@ For more information, see the [Workflow Error Handling](#Workflow-Error-Handling ```yaml -name: HandleApprovedVisa -type: subflow -workflowId: handleApprovedVisaWorkflowID -end: true +name: CheckVisaStatus +type: switch +eventConditions: +- eventRef: visaApprovedEvent + transition: HandleApprovedVisa +- eventRef: visaRejectedEvent + transition: HandleRejectedVisa +eventTimeout: PT1H +default: + transition: HandleNoVisaDecision ``` @@ -2441,52 +2790,35 @@ end: true

-Often you want to group your workflows into small logical units that solve a particular business problem and can be reused in -multiple other workflow definitions. - -

-Referencing reusable workflow via SubFlow states -

- -Reusable workflow are referenced by their `id` property via the SubFlow states`workflowId` parameter. - -Each referenced workflow receives the SubFlow states data as workflow data input. +Switch states can be viewed as workflow gateways: they can direct transitions of a workflow based on certain conditions. +There are two types of conditions for switch states: +* [Data-based conditions](#switch-state-dataconditions) +* [Event-based conditions](#switch-state-eventconditions) -The `waitForCompletion` property defines if the SubFlow state should wait until the referenced reusable workflow -has completed its execution. If it's set to "true" (default value), SubFlow state execution must wait until the referenced workflow has completed its execution. -In this case the workflow data output of the referenced workflow can and should be merged with the SubFlow states state data. -If it's set to "false" the parent workflow can continue its execution while the referenced sub-workflow -is being executed. For this case, the referenced (child) workflow data output cannot be merged with the SubFlow states -state data (as by the time its completion the parent workflow execution has already continued). +These are exclusive, meaning that a switch state can define one or the other condition type, but not both. -The `repeat` property defines the SubFlow states repeated execution (looping) behavior. This allows you to specify that -the sub-workflow should be executed multiple times repeatedly. -If the `repeat` property is defined, the `waitForCompletion` should be assumed have the value of `true`. -If the workflow explicitly triggers [compensation](#Workflow-Compensation) and the SubFlow state -was executed and defines its compensation state, it should be compensated once, no matter how many times -its was executed as defined by the `repeat` property. -After each execution of the SubFlow state, if `repeat` is defined, the SubFlow state data at the end of the -one execution should become the state data of the next execution. +At times multiple defined conditions can be evaluated to `true` by runtime implementations. +Conditions defined first take precedence over conditions defined later. This is backed by the fact that arrays/sequences +are ordered in both JSON and YAML. For example, let's say there are two `true` conditions: A and B, defined in that order. +Because A was defined first, its transition will be executed, not B's. -For more information about the `repeat` property see the [Repeat Definition](#Repeat-Definition) section. +In case of data-based conditions definition, switch state controls workflow transitions based on the states data. +If no defined conditions can be matched, the state transitions is taken based on the `default` property. +This property can be either a `transition` to another workflow state, or an `end` definition meaning a workflow end. -Referenced sub-workflows must declare their own [function](#Function-Definition) and [event](#Event-Definition) definitions. +For event-based conditions, a switch state acts as a workflow wait state. It halts workflow execution +until one of the referenced events arrive, then making a transition depending on that event definition. +If events defined in event-based conditions do not arrive before the states `eventTimeout` property expires, + state transitions are based on the defined `default` property. -#### Inject State +#### Switch State: Data Conditions | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| id | Unique state id | string | no | -| name | State name | string | yes | -| type | State type | string | yes | -| data | JSON object which can be set as state's data input and can be manipulated via filter | object | yes | -| [stateDataFilter](#state-data-filter) | State data filter | object | no | -| [transition](#Transitions) | Next transition of the workflow after subflow has completed | object | yes (if end is set to false) | -| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | -| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | -| [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | +| name | Data condition name | string | no | +| [condition](#Workflow-Expressions) | Workflow expression evaluated against state data. Must evaluate to true or false | string | yes | +| [transition](#Transitions) or [end](#End-Definition) | Defines what to do if condition is true. Transition to another state, or end workflow | object | yes | | [metadata](#Workflow-Metadata) | Metadata information| object | no | -| [end](#End-Definition) | If this state and end state | object | no |
Click to view example definition

@@ -2500,13 +2832,10 @@ Referenced sub-workflows must declare their own [function](#Function-Definition) ```json -{ - "name":"Hello", - "type":"inject", - "data": { - "result": "Hello" - }, - "transition": "World" +{ + "name": "Eighteen or older", + "condition": "${ .applicant | .age >= 18 }", + "transition": "StartApplication" } ``` @@ -2514,11 +2843,9 @@ Referenced sub-workflows must declare their own [function](#Function-Definition) ```yaml -name: Hello -type: inject -data: - result: Hello -transition: World +name: Eighteen or older +condition: "${ .applicant | .age >= 18 }" +transition: StartApplication ``` @@ -2527,15 +2854,26 @@ transition: World

-Inject state can be used to inject static data into state data input. Inject state does not perform any actions. -It is very useful for debugging, for example, as you can test/simulate workflow execution with pre-set data that would typically -be dynamic in nature (e.g., function calls, events). +Switch state data conditions specify a data-based condition statement, which causes a transition to another +workflow state if evaluated to true. +The `condition` property of the condition defines an expression (e.g., `${ .applicant | .age > 18 }`), which selects +parts of the state data input. The condition must evaluate to `true` or `false`. -The inject state `data` property allows you to statically define a JSON object which gets added to the states data input. -You can use the filter property to control the states data output to the transition state. +If the condition is evaluated to `true`, you can specify either the `transition` or `end` definitions +to decide what to do, transition to another workflow state, or end workflow execution. -Here is a typical example of how to use the inject state to add static data into its states data input, which then is passed -as data output to the transition state: +#### Switch State: Event Conditions + +| Parameter | Description | Type | Required | +| --- | --- | --- | --- | +| name | Event condition name | string | no | +| eventRef | References an unique event name in the defined workflow events | string | yes | +| [transition](#Transitions) or [end](#End-Definition) | Defines what to do if condition is true. Transition to another state, or end workflow | object | yes | +| [eventDataFilter](#Event-data-filters) | Event data filter definition | object | no | +| [metadata](#Workflow-Metadata) | Metadata information| object | no | + +
Click to view example definition +

@@ -2545,59 +2883,111 @@ as data output to the transition state:
- ```json - { - "name":"SimpleInjectState", - "type":"inject", - "data": { - "person": { - "fname": "John", - "lname": "Doe", - "address": "1234 SomeStreet", - "age": 40 - } - }, - "transition": "GreetPersonState" - } - ``` +```json +{ + "name": "Visa approved", + "eventRef": "visaApprovedEvent", + "transition": "HandleApprovedVisa" +} +``` ```yaml - name: SimpleInjectState - type: inject - data: - person: - fname: John - lname: Doe - address: 1234 SomeStreet - age: 40 - transition: GreetPersonState +name: Visa approved +eventRef: visaApprovedEvent +transition: HandleApprovedVisa ```
-The data output of the "SimpleInjectState" which then is passed as input to the transition state would be: +

+ +Switch state event conditions specify events, which the switch state must wait for. Each condition +can reference one workflow-defined event. Upon arrival of this event, the associated transition is taken. +The `eventRef` property references a name of one of the defined workflow events. + +If the referenced event is received, you can specify either the `transition` or `end` definitions +to decide what to do, transition to another workflow state, or end workflow execution. + +The `eventDataFilter` property can be used to filter event data when it is received. + +#### Delay State + +| Parameter | Description | Type | Required | +| --- | --- | --- | --- | +| id | Unique state id | string | no | +| name |State name | string | yes | +| type |State type | string | yes | +| timeDelay |Amount of time (ISO 8601 format) to delay when in this state. For example: "PT15M" (delay 15 minutes), or "P2DT3H4M" (delay 2 days, 3 hours and 4 minutes) | integer | yes | +| [stateDataFilter](#State-data-filters) | State data filter | object | no | +| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | +| [transition](#Transitions) | Next transition of the workflow after the delay | object | yes (if end is not defined) | +| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | +| [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | +| [end](#End-Definition) |If this state an end state | object | no | + +
Click to view example definition +

+ + + + + + + + + + +
JSONYAML
```json { - "person": { - "fname": "John", - "lname": "Doe", - "address": "1234 SomeStreet", - "age": 40 - } + "name": "WaitForCompletion", + "type": "delay", + "timeDelay": "PT5S", + "transition": "GetJobStatus" } +``` + + +```yaml +name: WaitForCompletion +type: delay +timeDelay: PT5S +transition: GetJobStatus ``` -If the inject state already receives a data input from the previous transition state, the inject data should be merged -with its data input. +
-You can also use the filter property to filter the state data after data is injected. Let's say we have: +

+ +Delay state waits for a certain amount of time before transitioning to a next state. The amount of delay is specified by the `timeDelay` property in ISO 8601 format. + +#### Parallel State + +| Parameter | Description | Type | Required | +| --- | --- | --- | --- | +| id | Unique state id | string | no | +| name | State name | string | yes | +| type | State type | string | yes | +| [branches](#parallel-state-branch) | List of branches for this parallel state| array | yes | +| completionType | Option types on how to complete branch execution. Default is "and" | enum | no | +| n | Used when branchCompletionType is set to `n_of_m` to specify the `n` value. | string or number | no | +| [stateDataFilter](#State-data-filters) | State data filter | object | no | +| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | +| [transition](#Transitions) | Next transition of the workflow after all branches have completed execution | object | yes (if end is not defined) | +| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | +| [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | +| [metadata](#Workflow-Metadata) | Metadata information| object | no | +| [end](#End-Definition) | If this state and end state | object | no | + +
Click to view example definition +

@@ -2608,116 +2998,91 @@ You can also use the filter property to filter the state data after data is inje
```json - { - "name":"SimpleInjectState", - "type":"inject", - "data": { - "people": [ - { - "fname": "John", - "lname": "Doe", - "address": "1234 SomeStreet", - "age": 40 - }, - { - "fname": "Marry", - "lname": "Allice", - "address": "1234 SomeStreet", - "age": 25 - }, - { - "fname": "Kelly", - "lname": "Mill", - "address": "1234 SomeStreet", - "age": 30 - } + { + "name":"ParallelExec", + "type":"parallel", + "completionType": "and", + "branches": [ + { + "name": "Branch1", + "actions": [ + { + "functionRef": { + "refName": "functionNameOne", + "arguments": { + "order": "${ .someParam }" + } + } + } ] - }, - "stateDataFilter": { - "dataOutputPath": "${ {people: [.people[] | select(.age < 40)]} }" - }, - "transition": "GreetPersonState" - } + }, + { + "name": "Branch2", + "actions": [ + { + "functionRef": { + "refName": "functionNameTwo", + "arguments": { + "order": "${ .someParam }" + } + } + } + ] + } + ], + "end": true +} ``` ```yaml - name: SimpleInjectState - type: inject - data: - people: - - fname: John - lname: Doe - address: 1234 SomeStreet - age: 40 - - fname: Marry - lname: Allice - address: 1234 SomeStreet - age: 25 - - fname: Kelly - lname: Mill - address: 1234 SomeStreet - age: 30 - stateDataFilter: - dataOutputPath: "${ {people: [.people[] | select(.age < 40)]} }" - transition: GreetPersonState +name: ParallelExec +type: parallel +completionType: and +branches: +- name: Branch1 + actions: + - functionRef: + refName: functionNameOne + arguments: + order: "${ .someParam }" +- name: Branch2 + actions: + - functionRef: + refName: functionNameTwo + arguments: + order: "${ .someParam }" +end: true ```
-In which case the states data output would include only people whose age is less than 40: - -```json -{ - "people": [ - { - "fname": "Marry", - "lname": "Allice", - "address": "1234 SomeStreet", - "age": 25 - }, - { - "fname": "Kelly", - "lname": "Mill", - "address": "1234 SomeStreet", - "age": 30 - } - ] -} -``` +

-You can change your output path easily during testing, for example change the expression to: +Parallel state defines a collection of `branches` that are executed in parallel. +A parallel state can be seen a state which splits up the current workflow instance execution path +into multiple ones, one for each of each branch. These execution paths are performed in parallel +and are joined back into the current execution path depending on the defined `completionType` parameter value. -```text -${ {people: [.people[] | select(.age >= 40)]} } -``` +The "completionType" enum specifies the different ways of completing branch execution: +* and: All branches must complete execution before state can perform its transition. This is the default value in case this parameter is not defined in the parallel state definition. +* xor: State can transition when one of the branches completes execution +* n_of_m: State can transition once `n` number of branches have completed execution. In this case you should also +specify the `n` property to define this number. -This allows you to test if your workflow behaves properly for cases when there are people whose age is greater or equal 40. +Exceptions may occur during execution of branches of the Parallel state, this is described in detail in [this section](#parallel-state-exceptions). -#### ForEach State +#### Parallel State: Branch | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| id | Unique state id | string | no | -| name | State name | string | yes | -| type | State type | string | yes | -| inputCollection | Workflow expression selecting an array element of the states data | string | yes | -| outputCollection | Workflow expression specifying an array element of the states data to add the results of each iteration | string | no | -| iterationParam | Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, this param should contain an unique element of the inputCollection array | string | yes | -| max | Specifies how upper bound on how many iterations may run in parallel | string or number | no | -| [actions](#Action-Definition) | Actions to be executed for each of the elements of inputCollection | array | yes if subflowId is not defined | -| workflowId | Unique Id of a workflow to be executed for each of the elements of inputCollection | string | yes if actions is not defined | -| [stateDataFilter](#state-data-filter) | State data filter definition | object | no | -| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | -| [transition](#Transitions) | Next transition of the workflow after state has completed | object | yes (if end is not defined) | -| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | -| [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | -| [metadata](#Workflow-Metadata) | Metadata information| object | no | -| [end](#End-Definition) | Is this state an end state | object | no | +| name | Branch name | string | yes | +| [actions](#Action-Definition) | Actions to be executed in this branch | array | yes if workflowId is not defined | +| workflowId | Unique Id of a workflow to be executed in this branch | string | yes if actions is not defined |
Click to view example definition

@@ -2732,21 +3097,25 @@ This allows you to test if your workflow behaves properly for cases when there a ```json { - "name": "ProvisionOrdersState", - "type": "foreach", - "inputCollection": "${ .orders }", - "iterationParam": "singleorder", - "outputCollection": "${ .provisionresults }", - "actions": [ - { - "functionRef": { - "refName": "provisionOrderFunction", - "arguments": { - "order": "${ .singleorder }" - } - } - } - ] + "name": "Branch1", + "actions": [ + { + "functionRef": { + "refName": "functionNameOne", + "arguments": { + "order": "${ .someParam }" + } + } + }, + { + "functionRef": { + "refName": "functionNameTwo", + "arguments": { + "order": "${ .someParamTwo }" + } + } + } + ] } ``` @@ -2754,16 +3123,16 @@ This allows you to test if your workflow behaves properly for cases when there a ```yaml -name: ProvisionOrdersState -type: foreach -inputCollection: "${ .orders }" -iterationParam: "singleorder" -outputCollection: "${ .provisionresults }" +name: Branch1 actions: - functionRef: - refName: provisionOrderFunction + refName: functionNameOne arguments: - order: "${ .singleorder }" + order: "${ .someParam }" +- functionRef: + refName: functionNameTwo + arguments: + order: "${ .someParamTwo }" ``` @@ -2772,58 +3141,53 @@ actions:

-ForEach states can be used to execute [actions](#Action-Definition), or a [sub-workflow](#SubFlow-State) for -each element of a data set. +Each branch receives the same copy of the Parallel state's data input. -Each iteration of the ForEach state should be executed in parallel. +A branch can define either actions or a workflow id of the workflow that needs to be executed. +The workflow id defined cannot be the same id of the workflow there the branch is defined. -You can use the `max` property to set the upper bound on how many iterations may run in parallel. The default -of the `max` property is zero, which places no limit on number of parallel executions. +#### Parallel State: Handling Exceptions -The `inputCollection` property is a workflow expression which selects an array in the states data. All iterations -are performed against data elements of this array. If this array does not exist, the runtime should throw -an error. This error can be handled inside the states [`onErrors`](#Error-Definition) definition. +Exceptions can occur during execution of Parallel state branches. -The `outputCollection` property is a workflow expression which selects an array in the state data where the results -of each iteration should be added to. If this array does not exist, it should be created. +By default, exceptions that are not handled within branches stop branch execution and are propagated +to the Parallel state and should be handled with its `onErrors` definition. -The `iterationParam` property defines the name of the iteration parameter passed to each parallel execution of the foreach state. -It should contain the unique element of the `inputCollection` array and passed as data input to the actions/workflow defined. -`iterationParam` should be created for each iteration, so it can be referenced/used in defined actions / workflow data input. +If the parallel states branch defines actions, all exceptions that arise from executing these actions + are propagated to the parallel state +and can be handled with the parallel states `onErrors` definition. -The `actions` property defines actions to be executed in each state iteration. +If the parallel states defines a `workflowId`, exceptions that occur during execution of the called workflow +can chose to handle exceptions on their own. All unhandled exceptions from the called workflow +execution however are propagated back to the parallel state and can be handled with the parallel states +`onErrors` definition. -If actions are not defined, you can specify the `workflowid` to reference a workflow id which needs to be executed -for each iteration. Note that `workflowid` should not be the same as the workflow id of the workflow where the foreach state -is defined. +Note that once an error that is propagated to the parallel state from a branch and handled by the +states `onErrors` definition is handled (its associated transition is taken) no further errors from branches of this +parallel state should be considered as the workflow control flow logic has already moved to a different state. -Let's take a look at an example: +For more information, see the [Workflow Error Handling](#Workflow-Error-Handling) sections. -In this example the data input to our workflow is an array of orders: +#### SubFlow State -```json -{ - "orders": [ - { - "orderNumber": "1234", - "completed": true, - "email": "firstBuyer@buyer.com" - }, - { - "orderNumber": "5678", - "completed": true, - "email": "secondBuyer@buyer.com" - }, - { - "orderNumber": "9910", - "completed": false, - "email": "thirdBuyer@buyer.com" - } - ] -} -``` +| Parameter | Description | Type | Required | +| --- | --- | --- | --- | +| id | Unique state id | string | no | +| name |State name | string | yes | +| type |State type | string | yes | +| waitForCompletion | If workflow execution must wait for sub-workflow to finish before continuing | boolean | yes | +| workflowId |Sub-workflow unique id | boolean | no | +| [repeat](#Repeat-Definition) | SubFlow state repeat exec definition | object | no | +| [stateDataFilter](#State-data-filters) | State data filter | object | no | +| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | +| [transition](#Transitions) | Next transition of the workflow after subflow has completed | object | yes (if end is not defined) | +| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | +| [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | +| [metadata](#Workflow-Metadata) | Metadata information| object | no | +| [end](#End-Definition) | If this state and end state | object | no | -and our workflow is defined as: +
Click to view example definition +

@@ -2835,35 +3199,10 @@ and our workflow is defined as: ```json { - "id": "sendConfirmWorkflow", - "name": "SendConfirmationForCompletedOrders", - "version": "1.0", - "start": "SendConfirmState", - "functions": [ - { - "name": "sendConfirmationFunction", - "operation": "file://confirmationapi.json#sendOrderConfirmation" - } - ], - "states": [ - { - "name":"SendConfirmState", - "type":"foreach", - "inputCollection": "${ [.orders[] | select(.completed == true)] }", - "iterationParam": "completedorder", - "outputCollection": "${ .confirmationresults }", - "actions":[ - { - "functionRef": { - "refName": "sendConfirmationFunction", - "arguments": { - "orderNumber": "${ .completedorder.orderNumber }", - "email": "${ .completedorder.email }" - } - } - }], - "end": true - }] + "name": "HandleApprovedVisa", + "type": "subflow", + "workflowId": "handleApprovedVisaWorkflowID", + "end": true } ``` @@ -2871,82 +3210,64 @@ and our workflow is defined as:
```yaml -id: sendConfirmWorkflow -name: SendConfirmationForCompletedOrders -version: '1.0' -start: SendConfirmState -functions: -- name: sendConfirmationFunction - operation: file://confirmationapi.json#sendOrderConfirmation -states: -- name: SendConfirmState - type: foreach - inputCollection: "${ [.orders[] | select(.completed == true)] }" - iterationParam: completedorder - outputCollection: "${ .confirmationresults }" - actions: - - functionRef: - refName: sendConfirmationFunction - arguments: - orderNumber: "${ .completedorder.orderNumber }" - email: "${ .completedorder.email }" - end: true +name: HandleApprovedVisa +type: subflow +workflowId: handleApprovedVisaWorkflowID +end: true ```
-The workflow data input containing order information is passed to the `SendConfirmState` foreach state. -The foreach state defines an `inputCollection` property which selects all orders that have the `completed` property set to `true`. +

-For each element of the array selected by `inputCollection` a JSON object defined by `iterationParam` should be -created containing an unique element of `inputCollection` and passed as the data input to the parallel executed actions. +Often you want to group your workflows into small logical units that solve a particular business problem and can be reused in +multiple other workflow definitions. -So for this example, we would have two parallel executions of the `sendConfirmationFunction`, the first one having data: +

+Referencing reusable workflow via SubFlow states +

-```json -{ - "completedorder": { - "orderNumber": "1234", - "completed": true, - "email": "firstBuyer@buyer.com" - } -} -``` +Reusable workflow are referenced by their `id` property via the SubFlow states`workflowId` parameter. -and the second: +Each referenced workflow receives the SubFlow states data as workflow data input. -```json -{ - "completedorder": { - "orderNumber": "5678", - "completed": true, - "email": "secondBuyer@buyer.com" - } -} -``` +The `waitForCompletion` property defines if the SubFlow state should wait until the referenced reusable workflow +has completed its execution. If it's set to "true" (default value), SubFlow state execution must wait until the referenced workflow has completed its execution. +In this case the workflow data output of the referenced workflow can and should be merged with the SubFlow states state data. +If it's set to "false" the parent workflow can continue its execution while the referenced sub-workflow +is being executed. For this case, the referenced (child) workflow data output cannot be merged with the SubFlow states +state data (as by the time its completion the parent workflow execution has already continued). -The results of each parallel action execution are stored as elements in the state data array defined by the `outputCollection` property. +The `repeat` property defines the SubFlow states repeated execution (looping) behavior. This allows you to specify that +the sub-workflow should be executed multiple times repeatedly. +If the `repeat` property is defined, the `waitForCompletion` should be assumed have the value of `true`. +If the workflow explicitly triggers [compensation](#Workflow-Compensation) and the SubFlow state +was executed and defines its compensation state, it should be compensated once, no matter how many times +its was executed as defined by the `repeat` property. +After each execution of the SubFlow state, if `repeat` is defined, the SubFlow state data at the end of the +one execution should become the state data of the next execution. -#### Callback State +For more information about the `repeat` property see the [Repeat Definition](#Repeat-Definition) section. + +Referenced sub-workflows must declare their own [function](#Function-Definition) and [event](#Event-Definition) definitions. + +#### Inject State | Parameter | Description | Type | Required | | --- | --- | --- | --- | | id | Unique state id | string | no | | name | State name | string | yes | | type | State type | string | yes | -| [action](#Action-Definition) | Defines the action to be executed | object | yes | -| eventRef | References an unique callback event name in the defined workflow [events](#Event-Definition) | string | yes | -| [timeout](#eventstate-timeout) | Time period to wait from when action is executed until the callback event is received (ISO 8601 format). For example: "PT15M" (wait 15 minutes), or "P2DT3H4M" (wait 2 days, 3 hours and 4 minutes)| string | yes | -| [eventDataFilter](#event-data-filter) | Callback event data filter definition | object | no | -| [stateDataFilter](#state-data-filter) | State data filter definition | object | no | +| data | JSON object which can be set as state's data input and can be manipulated via filter | object | yes | +| [stateDataFilter](#state-data-filters) | State data filter | object | no | +| [transition](#Transitions) | Next transition of the workflow after subflow has completed | object | yes (if end is set to false) | | [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | -| [transition](#Transitions) | Next transition of the workflow after callback event has been received | object | yes | -| [end](#End-Definition) | Is this state an end state | object | no | -| [compensatedBy](#Workflow-Compensation) | Uniaue name of a workflow state which is responsible for compensation of this state | String | no | +| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | | [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | | [metadata](#Workflow-Metadata) | Metadata information| object | no | +| [end](#End-Definition) | If this state and end state | object | no |
Click to view example definition

@@ -2960,20 +3281,13 @@ The results of each parallel action execution are stored as elements in the stat ```json -{ - "name": "CheckCredit", - "type": "callback", - "action": { - "functionRef": { - "refName": "callCreditCheckMicroservice", - "arguments": { - "customer": "${ .customer }" - } - } - }, - "eventRef": "CreditCheckCompletedEvent", - "timeout": "PT15M", - "transition": "EvaluateDecision" +{ + "name":"Hello", + "type":"inject", + "data": { + "result": "Hello" + }, + "transition": "World" } ``` @@ -2981,16 +3295,11 @@ The results of each parallel action execution are stored as elements in the stat ```yaml -name: CheckCredit -type: callback -action: - functionRef: - refName: callCreditCheckMicroservice - arguments: - customer: "${ .customer }" -eventRef: CreditCheckCompletedEvent -timeout: PT15M -transition: EvaluateDecision +name: Hello +type: inject +data: + result: Hello +transition: World ``` @@ -2999,40 +3308,77 @@ transition: EvaluateDecision

-Serverless orchestration can at times require manual steps/decisions to be made. While some work performed -in a serverless workflow can be executed automatically, some decisions must involve manual steps (e.g., human decisions). -The Callback state allows you to explicitly model manual decision steps during workflow execution. +Inject state can be used to inject static data into state data input. Inject state does not perform any actions. +It is very useful for debugging, for example, as you can test/simulate workflow execution with pre-set data that would typically +be dynamic in nature (e.g., function calls, events). -The action property defines a function call that triggers an external activity/service. Once the action executes, -the callback state will wait for a CloudEvent (defined via the `eventRef` property), which indicates the completion -of the manual decision by the called service. +The inject state `data` property allows you to statically define a JSON object which gets added to the states data input. +You can use the filter property to control the states data output to the transition state. -Note that the called decision service is responsible for emitting the callback CloudEvent indicating the completion of the -decision and including the decision results as part of the event payload. This event must be correlated to the -workflow instance using the callback events context attribute defined in the `correlation` property of the -referenced [Event Definition](#Event-Definition). +Here is a typical example of how to use the inject state to add static data into its states data input, which then is passed +as data output to the transition state: -Once the completion (callback) event is received, the Callback state completes its execution and transitions to the next -defined workflow state or completes workflow execution in case it is an end state. + + + + + + + + + +
JSONYAML
+ + ```json + { + "name":"SimpleInjectState", + "type":"inject", + "data": { + "person": { + "fname": "John", + "lname": "Doe", + "address": "1234 SomeStreet", + "age": 40 + } + }, + "transition": "GreetPersonState" + } + ``` + + + +```yaml + name: SimpleInjectState + type: inject + data: + person: + fname: John + lname: Doe + address: 1234 SomeStreet + age: 40 + transition: GreetPersonState +``` -The callback event payload is merged with the Callback state data and can be filtered via the "eventDataFilter" definition. +
-The Callback state `timeout` property defines a time period from the action execution until the callback event should be received. +The data output of the "SimpleInjectState" which then is passed as input to the transition state would be: -If the defined callback event has not been received during this time period, the state should transition to the next state or end workflow execution if it is an end state. +```json +{ + "person": { + "fname": "John", + "lname": "Doe", + "address": "1234 SomeStreet", + "age": 40 + } +} -#### Repeat Definition +``` -| Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| [expression](#Workflow-Expressions) | Workflow expression evaluated against state data. SubFlow will repeat execution as long as this expression is true or until the max property count is reached | string | no | -| checkBefore | If set to `true` (default value) the expression is evaluated before each repeat execution, if set to false the expression is evaluated after each repeat execution | boolean | no | -| max | Sets the maximum amount of repeat executions | integer | no | -| continueOnError | If set to `true` repeats executions in a case unhandled errors propagate from the sub-workflow to this state | boolean | no | -| stopOnEvents | List referencing defined consumed workflow events. SubFlow will repeat execution until one of the defined events is consumed, or until the max property count is reached | array | no | +If the inject state already receives a data input from the previous transition state, the inject data should be merged +with its data input. -
Click to view example definition -

+You can also use the filter property to filter the state data after data is injected. Let's say we have: @@ -3043,71 +3389,116 @@ If the defined callback event has not been received during this time period, the
```json -{ - "max": 10, - "continueOnError": true -} + { + "name":"SimpleInjectState", + "type":"inject", + "data": { + "people": [ + { + "fname": "John", + "lname": "Doe", + "address": "1234 SomeStreet", + "age": 40 + }, + { + "fname": "Marry", + "lname": "Allice", + "address": "1234 SomeStreet", + "age": 25 + }, + { + "fname": "Kelly", + "lname": "Mill", + "address": "1234 SomeStreet", + "age": 30 + } + ] + }, + "stateDataFilter": { + "output": "${ {people: [.people[] | select(.age < 40)]} }" + }, + "transition": "GreetPersonState" + } ``` ```yaml -max: 10 -continueOnError: true + name: SimpleInjectState + type: inject + data: + people: + - fname: John + lname: Doe + address: 1234 SomeStreet + age: 40 + - fname: Marry + lname: Allice + address: 1234 SomeStreet + age: 25 + - fname: Kelly + lname: Mill + address: 1234 SomeStreet + age: 30 + stateDataFilter: + output: "${ {people: [.people[] | select(.age < 40)]} }" + transition: GreetPersonState ```
-

- -Repeat definition can be used in [SubFlow](#SubFlow-State) states to define repeated execution (looping). - -The `expression` parameter is a [workflow expression](#Workflow-Expressions). It is -evaluated against SubFlow states data. -SubFlow state should repeat its execution as long as this expression evaluates to `true` (the expression returns a non-empty result), -or until the `max` property limit is reached. This parameter allows you to stop repeat execution based on data. - -The `checkBefore` property can be used to decide if the `expression` evaluation should be done before or after -each SubFlow state execution. Default value of this property is `true`. - -The `max` property sets the maximum count of repeat executions. It should be a positive integer value. -Runtime implementations must define an internal repeat/loop counter which is incremented for each of the -SubFlow state repeated executions. If this counter reaches the max value, repeated executions should end. - -The `continueOnError` property defines if repeated executions should continue or not in case unhandled errors are propagated -by the sub-workflow to the SubFlow state. Default value of this property is `false`. -Unhandled errors are errors which are not explicitly handled by the sub-workflow, and the SubFlow state -via its [`onErrors`](#Error-Definition) definition. - -If `continueOnError` is set to `false` (default value), and an unhandled error occurs, it should be handled -as any other unhandled workflow error, meaning repeat execution shall stop and workflow should stop its exception. - -If an error occurs which propagates to the SubFlow state, and is handled explicitly by the -SubFlow states [`onErrors`](#Error-Definition) definition, the control flow must take the path of the error handling definition -and repeat execution must halt. - -An alternative way to limit repeat executions is via the `stopOnEvents` property. It contains a list of one or more -defined consumed workflow events (referenced by the unique event name). When `stopOnEvents` is defined, -SubFlow will repeat execution until one of the defined events is consumed, or until the max property count is reached. +In which case the states data output would include only people whose age is less than 40: -#### Start Definition +```json +{ + "people": [ + { + "fname": "Marry", + "lname": "Allice", + "address": "1234 SomeStreet", + "age": 25 + }, + { + "fname": "Kelly", + "lname": "Mill", + "address": "1234 SomeStreet", + "age": 30 + } + ] +} +``` -Can be either `string` or `object` type. If type string, it defines the name of the workflow starting state. +You can change your output path easily during testing, for example change the expression to: -```json -"start": "MyStartingState" +```text +${ {people: [.people[] | select(.age >= 40)]} } ``` -In this case it's assumed that the `schedule` property is not defined. -If the start definition is of type `object`, it has the following structure: +This allows you to test if your workflow behaves properly for cases when there are people whose age is greater or equal 40. + +#### ForEach State | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| stateName | Name of the starting workflow state | object | yes | -| [schedule](#Schedule-Definition) | Define the time/repeating intervals or cron at which workflow instances should be automatically started. | object | yes | +| id | Unique state id | string | no | +| name | State name | string | yes | +| type | State type | string | yes | +| inputCollection | Workflow expression selecting an array element of the states data | string | yes | +| outputCollection | Workflow expression specifying an array element of the states data to add the results of each iteration | string | no | +| iterationParam | Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, this param should contain an unique element of the inputCollection array | string | yes | +| max | Specifies how upper bound on how many iterations may run in parallel | string or number | no | +| [actions](#Action-Definition) | Actions to be executed for each of the elements of inputCollection | array | yes if subflowId is not defined | +| workflowId | Unique Id of a workflow to be executed for each of the elements of inputCollection | string | yes if actions is not defined | +| [stateDataFilter](#State-data-filters) | State data filter definition | object | no | +| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | +| [transition](#Transitions) | Next transition of the workflow after state has completed | object | yes (if end is not defined) | +| [compensatedBy](#Workflow-Compensation) | Unique name of a workflow state which is responsible for compensation of this state | String | no | +| [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | +| [metadata](#Workflow-Metadata) | Metadata information| object | no | +| [end](#End-Definition) | Is this state an end state | object | no |
Click to view example definition

@@ -3122,8 +3513,21 @@ If the start definition is of type `object`, it has the following structure: ```json { - "stateName": "MyStartingstate", - "schedule": "2020-03-20T09:00:00Z/2020-03-20T15:00:00Z" + "name": "ProvisionOrdersState", + "type": "foreach", + "inputCollection": "${ .orders }", + "iterationParam": "singleorder", + "outputCollection": "${ .provisionresults }", + "actions": [ + { + "functionRef": { + "refName": "provisionOrderFunction", + "arguments": { + "order": "${ .singleorder }" + } + } + } + ] } ``` @@ -3131,8 +3535,16 @@ If the start definition is of type `object`, it has the following structure: ```yaml -stateName: MyStartingstate -schedule: 2020-03-20T09:00:00Z/2020-03-20T15:00:00Z +name: ProvisionOrdersState +type: foreach +inputCollection: "${ .orders }" +iterationParam: "singleorder" +outputCollection: "${ .provisionresults }" +actions: +- functionRef: + refName: provisionOrderFunction + arguments: + order: "${ .singleorder }" ``` @@ -3141,66 +3553,58 @@ schedule: 2020-03-20T09:00:00Z/2020-03-20T15:00:00Z

-Start definition explicitly defines how/when workflow instances should be created and what the workflow starting state is. - -The start definition can be either `string` or `object` type. - -If `string` type, it defines the name of the workflow starting state. - -If `object` type, it provides the ability to set the workflow starting state name, as well as the `schedule` property. - -The `schedule` property allows to define scheduled workflow instance creation. -Scheduled starts have two different choices. You can define a repeating interval or cron-based schedule at which a workflow -instance **should** be created (automatically). +ForEach states can be used to execute [actions](#Action-Definition), or a [sub-workflow](#SubFlow-State) for +each element of a data set. -You can also define cron-based scheduled starts, which allows you to specify periodically started workflow instances based on a [cron](http://crontab.org/) definition. -Cron-based scheduled starts can handle absolute time intervals (i.e., not calculated in respect to some particular point in time). -One use case for cron-based scheduled starts is a workflow that performs periodical data batch processing. -In this case we could use a cron definition +Each iteration of the ForEach state should be executed in parallel. -``` text -0 0/5 * * * ? -``` +You can use the `max` property to set the upper bound on how many iterations may run in parallel. The default +of the `max` property is zero, which places no limit on number of parallel executions. -to define that a workflow instance from the workflow definition should be created every 5 minutes, starting at full hour. +The `inputCollection` property is a workflow expression which selects an array in the states data. All iterations +are performed against data elements of this array. If this array does not exist, the runtime should throw +an error. This error can be handled inside the states [`onErrors`](#Error-Definition) definition. -Here are some more examples of cron expressions and their meanings: +The `outputCollection` property is a workflow expression which selects an array in the state data where the results +of each iteration should be added to. If this array does not exist, it should be created. -``` text -* * * * * - Create workflow instance at the top of every minute -0 * * * * - Create workflow instance at the top of every hour -0 */2 * * * - Create workflow instance every 2 hours -0 9 8 * * - Create workflow instance at 9:00:00AM on the eighth day of every month -``` +The `iterationParam` property defines the name of the iteration parameter passed to each parallel execution of the foreach state. +It should contain the unique element of the `inputCollection` array and passed as data input to the actions/workflow defined. +`iterationParam` should be created for each iteration, so it can be referenced/used in defined actions / workflow data input. -[See here](http://crontab.org/) to get more information on defining cron expressions. +The `actions` property defines actions to be executed in each state iteration. -One thing to discuss when dealing with cron-based scheduled starts is when the workflow starting state is an [Event](#Event-State). -Event states define that workflow instances are triggered by the existence of the defined event(s). -Defining a cron-based scheduled starts for the runtime implementations would mean that there needs to be an event service that issues -the needed events at the defined times to trigger workflow instance creation. +If actions are not defined, you can specify the `workflowid` to reference a workflow id which needs to be executed +for each iteration. Note that `workflowid` should not be the same as the workflow id of the workflow where the foreach state +is defined. -#### Schedule Definition +Let's take a look at an example: -`Schedule` definition can have two types, either `string` or `object`. -If `string` type, it defines time interval describing when the workflow instance should be automatically created. -This can be used as a short-cut definition when you don't need to define any other parameters, for example: +In this example the data input to our workflow is an array of orders: ```json -"schedule": "R/PT2H" +{ + "orders": [ + { + "orderNumber": "1234", + "completed": true, + "email": "firstBuyer@buyer.com" + }, + { + "orderNumber": "5678", + "completed": true, + "email": "secondBuyer@buyer.com" + }, + { + "orderNumber": "9910", + "completed": false, + "email": "thirdBuyer@buyer.com" + } + ] +} ``` -If you need to define the `cron` or the `timezone` parameters in your `schedule` definition, you can define -it with its `object` type which has the following properties: - -| Parameter | Description | Type | Required | -| --- | --- | --- | --- | -| interval | Time interval (must be repeating interval) described with ISO 8601 format. Declares when workflow instances will be automatically created. | string | yes if `cron` not defined | -| [cron](#Cron-Definition) | Cron expression defining when workflow instances should be created (automatically) | object | yes if `interval` not defined | -| timezone | Timezone name used to evaluate the interval & cron-expression. If the interval specifies a date-time w/ timezone then proper timezone conversion will be applied. (default: UTC). | string | no | - -
Click to view example definition -

+and our workflow is defined as: @@ -3212,7 +3616,35 @@ it with its `object` type which has the following properties: ```json { - "cron": "0 0/15 * * * ?" + "id": "sendConfirmWorkflow", + "name": "SendConfirmationForCompletedOrders", + "version": "1.0", + "start": "SendConfirmState", + "functions": [ + { + "name": "sendConfirmationFunction", + "operation": "file://confirmationapi.json#sendOrderConfirmation" + } + ], + "states": [ + { + "name":"SendConfirmState", + "type":"foreach", + "inputCollection": "${ [.orders[] | select(.completed == true)] }", + "iterationParam": "completedorder", + "outputCollection": "${ .confirmationresults }", + "actions":[ + { + "functionRef": { + "refName": "sendConfirmationFunction", + "arguments": { + "orderNumber": "${ .completedorder.orderNumber }", + "email": "${ .completedorder.email }" + } + } + }], + "end": true + }] } ``` @@ -3220,53 +3652,82 @@ it with its `object` type which has the following properties:
```yaml -cron: 0 0/15 * * * ? +id: sendConfirmWorkflow +name: SendConfirmationForCompletedOrders +version: '1.0' +start: SendConfirmState +functions: +- name: sendConfirmationFunction + operation: file://confirmationapi.json#sendOrderConfirmation +states: +- name: SendConfirmState + type: foreach + inputCollection: "${ [.orders[] | select(.completed == true)] }" + iterationParam: completedorder + outputCollection: "${ .confirmationresults }" + actions: + - functionRef: + refName: sendConfirmationFunction + arguments: + orderNumber: "${ .completedorder.orderNumber }" + email: "${ .completedorder.email }" + end: true ```
-

- -The `interval` property uses the ISO 8601 time repeating interval format to describe when workflow instances will be automatically created. -There are a number of supported ways to express the repeating interval: - -1. `R//`: Defines the start time and a duration, for example: "R/2020-03-20T13:00:00Z/PT2H", meaning workflow -instances will be automatically created every 2 hours starting from March 20th 2020 at 1pm UTC. -2. `R//`: Defines a duration and an end, for example: "R/PT2H/2020-05-11T15:30:00Z", meaning that workflow instances will be -automatically created every 2 hours until until May 11th 2020 at 3:30PM UTC. -3. `R/`: Defines a duration only, for example: "R/PT2H", meaning workflow instances will be automatically created every 2 hours. - -The `cron` property uses a [cron expression](http://crontab.org/) -to describe a repeating interval upon which a workflow instance should be created automatically. -For more information see the [cron definition](#Cron-Definition) section. +The workflow data input containing order information is passed to the `SendConfirmState` foreach state. +The foreach state defines an `inputCollection` property which selects all orders that have the `completed` property set to `true`. -The `timezone` property is used to define a time zone name to evaluate the cron or interval expression against. If not specified, it should default -to UTC time zone. See [here](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) for a list of timezone names. For ISO 8601 date time -values in `interval` or `cron.validUntil`, runtimes should treat `timezone` as the 'local time' (UTC if `interval` is not defined by the user). +For each element of the array selected by `inputCollection` a JSON object defined by `iterationParam` should be +created containing an unique element of `inputCollection` and passed as the data input to the parallel executed actions. -Note that when the workflow starting state is an [Event](#Event-State) -defining cron-based scheduled starts for the runtime implementations would mean that there needs to be an event service that issues -the needed events at the defined times to trigger workflow instance creation. +So for this example, we would have two parallel executions of the `sendConfirmationFunction`, the first one having data: -#### Cron Definition +```json +{ + "completedorder": { + "orderNumber": "1234", + "completed": true, + "email": "firstBuyer@buyer.com" + } +} +``` -`Cron` definition can have two types, either `string` or `object`. -If `string` type, it defines the cron expression describing when the workflow instance should be created (automatically). -This can be used as a short-cut definition when you don't need to define any other parameters, for example: +and the second: ```json -"cron": "0 15,30,45 * ? * *" +{ + "completedorder": { + "orderNumber": "5678", + "completed": true, + "email": "secondBuyer@buyer.com" + } +} ``` -If you need to define the `validUntil` parameters in your `cron` definition, you can define -it with its `object` type which has the following properties: +The results of each parallel action execution are stored as elements in the state data array defined by the `outputCollection` property. + +#### Callback State | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| expression | Cron expression describing when the workflow instance should be created (automatically) | string | yes | -| validUntil | Specific date and time (ISO 8601 format) when the cron expression is no longer valid | string | no | +| id | Unique state id | string | no | +| name | State name | string | yes | +| type | State type | string | yes | +| [action](#Action-Definition) | Defines the action to be executed | object | yes | +| eventRef | References an unique callback event name in the defined workflow [events](#Event-Definition) | string | yes | +| [timeout](#eventstate-timeout) | Time period to wait from when action is executed until the callback event is received (ISO 8601 format). For example: "PT15M" (wait 15 minutes), or "P2DT3H4M" (wait 2 days, 3 hours and 4 minutes)| string | yes | +| [eventDataFilter](#Event-data-filters) | Callback event data filter definition | object | no | +| [stateDataFilter](#State-data-filters) | State data filter definition | object | no | +| [onErrors](#Error-Definition) | States error handling and retries definitions | array | no | +| [transition](#Transitions) | Next transition of the workflow after callback event has been received | object | yes | +| [end](#End-Definition) | Is this state an end state | object | no | +| [compensatedBy](#Workflow-Compensation) | Uniaue name of a workflow state which is responsible for compensation of this state | String | no | +| [usedForCompensation](#Workflow-Compensation) | If true, this state is used to compensate another state. Default is "false" | boolean | no | +| [metadata](#Workflow-Metadata) | Metadata information| object | no |
Click to view example definition

@@ -3281,8 +3742,19 @@ it with its `object` type which has the following properties: ```json { - "expression": "0 15,30,45 * ? * *", - "validUntil": "2021-11-05T08:15:30-05:00" + "name": "CheckCredit", + "type": "callback", + "action": { + "functionRef": { + "refName": "callCreditCheckMicroservice", + "arguments": { + "customer": "${ .customer }" + } + } + }, + "eventRef": "CreditCheckCompletedEvent", + "timeout": "PT15M", + "transition": "EvaluateDecision" } ``` @@ -3290,8 +3762,16 @@ it with its `object` type which has the following properties: ```yaml -expression: 0 15,30,45 * ? * * -validUntil: '2021-11-05T08:15:30-05:00' +name: CheckCredit +type: callback +action: + functionRef: + refName: callCreditCheckMicroservice + arguments: + customer: "${ .customer }" +eventRef: CreditCheckCompletedEvent +timeout: PT15M +transition: EvaluateDecision ``` @@ -3300,43 +3780,37 @@ validUntil: '2021-11-05T08:15:30-05:00'

-The `expression` property is a a [cron expression](http://crontab.org/) which defines -when workflow instances should be created (automatically). - -The `validUntil` property defines a date and time (using ISO 8601 format). When the -`validUntil` time is reached, the cron expression for instances creations of this workflow -should no longer be valid. +Serverless orchestration can at times require manual steps/decisions to be made. While some work performed +in a serverless workflow can be executed automatically, some decisions must involve manual steps (e.g., human decisions). +The Callback state allows you to explicitly model manual decision steps during workflow execution. -For example let's say we have to following cron definitions: +The action property defines a function call that triggers an external activity/service. Once the action executes, +the callback state will wait for a CloudEvent (defined via the `eventRef` property), which indicates the completion +of the manual decision by the called service. -```json -{ - "expression": "0 15,30,45 * ? * *", - "validUntil": "2021-11-05T08:15:30-05:00" -} -``` +Note that the called decision service is responsible for emitting the callback CloudEvent indicating the completion of the +decision and including the decision results as part of the event payload. This event must be correlated to the +workflow instance using the callback events context attribute defined in the `correlation` property of the +referenced [Event Definition](#Event-Definition). -This tells the runtime engine to create an instance of this workflow every hour -at minutes 15, 30 and 45. This is to be done until November 5, 2021, 8:15:30 am, US Eastern Standard Time -as defined by the `validUntil` property value. +Once the completion (callback) event is received, the Callback state completes its execution and transitions to the next +defined workflow state or completes workflow execution in case it is an end state. -#### End Definition +The callback event payload is merged with the Callback state data and can be filtered via the "eventDataFilter" definition. -Can be either `boolean` or `object` type. If type boolean, must be set to `true`, for example: +The Callback state `timeout` property defines a time period from the action execution until the callback event should be received. -```json -"end": true -``` -In this case it's assumed that the `terminate` property has its default value of `false`, and the `produceEvents` and -`compensate` properties are not defined. +If the defined callback event has not been received during this time period, the state should transition to the next state or end workflow execution if it is an end state. -If the end definition is of type `object`, it has the following structure: +#### Repeat Definition -| Parameter | Description | Type | Required | +| Parameter | Description | Type | Required | | --- | --- | --- | --- | -| terminate | If true, terminates workflow instance execution | boolean | no | -| produceEvents | Array of [producedEvent](#ProducedEvent-Definition) definitions. Defines events that should be produced. | array | no | -| [compensate](#Workflow-Compensation) | If set to `true`, triggers workflow compensation before workflow execution completes. Default is `false` | boolean | no | +| [expression](#Workflow-Expressions) | Workflow expression evaluated against state data. SubFlow will repeat execution as long as this expression is true or until the max property count is reached | string | no | +| checkBefore | If set to `true` (default value) the expression is evaluated before each repeat execution, if set to false the expression is evaluated after each repeat execution | boolean | no | +| max | Sets the maximum amount of repeat executions | integer | no | +| continueOnError | If set to `true` repeats executions in a case unhandled errors propagate from the sub-workflow to this state | boolean | no | +| stopOnEvents | List referencing defined consumed workflow events. SubFlow will repeat execution until one of the defined events is consumed, or until the max property count is reached | array | no |
Click to view example definition

@@ -3351,11 +3825,8 @@ If the end definition is of type `object`, it has the following structure: ```json { - "terminate": true, - "produceEvents": [{ - "eventRef": "provisioningCompleteEvent", - "data": "${ .provisionedOrders }" - }] + "max": 10, + "continueOnError": true } ``` @@ -3363,11 +3834,8 @@ If the end definition is of type `object`, it has the following structure: ```yaml -terminate: true -produceEvents: -- eventRef: provisioningCompleteEvent - data: "${ .provisionedOrders }" - +max: 10 +continueOnError: true ``` @@ -3376,32 +3844,51 @@ produceEvents:

-End definitions are used to explicitly define execution completion of a workflow instance or workflow execution path. -A workflow definition must include at least one [workflow state](#State-Definition). -Note that [Switch states](#Switch-State) cannot declare to be workflow end states. Switch states must end -their execution followed by a transition another workflow state, given their conditional evaluation. +Repeat definition can be used in [SubFlow](#SubFlow-State) states to define repeated execution (looping). + +The `expression` parameter is a [workflow expression](#Workflow-Expressions). It is +evaluated against SubFlow states data. +SubFlow state should repeat its execution as long as this expression evaluates to `true` (the expression returns a non-empty result), +or until the `max` property limit is reached. This parameter allows you to stop repeat execution based on data. + +The `checkBefore` property can be used to decide if the `expression` evaluation should be done before or after +each SubFlow state execution. Default value of this property is `true`. + +The `max` property sets the maximum count of repeat executions. It should be a positive integer value. +Runtime implementations must define an internal repeat/loop counter which is incremented for each of the +SubFlow state repeated executions. If this counter reaches the max value, repeated executions should end. + +The `continueOnError` property defines if repeated executions should continue or not in case unhandled errors are propagated +by the sub-workflow to the SubFlow state. Default value of this property is `false`. +Unhandled errors are errors which are not explicitly handled by the sub-workflow, and the SubFlow state +via its [`onErrors`](#Error-Definition) definition. + +If `continueOnError` is set to `false` (default value), and an unhandled error occurs, it should be handled +as any other unhandled workflow error, meaning repeat execution shall stop and workflow should stop its exception. + +If an error occurs which propagates to the SubFlow state, and is handled explicitly by the +SubFlow states [`onErrors`](#Error-Definition) definition, the control flow must take the path of the error handling definition +and repeat execution must halt. +An alternative way to limit repeat executions is via the `stopOnEvents` property. It contains a list of one or more +defined consumed workflow events (referenced by the unique event name). When `stopOnEvents` is defined, +SubFlow will repeat execution until one of the defined events is consumed, or until the max property count is reached. -The `terminate` property, if set to `true`, completes the workflow instance execution, this any other active -execution paths. -If a terminate end is reached inside a ForEach, Parallel, or SubFlow state, the entire workflow instance is terminated. +#### Start Definition -The [`produceEvents`](#ProducedEvent-Definition) allows to define events which should be produced -by the workflow instance before workflow stops its execution. +Can be either `string` or `object` type. If type string, it defines the name of the workflow starting state. -It's important to mention that if the workflow `keepActive` property is set to`true`, -the only way to complete execution of the workflow instance -is if workflow execution reaches a state that defines an end definition with `terminate` property set to `true`, -or, if the [execution timeout](#ExecTimeout-Definition) property is defined, the time defined in its `interval` -is reached. +```json +"start": "MyStartingState" +``` +In this case it's assumed that the `schedule` property is not defined. -#### ProducedEvent Definition +If the start definition is of type `object`, it has the following structure: | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| eventRef | Reference to a defined unique event name in the [events](#Event-Definition) definition | string | yes | -| data | If string type, an expression which selects parts of the states data output to become the data (payload) of the produced event. If object type, a custom object to become the data (payload) of produced event. | string or object | no | -| contextAttributes | Add additional event extension context attributes | object | no | +| stateName | Name of the starting workflow state | object | yes | +| [schedule](#Schedule-Definition) | Define the time/repeating intervals or cron at which workflow instances should be automatically started. | object | yes |
Click to view example definition

@@ -3416,22 +3903,17 @@ is reached. ```json { - "eventRef": "provisioningCompleteEvent", - "data": "${ .provisionedOrders }", - "contextAttributes": [{ - "buyerId": "${ .buyerId }" - }] - } + "stateName": "MyStartingstate", + "schedule": "2020-03-20T09:00:00Z/2020-03-20T15:00:00Z" +} ``` ```yaml -eventRef: provisioningCompleteEvent -data: "${ .provisionedOrders }" -contextAttributes: -- buyerId: "${ .buyerId }" +stateName: MyStartingstate +schedule: 2020-03-20T09:00:00Z/2020-03-20T15:00:00Z ``` @@ -3440,127 +3922,65 @@ contextAttributes:

-Defines the event (CloudEvent format) to be produced when workflow execution completes or during a workflow [transitions](#Transitions). -The `eventRef` property must match the name of -one of the defined `produced` events in the [events](#Event-Definition) definition. +Start definition explicitly defines how/when workflow instances should be created and what the workflow starting state is. -The `data` property can have two types, object or string. If of string type, it is an expression that can select parts of state data -to be used as the event payload. If of object type, you can define a custom object to be the event payload. +The start definition can be either `string` or `object` type. -The `contextAttributes` property allows you to add one or more [extension context attributes](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes) -to the generated event. +If `string` type, it defines the name of the workflow starting state. -Being able to produce events when workflow execution completes or during state transition -allows for event-based orchestration communication. -For example, completion of an orchestration workflow can notify other orchestration workflows to decide if they need to act upon -the produced event, or notify monitoring services of the current state of workflow execution, etc. -It can be used to create very dynamic orchestration scenarios. +If `object` type, it provides the ability to set the workflow starting state name, as well as the `schedule` property. -#### Transitions +The `schedule` property allows to define scheduled workflow instance creation. +Scheduled starts have two different choices. You can define a repeating interval or cron-based schedule at which a workflow +instance **should** be created (automatically). -Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). -Each state can define a `transition` definition that is used to determine which -state to transition to next. +You can also define cron-based scheduled starts, which allows you to specify periodically started workflow instances based on a [cron](http://crontab.org/) definition. +Cron-based scheduled starts can handle absolute time intervals (i.e., not calculated in respect to some particular point in time). +One use case for cron-based scheduled starts is a workflow that performs periodical data batch processing. +In this case we could use a cron definition -Implementers can choose to use the states `name` property -for determining the transition; however, we realize that in most cases this is not an -optimal solution that can lead to ambiguity. This is why each state also include an "id" -property. Implementers can choose their own id generation strategy to populate the `id` property -for each of the states and use it as the unique state identifier that is to be used as the "nextState" value. +``` text +0 0/5 * * * ? +``` -So the options for next state transitions are: +to define that a workflow instance from the workflow definition should be created every 5 minutes, starting at full hour. -- Use the state name property -- Use the state id property -- Use a combination of name and id properties +Here are some more examples of cron expressions and their meanings: -Events can be produced during state transitions. The `produceEvents` property of the `transition` definitions allows you -to reference one or more defined `produced` events in the workflow [events definitions](#Event-Definition). -For each of the produced events you can select what parts of state data to be the event payload. +``` text +* * * * * - Create workflow instance at the top of every minute +0 * * * * - Create workflow instance at the top of every hour +0 */2 * * * - Create workflow instance every 2 hours +0 9 8 * * - Create workflow instance at 9:00:00AM on the eighth day of every month +``` -Transitions can trigger compensation via their `compensate` property. See the [Workflow Compensation](#Workflow-Compensation) -section for more information. +[See here](http://crontab.org/) to get more information on defining cron expressions. -#### Workflow Data Input +One thing to discuss when dealing with cron-based scheduled starts is when the workflow starting state is an [Event](#Event-State). +Event states define that workflow instances are triggered by the existence of the defined event(s). +Defining a cron-based scheduled starts for the runtime implementations would mean that there needs to be an event service that issues +the needed events at the defined times to trigger workflow instance creation. + +#### Schedule Definition -The initial data input into a workflow instance must be a valid [JSON object](https://tools.ietf.org/html/rfc7159#section-4). -If no input is provided the default data input is the empty object: +`Schedule` definition can have two types, either `string` or `object`. +If `string` type, it defines time interval describing when the workflow instance should be automatically created. +This can be used as a short-cut definition when you don't need to define any other parameters, for example: ```json { - + "schedule": "R/PT2H" } ``` -Workflow data input is passed to the workflow [starting state](#Start-Definition) as data input. - -

-Workflow data input -

- -#### Event Data - -[Event states](#Event-State) wait for arrival of defined CloudEvents, and when consumed perform a number of defined actions. -CloudEvents can contain data which is needed to make further orchestration decisions. Data from consumed CloudEvents -can be merged with the data of the Event state, so it can be used inside defined actions -or be passed as data output to transition states. - -

-Event data merged with state data input -

- -Similarly for Callback states, the callback event data is merged with the data of the Callback state. - -Merging of this data case be controlled via [data filters](#State-Data-Filtering). - -#### Action Data - -[Event](#Event-State), [Callback](#Callback-State), and [Operation](#Operation-State) states can execute [actions](#Action-Definition). -Actions can invoke different services (functions). Functions can return results that may be needed to make -further orchestration decisions. Function results can be merged with the state data. - -

-Actions data merged with state data -

- -Merging of this data case be controlled via [data filters](#State-Data-Filtering). - -#### Information Passing Between States - -States in a workflow can receive data (data input) as well as produce a data result (data output). The states data input is -typically the previous states data output. -When a state completes its tasks, its data output is passed to the data input of the state it transitions to. - -There are two of rules to consider here: - -- If the state is the workflow starting state, its data input is the [workflow data input](#Workflow-data-input). -- If the state is an end state ([`end`](#End-Definition) property is defined), its data output is the [workflow data output](#Workflow-data-output). - -

-Basic state data passing -

- -#### State Data Filtering - -Data filters allow you to select and extract specific data that is useful and needed during workflow execution. -Filters use [workflow expressions](#Workflow-Expressions) for extracting portions of state's data -input and output, actions data and results, event data, as well as error data. - -There are several types of data filters: - -- [State Data Filter](#state-data-filter) -- [Action Data Filter](#action-data-filter) -- [Event Data Filter](#event-data-filter) - -All states can define state data filters. States which can consume events can define event data filters, and states -that can perform actions can define action data filters for each of the actions they perform. - -#### State data filtering - State Data Filter +If you need to define the `cron` or the `timezone` parameters in your `schedule` definition, you can define +it with its `object` type which has the following properties: | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| dataInputPath | Workflow expression to filter the states data input | string | no | -| dataOutputPath | Workflow expression that filters the states data output | string | no | +| interval | Time interval (must be repeating interval) described with ISO 8601 format. Declares when workflow instances will be automatically created. | string | yes if `cron` not defined | +| [cron](#Cron-Definition) | Cron expression defining when workflow instances should be created (automatically) | object | yes if `interval` not defined | +| timezone | Timezone name used to evaluate the interval & cron-expression. If the interval specifies a date-time w/ timezone then proper timezone conversion will be applied. (default: UTC). | string | no |
Click to view example definition

@@ -3575,8 +3995,7 @@ that can perform actions can define action data filters for each of the actions ```json { - "dataInputPath": "${ .orders }", - "dataOutputPath": "${ .provisionedOrders }" + "cron": "0 0/15 * * * ?" } ``` @@ -3584,8 +4003,7 @@ that can perform actions can define action data filters for each of the actions ```yaml -dataInputPath: "${ .orders }" -dataOutputPath: "${ .provisionedOrders }" +cron: 0 0/15 * * * ? ``` @@ -3594,96 +4012,116 @@ dataOutputPath: "${ .provisionedOrders }"

-State data filters can be used to filter the states data input and output. +The `interval` property uses the ISO 8601 time repeating interval format to describe when workflow instances will be automatically created. +There are a number of supported ways to express the repeating interval: + +1. `R//`: Defines the start time and a duration, for example: "R/2020-03-20T13:00:00Z/PT2H", meaning workflow +instances will be automatically created every 2 hours starting from March 20th 2020 at 1pm UTC. +2. `R//`: Defines a duration and an end, for example: "R/PT2H/2020-05-11T15:30:00Z", meaning that workflow instances will be +automatically created every 2 hours until until May 11th 2020 at 3:30PM UTC. +3. `R/`: Defines a duration only, for example: "R/PT2H", meaning workflow instances will be automatically created every 2 hours. -The state data filters `dataInputPath` expression is applied when the workflow transitions to the current state and it receives its data input. -It filters this data input selecting parts of it (only the selected data is considered part of the states data during its execution). -If `dataInputPath` is not defined, or it does not select any parts of the states data input, the states data input is not filtered. +The `cron` property uses a [cron expression](http://crontab.org/) +to describe a repeating interval upon which a workflow instance should be created automatically. +For more information see the [cron definition](#Cron-Definition) section. -The state data filter `dataOutputPath` is applied right before the state transitions to the next state defined. It filters the states data -output to be passed as data input to the transitioning state. If the current state is the workflow end state, the filtered states data -output becomes the workflow data output. -If `dataOutputPath` is not defined, or it does not select any parts of the states data output, the states data output is not filtered. +The `timezone` property is used to define a time zone name to evaluate the cron or interval expression against. If not specified, it should default +to UTC time zone. See [here](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) for a list of timezone names. For ISO 8601 date time +values in `interval` or `cron.validUntil`, runtimes should treat `timezone` as the 'local time' (UTC if `interval` is not defined by the user). -Let's take a look at some examples of state filters. For our examples let's say the data input to our state is as follows: +Note that when the workflow starting state is an [Event](#Event-State) +defining cron-based scheduled starts for the runtime implementations would mean that there needs to be an event service that issues +the needed events at the defined times to trigger workflow instance creation. + +#### Cron Definition + +`Cron` definition can have two types, either `string` or `object`. +If `string` type, it defines the cron expression describing when the workflow instance should be created (automatically). +This can be used as a short-cut definition when you don't need to define any other parameters, for example: ```json { - "fruits": [ "apple", "orange", "pear" ], - "vegetables": [ - { - "veggieName": "potato", - "veggieLike": true - }, - { - "veggieName": "broccoli", - "veggieLike": false - } - ] + "cron": "0 15,30,45 * ? * *" } ``` -For the first example our state only cares about fruits data, and we want to disregard the vegetables. To do this -we can define a state filter: +If you need to define the `validUntil` parameters in your `cron` definition, you can define +it with its `object` type which has the following properties: + +| Parameter | Description | Type | Required | +| --- | --- | --- | --- | +| expression | Cron expression describing when the workflow instance should be created (automatically) | string | yes | +| validUntil | Specific date and time (ISO 8601 format) when the cron expression is no longer valid | string | no | + +
Click to view example definition +

+ + + + + + + + + + +
JSONYAML
```json { - "stateDataFilter": { - "dataInputPath": "${ {fruits: .fruits} }" - } + "expression": "0 15,30,45 * ? * *", + "validUntil": "2021-11-05T08:15:30-05:00" } ``` -The state data output then would include only the fruits data: + -```json -{ - "fruits": [ "apple", "orange", "pear"] -} +```yaml +expression: 0 15,30,45 * ? * * +validUntil: '2021-11-05T08:15:30-05:00' ``` -

-State Data Filter Example -

+
-For our second example let's say that we are interested in only vegetable that are "veggie like". -Here we have two ways of filtering our data, depending on if actions within our state need access to all vegetables, or -only the ones that are "veggie like". +

+ +The `expression` property is a a [cron expression](http://crontab.org/) which defines +when workflow instances should be created (automatically). -The first way would be to use both "dataInputPath", and "dataOutputPath": +The `validUntil` property defines a date and time (using ISO 8601 format). When the +`validUntil` time is reached, the cron expression for instances creations of this workflow +should no longer be valid. + +For example let's say we have to following cron definitions: ```json { - "stateDataFilter": { - "dataInputPath": "${ {vegetables: .vegetables} }", - "dataOutputPath": "${ {vegetables: .vegetables[] | select(.veggieLike == true)} }" - } + "expression": "0 15,30,45 * ? * *", + "validUntil": "2021-11-05T08:15:30-05:00" } ``` -The states data input filter selects all the vegetables from the main data input. Once all actions have performed, before the state transition -or workflow execution completion (if this is an end state), the "dataOutputPath" of the state filter selects only the vegetables which are "veggie like". +This tells the runtime engine to create an instance of this workflow every hour +at minutes 15, 30 and 45. This is to be done until November 5, 2021, 8:15:30 am, US Eastern Standard Time +as defined by the `validUntil` property value. -

-State Data Filter Example -

+#### End Definition -The second way would be to directly filter only the "veggie like" vegetables with just the data input path: +Can be either `boolean` or `object` type. If type boolean, must be set to `true`, for example: -```json -{ - "stateDataFilter": { - "dataInputPath": "${ {vegetables: .vegetables[] | select(.veggieLike == true)} }" - } -} +```json +"end": true ``` +In this case it's assumed that the `terminate` property has its default value of `false`, and the `produceEvents` and +`compensate` properties are not defined. -#### State data filtering - Action Data Filter +If the end definition is of type `object`, it has the following structure: | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| dataInputPath | Workflow expression that filters states data that can be used by the state action | string | no | -| dataResultsPath | Workflow expression that filters the actions data result, to be added to or merged with the states data | string | no | +| terminate | If true, terminates workflow instance execution | boolean | no | +| produceEvents | Array of [producedEvent](#ProducedEvent-Definition) definitions. Defines events that should be produced. | array | no | +| [compensate](#Workflow-Compensation) | If set to `true`, triggers workflow compensation before workflow execution completes. Default is `false` | boolean | no |
Click to view example definition

@@ -3698,8 +4136,11 @@ The second way would be to directly filter only the "veggie like" vegetables wit ```json { - "dataInputPath": "${ .language }", - "dataResultsPath": "${ .payload.greeting }" + "terminate": true, + "produceEvents": [{ + "eventRef": "provisioningCompleteEvent", + "data": "${ .provisionedOrders }" + }] } ``` @@ -3707,8 +4148,11 @@ The second way would be to directly filter only the "veggie like" vegetables wit ```yaml -dataInputPath: "${ .language } " -dataResultsPath: "${ .payload.greeting }" +terminate: true +produceEvents: +- eventRef: provisioningCompleteEvent + data: "${ .provisionedOrders }" + ``` @@ -3717,50 +4161,32 @@ dataResultsPath: "${ .payload.greeting }"

-[Actions](#Action-Definition) have access to the state data. -They can filter this data using an action data filter 'dataInputPath' expression before invoking functions. -This is useful if you want to restrict the data that can be passed as parameters to functions during action invocation. - -Actions can reference [functions](#Function-Definition) that need to be invoked. -The results of these functions are considered the output of the action which needs to be added to or merged with the states data. -You can filter the results of actions with the `dataResultsPath` property, to only select -parts of the action results that need to go into the states data. - -To give an example, let's say we have an action which returns a list of breads and pasta types. -For our workflow, we are only interested into breads and not the pasta. - -Action results: +End definitions are used to explicitly define execution completion of a workflow instance or workflow execution path. +A workflow definition must include at least one [workflow state](#State-Definition). +Note that [Switch states](#Switch-State) cannot declare to be workflow end states. Switch states must end +their execution followed by a transition another workflow state, given their conditional evaluation. -```json -{ - "breads": ["baguette", "brioche", "rye"], - "pasta": [ "penne", "spaghetti", "ravioli"] -} -``` -We can use an action data filter to filter only the breads data: +The `terminate` property, if set to `true`, completes the workflow instance execution, this any other active +execution paths. +If a terminate end is reached inside a ForEach, Parallel, or SubFlow state, the entire workflow instance is terminated. -```json -{ -"actions":[ - { - "functionRef": "breadAndPastaTypesFunction", - "actionDataFilter": { - "dataResultsPath": "${ {breads: .breads} }" - } - } - ] -} -``` +The [`produceEvents`](#ProducedEvent-Definition) allows to define events which should be produced +by the workflow instance before workflow stops its execution. -With this action data filter in place only the bread types returned by the function invocation will be added to or merged -with the state data. +It's important to mention that if the workflow `keepActive` property is set to`true`, +the only way to complete execution of the workflow instance +is if workflow execution reaches a state that defines an end definition with `terminate` property set to `true`, +or, if the [execution timeout](#ExecTimeout-Definition) property is defined, the time defined in its `interval` +is reached. -#### State data filtering - Event Data Filter +#### ProducedEvent Definition | Parameter | Description | Type | Required | | --- | --- | --- | --- | -| dataOutputPath | Workflow expression that filters of the event data, to be added to or merged with states data | string | no | +| eventRef | Reference to a defined unique event name in the [events](#Event-Definition) definition | string | yes | +| data | If string type, an expression which selects parts of the states data output to become the data (payload) of the produced event. If object type, a custom object to become the data (payload) of produced event. | string or object | no | +| contextAttributes | Add additional event extension context attributes | object | no |
Click to view example definition

@@ -3775,15 +4201,22 @@ with the state data. ```json { - "dataOutputPath": "${ .data.results }" -} + "eventRef": "provisioningCompleteEvent", + "data": "${ .provisionedOrders }", + "contextAttributes": [{ + "buyerId": "${ .buyerId }" + }] + } ``` ```yaml -dataOutputPath: "${ .data.results }" +eventRef: provisioningCompleteEvent +data: "${ .provisionedOrders }" +contextAttributes: +- buyerId: "${ .buyerId }" ``` @@ -3792,213 +4225,46 @@ dataOutputPath: "${ .data.results }"

-Allows event data to be filtered and added to or merged with the state data. All events have to be in the CloudEvents format -and event data filters can filter both context attributes and the event payload (data) using the `dataOutputPath` property. - -Here is an example using an event filter: - -

-Event Data Filter Example -

- -#### State data filtering - Using multiple filters - -As [Event states](#Event-State) can take advantage of all defined data filters, it is probably the best way to -show how we can combine them all to filter state data. - -Let's say we have a workflow which consumes events defining a customer arrival (to your store for example), -and then lets us know how to greet this customer in different languages. We could model this workflow as follows: - -```json -{ - "id": "GreetCustomersWorkflow", - "name": "Greet Customers when they arrive", - "version": "1.0", - "start": "WaitForCustomerToArrive", - "events": [{ - "name": "CustomerArrivesEvent", - "type": "customer-arrival-type", - "source": "customer-arrival-event-source" - }], - "functions": [{ - "name": "greetingFunction", - "operation": "http://my.api.org/myapi.json#greeting" - }], - "states":[ - { - "name": "WaitForCustomerToArrive", - "type": "event", - "onEvents": [{ - "eventRefs": ["CustomerArrivesEvent"], - "eventDataFilter": { - "dataInputPath": "${ .customer }" - }, - "actions":[ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "greeting": "${ .languageGreetings.spanish } ", - "customerName": "${ .customer.name } " - } - }, - "actionDataFilter": { - "dataResultsPath": "${ .finalCustomerGreeting }" - } - } - ] - }], - "stateDataFilter": { - "dataInputPath": "${ .hello } ", - "dataOutputPath": "${ .finalCustomerGreeting }" - }, - "end": true - } - ] -} -``` - -The example workflow contains an event state which consumes CloudEvents of type "customer-arrival-type", and then -calls the "greetingFunction" function passing in the greeting in Spanish and the name of the customer to greet. - -The workflow data input when starting workflow execution is assumed to include greetings in different languages: - -```json -{ - "hello": { - "english": "Hello", - "spanish": "Hola", - "german": "Hallo", - "russian": "Здравствуйте" - }, - "goodbye": { - "english": "Goodbye", - "spanish": "Adiós", - "german": "Auf Wiedersehen", - "russian": "Прощай" - } -} -``` - -We also assume for this example that the CloudEvent that our event state is set to consume (has the "customer-arrival-type" type) include the data: - -```json -{ - "data": { - "customer": { - "name": "John Michaels", - "address": "111 Some Street, SomeCity, SomeCountry", - "age": 40 - } - } -} -``` - -Here is a sample diagram showing our workflow, each numbered step on this diagram shows a certain defined point during -workflow execution at which data filters are invoked and correspond to the numbered items below. - -

-Using Multple Filters Example -

- -**(1) Workflow execution starts**: Workflow data is passed to our "WaitForCustomerToArrive" event state as data input. -Workflow transitions to its starting state, namely the "WaitForCustomerToArrive" event state. - -The event state **stateDataFilter** is invoked to filter this data input. Its "dataInputPath" is evaluated and filters - only the "hello" greetings in different languages. At this point our event state data contains: - -```json -{ - "hello": { - "english": "Hello", - "spanish": "Hola", - "german": "Hallo", - "russian": "Здравствуйте" - } -} -``` - -**(2) CloudEvent of type "customer-arrival-type" is consumed**: First the "eventDataFilter" is triggered. Its "dataInputPath" -expression selects the "customer" object from the events data and places it into the state data. - -At this point our event state data contains: - -```json -{ - "hello": { - "english": "Hello", - "spanish": "Hola", - "german": "Hallo", - "russian": "Здравствуйте" - }, - "customer": { - "name": "John Michaels", - "address": "111 Some Street, SomeCity, SomeCountry", - "age": 40 - } -} -``` - -**(3) Event state performs its actions**: -Before the first action is executed, its actionDataFilter is invoked. Its "dataInputPath" expression selects -the entire state data as the data available to functions that should be executed. Its "dataResultsPath" expression -specifies that results of all functions executed in this action should be placed back to the state data as part -of a new "finalCustomerGreeting" object. - -The action then calls the "greetingFunction" function passing in as parameters the Spanish greeting and the name of the customer that arrived. - -We assume that for this example "greetingFunction" returns: +Defines the event (CloudEvent format) to be produced when workflow execution completes or during a workflow [transitions](#Transitions). +The `eventRef` property must match the name of +one of the defined `produced` events in the [events](#Event-Definition) definition. -```json -{ - "finalCustomerGreeting": "Hola John Michaels!" -} -``` +The `data` property can have two types, object or string. If of string type, it is an expression that can select parts of state data +to be used as the event payload. If of object type, you can define a custom object to be the event payload. -which then becomes the result of the action. +The `contextAttributes` property allows you to add one or more [extension context attributes](https://github.com/cloudevents/spec/blob/master/spec.md#extension-context-attributes) +to the generated event. -**(4) Event State Completes Workflow Execution**: The results of action executions as defined in the actionDataFilter are placed into the -states data under the "finalCustomerGreeting" object. So at this point our event state data contains: +Being able to produce events when workflow execution completes or during state transition +allows for event-based orchestration communication. +For example, completion of an orchestration workflow can notify other orchestration workflows to decide if they need to act upon +the produced event, or notify monitoring services of the current state of workflow execution, etc. +It can be used to create very dynamic orchestration scenarios. -```json -{ - "hello": { - "english": "Hello", - "spanish": "Hola", - "german": "Hallo", - "russian": "Здравствуйте" - }, - "customer": { - "name": "John Michaels", - "address": "111 Some Street, SomeCity, SomeCountry", - "age": 40 - }, - "finalCustomerGreeting": "Hola John Michaels!" -} -``` +#### Transitions -Since our event state has performed all actions it is ready to either transition to the next state or end workflow execution if it is an end state. -Before this happens though, the "stateDataFilter" is again invoked to filter this states data, specifically the "dataOutputPath" expression -selects only the "finalCustomerGreeting" object to make it the data output of the state. +Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). +Each state can define a `transition` definition that is used to determine which +state to transition to next. -Because our event state is also an end state, its data output becomes the final [workflow data output](#Workflow-data-output). Namely: +Implementers can choose to use the states `name` property +for determining the transition; however, we realize that in most cases this is not an +optimal solution that can lead to ambiguity. This is why each state also include an "id" +property. Implementers can choose their own id generation strategy to populate the `id` property +for each of the states and use it as the unique state identifier that is to be used as the "nextState" value. -```json -{ - "finalCustomerGreeting": "Hola John Michaels!" -} -``` +So the options for next state transitions are: -Note that in case of multiple actions with each containing an "actionDataFilter", you must be careful for their results -not to overwrite each other after actions complete and their results are added to the state data. -Also note that in case of parallel execution of actions, the results of only those that complete before the state -transitions to the next one or ends workflow execution (end state) can be considered to be added to the state data. +- Use the state name property +- Use the state id property +- Use a combination of name and id properties -#### Workflow data output +Events can be produced during state transitions. The `produceEvents` property of the `transition` definitions allows you +to reference one or more defined `produced` events in the workflow [events definitions](#Event-Definition). +For each of the produced events you can select what parts of state data to be the event payload. -Once a workflow instance reaches an end state (where the `end` property is defined) and the workflow finishes its execution -the data output of that result state becomes the workflow data output. This output can be logged or indexed depending on the -implementation details. +Transitions can trigger compensation via their `compensate` property. See the [Workflow Compensation](#Workflow-Compensation) +section for more information. ### Workflow Error Handling @@ -4596,4 +4862,4 @@ You can find a list of other languages, technologies and specifications related ## License Serverless Workflow specification operates under the -[Apache License version 2.0](LICENSE). +[Apache License version 2.0](LICENSE). \ No newline at end of file