in taverna-workflowmodel-impl/src/main/java/org/apache/taverna/workflowmodel/impl/DataflowImpl.java [495:640]
public synchronized DataflowValidationReport checkValidityImpl() {
// First things first - nullify the resolved depths in all datalinks
for (Datalink dl : getLinks())
if (dl instanceof DatalinkImpl)
((DatalinkImpl) dl).setResolvedDepth(-1);
// Now copy type information from workflow inputs
for (DataflowInputPort dip : getInputPorts())
for (Datalink dl : dip.getInternalOutputPort().getOutgoingLinks())
if (dl instanceof DatalinkImpl)
((DatalinkImpl) dl).setResolvedDepth(dip.getDepth());
/*
* ==================================================================
* Now iteratively attempt to resolve everything else.
* ==================================================================
*/
/*
* Firstly take a copy of the processor list, we'll processors from this
* list as they become either failed or resolved
*/
List<TokenProcessingEntity> unresolved = new ArrayList<>(
getEntities(TokenProcessingEntity.class));
// Keep a list of processors that have failed, initially empty
List<TokenProcessingEntity> failed = new ArrayList<>();
/**
* Is the dataflow valid? The flow is valid if and only if both
* unresolved and failed lists are empty at the end. This doesn't
* guarantee that the workflow will run, in particular it doesn't
* actually check for issues such as unresolved output edges.
*/
// Flag to indicate whether we've finished yet, set to true if no
// changes are made in an iteration
boolean finished = false;
Map<TokenProcessingEntity, DataflowValidationReport> invalidDataflows = new HashMap<>();
while (!finished) {
// We're finished unless something happens later
finished = true;
// Keep a list of processors to remove from the unresolved list
// because they've been resolved properly
List<TokenProcessingEntity> removeValidated = new ArrayList<>();
// Keep another list of those that have failed
List<TokenProcessingEntity> removeFailed = new ArrayList<>();
for (TokenProcessingEntity p : unresolved)
try {
/*
* true = checked and valid, false = can't check, the
* exception means the processor was checked but was invalid
* for some reason
*/
if (p.doTypeCheck()) {
removeValidated.add(p);
/*
* At least one thing validated; we will need to run the
* check loop at least once more.
*/
finished = false;
}
} catch (IterationTypeMismatchException e) {
removeFailed.add(p);
} catch (InvalidDataflowException e) {
invalidDataflows.put(p, e.getDataflowValidationReport());
removeFailed.add(p);
}
/*
* Remove validated and failed items from the pending lists.
*/
unresolved.removeAll(removeValidated);
unresolved.removeAll(removeFailed);
failed.addAll(removeFailed);
}
/*
* At this point we know whether the processors within the workflow
* validated. If all the processors validated then we're probably okay,
* but there are a few other problems to check for. Firstly we need to
* check whether all the dataflow outputs are connected; any unconnected
* output is by definition a validation failure.
*/
List<DataflowOutputPort> unresolvedOutputs = new ArrayList<>();
for (DataflowOutputPortImpl dopi : outputs) {
Datalink dl = dopi.getInternalInputPort().getIncomingLink();
/*
* Unset any type information on the output port, we'll set it again
* later if there's a suitably populated link going into it
*/
dopi.setDepths(-1, -1);
if (dl == null)
// not linked, this is by definition an unsatisfied link!
unresolvedOutputs.add(dopi);
else if (dl.getResolvedDepth() == -1)
/*
* linked but the edge hasn't had its depth resolved, i.e. it
* links from an unresolved entity
*/
unresolvedOutputs.add(dopi);
else {
/*
* linked and edge depth is defined, we can therefore populate
* the granular and real depth of the dataflow output port. Note
* that this is the only way these values can be populated, you
* don't define them when creating the ports as they are
* entirely based on the type check stage.
*/
int granularDepth = dl.getSource().getGranularDepth();
int resolvedDepth = dl.getResolvedDepth();
dopi.setDepths(resolvedDepth, granularDepth);
}
}
/*
* Check if workflow is 'incomplete' - i.e. if it contains no processors
* and no output ports. This is to prevent empty workflows or ones that
* contain input ports from being run.
*/
boolean dataflowIsIncomplete = getProcessors().isEmpty()
&& getOutputPorts().isEmpty();
/*
* For a workflow to be valid - workflow must not be 'empty' and lists
* of problems must all be empty
*/
boolean dataflowValid = (!dataflowIsIncomplete)
&& unresolvedOutputs.isEmpty() && failed.isEmpty()
&& unresolved.isEmpty();
/*
* Build and return a new validation report containing the overall state
* along with lists of failed and unsatisfied processors and unsatisfied
* output ports
*/
return new DataflowValidationReportImpl(dataflowValid,
dataflowIsIncomplete, failed, unresolved, unresolvedOutputs,
invalidDataflows);
}