in src/main/java/org/opensearch/ad/model/AnomalyResult.java [592:720]
public static AnomalyResult parse(XContentParser parser) throws IOException {
String detectorId = null;
Double anomalyScore = null;
Double anomalyGrade = null;
Double confidence = null;
List<FeatureData> featureData = new ArrayList<>();
Instant dataStartTime = null;
Instant dataEndTime = null;
Instant executionStartTime = null;
Instant executionEndTime = null;
String error = null;
Entity entity = null;
User user = null;
Integer schemaVersion = CommonValue.NO_SCHEMA_VERSION;
String taskId = null;
String modelId = null;
Instant approAnomalyStartTime = null;
List<DataByFeatureId> relavantAttribution = new ArrayList<>();
List<DataByFeatureId> pastValues = new ArrayList<>();
List<ExpectedValueList> expectedValues = new ArrayList<>();
Double threshold = null;
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
String fieldName = parser.currentName();
parser.nextToken();
switch (fieldName) {
case DETECTOR_ID_FIELD:
detectorId = parser.text();
break;
case ANOMALY_SCORE_FIELD:
anomalyScore = parser.doubleValue();
break;
case ANOMALY_GRADE_FIELD:
anomalyGrade = parser.doubleValue();
break;
case CONFIDENCE_FIELD:
confidence = parser.doubleValue();
break;
case FEATURE_DATA_FIELD:
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser);
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
featureData.add(FeatureData.parse(parser));
}
break;
case DATA_START_TIME_FIELD:
dataStartTime = ParseUtils.toInstant(parser);
break;
case DATA_END_TIME_FIELD:
dataEndTime = ParseUtils.toInstant(parser);
break;
case EXECUTION_START_TIME_FIELD:
executionStartTime = ParseUtils.toInstant(parser);
break;
case EXECUTION_END_TIME_FIELD:
executionEndTime = ParseUtils.toInstant(parser);
break;
case ERROR_FIELD:
error = parser.text();
break;
case ENTITY_FIELD:
entity = Entity.parse(parser);
break;
case USER_FIELD:
user = User.parse(parser);
break;
case SCHEMA_VERSION_FIELD:
schemaVersion = parser.intValue();
break;
case TASK_ID_FIELD:
taskId = parser.text();
break;
case MODEL_ID_FIELD:
modelId = parser.text();
break;
case APPROX_ANOMALY_START_FIELD:
approAnomalyStartTime = ParseUtils.toInstant(parser);
break;
case RELEVANT_ATTRIBUTION_FIELD:
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser);
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
relavantAttribution.add(DataByFeatureId.parse(parser));
}
break;
case PAST_VALUES_FIELD:
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser);
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
pastValues.add(DataByFeatureId.parse(parser));
}
break;
case EXPECTED_VALUES_FIELD:
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser);
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
expectedValues.add(ExpectedValueList.parse(parser));
}
break;
case THRESHOLD_FIELD:
threshold = parser.doubleValue();
break;
default:
parser.skipChildren();
break;
}
}
return new AnomalyResult(
detectorId,
taskId,
anomalyScore,
anomalyGrade,
confidence,
featureData,
dataStartTime,
dataEndTime,
executionStartTime,
executionEndTime,
error,
entity,
user,
schemaVersion,
modelId,
approAnomalyStartTime,
relavantAttribution,
pastValues,
expectedValues,
threshold
);
}