in lib/maths/time_series/CTimeSeriesModel.cc [2458:2597]
bool CMultivariateTimeSeriesModel::probability(const common::CModelProbabilityParams& params,
const TTime2Vec1Vec& time_,
const TDouble2Vec1Vec& value,
common::SModelProbabilityResult& result) const {
TSize2Vec coordinates(params.coordinates());
if (coordinates.empty()) {
coordinates.resize(this->dimension());
std::iota(coordinates.begin(), coordinates.end(), 0);
}
TTail2Vec tail(coordinates.size(), maths_t::E_UndeterminedTail);
result = common::SModelProbabilityResult{
1.0, false, {{common::SModelProbabilityResult::E_SingleBucketProbability, 1.0}}, tail, {}, {}};
std::size_t dimension{this->dimension()};
core_t::TTime time{time_[0][0]};
TDouble10Vec1Vec sample{TDouble10Vec(dimension)};
for (std::size_t d = 0; d < dimension; ++d) {
sample[0][d] = m_TrendModel[d]->detrend(
time, value[0][d], params.seasonalConfidenceInterval(), m_IsNonNegative);
}
maths_t::TDouble10VecWeightsAry1Vec weights{unpack(params.weights()[0])};
struct SJointProbability {
common::CJointProbabilityOfLessLikelySamples s_MarginalLower;
common::CJointProbabilityOfLessLikelySamples s_MarginalUpper;
common::CJointProbabilityOfLessLikelySamples s_ConditionalLower;
common::CJointProbabilityOfLessLikelySamples s_ConditionalUpper;
};
using TJointProbability2Vec = core::CSmallVector<SJointProbability, 2>;
auto updateJointProbabilities = [&](const TDouble10Vec2Vec& pl,
const TDouble10Vec2Vec& pu,
SJointProbability& joint) {
joint.s_MarginalLower.add(pl[0][0]);
joint.s_MarginalUpper.add(pu[0][0]);
joint.s_ConditionalLower.add(pl[1][0]);
joint.s_ConditionalUpper.add(pu[1][0]);
};
TJointProbability2Vec jointProbabilities(
m_MultibucketFeatureModel != nullptr && params.useMultibucketFeatures() ? 2 : 1);
double correlation{0.0};
for (std::size_t i = 0; i < coordinates.size(); ++i) {
maths_t::EProbabilityCalculation calculation{params.calculation(i)};
TSize10Vec coordinate{coordinates[i]};
TDouble10Vec2Vec pSingleBucket[2];
TTail10Vec tail_;
if (m_ResidualModel->probabilityOfLessLikelySamples(
calculation, sample, weights, coordinate, pSingleBucket[0],
pSingleBucket[1], tail_) == false) {
LOG_ERROR(<< "Failed to compute P(" << sample << " | weight = " << weights << ")");
return false;
}
updateJointProbabilities(pSingleBucket[0], pSingleBucket[1],
jointProbabilities[0]);
tail[i] = tail_[0];
if (m_MultibucketFeatureModel != nullptr && params.useMultibucketFeatures()) {
TDouble10Vec1Vec feature;
std::tie(feature, std::ignore) = m_MultibucketFeature->value();
if (feature.empty() == false) {
TDouble10Vec2Vec pMultiBucket[2]{{{1.0}, {1.0}}, {{1.0}, {1.0}}};
for (auto calculation_ : expand(calculation)) {
TDouble10Vec2Vec pl;
TDouble10Vec2Vec pu;
TTail10Vec dummy;
if (m_MultibucketFeatureModel->probabilityOfLessLikelySamples(
calculation_, feature,
maths_t::CUnitWeights::singleUnit<TDouble10Vec>(dimension),
coordinate, pl, pu, dummy) == false) {
LOG_ERROR(<< "Failed to compute P(" << feature << ")");
return false;
}
pMultiBucket[0][0][0] = std::min(pMultiBucket[0][0][0], pl[0][0]);
pMultiBucket[1][0][0] = std::min(pMultiBucket[1][0][0], pu[0][0]);
pMultiBucket[0][1][0] = std::min(pMultiBucket[0][1][0], pl[1][0]);
pMultiBucket[1][1][0] = std::min(pMultiBucket[1][1][0], pu[1][0]);
}
updateJointProbabilities(pMultiBucket[0], pMultiBucket[1],
jointProbabilities[1]);
correlation = m_MultibucketFeature->correlationWithBucketValue();
}
}
}
TDouble4Vec probabilities;
for (const auto& probability : jointProbabilities) {
double marginalLower;
double marginalUpper;
double conditionalLower;
double conditionalUpper;
if (probability.s_MarginalLower.calculate(marginalLower) == false ||
probability.s_MarginalUpper.calculate(marginalUpper) == false ||
probability.s_ConditionalLower.calculate(conditionalLower) == false ||
probability.s_ConditionalUpper.calculate(conditionalUpper) == false) {
return false;
}
probabilities.push_back((std::sqrt(marginalLower * conditionalLower) +
std::sqrt(marginalUpper * conditionalUpper)) /
2.0);
}
common::SModelProbabilityResult::EFeatureProbabilityLabel labels[]{
common::SModelProbabilityResult::E_SingleBucketProbability,
common::SModelProbabilityResult::E_MultiBucketProbability};
common::SModelProbabilityResult::TFeatureProbability4Vec featureProbabilities;
for (std::size_t i = 0; i < probabilities.size(); ++i) {
featureProbabilities.emplace_back(labels[i], probabilities[i]);
}
double pOverall{aggregateFeatureProbabilities(probabilities, correlation)};
if (m_AnomalyModel != nullptr && params.useAnomalyModel()) {
double residual{0.0};
TDouble10Vec nearest(m_ResidualModel->nearestMarginalLikelihoodMean(sample[0]));
TDouble2Vec seasonalWeight;
this->seasonalWeight(0.0, time, seasonalWeight);
for (std::size_t i = 0; i < dimension; ++i) {
residual += (sample[0][i] - nearest[i]) /
std::max(std::sqrt(seasonalWeight[i]), 1.0);
}
double pSingleBucket{probabilities[0]};
m_AnomalyModel->sample(params, time, residual, pSingleBucket, pOverall);
double pAnomaly;
std::tie(pOverall, pAnomaly) = m_AnomalyModel->probability(pSingleBucket, pOverall);
probabilities.push_back(pAnomaly);
featureProbabilities.emplace_back(
common::SModelProbabilityResult::E_AnomalyModelProbability, pAnomaly);
}
result.s_Probability = pOverall;
result.s_FeatureProbabilities = std::move(featureProbabilities);
result.s_Tail = tail;
return true;
}