in hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/KCoreAlgorithm.java [195:263]
private static Set<Id> extractKcore(SimilarsMap similarsMap, int k) {
assert similarsMap.size() == 1;
Map.Entry<Id, Set<Similar>> entry = similarsMap.entrySet()
.iterator().next();
Id source = entry.getKey();
Set<KcoreSimilar> similars = new HashSet<>();
for (Similar similar: entry.getValue()) {
similars.add(new KcoreSimilar(similar));
}
boolean stop;
do {
stop = true;
// Do statistics
Map<Id, MutableInt> counts = new HashMap<>();
for (KcoreSimilar similar : similars) {
for (Id id : similar.ids()) {
MutableInt count = counts.get(id);
if (count == null) {
count = new MutableInt(0);
counts.put(id, count);
}
count.increment();
}
}
/*
* Iterate similars to:
* 1. delete failed similar
* 2. delete failed intermediaries in survive similar
* 3. update statistics
*/
Set<KcoreSimilar> failedSimilars = new HashSet<>();
for (KcoreSimilar similar : similars) {
Set<Id> failedIds = new HashSet<>();
for (Id id : similar.ids()) {
MutableInt count = counts.get(id);
if (count.getValue() < k - 1) {
count.decrement();
failedIds.add(id);
stop = false;
}
}
Set<Id> survivedIds = new HashSet<>(CollectionUtils
.subtract(similar.ids(), failedIds));
if (survivedIds.size() < k) {
for (Id id : survivedIds) {
counts.get(id).decrement();
}
failedSimilars.add(similar);
} else {
similar.ids(survivedIds);
}
}
similars = new HashSet<>(CollectionUtils.subtract(
similars, failedSimilars));
} while (!stop);
if (similars.isEmpty()) {
return ImmutableSet.of();
}
Set<Id> kcores = new HashSet<>();
kcores.add(source);
for (KcoreSimilar similar : similars) {
kcores.add(similar.id());
kcores.addAll(similar.ids());
}
return kcores;
}