in packages/pressreader/src/processEdition.ts [45:108]
async function run() {
const sectionData = await Promise.all(
edition.sections.map(async (section) => {
const frontData = await Promise.all(
section.frontSources.map(fetchFrontData),
);
const frontArticleIds = frontData
.filter(isNotUndefined)
.flatMap((front) => processFrontData(front, collectionMismatchAlarm));
const capiArticleIds = await getArticleIdsFromCapi(
section.capiSources,
capiConfig,
);
/**
* We can't guarantee that these ids are unique across the whole edition,
* but we might as well remove duplicates within each section as we go,
* to reduce calls to `fetchArticleData` below.
*/
const uniqueArticleIds = Array.from(
new Set([...frontArticleIds, ...capiArticleIds]),
);
const maybeArticles = await Promise.all(
uniqueArticleIds.map((id) => fetchArticleData(id, capiConfig)),
);
const articleDetails = checkArticlesForSection(
section.toneFilters,
maybeArticles,
);
return { ...section, articleDetails };
}),
);
/**
* These two lists will be mutated in the loop below.
*/
const usedArticleIdsStore: string[] = [];
const outputAccumulator: PressReaderEditionOutput = [];
/**
* Build up the list of articles for each section, checking that they
* meet the criteria for inclusion, and also making sure that we
* don't include the same article more than once in the edition.
*/
for (const section of sectionData) {
const articleIdsForSection = section.articleDetails
.filter((article) => {
return (
!usedArticleIdsStore.includes(article.id) &&
meetsInclusionCriteria(
article,
edition.bannedTags ?? [],
MIN_WORDCOUNT,
)
);
})
.slice(0, section.maximumArticleCount + 1)
.map((article) => article.id);
usedArticleIdsStore.push(...articleIdsForSection);
outputAccumulator.push({
section: section.displayName,
articles: articleIdsForSection,
});
}
return outputAccumulator;
}