in src/backend/transliterator/transliterator.ecstask.ts [36:222]
export function handler(event: TransliteratorInput): Promise<{ created: string[]; deleted: string[] }> {
console.log(`Event: ${JSON.stringify(event, null, 2)}`);
// We'll need a writable $HOME directory, or this won't work well, because
// npm will try to write stuff like the `.npmrc` or package caches in there
// and that'll bail out on EROFS if that fails.
return ensureWritableHome(async () => {
const endpoint = process.env.CODE_ARTIFACT_REPOSITORY_ENDPOINT;
if (!endpoint) {
console.log('No CodeArtifact endpoint configured - using npm\'s default registry');
} else {
console.log(`Using CodeArtifact registry: ${endpoint}`);
const domain = requireEnv('CODE_ARTIFACT_DOMAIN_NAME');
const domainOwner = process.env.CODE_ARTIFACT_DOMAIN_OWNER;
const apiEndpoint = process.env.CODE_ARTIFACT_API_ENDPOINT;
await logInWithCodeArtifact({ endpoint, domain, domainOwner, apiEndpoint });
}
// Set up NPM shared cache directory (https://docs.npmjs.com/cli/v7/using-npm/config#cache)
const npmCacheDir = process.env.NPM_CACHE;
if (npmCacheDir) {
// Create it if it does not exist yet...
await fs.mkdirp(npmCacheDir);
console.log(`Using shared NPM cache at: ${npmCacheDir}`);
await shellOut('npm', 'config', 'set', `cache=${npmCacheDir}`);
}
const created = new Array<S3Object>();
const deleted = new Array<S3Object>();
const [, packageName, packageVersion] = event.assembly.key.match(ASSEMBLY_KEY_REGEX) ?? [];
if (packageName == null) {
throw new Error(`Invalid object key: "${event.assembly.key}". It was expected to match ${ASSEMBLY_KEY_REGEX}!`);
}
const packageFqn = `${packageName}@${packageVersion}`;
console.log(`Source Bucket: ${event.bucket}`);
console.log(`Source Key: ${event.assembly.key}`);
console.log(`Source Version: ${event.assembly.versionId}`);
console.log(`Fetching assembly: ${event.assembly.key}`);
const assemblyResponse = await aws.s3().getObject({ Bucket: event.bucket, Key: event.assembly.key }).promise();
if (!assemblyResponse.Body) {
throw new Error(`Response body for assembly at key ${event.assembly.key} is empty`);
}
const assembly = JSON.parse(assemblyResponse.Body.toString('utf-8'));
const submodules = Object.keys(assembly.submodules ?? {}).map(s => s.split('.')[1]);
console.log(`Fetching package: ${event.package.key}`);
const tarballExists = await aws.s3ObjectExists(event.bucket, event.package.key);
if (!tarballExists) {
throw new Error(`Tarball does not exist at key ${event.package.key} in bucket ${event.bucket}.`);
}
const readStream = aws.s3().getObject({ Bucket: event.bucket, Key: event.package.key }).createReadStream();
const tmpdir = fs.mkdtempSync(path.join(os.tmpdir(), 'packages-'));
const tarball = path.join(tmpdir, 'package.tgz');
await writeFile(tarball, readStream);
const uploads = new Map<string, Promise<PromiseResult<AWS.S3.PutObjectOutput, AWS.AWSError>>>();
const deletions = new Map<string, Promise<PromiseResult<AWS.S3.DeleteObjectOutput, AWS.AWSError>>>();
let unprocessable: boolean = false;
function markPackage(e: Error, marker: string) {
const key = event.assembly.key.replace(/\/[^/]+$/, marker);
const upload = uploadFile(event.bucket, key, event.assembly.versionId, Buffer.from(e.message));
uploads.set(key, upload);
}
async function unmarkPackage(marker: string) {
const key = event.assembly.key.replace(/\/[^/]+$/, marker);
const marked = await aws.s3ObjectExists(event.bucket, key);
if (!marked) {
return;
}
const deletion = deleteFile(event.bucket, key);
deletions.set(key, deletion);
}
console.log(`Generating documentation for ${packageFqn}...`);
try {
const docs = await docgen.Documentation.forPackage(tarball, { name: assembly.name });
// if the package used to not be installable, remove the marker for it.
await unmarkPackage(constants.UNINSTALLABLE_PACKAGE_SUFFIX);
for (const language of DocumentationLanguage.ALL) {
if (event.languages && !event.languages[language.toString()]) {
console.log(`Skipping language ${language} as it was not requested!`);
continue;
}
const generateDocs = metricScope((metrics) => async (lang: DocumentationLanguage) => {
metrics.setDimensions();
metrics.setNamespace(METRICS_NAMESPACE);
async function renderAndDispatch(submodule?: string) {
try {
console.log(`Rendering documentation in ${lang} for ${packageFqn} (submodule: ${submodule})`);
const json = await docs.toJson({
submodule,
language: docgen.Language.fromString(lang.name),
});
const jsonPage = Buffer.from(json.render(null, 2));
metrics.putMetric(MetricName.DOCUMENT_SIZE, jsonPage.length, Unit.Bytes);
const { buffer: jsonBody, contentEncoding: jsonContentEncoding } = compressContent(jsonPage);
metrics.putMetric(MetricName.COMPRESSED_DOCUMENT_SIZE, jsonBody.length, Unit.Bytes);
const jsonKey = event.assembly.key.replace(/\/[^/]+$/, constants.docsKeySuffix(lang, submodule, 'json'));
console.log(`Uploading ${jsonKey}`);
const jsonUpload = uploadFile(event.bucket, jsonKey, event.assembly.versionId, jsonBody, jsonContentEncoding);
uploads.set(jsonKey, jsonUpload);
const markdown = MarkdownRenderer.fromSchema(json.content, {
anchorFormatter,
linkFormatter: linkFormatter(lang),
});
const page = Buffer.from(markdown.render());
metrics.putMetric(MetricName.DOCUMENT_SIZE, page.length, Unit.Bytes);
const { buffer: body, contentEncoding } = compressContent(page);
metrics.putMetric(MetricName.COMPRESSED_DOCUMENT_SIZE, body.length, Unit.Bytes);
const key = event.assembly.key.replace(/\/[^/]+$/, constants.docsKeySuffix(lang, submodule, 'md'));
console.log(`Uploading ${key}`);
const upload = uploadFile(event.bucket, key, event.assembly.versionId, body, contentEncoding);
uploads.set(key, upload);
// if the package used to have a corrupt assembly, remove the marker for it.
await unmarkPackage(constants.corruptAssemblyKeySuffix(language, submodule, 'md'));
} catch (e) {
if (e instanceof docgen.LanguageNotSupportedError) {
markPackage(e, constants.notSupportedKeySuffix(language, submodule, 'json'));
markPackage(e, constants.notSupportedKeySuffix(language, submodule, 'md'));
} else if (e instanceof docgen.CorruptedAssemblyError) {
markPackage(e, constants.corruptAssemblyKeySuffix(language, submodule, 'json'));
markPackage(e, constants.corruptAssemblyKeySuffix(language, submodule, 'md'));
unprocessable = true;
} else {
throw e;
}
}
}
await renderAndDispatch();
for (const submodule of submodules) {
await renderAndDispatch(submodule);
}
});
await generateDocs(language);
}
} catch (error) {
if (error instanceof docgen.UnInstallablePackageError) {
markPackage(error, constants.UNINSTALLABLE_PACKAGE_SUFFIX);
unprocessable = true;
} else {
throw error;
}
}
for (const [key, upload] of uploads.entries()) {
const response = await upload;
created.push({ bucket: event.bucket, key, versionId: response.VersionId });
console.log(`Finished uploading ${key} (Version ID: ${response.VersionId})`);
}
for (const [key, deletion] of deletions.entries()) {
const response = await deletion;
deleted.push({ bucket: event.bucket, key, versionId: response.VersionId });
console.log(`Finished deleting ${key} (Version ID: ${response.VersionId})`);
}
if (unprocessable) {
// the message here doesn't matter, we only use the error name
// to divert this message away from the DLQ.
const error = new Error();
error.name = constants.UNPROCESSABLE_PACKAGE_ERROR_NAME;
}
// output must be compressed to satisfy 262,144 byte limit of SendTaskSuccess command
const s3OKey = (s3Obj: S3Object) => s3Obj.key;
return { created: created.map(s3OKey), deleted: deleted.map(s3OKey) };
});
}