in cli/cli.ts [4972:5954]
function buildJResSpritesDirectoryAsync(dir: string) {
const PNG: any = require("pngjs").PNG;
// create meta.json file if needed
const metaInfoPath = path.join(dir, "meta.json");
if (!fs.existsSync(metaInfoPath)) {
pxt.log(`${metaInfoPath} not found, creating new one`);
fs.writeFileSync(metaInfoPath, JSON.stringify({
"width": 16,
"height": 16,
"blockIdentity": "image.__imagePicker",
"creator": "image.ofBuffer",
"star": {
"namespace": `sprites.${dir.toLowerCase()}`,
"mimeType": "image/x-mkcd-f4"
}
}, null, 4));
}
const metaInfo: SpriteGlobalMeta = nodeutil.readJson(metaInfoPath)
const jresources: pxt.Map<pxt.JRes> = {}
const star = metaInfo.star
jresources["*"] = metaInfo.star
let bpp = 4
if (/-f1/.test(star.mimeType))
bpp = 1
if (!metaInfo.star)
U.userError(`invalid meta.json`)
if (!metaInfo.basename) metaInfo.basename = star.namespace
if (!metaInfo.basename)
U.userError(`invalid meta.json`)
star.dataEncoding = star.dataEncoding || "base64"
if (!pxt.appTarget.runtime || !pxt.appTarget.runtime.palette)
U.userError(`palette not defined in pxt.json`)
const palette = pxt.appTarget.runtime.palette.map(s => {
let v = parseInt(s.replace(/#/, ""), 16)
return [(v >> 16) & 0xff, (v >> 8) & 0xff, (v >> 0) & 0xff]
})
let ts = `namespace ${metaInfo.star.namespace} {\n`
for (let fn of nodeutil.allFiles(dir, 1)) {
fn = fn.replace(/\\/g, "/")
let m = /(.*\/)(.*)\.png$/i.exec(fn)
if (!m) continue
let bn = m[2]
let jn = m[1] + m[2] + ".json"
bn = bn.replace(/-1bpp/, "").replace(/[^\w]/g, "_")
const standalone = metaInfo.standaloneSprites && metaInfo.standaloneSprites.indexOf(bn) !== -1;
processImage(bn, fn, jn, standalone)
}
ts += "}\n"
pxt.log(`save ${metaInfo.basename}.jres and .ts`)
nodeutil.writeFileSync(metaInfo.basename + ".jres", nodeutil.stringify(jresources));
nodeutil.writeFileSync(metaInfo.basename + ".ts", ts);
return Promise.resolve()
// use geometric distance on colors
function scale(v: number) {
return v * v
}
function closestColor(buf: Buffer, pix: number, alpha = true) {
if (alpha && buf[pix + 3] < 100)
return 0 // transparent
let mindelta = 0
let idx = -1
for (let i = alpha ? 1 : 0; i < palette.length; ++i) {
let delta = scale(palette[i][0] - buf[pix + 0]) + scale(palette[i][1] - buf[pix + 1]) + scale(palette[i][2] - buf[pix + 2])
if (idx < 0 || delta < mindelta) {
idx = i
mindelta = delta
}
}
return idx
}
function processImage(basename: string, pngName: string, jsonName: string, standalone: boolean) {
let info: SpriteInfo = {}
if (nodeutil.fileExistsSync(jsonName))
info = nodeutil.readJson(jsonName)
if (!info.width) info.width = metaInfo.width
if (!info.height) info.height = metaInfo.height
let sheet = PNG.sync.read(fs.readFileSync(pngName)) as PNGImage
let imgIdx = 0
// add alpha channel
if (sheet.colorType == 0) {
sheet.colorType = 6
sheet.depth = 8
for (let i = 0; i < sheet.data.length; i += 4) {
if (closestColor(sheet.data, i, false) == 0)
sheet.data[i + 3] = 0x00
}
}
if (sheet.colorType != 6)
U.userError(`only RGBA png images supported`)
if (sheet.depth != 8)
U.userError(`only 8 bit per channel png images supported`)
if (sheet.width > 255 || sheet.height > 255)
U.userError(`PNG image too big`)
if (standalone) {
// Image contains a single sprite
info.width = sheet.width;
info.height = sheet.height;
}
else {
if (!info.width || info.width > sheet.width) info.width = sheet.width
if (!info.height || info.height > sheet.height) info.height = sheet.height
}
if (!info.xSpacing) info.xSpacing = 0;
if (!info.ySpacing) info.ySpacing = 0;
let nx = (sheet.width / info.width) | 0
let ny = (sheet.height / info.height) | 0
let numSprites = nx * ny
const frameQNames: string[] = [];
for (let y = 0; y + info.height - 1 < sheet.height; y += info.height + info.ySpacing) {
for (let x = 0; x + info.width - 1 < sheet.width; x += info.width + info.xSpacing) {
if (info.frames && imgIdx >= info.frames.length) return;
let img = U.flatClone(sheet)
img.data = Buffer.alloc(info.width * info.height * 4)
img.width = info.width
img.height = info.height
for (let i = 0; i < info.height; ++i) {
let src = x * 4 + (y + i) * sheet.width * 4
sheet.data.copy(img.data, i * info.width * 4, src, src + info.width * 4)
}
let key = basename + imgIdx
if (info.frames && info.frames[imgIdx]) {
let suff = info.frames[imgIdx]
if (/^[a-z]/.test(suff))
suff = "_" + suff
key = basename + suff
} else if (numSprites == 1) {
key = basename
}
let hasNonTransparent = false;
let hex = pxtc.f4EncodeImg(img.width, img.height, bpp, (x, y) => {
const col = closestColor(img.data, 4 * (x + y * img.width));
if (col)
hasNonTransparent = true;
return col;
});
if (!hasNonTransparent)
continue;
let data = Buffer.from(hex, "hex").toString(star.dataEncoding)
let storeIcon = false
// These are space-separated lists of tags
let tags = `${info.tags || ""} ${metaInfo.tags || ""}`.split(" ").filter(t => !!t);
const isTile = tags.some(t => t === "tile" || t === "?tile");
if (storeIcon || isTile) {
let jres = jresources[key]
if (!jres) {
jres = jresources[key] = {} as any
}
jres.data = data
if (storeIcon) {
jres.icon = 'data:image/png;base64,' + PNG.sync.write(img).toString('base64');
}
if (isTile) {
jres.tilemapTile = true;
}
} else {
// use the short form
jresources[key] = data as any
}
ts += ` //% fixedInstance jres blockIdentity=${info.blockIdentity || metaInfo.blockIdentity}\n`
if (tags.length) {
ts += ` //% tags="${tags.join(" ")}"\n`;
}
ts += ` export const ${key} = ${metaInfo.creator}(hex\`\`);\n`
pxt.log(`add ${key}; ${JSON.stringify(jresources[key]).length} bytes`)
frameQNames.push((metaInfo.star.namespace) + "." + key);
imgIdx++
}
}
if (info.animation) {
processAnimation(basename, frameQNames.map((name, i) => i));
}
if (info.animations) {
for (const animation of info.animations) {
processAnimation(animation.name, animation.frames, animation.flippedHorizontal);
}
}
function processAnimation(name: string, frames: number[], flippedHorizontal = false) {
jresources[name] = {
mimeType: "application/mkcd-animation",
dataEncoding: "json",
data: JSON.stringify({
frames: frames.map(frameIndex => frameQNames[frameIndex]),
flippedHorizontal
})
} as any
}
}
}
export function buildJResAsync(parsed: commandParser.ParsedCommand) {
ensurePkgDir();
nodeutil.allFiles(".")
.filter(f => /\.jres$/i.test(f))
.forEach(f => {
pxt.log(`expanding jres resources in ${f}`);
const jresources = nodeutil.readJson(f) as pxt.Map<pxt.JRes>;
const oldjr = nodeutil.stringify(jresources);
const dir = path.join('jres', path.basename(f, '.jres'));
// update existing fields
const star = jresources["*"];
if (!star.dataEncoding) star.dataEncoding = 'base64';
Object.keys(jresources).filter(k => k != "*").forEach(k => {
const jres = jresources[k];
const mime = jres.mimeType || star.mimeType;
pxt.log(`expanding ${k}`);
// try to slurp icon
const iconn = path.join(dir, k + '-icon.png');
pxt.debug(`looking for ${iconn}`)
if (nodeutil.fileExistsSync(iconn)) {
pxt.log(`importing ${iconn}`);
jres.icon = 'data:image/png;base64,' + fs.readFileSync(iconn, 'base64');
}
// try to find file
if (mime) {
const ext = mime.replace(/^.*\//, '');
let fn = path.join(dir, k + '-data.' + ext);
pxt.debug(`looking for ${fn}`)
if (nodeutil.fileExistsSync(fn)) {
pxt.log(`importing ${fn}`);
jres.data = fs.readFileSync(fn, 'base64');
} else {
let fn = path.join(dir, k + '.' + ext);
pxt.debug(`looking for ${fn}`)
if (nodeutil.fileExistsSync(fn)) {
pxt.log(`importing ${fn}`);
jres.data = fs.readFileSync(fn, 'base64');
}
}
}
})
const newjr = nodeutil.stringify(jresources);
if (oldjr != newjr) {
pxt.log(`updating ${f}`)
nodeutil.writeFileSync(f, newjr);
}
})
return Promise.resolve();
}
export function buildAsync(parsed: commandParser.ParsedCommand) {
let mode = BuildOption.JustBuild;
if (parsed && parsed.flags["debug"])
mode = BuildOption.DebugSim;
else if (parsed && parsed.flags["deploy"])
mode = BuildOption.Deploy;
const clean = parsed && parsed.flags["clean"];
const ignoreTests = parsed && !!parsed.flags["ignoreTests"];
const install = parsed && !!parsed.flags["install"];
return (clean ? cleanAsync() : Promise.resolve())
.then(() => install ? installAsync(parsed) : Promise.resolve())
.then(() => {
parseBuildInfo(parsed);
return buildCoreAsync({ mode, ignoreTests })
}).then((compileOpts) => { });
}
export async function buildShareSimJsAsync(parsed: commandParser.ParsedCommand) {
const id = parsed.args[0];
console.log(`Building sim js for ${id}`);
const cwd = process.cwd();
const builtFolder = path.join("temp", id);
nodeutil.mkdirP(builtFolder);
process.chdir(builtFolder);
const mainPkg = new pxt.MainPackage(new Host());
mainPkg._verspec = `pub:${id}`;
await mainPkg.host().downloadPackageAsync(mainPkg);
await mainPkg.installAllAsync();
const opts = await mainPkg.getCompileOptionsAsync();
const compileResult = pxtc.compile(opts);
if (compileResult.diagnostics && compileResult.diagnostics.length > 0) {
compileResult.diagnostics.forEach(diag => {
console.error(diag.messageText)
})
throw new Error(`Failed to compile share id: ${id}`);
}
const builtJsInfo = pxtc.buildSimJsInfo(compileResult);
const outdir = parsed.flags["output"] as string || path.join(cwd, "docs", "static", "builtjs");
nodeutil.mkdirP(outdir);
const outputLocation = path.join(outdir, `${id}v${pxt.appTarget.versions.target}.json`);
fs.writeFileSync(
outputLocation,
JSON.stringify(builtJsInfo)
);
process.chdir(cwd);
console.log(`saved prebuilt ${id} to ${outputLocation}`);
}
export function gendocsAsync(parsed: commandParser.ParsedCommand) {
const docs = !!parsed.flags["docs"];
const locs = !!parsed.flags["locs"];
const fileFilter = parsed.flags["files"] as string;
const createOnly = !!parsed.flags["create"];
return internalGenDocsAsync(docs, locs, fileFilter, createOnly);
}
function internalGenDocsAsync(docs: boolean, locs: boolean, fileFilter?: string, createOnly?: boolean) {
const buildAsync = () => buildCoreAsync({
mode: BuildOption.GenDocs,
docs,
locs,
fileFilter,
createOnly,
ignoreTests: true
}).then((compileOpts) => { });
// from target location?
if (fs.existsSync("pxtarget.json") && !!readJson("pxtarget.json").appTheme)
return forEachBundledPkgAsync((pkg, dirname) => {
pxt.debug(`building docs in ${dirname}`);
return buildAsync();
});
else // from a project build
return buildAsync();
}
export function consoleAsync(parsed?: commandParser.ParsedCommand): Promise<void> {
pxt.log(`monitoring console.log`)
if (!hid.isInstalled()) {
pxt.log(`console support not installed, did you run "pxt npminstallnative"?`)
return Promise.resolve();
}
return hid.serialAsync();
}
export function deployAsync(parsed?: commandParser.ParsedCommand) {
parseBuildInfo(parsed);
const serial = parsed && !!parsed.flags["console"];
return buildCoreAsync({ mode: BuildOption.Deploy })
.then((compileOpts) => serial ? consoleAsync(parsed) : Promise.resolve())
}
export function runAsync(parsed?: commandParser.ParsedCommand) {
parseBuildInfo(parsed);
return buildCoreAsync({ mode: BuildOption.Run })
.then((compileOpts) => { });
}
export function testAsync() {
return buildCoreAsync({ mode: BuildOption.Test })
.then((compileOpts) => { });
}
export interface SavedProject {
name: string;
files: Map<string>;
}
export function extractAsync(parsed: commandParser.ParsedCommand): Promise<void> {
const vscode = !!parsed.flags["code"];
const out = parsed.flags["code"] || '.';
const filename = parsed.args[0];
return extractAsyncInternal(filename, out as string, vscode)
.then(() => { });
}
function isScriptId(id: string) {
return /^((_[a-zA-Z0-9]{12})|([\d\-]{20,}))$/.test(id)
}
function fetchTextAsync(filename: string): Promise<Buffer> {
if (filename == "-" || !filename)
return nodeutil.readResAsync(process.stdin)
if (isScriptId(filename))
filename = Cloud.apiRoot + filename + "/text"
let m = /^(https:\/\/[^\/]+\/)([^\/]+)$/.exec(filename)
let fn2 = ""
if (m) {
let id = m[2]
if (/^api\//.test(id)) id = id.slice(4)
if (isScriptId(id)) {
fn2 = m[1] + "api/" + id + "/text"
}
}
if (/^https?:/.test(filename)) {
pxt.log(`fetching ${filename}...`)
if (/\.json$/i.test(filename)) pxt.log(`compile log: ${filename.replace(/\.json$/i, ".log")}`)
return U.requestAsync({ url: filename, allowHttpErrors: !!fn2 })
.then(resp => {
if (fn2 && (resp.statusCode != 200 || /html/.test(resp.headers["content-type"] as string))) {
pxt.log(`Trying also ${fn2}...`)
return U.requestAsync({ url: fn2 })
} return resp
})
.then(resp => resp.buffer)
} else
return readFileAsync(filename)
}
function extractAsyncInternal(filename: string, out: string, vscode: boolean): Promise<string[]> {
if (filename && nodeutil.existsDirSync(filename)) {
pxt.log(`extracting folder ${filename}`);
return Promise.all(fs.readdirSync(filename)
.filter(f => /\.(hex|uf2)/.test(f))
.map(f => extractAsyncInternal(path.join(filename, f), out, vscode)))
.then(() => [filename]);
}
return fetchTextAsync(filename)
.then(buf => extractBufferAsync(buf, out))
.then(dirs => {
if (dirs && vscode) {
pxt.debug('launching code...')
dirs.forEach(dir => openVsCode(dir));
}
return dirs;
})
}
function extractBufferAsync(buf: Buffer, outDir: string): Promise<string[]> {
const oneFile = (src: string, editor: string) => {
let files: any = {}
files["main." + (editor || "td")] = src || ""
return files
}
const unpackHexAsync = (buf: Buffer) =>
pxt.cpp.unpackSourceFromHexAsync(buf as any)
.then(data => {
if (!data) return null
if (!data.meta) data.meta = {} as any
let id = data.meta.cloudId || "?"
console.log(`.hex/uf2 cloudId: ${id}`)
if (data.meta.targetVersions)
console.log(`target version: ${data.meta.targetVersions.target}, pxt ${data.meta.targetVersions.pxt}`);
let files: Map<string> = null
try {
files = JSON.parse(data.source)
} catch (e) {
files = oneFile(data.source, data.meta.editor)
}
return {
projects: [
{
name: data.meta.name,
files: files
}
]
}
})
return Promise.resolve()
.then(() => {
let str = buf.toString("utf8")
if (str[0] == ":") {
pxt.debug("Detected .hex file.")
return unpackHexAsync(buf)
} else if (str[0] == "U") {
pxt.debug("Detected .uf2 file.")
return unpackHexAsync(buf)
} else if (str[0] == "{") { // JSON
pxt.debug("Detected .json file.")
return JSON.parse(str)
} else if (buf[0] == 0x5d) { // JSZ
pxt.debug("Detected .jsz/.pxt file.")
return pxt.lzmaDecompressAsync(buf as any)
.then(str => JSON.parse(str))
} else
return Promise.resolve(null)
})
.then(json => {
if (!json) {
pxt.log("Couldn't extract.")
return undefined;
}
if (json.meta && json.source) {
json = typeof json.source == "string" ? JSON.parse(json.source) : json.source
}
if (Array.isArray(json.scripts)) {
pxt.debug("Legacy TD workspace.")
json.projects = json.scripts.map((scr: any) => ({
name: scr.header.name,
files: oneFile(scr.source, scr.header.editor)
}))
delete json.scripts
}
if (json[pxt.CONFIG_NAME]) {
pxt.debug("Raw JSON files.")
let cfg: pxt.PackageConfig = pxt.Package.parseAndValidConfig(json[pxt.CONFIG_NAME])
let files = json
json = {
projects: [{
name: cfg.name,
files: files
}]
}
}
let prjs: SavedProject[] = json.projects
if (!prjs) {
pxt.log("No projects found.")
return undefined;
}
const dirs = writeProjects(prjs, outDir)
return dirs;
})
}
export function hexdumpAsync(c: commandParser.ParsedCommand) {
let filename = c.args[0]
let buf = fs.readFileSync(filename)
if (/^UF2\n/.test(buf.slice(0, 4).toString("utf8"))) {
for (let b of pxtc.UF2.parseFile(buf)) {
console.log(`UF2 Block: ${b.blockNo}/${b.numBlocks} family:${b.familyId.toString(16)} flags:${b.flags.toString(16)}\n` +
pxtc.hexDump(b.data, b.targetAddr))
}
return Promise.resolve()
}
console.log("Binary file assumed.")
console.log(pxtc.hexDump(buf))
return Promise.resolve()
}
export function hex2uf2Async(c: commandParser.ParsedCommand) {
let filename = c.args[0]
let buf = fs.readFileSync(filename, "utf8").split(/\r?\n/)
if (buf[0][0] != ':') {
console.log("Not a hex file: " + filename)
} else {
let f = pxtc.UF2.newBlockFile()
pxtc.UF2.writeHex(f, buf)
let uf2buf = Buffer.from(pxtc.UF2.serializeFile(f), "binary")
let uf2fn = filename.replace(/(\.hex)?$/i, ".uf2")
nodeutil.writeFileSync(uf2fn, uf2buf)
console.log("Wrote: " + uf2fn)
}
return Promise.resolve()
}
function openVsCode(dirname: string) {
child_process.exec(`code -g main.ts ${dirname}`); // notice this without a callback..
}
function writeProjects(prjs: SavedProject[], outDir: string): string[] {
const dirs: string[] = [];
for (let prj of prjs) {
const dirname = prj.name.replace(/[^A-Za-z0-9_]/g, "-")
const fdir = path.join(outDir, dirname);
nodeutil.mkdirP(fdir);
for (let fn of Object.keys(prj.files)) {
fn = fn.replace(/[\/]/g, "-")
const fullname = path.join(fdir, fn)
nodeutil.mkdirP(path.dirname(fullname));
nodeutil.writeFileSync(fullname, prj.files[fn])
}
// add default files if not present
const files = pxt.template.packageFiles(prj.name);
pxt.template.packageFilesFixup(files);
for (let fn in files) {
if (prj.files[fn]) continue;
const fullname = path.join(fdir, fn)
nodeutil.mkdirP(path.dirname(fullname));
const src = files[fn];
nodeutil.writeFileSync(fullname, src)
}
// start installing in the background
child_process.exec(`pxt install`, { cwd: dirname });
dirs.push(dirname);
}
return dirs;
}
function cherryPickAsync(parsed: commandParser.ParsedCommand) {
const commit = parsed.args[0];
const name = parsed.flags["name"] || commit.slice(0, 7);
let majorVersion = parseInt(pxtVersion().split('.')[0]);
const gitAsync = (args: string[]) => nodeutil.spawnAsync({
cmd: "git",
args
})
let branches: string[] = [];
for (let i = majorVersion - 1; i >= 0; --i) branches.push("v" + i);
pxt.log(`cherry picking ${commit} into ${branches.join(', ')}`)
let p = gitAsync(["pull"]);
branches.forEach(branch => {
const pr = `cp/${branch}${name}`;
p = p.then(() => gitAsync(["checkout", branch]))
.then(() => gitAsync(["pull"]))
.then(() => gitAsync(["checkout", "-b", pr]))
.then(() => gitAsync(["cherry-pick", commit]))
.then(() => gitAsync(["push", "--set-upstream", "origin", pr]));
})
return p.catch(() => gitAsync(["checkout", "master"]));
}
function checkDocsAsync(parsed?: commandParser.ParsedCommand): Promise<void> {
return internalCheckDocsAsync(
true,
parsed.flags["re"] as string,
!!parsed.flags["fix"],
!!parsed.flags["pycheck"]
)
}
function checkFileSize(files: string[]): number {
if (!pxt.appTarget.cloud)
return 0;
pxt.log('checking for file sizes');
const mb = 1e6;
const warnSize = pxt.appTarget.cloud.warnFileSize || (1 * mb);
let maxSize = 0;
files.forEach(f => {
const stats = fs.statSync(f);
if (stats.size > warnSize)
pxt.log(` ${f} - ${stats.size / mb}Mb`);
maxSize = Math.max(maxSize, stats.size);
});
return maxSize;
}
function internalCheckDocsAsync(compileSnippets?: boolean, re?: string, fix?: boolean, pycheck?: boolean): Promise<void> {
if (!nodeutil.existsDirSync("docs"))
return Promise.resolve();
const docsRoot = nodeutil.targetDir;
const docsTemplate = server.expandDocFileTemplate("docs.html")
pxt.log(`checking docs`);
const noTOCs: string[] = [];
const todo: string[] = [];
let urls: any = {};
let checked = 0;
let broken = 0;
// only check each snippet once.
const existingSnippets: pxt.Map<boolean> = {};
let snippets: CodeSnippet[] = [];
const maxFileSize = checkFileSize(nodeutil.allFiles("docs", 10, true, true, ".ignorelargefiles"));
if (!pxt.appTarget.ignoreDocsErrors
&& maxFileSize > (pxt.appTarget.cloud.maxFileSize || (5000000)))
U.userError(`files too big in docs folder`);
// scan and fix image links
nodeutil.allFiles("docs")
.filter(f => /\.md/.test(f))
.forEach(f => {
let md = fs.readFileSync(f, { encoding: "utf8" });
let newmd = md.replace(/]\((\/static\/[^)]+?)\.(png|jpg)(\s+"[^"]+")?\)/g, (m: string, p: string, ext: string, comment: string) => {
let fn = path.join(docsRoot, "docs", `${p}.${ext}`);
if (fs.existsSync(fn))
return m;
// try finding other file
let next = ext == "png" ? "jpg" : "png";
const exists = fs.existsSync(path.join(docsRoot, "docs", `${p}.${next}`));
if (exists && fix)
return `](${p}.${next}${comment ? " " : ""}${comment || ""})`;
// broken image or resources
broken++;
pxt.log(`missing file ${p}.${ext}`)
return m;
});
if (fix && md != newmd) {
pxt.log(`patching ${f}`)
nodeutil.writeFileSync(f, newmd, { encoding: "utf8" })
}
});
function addSnippet(snippet: CodeSnippet, entryPath: string, snipIndex: number, src: string) {
const key = `${src}${snipIndex}`;
if (existingSnippets[key])
return;
snippets.push(snippet);
const dir = path.join("temp/snippets", snippet.type);
const fn = `${dir}/${entryPath.replace(/^\//, '').replace(/[\/\s]/g, '-').replace(/\.\w+$/, '')}-${snipIndex}.${snippet.ext}`;
if (isLocalBuild()) {
nodeutil.mkdirP(dir);
nodeutil.writeFileSync(fn, snippet.code);
}
snippet.file = fn;
snippet.src = src;
existingSnippets[key] = true;
}
function pushUrl(url: string, toc: boolean) {
// cache value
if (!urls.hasOwnProperty(url)) {
const specialPath = /^\/pkg\//.test(url) || /^\/--[a-z]+/.test(url);
if (specialPath) {
urls[url] = url;
return;
}
const isResource = /\.[a-z]+$/i.test(url)
if (!isResource && !toc) {
pxt.debug(`link not in SUMMARY: ${url}`);
noTOCs.push(url);
}
// TODO: correct resolution of static resources
urls[url] = isResource
? nodeutil.fileExistsSync(path.join(docsRoot, "docs", url))
: nodeutil.resolveMd(docsRoot, url);
if (!isResource && urls[url])
todo.push(url);
}
}
function checkTOCEntry(entry: pxt.TOCMenuEntry) {
if (entry.path && !/^https:\/\//.test(entry.path)) {
pushUrl(entry.path, true);
if (!urls[entry.path]) {
pxt.log(`SUMMARY: broken link ${entry.path}`);
broken++;
}
}
// look for sub items
if (entry.subitems)
entry.subitems.forEach(checkTOCEntry);
}
// check over TOCs
nodeutil.allFiles("docs", 5).filter(f => /SUMMARY\.md$/.test(f))
.forEach(summaryFile => {
const summaryPath = path.join(path.dirname(summaryFile), 'SUMMARY').replace(/^docs[\/\\]/, '');
pxt.log(`looking for ${summaryPath}`);
const summaryMD = nodeutil.resolveMd(docsRoot, summaryPath);
const toc = pxt.docs.buildTOC(summaryMD);
if (!toc) {
pxt.log(`invalid SUMMARY`);
broken++;
} else {
toc.forEach(checkTOCEntry);
}
});
// push entries from pxtarget
const theme = pxt.appTarget.appTheme;
if (theme) {
if (theme.sideDoc)
todo.push(theme.sideDoc);
if (theme.usbDocs)
todo.push(theme.usbDocs);
}
// push galleries for targetconfig
if (fs.existsSync("targetconfig.json")) {
const targeConfig = nodeutil.readJson("targetconfig.json") as pxt.TargetConfig;
if (targeConfig.galleries)
Object.keys(targeConfig.galleries)
.forEach(gallery => {
const url = getGalleryUrl(targeConfig.galleries[gallery])
todo.push(url)
});
}
// push files from targetconfig checkdocsdirs
const mdRegex = /\.md$/;
const targetDirs = pxt.appTarget.checkdocsdirs;
if (targetDirs) {
targetDirs.forEach(dir => {
pxt.log(`looking for markdown files in ${dir}`);
nodeutil.allFiles(path.join("docs", dir), 3).filter(f => mdRegex.test(f))
.forEach(md => {
pushUrl(md.slice(5).replace(mdRegex, ""), true);
});
})
}
while (todo.length) {
checked++;
const entrypath = todo.pop();
pxt.debug(`checking ${entrypath}`)
let md = (urls[entrypath] as string) || nodeutil.resolveMd(docsRoot, entrypath);
if (!md) {
pxt.log(`unable to resolve ${entrypath}`)
broken++;
continue;
}
// look for broken urls
md.replace(/]\( (\/[^)]+?)(\s+"[^"]+")?\)/g, (m) => {
let url = /]\((\/[^)]+?)(\s+"[^"]+")?\)/.exec(m)[1];
// remove hash
url = url.replace(/#.*$/, '');
pushUrl(url, false);
if (!urls[url]) {
pxt.log(`${entrypath}: broken link ${url}`);
broken++;
}
return '';
})
// look for broken macros
try {
const r = pxt.docs.renderMarkdown({
template: docsTemplate,
markdown: md,
theme: pxt.appTarget.appTheme,
throwOnError: true
});
} catch (e) {
pxt.log(`${entrypath}: ${e}`);
broken++;
}
// look for snippets
getCodeSnippets(entrypath, md).forEach((snippet, snipIndex) => addSnippet(snippet, entrypath, snipIndex, entrypath));
}
nodeutil.mkdirP("temp");
nodeutil.writeFileSync("temp/noSUMMARY.md", noTOCs.sort().map(p => `${Array(p.split(/[\/\\]/g).length - 1).join(' ')}* [${pxt.Util.capitalize(p.split(/[\/\\]/g).reverse()[0].split('-').join(' '))}](${p})`).join('\n'), { encoding: "utf8" });
// test targetconfig
if (nodeutil.fileExistsSync("targetconfig.json")) {
const targetConfig = nodeutil.readJson("targetconfig.json") as pxt.TargetConfig;
if (targetConfig && targetConfig.galleries) {
Object.keys(targetConfig.galleries).forEach(k => {
pxt.log(`gallery ${k}`);
const galleryUrl = getGalleryUrl(targetConfig.galleries[k])
let gallerymd = nodeutil.resolveMd(docsRoot, galleryUrl);
if (!gallerymd) {
pxt.log(`unable to resolve ${galleryUrl}`)
broken++;
return;
}
let gallery = pxt.gallery.parseGalleryMardown(gallerymd);
pxt.debug(`found ${gallery.length} galleries`);
gallery.forEach(gal => gal.cards.forEach((card, cardIndex) => {
pxt.debug(`card ${card.shortName || card.name}`);
switch (card.cardType) {
case "tutorial": {
let urls = [card.url]
if (card.otherActions) card.otherActions.forEach(a => { if (a.url) urls.push(a.url) });
for (let url of urls) {
const tutorialMd = nodeutil.resolveMd(docsRoot, url);
if (!tutorialMd) {
pxt.log(`unable to resolve ${url}`)
broken++;
continue;
}
const tutorial = pxt.tutorial.parseTutorial(tutorialMd);
const pkgs: pxt.Map<string> = { "blocksprj": "*" };
pxt.Util.jsonMergeFrom(pkgs, pxt.gallery.parsePackagesFromMarkdown(tutorialMd) || {});
let extraFiles: Map<string> = null;
if (tutorial.jres) {
extraFiles = {
[pxt.TILEMAP_JRES]: tutorial.jres,
[pxt.TILEMAP_CODE]: pxt.emitTilemapsFromJRes(JSON.parse(tutorial.jres))
};
}
// Handles tilemaps, spritekinds
if (tutorial.code.some(tut => tut.indexOf("namespace") !== -1)
// Handles ```python``` snippets
|| (tutorial.language == "python")) {
tutorial.steps
.filter(step => !!step.contentMd)
.forEach((step, stepIndex) => getCodeSnippets(`${card.name}-step${stepIndex}`, step.contentMd)
.forEach((snippet, snippetIndex) => {
snippet.packages = pkgs;
snippet.extraFiles = extraFiles;
const indexInStep = snippetIndex ? `-${snippetIndex}` : "";
addSnippet(
snippet,
`tutorial/${card.name}-${indexInStep}step`,
stepIndex,
url + (indexInStep)
)
})
);
}
else {
addSnippet(<CodeSnippet>{
name: card.name,
code: tutorial.code.join("\n"),
type: "blocks",
ext: "ts",
packages: pkgs,
extraFiles: extraFiles
}, "tutorial" + gal.name, cardIndex, url);
}
}
break;
}
case "example": {
let urls = [card.url]
if (card.otherActions) card.otherActions.forEach(a => { if (a.url) urls.push(a.url) });
for (let url of urls) {
const exMd = nodeutil.resolveMd(docsRoot, url);
if (!exMd) {
pxt.log(`unable to resolve ${url}`)
broken++;
continue;
}
const prj = pxt.gallery.parseExampleMarkdown(card.name, exMd);
const pkgs: pxt.Map<string> = { "blocksprj": "*" };
pxt.U.jsonMergeFrom(pkgs, prj.dependencies);
let extraFiles: Map<string> = undefined;
if (prj.filesOverride[pxt.TILEMAP_CODE] && prj.filesOverride[pxt.TILEMAP_JRES]) {
extraFiles = {
[pxt.TILEMAP_CODE]: prj.filesOverride[pxt.TILEMAP_CODE],
[pxt.TILEMAP_JRES]: prj.filesOverride[pxt.TILEMAP_JRES]
};
}
addSnippet(<CodeSnippet>{
name: card.name,
code: prj.filesOverride[pxt.MAIN_TS],
type: "blocks",
ext: "ts",
extraFiles,
packages: pkgs
}, `example/${url}`, 0, url);
}
break;
}
}
}));
})
}
}
pxt.log(`checked ${checked} files: ${broken} broken links, ${noTOCs.length} not in SUMMARY, ${snippets.length} snippets`);
let p = Promise.resolve();
if (compileSnippets)
p = p.then(() => testSnippetsAsync(snippets, re, pycheck));
return p.then(() => {
if (broken > 0) {
const msg = `${broken} broken links found in the docs`;
if (pxt.appTarget.ignoreDocsErrors) pxt.log(msg)
else U.userError(msg);
}
})
}