function createSnapshot()

in src/powerquery-parser/lexer/lexerSnapshot.ts [83:165]


function createSnapshot(state: Lexer.State): LexerSnapshot {
    // class properties
    const tokens: Token.Token[] = [];
    const comments: Comment.TComment[] = [];
    const flattenedLines: FlattenedLines = flattenLineTokens(state);
    const flatTokens: ReadonlyArray<FlatLineToken> = flattenedLines.flatLineTokens;
    const numFlatTokens: number = flatTokens.length;
    const text: string = flattenedLines.text;
    const maybeCancellationToken: ICancellationToken | undefined = state.maybeCancellationToken;

    let flatIndex: number = 0;

    while (flatIndex < numFlatTokens) {
        state.maybeCancellationToken?.throwIfCancelled();

        const flatToken: FlatLineToken = flatTokens[flatIndex];

        switch (flatToken.kind) {
            case Token.LineTokenKind.LineComment:
                comments.push(readLineComment(flatToken));
                break;

            case Token.LineTokenKind.MultilineComment:
                comments.push(readSingleLineMultilineComment(flatToken));
                break;

            case Token.LineTokenKind.MultilineCommentStart: {
                const concatenatedTokenRead: ConcatenatedCommentRead = readMultilineComment(
                    maybeCancellationToken,
                    state.locale,
                    flattenedLines,
                    flatToken,
                );

                comments.push(concatenatedTokenRead.comment);
                flatIndex = concatenatedTokenRead.flatIndexEnd;
                break;
            }

            case Token.LineTokenKind.QuotedIdentifierStart: {
                const concatenatedTokenRead: ConcatenatedTokenRead = readQuotedIdentifier(
                    maybeCancellationToken,
                    state.locale,
                    flattenedLines,
                    flatToken,
                );

                tokens.push(concatenatedTokenRead.token);
                flatIndex = concatenatedTokenRead.flatIndexEnd;
                break;
            }

            case Token.LineTokenKind.TextLiteralStart: {
                const concatenatedTokenRead: ConcatenatedTokenRead = readTextLiteral(
                    maybeCancellationToken,
                    state.locale,
                    flattenedLines,
                    flatToken,
                );

                tokens.push(concatenatedTokenRead.token);
                flatIndex = concatenatedTokenRead.flatIndexEnd;
                break;
            }

            default: {
                const positionStart: Token.TokenPosition = flatToken.positionStart;
                const positionEnd: Token.TokenPosition = flatToken.positionEnd;

                tokens.push({
                    kind: flatToken.kind as unknown as Token.TokenKind,
                    data: flatToken.data,
                    positionStart,
                    positionEnd,
                });
            }
        }

        flatIndex += 1;
    }

    return new LexerSnapshot(text, tokens, comments, flattenedLines.lineTerminators);
}