def kql_code_score()

in azure/Kqlmagic/activate_kernel_command.py [0:0]


def kql_code_score(lines:List[str])->bool:
    # remove indents
    lines = [line.lstrip() for line in lines]
    tok_lines = make_tokens_by_line(lines)

    comment_count = 0
    pipe_count = 0
    pipe_operators_count = 0
    table_operators_count = 0
    kql_count = 0
    negative_score = 0

    first_content_token = True
    last_token_type = None
    last_op = None
    for tok_line in tok_lines:
        for idx, tokinfo in enumerate(tok_line):

            if tokinfo.type in [tokenize.INDENT, tokenize.DEDENT]:
                continue

            if last_op == "//":
                if tokinfo.type in [tokenize.NEWLINE, tokenize.NL]:
                    last_op = None
                else:
                    continue

            elif tokinfo.type in [tokenize.COMMENT]:
                negative_score += 1

            elif tokinfo.type == tokenize.OP:
                if tokinfo.string == ">" and last_op == "|" and tokinfo.line[tokinfo.start[1] - 1] == "|":
                    kql_count += 1
                elif first_content_token or last_token_type in [tokenize.NEWLINE, tokenize.NL]:
                    if tokinfo.string in ["|", "//"]:
                        kql_count += 1
                    elif first_content_token and tokinfo.string in [".", "--"]:
                        kql_count += 1
                if tokinfo.string == "//":
                    comment_count += 1
                elif tokinfo.string == "|":
                    pipe_count += 1
                last_op = tokinfo.string
                first_content_token = False
            elif tokinfo.type in [tokenize.NEWLINE, tokenize.NL]:
                pass
            else:
                if last_op == "|":
                    scored = False
                    if tokinfo.type != tokenize.NAME:
                        scored = True
                        negative_score += 1

                    elif (idx + 2 < len(tok_line)
                          and tok_line[idx + 1].type == tokenize.OP
                          and tok_line[idx + 1].string == "-"
                          and tok_line[idx + 2].type == tokenize.NAME):

                        full_name = f"{tok_line[idx].string}{tok_line[idx + 1].string}{tok_line[idx + 2].string}"
                        if full_name in PIPE_OPERATORS2 \
                           and (tok_line[idx + 2].end[1] - tokinfo.start[1]) == len(full_name):

                            scored = True
                            pipe_operators_count += 1
                    
                    if not scored and tokinfo.string in PIPE_OPERATORS:
                        pipe_operators_count += 1
                
                elif first_content_token and tokinfo.string in TABLE_OPERATORS:
                    table_operators_count += 1

                first_content_token = False
                last_op = None

            last_token_type = tokinfo.type