def tokenize_line()

in tokenizers/python/baronetokenizer.py [0:0]


def tokenize_line(line: str)-> List[str]:
    tokens = []
    for token_group in line.split(' '):
        if token_group in {'DCSP', 'DCNL'}: continue
        tokens.extend([t for t in ID_SPLIT_RE.findall(token_group) if not keyword.iskeyword(t) and len(t)>0])
    return tokens