fn tokenize_quoted_string_escape()

in src/tokenizer.rs [3609:3670]


    fn tokenize_quoted_string_escape() {
        let dialect = SnowflakeDialect {};
        for (sql, expected, expected_unescaped) in [
            (r#"'%a\'%b'"#, r#"%a\'%b"#, r#"%a'%b"#),
            (r#"'a\'\'b\'c\'d'"#, r#"a\'\'b\'c\'d"#, r#"a''b'c'd"#),
            (r#"'\\'"#, r#"\\"#, r#"\"#),
            (
                r#"'\0\a\b\f\n\r\t\Z'"#,
                r#"\0\a\b\f\n\r\t\Z"#,
                "\0\u{7}\u{8}\u{c}\n\r\t\u{1a}",
            ),
            (r#"'\"'"#, r#"\""#, "\""),
            (r#"'\\a\\b\'c'"#, r#"\\a\\b\'c"#, r#"\a\b'c"#),
            (r#"'\'abcd'"#, r#"\'abcd"#, r#"'abcd"#),
            (r#"'''a''b'"#, r#"''a''b"#, r#"'a'b"#),
            (r#"'\q'"#, r#"\q"#, r#"q"#),
            (r#"'\%\_'"#, r#"\%\_"#, r#"%_"#),
            (r#"'\\%\\_'"#, r#"\\%\\_"#, r#"\%\_"#),
        ] {
            let tokens = Tokenizer::new(&dialect, sql)
                .with_unescape(false)
                .tokenize()
                .unwrap();
            let expected = vec![Token::SingleQuotedString(expected.to_string())];
            compare(expected, tokens);

            let tokens = Tokenizer::new(&dialect, sql)
                .with_unescape(true)
                .tokenize()
                .unwrap();
            let expected = vec![Token::SingleQuotedString(expected_unescaped.to_string())];
            compare(expected, tokens);
        }

        for sql in [r#"'\'"#, r#"'ab\'"#] {
            let mut tokenizer = Tokenizer::new(&dialect, sql);
            assert_eq!(
                "Unterminated string literal",
                tokenizer.tokenize().unwrap_err().message.as_str(),
            );
        }

        // Non-escape dialect
        for (sql, expected) in [(r#"'\'"#, r#"\"#), (r#"'ab\'"#, r#"ab\"#)] {
            let dialect = GenericDialect {};
            let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap();

            let expected = vec![Token::SingleQuotedString(expected.to_string())];

            compare(expected, tokens);
        }

        // MySQL special case for LIKE escapes
        for (sql, expected) in [(r#"'\%'"#, r#"\%"#), (r#"'\_'"#, r#"\_"#)] {
            let dialect = MySqlDialect {};
            let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap();

            let expected = vec![Token::SingleQuotedString(expected.to_string())];

            compare(expected, tokens);
        }
    }