diff options
| author | Ulf Magnusson <ulfalizer@gmail.com> | 2012-12-17 18:27:30 +0100 |
|---|---|---|
| committer | Ulf Magnusson <ulfalizer@gmail.com> | 2012-12-17 18:37:04 +0100 |
| commit | 637d66b241c82bf53c7058b1a160fff0f2ca63d5 (patch) | |
| tree | c08e054b3fa9b1da18245ce0802076ba010361f7 | |
| parent | 7b98d9bf3722682642db4e6f5e4dc0a7bf4ad202 (diff) | |
Add selftests for string literal lexing.
| -rw-r--r-- | testsuite.py | 68 |
1 files changed, 68 insertions, 0 deletions
diff --git a/testsuite.py b/testsuite.py index 2ee7982..e87a589 100644 --- a/testsuite.py +++ b/testsuite.py @@ -156,6 +156,74 @@ def run_selftests(): True, True, False, True, True, True) + print "Testing string literal (constant symbol) lexing..." + + # + # String literal lexing. (This tests an internal API.) + # + + c = kconfiglib.Config("Kconfiglib/tests/empty") + + def verify_string_lex(s, res): + """Verifies that the string token 'res' is produced from lexing 's'. + Strips the first and last characters from 's' so we can use readable + raw strings as input.""" + s = s[1:-1] + s_res = c._tokenize(s, for_eval = True)[0] + verify(s_res == res, + "'{0}' produced the string token '{1}'. Expected '{2}'." + .format(s, s_res, res)) + + verify_string_lex(r""" "" """, "") + verify_string_lex(r""" '' """, "") + + verify_string_lex(r""" "a" """, "a") + verify_string_lex(r""" 'a' """, "a") + verify_string_lex(r""" "ab" """, "ab") + verify_string_lex(r""" 'ab' """, "ab") + verify_string_lex(r""" "abc" """, "abc") + verify_string_lex(r""" 'abc' """, "abc") + + verify_string_lex(r""" "'" """, "'") + verify_string_lex(r""" '"' """, '"') + + verify_string_lex(r""" "\"" """, '"') + verify_string_lex(r""" '\'' """, "'") + + verify_string_lex(r""" "\"\"" """, '""') + verify_string_lex(r""" '\'\'' """, "''") + + verify_string_lex(r""" "\'" """, "'") + verify_string_lex(r""" '\"' """, '"') + + verify_string_lex(r""" "\\" """, "\\") + verify_string_lex(r""" '\\' """, "\\") + + verify_string_lex(r""" "\a\\\b\c\"'d" """, 'a\\bc"\'d') + verify_string_lex(r""" '\a\\\b\c\'"d' """, "a\\bc'\"d") + + def verify_string_bad(s): + """Verifies that tokenizing 's' throws a Kconfig_Syntax_Error. Strips + the first and last characters from 's' so we can use readable raw + strings as input.""" + s = s[1:-1] + caught_exception = False + try: + c._tokenize(s, for_eval = True) + except kconfiglib.Kconfig_Syntax_Error: + caught_exception = True + verify(caught_exception, "Tokenization of '{0}' should have failed." + .format(s)) + + verify_string_bad(r""" " """) + verify_string_bad(r""" ' """) + verify_string_bad(r""" "' """) + verify_string_bad(r""" '" """) + verify_string_bad(r""" "\" """) + verify_string_bad(r""" '\' """) + verify_string_bad(r""" "foo """) + verify_string_bad(r""" 'foo """) + print "Testing is_modifiable() and range queries..." # |
