"/*\n * Copyright 2008-2010 the T2 Project ant the Others.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n//package org.t2framework.commons.util;\n\n/**\n * \n * <#if locale=\"en\">\n * <p>\n * JSON tokenizer.It makes tokens for parsing json data.\n * </p>\n * <#else>\n * <p>\n * \n * </p>\n * </#if>\n * \n * @author shot\n * \n * TODO : unicode, quotation(\"), reverse solidus(\\), solidus(/),\n * \n */\n" "public" " " "class" " " "JSONTokenizer" " " "{" "\n\n " "public" " " "static" " " "final" " " "char" " " "COMMA" " " "=" " " "','" "" ";" "\n\n " "public" " " "static" " " "final" " " "char" " " "COLON" " " "=" " " "':'" "" ";" "\n\n " "public" " " "static" " " "final" " " "char" " " "SINGLE_QUOTE" " " "=" " " "'\\''" "" ";" "\n\n " "public" " " "static" " " "final" " " "char" " " "DOUBLE_QUOTE" " " "=" " " "'\\\"'" "" ";" "\n\n " "public" " " "static" " " "final" " " "String" " " "NULL_STRING" " " "=" " " "\"null\"" "" ";" "\n\n " "public" " " "static" " " "final" " " "String" " " "TRUE_STRING" " " "=" " " "\"true\"" "" ";" "\n\n " "public" " " "static" " " "final" " " "String" " " "FALSE_STRING" " " "=" " " "\"false\"" "" ";" "\n\n " "public" " " "static" " " "final" " " "char" " " "START_BRACKET" " " "=" " " "'['" "" ";" "\n\n " "public" " " "static" " " "final" " " "char" " " "END_BRACKET" " " "=" " " "']'" "" ";" "\n\n " "public" " " "static" " " "final" " " "char" " " "START_BRACE" " " "=" " " "'{'" "" ";" "\n\n " "public" " " "static" " " "final" " " "char" " " "END_BRACE" " " "=" " " "'}'" "" ";" "\n\n // public static final char UTF8_BOM = 0xFEFF;\n\n " "protected" " " "final" " " "String" " " "orgString" "" ";" "\n\n " "protected" " " "final" " " "int" " " "orgLength" "" ";" "\n\n " "protected" " " "String" " " "tokenString" "" ";" "\n\n " "protected" " " "int" " " "pos" "" ";" "\n\n " "protected" " " "int" " " "nextPos" "" ";" "\n\n " "protected" " " "TokenType" " " "type" " " "=" " " "TokenType" "" "." "" "EOF" "" ";" "\n\n " "public" " " "JSONTokenizer" "" "(" "" "String" " " "s" "" ")" " " "{" "\n // TODO : check grammer first.\n " "this" "" "." "" "orgString" " " "=" " " "(" "" "s" " " "!" "" "=" " " "null" "" ")" " " "?" " " "s" " " ":" " " "\"null\"" "" ";" "\n " "this" "" "." "" "orgLength" " " "=" " " "this" "" "." "" "orgString" "" "." "" "length" "" "(" "" ")" "" ";" "\n " "this" "" "." "" "tokenString" " " "=" " " "this" "" "." "" "orgString" "" ";" "\n " "prepare" "" "(" "" "0" "" ")" "" ";" "\n " "}" "\n\n " "protected" " " "void" " " "prepare" "" "(" "" "int" " " "i" "" ")" " " "{" "\n " "if" " " "(" "" "i" " " "<" " " "orgLength" "" ")" " " "{" "\n " "char" " " "ch" " " "=" " " "orgString" "" "." "" "charAt" "" "(" "" "i" "" ")" "" ";" "\n " "if" " " "(" "" "ch" " " "=" "" "=" " " "START_BRACE" "" ")" " " "{" "\n " "type" " " "=" " " "TokenType" "" "." "" "START_BRACE" "" ";" "\n " "}" " " "else" " " "if" " " "(" "" "ch" " " "=" "" "=" " " "END_BRACE" "" ")" " " "{" "\n " "type" " " "=" " " "TokenType" "" "." "" "END_BRACE" "" ";" "\n " "}" " " "else" " " "if" " " "(" "" "ch" " " "=" "" "=" " " "START_BRACKET" "" ")" " " "{" "\n " "type" " " "=" " " "TokenType" "" "." "" "START_BRACKET" "" ";" "\n " "}" " " "else" " " "if" " " "(" "" "ch" " " "=" "" "=" " " "END_BRACKET" "" ")" " " "{" "\n " "type" " " "=" " " "TokenType" "" "." "" "END_BRACKET" "" ";" "\n " "}" "\n " "this" "" "." "" "pos" " " "=" " " "i" "" ";" "\n " "this" "" "." "" "nextPos" " " "=" " " "i" " " "+" " " "1" "" ";" "\n " "}" " " "else" " " "{" "\n " "type" " " "=" " " "TokenType" "" "." "" "EOF" "" ";" "\n " "}" "\n " "}" "\n\n " "public" " " "TokenType" " " "nextToken" "" "(" "" ")" " " "{" "\n " "if" " " "(" "" "type" " " "=" "" "=" " " "TokenType" "" "." "" "EOF" "" ")" " " "{" "\n " "return" " " "TokenType" "" "." "" "EOF" "" ";" "\n " "}" "\n " "String" " " "s" " " "=" " " "this" "" "." "" "tokenString" "" ";" "\n " "TokenType" " " "retType" " " "=" " " "TokenType" "" "." "" "EOF" "" ";" "\n " "boolean" " " "key" " " "=" " " "true" "" ";" "\n " "for" " " "(" "" "int" " " "i" " " "=" " " "this" "" "." "" "nextPos" "" ";" " " "i" " " "<" " " "this" "" "." "" "orgLength" "" ";" " " "i" "" "+" "" "+" "" ")" " " "{" "\n " "char" " " "ch" " " "=" " " "this" "" "." "" "orgString" "" "." "" "charAt" "" "(" "" "i" "" ")" "" ";" "\n " "if" " " "(" "" "isIgnorable" "" "(" "" "ch" "" ")" "" ")" " " "{" "\n " "continue" "" ";" "\n " "}" "\n " "if" " " "(" "" "ch" " " "=" "" "=" " " "START_BRACE" "" ")" " " "{" "\n " "s" " " "=" " " "getElement" "" "(" "" "nextPos" "" "," " " "this" "" "." "" "orgLength" " " "-" " " "1" "" ")" "" ";" "\n " "retType" " " "=" " " "TokenType" "" "." "" "START_BRACE" "" ";" "\n " "prepare" "" "(" "" "i" "" ")" "" ";" "\n " "key" " " "=" " " "true" "" ";" "\n " "break" "" ";" "\n " "}" " " "else" " " "if" " " "(" "" "ch" " " "=" "" "=" " " "START_BRACKET" "" ")" " " "{" "\n " "s" " " "=" " " "getElement" "" "(" "" "nextPos" "" "," " " "this" "" "." "" "orgLength" " " "-" " " "1" "" ")" "" ";" "\n " "retType" " " "=" " " "TokenType" "" "." "" "START_BRACKET" "" ";" "\n " "prepare" "" "(" "" "i" "" ")" "" ";" "\n " "key" " " "=" " " "true" "" ";" "\n " "break" "" ";" "\n " "}" " " "else" " " "if" " " "(" "" "ch" " " "=" "" "=" " " "COLON" "" ")" " " "{" "\n " "if" " " "(" "" "i" " " "=" "" "=" " " "this" "" "." "" "orgLength" " " "-" " " "1" "" ")" " " "{" "\n " "throw" " " "new" " " "IllegalStateException" "" "(" "" ")" "" ";" "\n " "}" "\n " "s" " " "=" " " "getElement" "" "(" "" "nextPos" "" "," " " "i" " " "-" " " "1" "" ")" "" ";" "\n " "this" "" "." "" "type" " " "=" " " "retType" " " "=" " " "TokenType" "" "." "" "COLON" "" ";" "\n " "prepare" "" "(" "" "i" "" ")" "" ";" "\n " "key" " " "=" " " "true" "" ";" "\n " "break" "" ";" "\n " "}" " " "else" " " "if" " " "(" "" "ch" " " "=" "" "=" " " "COMMA" "" ")" " " "{" "\n " "if" " " "(" "" "i" " " "=" "" "=" " " "this" "" "." "" "orgLength" " " "-" " " "1" "" ")" " " "{" "\n " "throw" " " "new" " " "IllegalArgumentException" "" "(" "" ")" "" ";" "\n " "}" "\n " "s" " " "=" " " "getElement" "" "(" "" "nextPos" "" "," " " "i" " " "-" " " "1" "" ")" "" ";" "\n " "this" "" "." "" "type" " " "=" " " "retType" " " "=" " " "(" "" "isObjectOrArrayEnd" "" "(" "" "i" " " "-" " " "1" "" ")" "" ")" " " "?" " " "TokenType" "" "." "" "END_COMMA" "\n " ":" " " "TokenType" "" "." "" "COMMA" "" ";" "\n " "prepare" "" "(" "" "i" "" ")" "" ";" "\n " "key" " " "=" " " "false" "" ";" "\n " "break" "" ";" "\n " "}" " " "else" " " "if" " " "(" "" "ch" " " "=" "" "=" " " "END_BRACKET" "" ")" " " "{" "\n " "this" "" "." "" "type" " " "=" " " "(" "" "i" " " "=" "" "=" " " "this" "" "." "" "orgLength" " " "-" " " "1" "" ")" " " "?" " " "TokenType" "" "." "" "EOF" "\n " ":" " " "TokenType" "" "." "" "END_BRACKET" "" ";" "\n " "retType" " " "=" " " "TokenType" "" "." "" "END_BRACKET" "" ";" "\n " "s" " " "=" " " "getElement" "" "(" "" "nextPos" "" "," " " "i" " " "-" " " "1" "" ")" "" ";" "\n " "prepare" "" "(" "" "i" "" ")" "" ";" "\n " "key" " " "=" " " "false" "" ";" "\n " "break" "" ";" "\n " "}" " " "else" " " "if" " " "(" "" "ch" " " "=" "" "=" " " "END_BRACE" "" ")" " " "{" "\n " "this" "" "." "" "type" " " "=" " " "(" "" "i" " " "=" "" "=" " " "this" "" "." "" "orgLength" " " "-" " " "1" "" ")" " " "?" " " "TokenType" "" "." "" "EOF" "\n " ":" " " "TokenType" "" "." "" "END_BRACE" "" ";" "\n " "retType" " " "=" " " "TokenType" "" "." "" "END_BRACE" "" ";" "\n " "s" " " "=" " " "getElement" "" "(" "" "this" "" "." "" "nextPos" "" "," " " "i" " " "-" " " "1" "" ")" "" ";" "\n " "prepare" "" "(" "" "i" "" ")" "" ";" "\n " "key" " " "=" " " "false" "" ";" "\n " "break" "" ";" "\n " "}" "\n " "}" "\n " "s" " " "=" " " "removeIgnorable" "" "(" "" "s" "" ")" "" ";" "\n " "this" "" "." "" "tokenString" " " "=" " " "(" "" "key" "" ")" " " "?" " " "unquote" "" "(" "" "s" "" ")" " " ":" " " "s" "" ";" "\n " "return" " " "retType" "" ";" "\n " "}" "\n\n " "protected" " " "boolean" " " "isObjectOrArrayEnd" "" "(" "" "int" " " "pos" "" ")" " " "{" "\n " "for" " " "(" "" "int" " " "i" " " "=" " " "pos" "" ";" " " "0" " " "<" " " "i" "" ";" " " "i" "" "-" "" "-" "" ")" " " "{" "\n " "char" " " "c" " " "=" " " "this" "" "." "" "orgString" "" "." "" "charAt" "" "(" "" "i" "" ")" "" ";" "\n " "if" " " "(" "" "isIgnorable" "" "(" "" "c" "" ")" "" ")" " " "{" "\n " "continue" "" ";" "\n " "}" "\n " "if" " " "(" "" "c" " " "=" "" "=" " " "END_BRACE" " " "|" "" "|" " " "c" " " "=" "" "=" " " "END_BRACKET" "" ")" " " "{" "\n " "return" " " "true" "" ";" "\n " "}" " " "else" " " "{" "\n " "return" " " "false" "" ";" "\n " "}" "\n " "}" "\n " "return" " " "false" "" ";" "\n " "}" "\n\n /**\n * <#if locale=\"en\">\n * <p>\n * TODO refactor.\n * \n * </p>\n * <#else>\n * <p>\n * \n * </p>\n * </#if>\n * \n * @param s\n * @return\n */\n " "protected" " " "String" " " "removeIgnorable" "" "(" "" "String" " " "s" "" ")" " " "{" "\n " "if" " " "(" "" "isEmpty" "" "(" "" "s" "" ")" "" ")" " " "{" "\n " "return" " " "s" "" ";" "\n " "}" "\n " "for" " " "(" "" "int" " " "pos1" " " "=" " " "0" "" ";" " " "pos1" " " "<" " " "s" "" "." "" "length" "" "(" "" ")" "" ";" " " "pos1" "" "+" "" "+" "" ")" " " "{" "\n " "if" " " "(" "" "isIgnorable" "" "(" "" "s" "" "." "" "charAt" "" "(" "" "pos1" "" ")" "" ")" " " "=" "" "=" " " "false" "" ")" " " "{" "\n " "return" " " "s" "" "." "" "substring" "" "(" "" "pos1" "" ")" "" ";" "\n " "}" "\n " "}" "\n " "return" " " "\"\"" "" ";" "\n " "}" "\n\n " "public" " " "static" " " "boolean" " " "isEmpty" "" "(" "" "String" " " "text" "" ")" " " "{" "\n " "return" " " "text" " " "=" "" "=" " " "null" " " "|" "" "|" " " "text" "" "." "" "length" "" "(" "" ")" " " "=" "" "=" " " "0" "" ";" "\n " "}" "\n\n " "protected" " " "String" " " "getElement" "" "(" "" "final" " " "int" " " "orgStartPos" "" "," " " "final" " " "int" " " "orgEndPos" "" ")" " " "{" "\n " "int" " " "startPos" " " "=" " " "orgStartPos" "" ";" "\n " "for" " " "(" "" ";" " " "startPos" " " "<" " " "orgEndPos" "" ";" " " "startPos" "" "+" "" "+" "" ")" " " "{" "\n " "char" " " "ch" " " "=" " " "this" "" "." "" "orgString" "" "." "" "charAt" "" "(" "" "startPos" "" ")" "" ";" "\n " "if" " " "(" "" "isIgnorable" "" "(" "" "ch" "" ")" " " "=" "" "=" " " "false" "" ")" " " "{" "\n " "break" "" ";" "\n " "}" "\n " "}" "\n " "int" " " "endPos" " " "=" " " "orgEndPos" "" ";" "\n " "for" " " "(" "" ";" " " "startPos" " " "<" " " "endPos" "" ";" " " "endPos" "" "-" "" "-" "" ")" " " "{" "\n " "char" " " "ch" " " "=" " " "this" "" "." "" "orgString" "" "." "" "charAt" "" "(" "" "endPos" "" ")" "" ";" "\n " "if" " " "(" "" "isIgnorable" "" "(" "" "ch" "" ")" " " "=" "" "=" " " "false" "" ")" " " "{" "\n " "break" "" ";" "\n " "}" "\n " "}" "\n " "return" " " "this" "" "." "" "orgString" "" "." "" "substring" "" "(" "" "startPos" "" "," " " "endPos" " " "+" " " "1" "" ")" "" ";" "\n " "}" "\n\n " "protected" " " "static" " " "boolean" " " "isIgnorable" "" "(" "" "char" " " "ch" "" ")" " " "{" "\n " "boolean" " " "ret" " " "=" " " "false" "" ";" "\n " "switch" " " "(" "" "ch" "" ")" " " "{" "\n " "case" " " "'\\b'" "" ":" "\n " "case" " " "'\\f'" "" ":" "\n " "case" " " "'\\n'" "" ":" "\n " "case" " " "'\\r'" "" ":" "\n " "case" " " "'\\t'" "" ":" "\n " "case" " " "' '" "" ":" "\n " "ret" " " "=" " " "true" "" ";" "\n " "break" "" ";" "\n " "default" "" ":" "\n " "ret" " " "=" " " "false" "" ";" "\n " "break" "" ";" "\n " "}" "\n " "return" " " "ret" "" ";" "\n " "}" "\n\n " "public" " " "static" " " "String" " " "unquote" "" "(" "" "String" " " "str" "" ")" " " "{" "\n " "if" " " "(" "" "str" " " "=" "" "=" " " "null" " " "|" "" "|" " " "str" "" "." "" "length" "" "(" "" ")" " " "=" "" "=" " " "0" "" ")" " " "{" "\n " "return" " " "str" "" ";" "\n " "}" "\n " "if" " " "(" "" "isQuoted" "" "(" "" "str" "" "," " " "DOUBLE_QUOTE" "" ")" "" ")" " " "{" "\n " "return" " " "chopQuote" "" "(" "" "str" "" ")" "" ";" "\n " "}" " " "else" " " "if" " " "(" "" "isQuoted" "" "(" "" "str" "" "," " " "SINGLE_QUOTE" "" ")" "" ")" " " "{" "\n " "return" " " "chopQuote" "" "(" "" "str" "" ")" "" ";" "\n " "}" " " "else" " " "{" "\n " "return" " " "str" "" ";" "\n " "}" "\n " "}" "\n\n " "public" " " "static" " " "String" " " "chopQuote" "" "(" "" "String" " " "str" "" ")" " " "{" "\n " "return" " " "str" "" "." "" "substring" "" "(" "" "1" "" "," " " "str" "" "." "" "length" "" "(" "" ")" " " "-" " " "1" "" ")" "" ";" "\n " "}" "\n\n " "protected" " " "static" " " "boolean" " " "isQuoted" "" "(" "" "String" " " "str" "" "," " " "char" " " "quote" "" ")" " " "{" "\n " "return" " " "str" "" "." "" "indexOf" "" "(" "" "quote" "" ")" " " "=" "" "=" " " "0" " " "&" "" "&" " " "str" "" "." "" "lastIndexOf" "" "(" "" "quote" "" ")" " " ">" " " "0" "" ";" "\n " "}" "\n\n " "public" " " "String" " " "getToken" "" "(" "" ")" " " "{" "\n " "return" " " "this" "" "." "" "tokenString" "" ";" "\n " "}" "\n\n " "public" " " "static" " " "enum" " " "TokenType" " " "{" "\n /**\n * <#if locale=\"en\">\n * <p>\n * \"{\"\n * </p>\n * <#else>\n * <p>\n * \n * </p>\n * </#if>\n */\n " "START_BRACE" " " "{" "\n " "public" " " "String" " " "toString" "" "(" "" ")" " " "{" "\n " "return" " " "\"{\"" "" ";" "\n " "}" "\n " "}" "" "," "\n\n /**\n * <#if locale=\"en\">\n * <p>\n * \"}\"\n * </p>\n * <#else>\n * <p>\n * \n * </p>\n * </#if>\n */\n " "END_BRACE" " " "{" "\n " "public" " " "String" " " "toString" "" "(" "" ")" " " "{" "\n " "return" " " "\"}\"" "" ";" "\n " "}" "\n " "}" "" "," "\n\n /**\n * <#if locale=\"en\">\n * <p>\n * \"[\"\n * </p>\n * <#else>\n * <p>\n * \n * </p>\n * </#if>\n */\n " "START_BRACKET" " " "{" "\n " "public" " " "String" " " "toString" "" "(" "" ")" " " "{" "\n " "return" " " "\"[\"" "" ";" "\n " "}" "\n " "}" "" "," "\n\n /**\n * <#if locale=\"en\">\n * <p>\n * \"]\"\n * </p>\n * <#else>\n * <p>\n * \n * </p>\n * </#if>\n */\n " "END_BRACKET" " " "{" "\n " "public" " " "String" " " "toString" "" "(" "" ")" " " "{" "\n " "return" " " "\"]\"" "" ";" "\n " "}" "\n " "}" "" "," "\n\n /**\n * <#if locale=\"en\">\n * <p>\n * \",\"\n * </p>\n * <#else>\n * <p>\n * \n * </p>\n * </#if>\n */\n " "COMMA" " " "{" "\n " "public" " " "String" " " "toString" "" "(" "" ")" " " "{" "\n " "return" " " "\",\"" "" ";" "\n " "}" "\n " "}" "" "," "\n\n /**\n * <#if locale=\"en\">\n * <p>\n * \":\"\n * </p>\n * <#else>\n * <p>\n * \n * </p>\n * </#if>\n */\n " "COLON" " " "{" "\n " "public" " " "String" " " "toString" "" "(" "" ")" " " "{" "\n " "return" " " "\":\"" "" ";" "\n " "}" "\n " "}" "" "," "\n\n /**\n * <#if locale=\"en\">\n * <p>\n * \",\" and it is the end of {} or [].\n * </p>\n * <#else>\n * <p>\n * \n * </p>\n * </#if>\n */\n " "END_COMMA" " " "{" "\n " "public" " " "String" " " "toString" "" "(" "" ")" " " "{" "\n " "return" " " "\",\"" "" ";" "\n " "}" "\n " "}" "" "," "\n\n /**\n * <#if locale=\"en\">\n * <p>\n * End of file.\n * </p>\n * <#else>\n * <p>\n * \n * </p>\n * </#if>\n */\n " "EOF" "" ";" "\n\n " "}" "\n\n " "public" " " "TokenType" " " "currentTokenType" "" "(" "" ")" " " "{" "\n " "return" " " "type" "" ";" "\n " "}" "\n" "}" "\n" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "" "-" "\n/*\n * Copyright 2008-2009 the T2 Project ant the Others.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n" "package" " " "org" "" "." "" "t2framework" "" "." "" "commons" "" "." "" "util" "" ";" "\n\n" "import" " " "org" "" "." "" "t2framework" "" "." "" "commons" "" "." "" "util" "" "." "" "JSONTokenizer" "" "." "" "TokenType" "" ";" "\n\n" "import" " " "junit" "" "." "" "framework" "" "." "" "TestCase" "" ";" "\n\n" "public" " " "class" " " "JSONTokenizerTest" " " "extends" " " "TestCase" " " "{" "\n\n " "public" " " "void" " " "test1_blank" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test2_null" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "null" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"null\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test3_keyvalue" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\\"aaa\\\":123}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\\"aaa\\\":123}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"aaa\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"123\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test4_array" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"[\\\"aaa\\\",123,\\\"b\\\"]\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACKET" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"[\\\"aaa\\\",123,\\\"b\\\"]\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\\"aaa\\\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"123\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\\"b\\\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test5_emptyobject" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test6_emptyarray" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"[]\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACKET" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"[]\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test7_multiplekeyvalue" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "\n " "\"{\\\"aaa\\\":123,\\\"bbb\\\":true}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\\"aaa\\\":123,\\\"bbb\\\":true}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"aaa\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"123\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"bbb\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"true\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test8_keyvaluewithsinglequote" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{'aaa':'123'}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{'aaa':'123'}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"aaa\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"'123'\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test9_blankwithtab" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\t}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\t}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test10_blankwithbackspace" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\b}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\b}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test11_blankwithformfeed" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\f}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\f}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test12_blankwithnewline" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\n}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\n}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test13_blankwithcarriagereturn" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\r}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\r}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test14_keyvalue_nest1" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "\n " "\"{\\\"aaa\\\":123, \\\"bbb\\\":{\\\"b1\\\":true}}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\\"aaa\\\":123, \\\"bbb\\\":{\\\"b1\\\":true}}\"" "" "," " " "tokenizer" "\n " "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"aaa\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"123\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"bbb\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\\"b1\\\":true}}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"b1\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"true\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test15_ignorableSpaceShouldIgnoreAtObject" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\\"aaa\\\"\\r\\t:\\n123 }\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"aaa\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"123\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test16_ignorableSpaceShouldIgnoreAtArray" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "\n " "\"[ \\\"aaa\\\"\\t,123\\b,\\f\\'b\\' ]\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACKET" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\\"aaa\\\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"123\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\'b\\'\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test17_blankwithlotsofignorables" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\t\\r\\n \\t}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test18_keyvalue_nest3_array" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "\n " "\"{\\\"aaa\\\":123, \\\"bbb\\\":{\\\"b1\\\":true},\\t\\\"ccc\\\":\\\"fuga\\\", \\\"array1\\\":[\\\"1.1233333333000000000000001\\\"\\r,\\b1.1233333333000000000000001, \\\"3.0\\\"]}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"aaa\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"123\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"bbb\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"b1\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"true\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"ccc\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\\"fuga\\\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"array1\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\\"1.1233333333000000000000001\\\"\"" "" "," " " "tokenizer" "\n " "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"1.1233333333000000000000001\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\\"3.0\\\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test18_stringEnquote" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\\"a'aa\\\":\\\"?????\\\"}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"a'aa\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\\"?????\\\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test19_booleanarray" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"[true, false,true]\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACKET" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"true\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"false\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"true\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test20_nestarray" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"[1, [2, 3, 4, 5], 3]\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACKET" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"1\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"2\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"3\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"4\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"5\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"3\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test21_nestarrayandobjects" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "\n " "\"[1, [2, 3, 4, 5], \\\"key\\\":{true, false, \\\"hoge\\\", \\\"array\\\":[0.001, 0.00001, 1.2E-7] }, 3]\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACKET" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"1\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"2\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"3\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"4\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"5\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"key\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"true\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"false\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\\"hoge\\\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"array\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"0.001\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"0.00001\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"1.2E-7\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACKET" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"3\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test22_stringSingleEnquote" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\'a'aa\\':\\\"?????\\\"}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"a'aa\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\\"?????\\\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n /**\n * \n * {@.en }\n * \n * <br />\n * \n * {@.ja ?????????key????String??????????.}\n * \n * @throws Exception\n */\n " "public" " " "void" " " "test23_keyMustBeString" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{aaa:123}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"aaa\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"123\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test24_keyvalue_nestOnlyNestObject" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "\"{\\\"bbb\\\":{\\\"b1\\\":true}}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\\"bbb\\\":{\\\"b1\\\":true}}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"bbb\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\\"b1\\\":true}}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"b1\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"true\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test25_keyvalue_nestOnlyNestObject2" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "\n " "\"{\\\"bbb\\\":{\\\"b1\\\":true}, \\\"vvv\\\":null}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\\"bbb\\\":{\\\"b1\\\":true}, \\\"vvv\\\":null}\"" "" "," " " "tokenizer" "\n " "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"bbb\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\\"b1\\\":true}, \\\"vvv\\\":null}\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"b1\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"true\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"vvv\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"null\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test26_keyvalue_deepNest1" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "final" " " "String" " " "json" " " "=" " " "\"{\\\"bbb\\\":{\\\"dates\\\":{\\\"from\\\":20090101,\\n \\\"to\\\":20091231},\\t\\\"b1\\\":true}, \\\"vvv\\\":null}\"" "" ";" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "" "json" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "json" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"bbb\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "\n " "\"{\\\"dates\\\":{\\\"from\\\":20090101,\\n \\\"to\\\":20091231},\\t\\\"b1\\\":true}, \\\"vvv\\\":null}\"" "" "," "\n " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"dates\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "\n " "\"{\\\"from\\\":20090101,\\n \\\"to\\\":20091231},\\t\\\"b1\\\":true}, \\\"vvv\\\":null}\"" "" "," "\n " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"from\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"20090101\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"to\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"20091231\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"b1\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"true\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"vvv\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"null\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n " "public" " " "void" " " "test27_keyvalue_nest2" "" "(" "" ")" " " "throws" " " "Exception" " " "{" "\n " "JSONTokenizer" " " "tokenizer" " " "=" " " "new" " " "JSONTokenizer" "" "(" "\n " "\"{\\\"aaa\\\":123, \\\"bbb\\\":{\\\"b1\\\":true},\\t\\\"ccc\\\":\\\"fuga\\\"}\"" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "currentTokenType" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\\"aaa\\\":123, \\\"bbb\\\":{\\\"b1\\\":true},\\t\\\"ccc\\\":\\\"fuga\\\"}\"" "" "," "\n " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"aaa\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"123\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"bbb\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "{" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "START_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"{\\\"b1\\\":true},\\t\\\"ccc\\\":\\\"fuga\\\"}\"" "" "," " " "tokenizer" "\n " "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"b1\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"true\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_COMMA" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "COLON" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"ccc\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "END_BRACE" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "\"\\\"fuga\\\"\"" "" "," " " "tokenizer" "" "." "" "getToken" "" "(" "" ")" "" ")" "" ";" "\n " "assertEquals" "" "(" "" "TokenType" "" "." "" "EOF" "" "," " " "tokenizer" "" "." "" "nextToken" "" "(" "" ")" "" ")" "" ";" "\n " "}" "\n\n" "}" "\n\n \n "
Snippet is not live.
Travelled to 12 computer(s): aoiabmzegqzx, bhatertpkbcr, cbybwowwnfue, gwrvuhgaqvyk, ishqpsrjomds, lpdgvwnxivlt, mqqgnosmbjvj, pyentgdyhuwx, pzhvpgtvlbxg, tslmcundralx, tvejysmllsmz, vouqrxazstgt
No comments. add comment
Snippet ID: | #2000399 |
Snippet name: | Application of #651 on #2000398 |
Eternal ID of this version: | #2000399/1 |
Text MD5: | 2b358bf45510bd43953e882e7c75a9ff |
Author: | someone |
Category: | |
Type: | New Tinybrain snippet |
Gummipassword: | apply translator 651 |
Uploaded from IP: | 84.201.25.107 |
Public (visible to everyone): | Yes |
Archived (hidden from active list): | No |
Created/modified: | 2015-06-28 18:12:50 |
Source code size: | 63044 bytes / 10567 lines |
Pitched / IR pitched: | No / Yes |
Views / Downloads: | 700 / 123 |
Referenced in: | [show references] |