2009-11-11 17:39:14 +01:00
|
|
|
/*
|
|
|
|
* JSON lexer
|
|
|
|
*
|
|
|
|
* Copyright IBM, Corp. 2009
|
|
|
|
*
|
|
|
|
* Authors:
|
|
|
|
* Anthony Liguori <aliguori@us.ibm.com>
|
|
|
|
*
|
|
|
|
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
|
|
|
|
* See the COPYING.LIB file in the top-level directory.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2016-01-29 18:50:01 +01:00
|
|
|
#include "qemu/osdep.h"
|
2009-11-11 17:39:14 +01:00
|
|
|
#include "qemu-common.h"
|
2012-12-17 18:19:43 +01:00
|
|
|
#include "qapi/qmp/json-lexer.h"
|
2009-11-11 17:39:14 +01:00
|
|
|
|
2011-06-01 19:14:52 +02:00
|
|
|
#define MAX_TOKEN_SIZE (64ULL << 20)
|
|
|
|
|
2009-11-11 17:39:14 +01:00
|
|
|
/*
|
2016-06-10 04:48:06 +02:00
|
|
|
* Required by JSON (RFC 7159):
|
|
|
|
*
|
|
|
|
* \"([^\\\"]|\\[\"'\\/bfnrt]|\\u[0-9a-fA-F]{4})*\"
|
|
|
|
* -?(0|[1-9][0-9]*)(.[0-9]+)?([eE][-+]?[0-9]+)?
|
2009-11-11 17:39:14 +01:00
|
|
|
* [{}\[\],:]
|
2016-06-10 04:48:06 +02:00
|
|
|
* [a-z]+ # covers null, true, false
|
|
|
|
*
|
|
|
|
* Extension of '' strings:
|
|
|
|
*
|
|
|
|
* '([^\\']|\\[\"'\\/bfnrt]|\\u[0-9a-fA-F]{4})*'
|
|
|
|
*
|
|
|
|
* Extension for vararg handling in JSON construction:
|
|
|
|
*
|
|
|
|
* %((l|ll|I64)?d|[ipsf])
|
2009-11-11 17:39:14 +01:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
enum json_lexer_state {
|
2015-11-25 22:23:25 +01:00
|
|
|
IN_ERROR = 0, /* must really be 0, see json_lexer[] */
|
2009-11-11 17:39:14 +01:00
|
|
|
IN_DQ_UCODE3,
|
|
|
|
IN_DQ_UCODE2,
|
|
|
|
IN_DQ_UCODE1,
|
|
|
|
IN_DQ_UCODE0,
|
|
|
|
IN_DQ_STRING_ESCAPE,
|
|
|
|
IN_DQ_STRING,
|
|
|
|
IN_SQ_UCODE3,
|
|
|
|
IN_SQ_UCODE2,
|
|
|
|
IN_SQ_UCODE1,
|
|
|
|
IN_SQ_UCODE0,
|
|
|
|
IN_SQ_STRING_ESCAPE,
|
|
|
|
IN_SQ_STRING,
|
|
|
|
IN_ZERO,
|
|
|
|
IN_DIGITS,
|
|
|
|
IN_DIGIT,
|
|
|
|
IN_EXP_E,
|
|
|
|
IN_MANTISSA,
|
|
|
|
IN_MANTISSA_DIGITS,
|
|
|
|
IN_NONZERO_NUMBER,
|
|
|
|
IN_NEG_NONZERO_NUMBER,
|
|
|
|
IN_KEYWORD,
|
|
|
|
IN_ESCAPE,
|
|
|
|
IN_ESCAPE_L,
|
|
|
|
IN_ESCAPE_LL,
|
2010-02-04 03:30:30 +01:00
|
|
|
IN_ESCAPE_I,
|
|
|
|
IN_ESCAPE_I6,
|
|
|
|
IN_ESCAPE_I64,
|
2009-11-11 17:39:14 +01:00
|
|
|
IN_WHITESPACE,
|
|
|
|
IN_START,
|
|
|
|
};
|
|
|
|
|
2015-11-25 22:23:25 +01:00
|
|
|
QEMU_BUILD_BUG_ON((int)JSON_MIN <= (int)IN_START);
|
|
|
|
|
2009-11-11 17:39:14 +01:00
|
|
|
#define TERMINAL(state) [0 ... 0x7F] = (state)
|
|
|
|
|
2010-05-24 09:39:52 +02:00
|
|
|
/* Return whether TERMINAL is a terminal state and the transition to it
|
|
|
|
from OLD_STATE required lookahead. This happens whenever the table
|
|
|
|
below uses the TERMINAL macro. */
|
|
|
|
#define TERMINAL_NEEDED_LOOKAHEAD(old_state, terminal) \
|
|
|
|
(json_lexer[(old_state)][0] == (terminal))
|
|
|
|
|
2009-11-11 17:39:14 +01:00
|
|
|
static const uint8_t json_lexer[][256] = {
|
2015-11-25 22:23:25 +01:00
|
|
|
/* Relies on default initialization to IN_ERROR! */
|
|
|
|
|
2009-11-11 17:39:14 +01:00
|
|
|
/* double quote string */
|
|
|
|
[IN_DQ_UCODE3] = {
|
|
|
|
['0' ... '9'] = IN_DQ_STRING,
|
|
|
|
['a' ... 'f'] = IN_DQ_STRING,
|
|
|
|
['A' ... 'F'] = IN_DQ_STRING,
|
|
|
|
},
|
|
|
|
[IN_DQ_UCODE2] = {
|
|
|
|
['0' ... '9'] = IN_DQ_UCODE3,
|
|
|
|
['a' ... 'f'] = IN_DQ_UCODE3,
|
|
|
|
['A' ... 'F'] = IN_DQ_UCODE3,
|
|
|
|
},
|
|
|
|
[IN_DQ_UCODE1] = {
|
|
|
|
['0' ... '9'] = IN_DQ_UCODE2,
|
|
|
|
['a' ... 'f'] = IN_DQ_UCODE2,
|
|
|
|
['A' ... 'F'] = IN_DQ_UCODE2,
|
|
|
|
},
|
|
|
|
[IN_DQ_UCODE0] = {
|
|
|
|
['0' ... '9'] = IN_DQ_UCODE1,
|
|
|
|
['a' ... 'f'] = IN_DQ_UCODE1,
|
|
|
|
['A' ... 'F'] = IN_DQ_UCODE1,
|
|
|
|
},
|
|
|
|
[IN_DQ_STRING_ESCAPE] = {
|
|
|
|
['b'] = IN_DQ_STRING,
|
|
|
|
['f'] = IN_DQ_STRING,
|
|
|
|
['n'] = IN_DQ_STRING,
|
|
|
|
['r'] = IN_DQ_STRING,
|
|
|
|
['t'] = IN_DQ_STRING,
|
2010-05-19 21:57:28 +02:00
|
|
|
['/'] = IN_DQ_STRING,
|
|
|
|
['\\'] = IN_DQ_STRING,
|
2009-11-11 17:39:14 +01:00
|
|
|
['\''] = IN_DQ_STRING,
|
|
|
|
['\"'] = IN_DQ_STRING,
|
|
|
|
['u'] = IN_DQ_UCODE0,
|
|
|
|
},
|
|
|
|
[IN_DQ_STRING] = {
|
2011-06-01 19:14:58 +02:00
|
|
|
[1 ... 0xBF] = IN_DQ_STRING,
|
|
|
|
[0xC2 ... 0xF4] = IN_DQ_STRING,
|
2009-11-11 17:39:14 +01:00
|
|
|
['\\'] = IN_DQ_STRING_ESCAPE,
|
2010-05-24 09:39:53 +02:00
|
|
|
['"'] = JSON_STRING,
|
2009-11-11 17:39:14 +01:00
|
|
|
},
|
|
|
|
|
|
|
|
/* single quote string */
|
|
|
|
[IN_SQ_UCODE3] = {
|
|
|
|
['0' ... '9'] = IN_SQ_STRING,
|
|
|
|
['a' ... 'f'] = IN_SQ_STRING,
|
|
|
|
['A' ... 'F'] = IN_SQ_STRING,
|
|
|
|
},
|
|
|
|
[IN_SQ_UCODE2] = {
|
|
|
|
['0' ... '9'] = IN_SQ_UCODE3,
|
|
|
|
['a' ... 'f'] = IN_SQ_UCODE3,
|
|
|
|
['A' ... 'F'] = IN_SQ_UCODE3,
|
|
|
|
},
|
|
|
|
[IN_SQ_UCODE1] = {
|
|
|
|
['0' ... '9'] = IN_SQ_UCODE2,
|
|
|
|
['a' ... 'f'] = IN_SQ_UCODE2,
|
|
|
|
['A' ... 'F'] = IN_SQ_UCODE2,
|
|
|
|
},
|
|
|
|
[IN_SQ_UCODE0] = {
|
|
|
|
['0' ... '9'] = IN_SQ_UCODE1,
|
|
|
|
['a' ... 'f'] = IN_SQ_UCODE1,
|
|
|
|
['A' ... 'F'] = IN_SQ_UCODE1,
|
|
|
|
},
|
|
|
|
[IN_SQ_STRING_ESCAPE] = {
|
|
|
|
['b'] = IN_SQ_STRING,
|
|
|
|
['f'] = IN_SQ_STRING,
|
|
|
|
['n'] = IN_SQ_STRING,
|
|
|
|
['r'] = IN_SQ_STRING,
|
|
|
|
['t'] = IN_SQ_STRING,
|
2014-06-13 10:13:02 +02:00
|
|
|
['/'] = IN_SQ_STRING,
|
|
|
|
['\\'] = IN_SQ_STRING,
|
2009-11-11 17:39:14 +01:00
|
|
|
['\''] = IN_SQ_STRING,
|
|
|
|
['\"'] = IN_SQ_STRING,
|
|
|
|
['u'] = IN_SQ_UCODE0,
|
|
|
|
},
|
|
|
|
[IN_SQ_STRING] = {
|
2011-06-01 19:14:58 +02:00
|
|
|
[1 ... 0xBF] = IN_SQ_STRING,
|
|
|
|
[0xC2 ... 0xF4] = IN_SQ_STRING,
|
2009-11-11 17:39:14 +01:00
|
|
|
['\\'] = IN_SQ_STRING_ESCAPE,
|
2010-05-24 09:39:53 +02:00
|
|
|
['\''] = JSON_STRING,
|
2009-11-11 17:39:14 +01:00
|
|
|
},
|
|
|
|
|
|
|
|
/* Zero */
|
|
|
|
[IN_ZERO] = {
|
|
|
|
TERMINAL(JSON_INTEGER),
|
2011-03-27 11:07:54 +02:00
|
|
|
['0' ... '9'] = IN_ERROR,
|
2009-11-11 17:39:14 +01:00
|
|
|
['.'] = IN_MANTISSA,
|
|
|
|
},
|
|
|
|
|
|
|
|
/* Float */
|
|
|
|
[IN_DIGITS] = {
|
|
|
|
TERMINAL(JSON_FLOAT),
|
|
|
|
['0' ... '9'] = IN_DIGITS,
|
|
|
|
},
|
|
|
|
|
|
|
|
[IN_DIGIT] = {
|
|
|
|
['0' ... '9'] = IN_DIGITS,
|
|
|
|
},
|
|
|
|
|
|
|
|
[IN_EXP_E] = {
|
|
|
|
['-'] = IN_DIGIT,
|
|
|
|
['+'] = IN_DIGIT,
|
|
|
|
['0' ... '9'] = IN_DIGITS,
|
|
|
|
},
|
|
|
|
|
|
|
|
[IN_MANTISSA_DIGITS] = {
|
|
|
|
TERMINAL(JSON_FLOAT),
|
|
|
|
['0' ... '9'] = IN_MANTISSA_DIGITS,
|
|
|
|
['e'] = IN_EXP_E,
|
|
|
|
['E'] = IN_EXP_E,
|
|
|
|
},
|
|
|
|
|
|
|
|
[IN_MANTISSA] = {
|
|
|
|
['0' ... '9'] = IN_MANTISSA_DIGITS,
|
|
|
|
},
|
|
|
|
|
|
|
|
/* Number */
|
|
|
|
[IN_NONZERO_NUMBER] = {
|
|
|
|
TERMINAL(JSON_INTEGER),
|
|
|
|
['0' ... '9'] = IN_NONZERO_NUMBER,
|
|
|
|
['e'] = IN_EXP_E,
|
|
|
|
['E'] = IN_EXP_E,
|
|
|
|
['.'] = IN_MANTISSA,
|
|
|
|
},
|
|
|
|
|
|
|
|
[IN_NEG_NONZERO_NUMBER] = {
|
|
|
|
['0'] = IN_ZERO,
|
|
|
|
['1' ... '9'] = IN_NONZERO_NUMBER,
|
|
|
|
},
|
|
|
|
|
|
|
|
/* keywords */
|
|
|
|
[IN_KEYWORD] = {
|
|
|
|
TERMINAL(JSON_KEYWORD),
|
|
|
|
['a' ... 'z'] = IN_KEYWORD,
|
|
|
|
},
|
|
|
|
|
|
|
|
/* whitespace */
|
|
|
|
[IN_WHITESPACE] = {
|
|
|
|
TERMINAL(JSON_SKIP),
|
|
|
|
[' '] = IN_WHITESPACE,
|
|
|
|
['\t'] = IN_WHITESPACE,
|
|
|
|
['\r'] = IN_WHITESPACE,
|
|
|
|
['\n'] = IN_WHITESPACE,
|
2016-06-10 04:48:06 +02:00
|
|
|
},
|
2009-11-11 17:39:14 +01:00
|
|
|
|
|
|
|
/* escape */
|
|
|
|
[IN_ESCAPE_LL] = {
|
2010-05-24 09:39:53 +02:00
|
|
|
['d'] = JSON_ESCAPE,
|
2017-06-07 18:36:02 +02:00
|
|
|
['u'] = JSON_ESCAPE,
|
2009-11-11 17:39:14 +01:00
|
|
|
},
|
|
|
|
|
|
|
|
[IN_ESCAPE_L] = {
|
2010-05-24 09:39:53 +02:00
|
|
|
['d'] = JSON_ESCAPE,
|
2009-11-11 17:39:14 +01:00
|
|
|
['l'] = IN_ESCAPE_LL,
|
2017-06-07 18:36:02 +02:00
|
|
|
['u'] = JSON_ESCAPE,
|
2009-11-11 17:39:14 +01:00
|
|
|
},
|
|
|
|
|
2010-02-04 03:30:30 +01:00
|
|
|
[IN_ESCAPE_I64] = {
|
2010-05-24 09:39:53 +02:00
|
|
|
['d'] = JSON_ESCAPE,
|
2017-06-07 18:36:02 +02:00
|
|
|
['u'] = JSON_ESCAPE,
|
2010-02-04 03:30:30 +01:00
|
|
|
},
|
|
|
|
|
|
|
|
[IN_ESCAPE_I6] = {
|
|
|
|
['4'] = IN_ESCAPE_I64,
|
|
|
|
},
|
|
|
|
|
|
|
|
[IN_ESCAPE_I] = {
|
|
|
|
['6'] = IN_ESCAPE_I6,
|
|
|
|
},
|
|
|
|
|
2009-11-11 17:39:14 +01:00
|
|
|
[IN_ESCAPE] = {
|
2010-05-24 09:39:53 +02:00
|
|
|
['d'] = JSON_ESCAPE,
|
|
|
|
['i'] = JSON_ESCAPE,
|
|
|
|
['p'] = JSON_ESCAPE,
|
|
|
|
['s'] = JSON_ESCAPE,
|
2017-06-07 18:36:02 +02:00
|
|
|
['u'] = JSON_ESCAPE,
|
2010-05-24 09:39:53 +02:00
|
|
|
['f'] = JSON_ESCAPE,
|
2009-11-11 17:39:14 +01:00
|
|
|
['l'] = IN_ESCAPE_L,
|
2010-02-04 03:30:30 +01:00
|
|
|
['I'] = IN_ESCAPE_I,
|
2009-11-11 17:39:14 +01:00
|
|
|
},
|
|
|
|
|
|
|
|
/* top level rule */
|
|
|
|
[IN_START] = {
|
|
|
|
['"'] = IN_DQ_STRING,
|
|
|
|
['\''] = IN_SQ_STRING,
|
|
|
|
['0'] = IN_ZERO,
|
|
|
|
['1' ... '9'] = IN_NONZERO_NUMBER,
|
|
|
|
['-'] = IN_NEG_NONZERO_NUMBER,
|
2015-11-25 22:23:26 +01:00
|
|
|
['{'] = JSON_LCURLY,
|
|
|
|
['}'] = JSON_RCURLY,
|
|
|
|
['['] = JSON_LSQUARE,
|
|
|
|
[']'] = JSON_RSQUARE,
|
|
|
|
[','] = JSON_COMMA,
|
|
|
|
[':'] = JSON_COLON,
|
2009-11-11 17:39:14 +01:00
|
|
|
['a' ... 'z'] = IN_KEYWORD,
|
|
|
|
['%'] = IN_ESCAPE,
|
|
|
|
[' '] = IN_WHITESPACE,
|
|
|
|
['\t'] = IN_WHITESPACE,
|
|
|
|
['\r'] = IN_WHITESPACE,
|
|
|
|
['\n'] = IN_WHITESPACE,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
void json_lexer_init(JSONLexer *lexer, JSONLexerEmitter func)
|
|
|
|
{
|
|
|
|
lexer->emit = func;
|
|
|
|
lexer->state = IN_START;
|
2015-11-25 22:23:29 +01:00
|
|
|
lexer->token = g_string_sized_new(3);
|
2010-05-17 22:50:01 +02:00
|
|
|
lexer->x = lexer->y = 0;
|
2009-11-11 17:39:14 +01:00
|
|
|
}
|
|
|
|
|
2011-06-01 19:14:57 +02:00
|
|
|
static int json_lexer_feed_char(JSONLexer *lexer, char ch, bool flush)
|
2009-11-11 17:39:14 +01:00
|
|
|
{
|
2010-05-24 09:39:52 +02:00
|
|
|
int char_consumed, new_state;
|
|
|
|
|
2009-11-11 17:39:14 +01:00
|
|
|
lexer->x++;
|
|
|
|
if (ch == '\n') {
|
|
|
|
lexer->x = 0;
|
|
|
|
lexer->y++;
|
|
|
|
}
|
|
|
|
|
2010-05-24 09:39:52 +02:00
|
|
|
do {
|
2015-11-25 22:23:25 +01:00
|
|
|
assert(lexer->state <= ARRAY_SIZE(json_lexer));
|
2010-05-24 09:39:52 +02:00
|
|
|
new_state = json_lexer[lexer->state][(uint8_t)ch];
|
|
|
|
char_consumed = !TERMINAL_NEEDED_LOOKAHEAD(lexer->state, new_state);
|
|
|
|
if (char_consumed) {
|
2015-11-25 22:23:29 +01:00
|
|
|
g_string_append_c(lexer->token, ch);
|
2010-05-24 09:39:52 +02:00
|
|
|
}
|
2009-11-11 17:39:14 +01:00
|
|
|
|
2010-05-24 09:39:52 +02:00
|
|
|
switch (new_state) {
|
2015-11-25 22:23:26 +01:00
|
|
|
case JSON_LCURLY:
|
|
|
|
case JSON_RCURLY:
|
|
|
|
case JSON_LSQUARE:
|
|
|
|
case JSON_RSQUARE:
|
|
|
|
case JSON_COLON:
|
|
|
|
case JSON_COMMA:
|
2010-05-24 09:39:52 +02:00
|
|
|
case JSON_ESCAPE:
|
|
|
|
case JSON_INTEGER:
|
|
|
|
case JSON_FLOAT:
|
|
|
|
case JSON_KEYWORD:
|
|
|
|
case JSON_STRING:
|
|
|
|
lexer->emit(lexer, lexer->token, new_state, lexer->x, lexer->y);
|
2012-01-09 18:29:51 +01:00
|
|
|
/* fall through */
|
2010-05-24 09:39:52 +02:00
|
|
|
case JSON_SKIP:
|
2015-11-25 22:23:29 +01:00
|
|
|
g_string_truncate(lexer->token, 0);
|
2010-05-24 09:39:52 +02:00
|
|
|
new_state = IN_START;
|
|
|
|
break;
|
2011-03-27 11:07:54 +02:00
|
|
|
case IN_ERROR:
|
2011-06-01 19:14:58 +02:00
|
|
|
/* XXX: To avoid having previous bad input leaving the parser in an
|
|
|
|
* unresponsive state where we consume unpredictable amounts of
|
|
|
|
* subsequent "good" input, percolate this error state up to the
|
|
|
|
* tokenizer/parser by forcing a NULL object to be emitted, then
|
|
|
|
* reset state.
|
|
|
|
*
|
|
|
|
* Also note that this handling is required for reliable channel
|
|
|
|
* negotiation between QMP and the guest agent, since chr(0xFF)
|
|
|
|
* is placed at the beginning of certain events to ensure proper
|
|
|
|
* delivery when the channel is in an unknown state. chr(0xFF) is
|
|
|
|
* never a valid ASCII/UTF-8 sequence, so this should reliably
|
|
|
|
* induce an error/flush state.
|
|
|
|
*/
|
|
|
|
lexer->emit(lexer, lexer->token, JSON_ERROR, lexer->x, lexer->y);
|
2015-11-25 22:23:29 +01:00
|
|
|
g_string_truncate(lexer->token, 0);
|
2011-06-01 19:14:56 +02:00
|
|
|
new_state = IN_START;
|
2011-06-01 19:14:58 +02:00
|
|
|
lexer->state = new_state;
|
|
|
|
return 0;
|
2010-05-24 09:39:52 +02:00
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
lexer->state = new_state;
|
2011-06-01 19:14:57 +02:00
|
|
|
} while (!char_consumed && !flush);
|
2011-06-01 19:14:52 +02:00
|
|
|
|
|
|
|
/* Do not let a single token grow to an arbitrarily large size,
|
|
|
|
* this is a security consideration.
|
|
|
|
*/
|
2015-11-25 22:23:29 +01:00
|
|
|
if (lexer->token->len > MAX_TOKEN_SIZE) {
|
2011-06-01 19:14:52 +02:00
|
|
|
lexer->emit(lexer, lexer->token, lexer->state, lexer->x, lexer->y);
|
2015-11-25 22:23:29 +01:00
|
|
|
g_string_truncate(lexer->token, 0);
|
2011-06-01 19:14:52 +02:00
|
|
|
lexer->state = IN_START;
|
|
|
|
}
|
|
|
|
|
2009-11-11 17:39:14 +01:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
int json_lexer_feed(JSONLexer *lexer, const char *buffer, size_t size)
|
|
|
|
{
|
|
|
|
size_t i;
|
|
|
|
|
|
|
|
for (i = 0; i < size; i++) {
|
|
|
|
int err;
|
|
|
|
|
2011-06-01 19:14:57 +02:00
|
|
|
err = json_lexer_feed_char(lexer, buffer[i], false);
|
2009-11-11 17:39:14 +01:00
|
|
|
if (err < 0) {
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
int json_lexer_flush(JSONLexer *lexer)
|
|
|
|
{
|
2011-06-01 19:14:58 +02:00
|
|
|
return lexer->state == IN_START ? 0 : json_lexer_feed_char(lexer, 0, true);
|
2009-11-11 17:39:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void json_lexer_destroy(JSONLexer *lexer)
|
|
|
|
{
|
2015-11-25 22:23:29 +01:00
|
|
|
g_string_free(lexer->token, true);
|
2009-11-11 17:39:14 +01:00
|
|
|
}
|