2009-11-11 17:38:59 +01:00
|
|
|
/*
|
|
|
|
* JSON streaming support
|
|
|
|
*
|
|
|
|
* Copyright IBM, Corp. 2009
|
|
|
|
*
|
|
|
|
* Authors:
|
|
|
|
* Anthony Liguori <aliguori@us.ibm.com>
|
|
|
|
*
|
|
|
|
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
|
|
|
|
* See the COPYING.LIB file in the top-level directory.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2016-01-29 18:50:01 +01:00
|
|
|
#include "qemu/osdep.h"
|
2009-11-11 17:38:59 +01:00
|
|
|
#include "qemu-common.h"
|
2012-12-17 18:19:43 +01:00
|
|
|
#include "qapi/qmp/json-lexer.h"
|
|
|
|
#include "qapi/qmp/json-streamer.h"
|
2009-11-11 17:38:59 +01:00
|
|
|
|
2011-06-01 19:14:53 +02:00
|
|
|
#define MAX_TOKEN_SIZE (64ULL << 20)
|
2015-11-25 22:23:33 +01:00
|
|
|
#define MAX_TOKEN_COUNT (2ULL << 20)
|
2011-06-01 19:14:53 +02:00
|
|
|
#define MAX_NESTING (1ULL << 10)
|
|
|
|
|
2015-11-25 22:23:31 +01:00
|
|
|
static void json_message_free_tokens(JSONMessageParser *parser)
|
|
|
|
{
|
|
|
|
if (parser->tokens) {
|
|
|
|
g_queue_free(parser->tokens);
|
|
|
|
parser->tokens = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-11-25 22:23:29 +01:00
|
|
|
static void json_message_process_token(JSONLexer *lexer, GString *input,
|
|
|
|
JSONTokenType type, int x, int y)
|
2009-11-11 17:38:59 +01:00
|
|
|
{
|
|
|
|
JSONMessageParser *parser = container_of(lexer, JSONMessageParser, lexer);
|
2015-11-25 22:23:32 +01:00
|
|
|
JSONToken *token;
|
2009-11-11 17:38:59 +01:00
|
|
|
|
2015-11-25 22:23:26 +01:00
|
|
|
switch (type) {
|
|
|
|
case JSON_LCURLY:
|
|
|
|
parser->brace_count++;
|
|
|
|
break;
|
|
|
|
case JSON_RCURLY:
|
|
|
|
parser->brace_count--;
|
|
|
|
break;
|
|
|
|
case JSON_LSQUARE:
|
|
|
|
parser->bracket_count++;
|
|
|
|
break;
|
|
|
|
case JSON_RSQUARE:
|
|
|
|
parser->bracket_count--;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
2009-11-11 17:38:59 +01:00
|
|
|
}
|
|
|
|
|
2015-11-25 22:23:32 +01:00
|
|
|
token = g_malloc(sizeof(JSONToken) + input->len + 1);
|
|
|
|
token->type = type;
|
|
|
|
memcpy(token->str, input->str, input->len);
|
|
|
|
token->str[input->len] = 0;
|
|
|
|
token->x = x;
|
|
|
|
token->y = y;
|
2009-11-11 17:38:59 +01:00
|
|
|
|
2015-11-25 22:23:29 +01:00
|
|
|
parser->token_size += input->len;
|
2011-06-01 19:14:53 +02:00
|
|
|
|
2015-11-25 22:23:32 +01:00
|
|
|
g_queue_push_tail(parser->tokens, token);
|
2009-11-11 17:38:59 +01:00
|
|
|
|
2011-06-01 19:14:59 +02:00
|
|
|
if (type == JSON_ERROR) {
|
|
|
|
goto out_emit_bad;
|
|
|
|
} else if (parser->brace_count < 0 ||
|
2011-06-01 19:14:51 +02:00
|
|
|
parser->bracket_count < 0 ||
|
|
|
|
(parser->brace_count == 0 &&
|
|
|
|
parser->bracket_count == 0)) {
|
2011-06-01 19:14:59 +02:00
|
|
|
goto out_emit;
|
2011-06-01 19:14:53 +02:00
|
|
|
} else if (parser->token_size > MAX_TOKEN_SIZE ||
|
2015-11-25 22:23:33 +01:00
|
|
|
g_queue_get_length(parser->tokens) > MAX_TOKEN_COUNT ||
|
2015-11-25 22:23:22 +01:00
|
|
|
parser->bracket_count + parser->brace_count > MAX_NESTING) {
|
2011-06-01 19:14:53 +02:00
|
|
|
/* Security consideration, we limit total memory allocated per object
|
|
|
|
* and the maximum recursion depth that a message can force.
|
|
|
|
*/
|
2015-11-25 22:23:23 +01:00
|
|
|
goto out_emit_bad;
|
2011-06-01 19:14:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
out_emit_bad:
|
2015-11-25 22:23:23 +01:00
|
|
|
/*
|
|
|
|
* Clear out token list and tell the parser to emit an error
|
2011-06-01 19:14:59 +02:00
|
|
|
* indication by passing it a NULL list
|
|
|
|
*/
|
2015-11-25 22:23:31 +01:00
|
|
|
json_message_free_tokens(parser);
|
2011-06-01 19:14:59 +02:00
|
|
|
out_emit:
|
|
|
|
/* send current list of tokens to parser and reset tokenizer */
|
|
|
|
parser->brace_count = 0;
|
|
|
|
parser->bracket_count = 0;
|
2015-11-25 22:23:31 +01:00
|
|
|
/* parser->emit takes ownership of parser->tokens. */
|
2011-06-01 19:14:59 +02:00
|
|
|
parser->emit(parser, parser->tokens);
|
2015-11-25 22:23:31 +01:00
|
|
|
parser->tokens = g_queue_new();
|
2011-06-01 19:14:59 +02:00
|
|
|
parser->token_size = 0;
|
2009-11-11 17:38:59 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void json_message_parser_init(JSONMessageParser *parser,
|
2015-11-25 22:23:31 +01:00
|
|
|
void (*func)(JSONMessageParser *, GQueue *))
|
2009-11-11 17:38:59 +01:00
|
|
|
{
|
|
|
|
parser->emit = func;
|
|
|
|
parser->brace_count = 0;
|
|
|
|
parser->bracket_count = 0;
|
2015-11-25 22:23:31 +01:00
|
|
|
parser->tokens = g_queue_new();
|
2011-06-01 19:14:53 +02:00
|
|
|
parser->token_size = 0;
|
2009-11-11 17:38:59 +01:00
|
|
|
|
|
|
|
json_lexer_init(&parser->lexer, json_message_process_token);
|
|
|
|
}
|
|
|
|
|
|
|
|
int json_message_parser_feed(JSONMessageParser *parser,
|
|
|
|
const char *buffer, size_t size)
|
|
|
|
{
|
|
|
|
return json_lexer_feed(&parser->lexer, buffer, size);
|
|
|
|
}
|
|
|
|
|
|
|
|
int json_message_parser_flush(JSONMessageParser *parser)
|
|
|
|
{
|
|
|
|
return json_lexer_flush(&parser->lexer);
|
|
|
|
}
|
|
|
|
|
|
|
|
void json_message_parser_destroy(JSONMessageParser *parser)
|
|
|
|
{
|
|
|
|
json_lexer_destroy(&parser->lexer);
|
2015-11-25 22:23:31 +01:00
|
|
|
json_message_free_tokens(parser);
|
2009-11-11 17:38:59 +01:00
|
|
|
}
|