亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? json_tokener.c

?? JSON(JavaScript Object Notation) 是一種輕量級的數據交換格式。易于人閱讀和編寫。同時也易于機器解析和生成。它基于JavaScript(Standard ECMA-262
?? C
字號:
/*
 * $Id: json_tokener.c,v 1.20 2006/07/25 03:24:50 mclark Exp $
 *
 * Copyright (c) 2004, 2005 Metaparadigm Pte. Ltd.
 * Michael Clark <michael@metaparadigm.com>
 *
 * This library is free software; you can redistribute it and/or modify
 * it under the terms of the MIT license. See COPYING for details.
 *
 */

#include "config.h"

#include <stdio.h>
#include <stdlib.h>
#include <ctype.h>
#include <string.h>

#include "bits.h"
#include "debug.h"
#include "printbuf.h"
#include "arraylist.h"
#include "json_object.h"
#include "json_tokener.h"


#if !HAVE_STRNCASECMP && defined(_MSC_VER)
  /* MSC has the version as _strnicmp */
# define strncasecmp _strnicmp
#elif !HAVE_STRNCASECMP
# error You do not have strncasecmp on your system.
#endif /* HAVE_STRNCASECMP */


static const char* json_null_str = "null";
static const char* json_true_str = "true";
static const char* json_false_str = "false";

const char* json_tokener_errors[] = {
  "success",
  "continue",
  "nesting to deep",
  "unexpected end of data",
  "unexpected character",
  "null expected",
  "boolean expected",
  "number expected",
  "array value separator ',' expected",
  "quoted object property name expected",
  "object property name separator ':' expected",
  "object value separator ',' expected",
  "invalid string sequence",
  "expected comment",
};


struct json_tokener* json_tokener_new()
{
  struct json_tokener *tok = calloc(1, sizeof(struct json_tokener));
  tok->pb = printbuf_new();
  json_tokener_reset(tok);
  return tok;
}

void json_tokener_free(struct json_tokener *tok)
{
  json_tokener_reset(tok);
  if(tok) printbuf_free(tok->pb);
  free(tok);
}

static void json_tokener_reset_level(struct json_tokener *tok, int depth)
{
  tok->stack[depth].state = json_tokener_state_eatws;
  tok->stack[depth].saved_state = json_tokener_state_start;
  json_object_put(tok->stack[depth].current);
  tok->stack[depth].current = NULL;
  free(tok->stack[depth].obj_field_name);
  tok->stack[depth].obj_field_name = NULL;
}

void json_tokener_reset(struct json_tokener *tok)
{
  int i;
  for(i = tok->depth; i >= 0; i--)
    json_tokener_reset_level(tok, i);
  tok->depth = 0;
  tok->err = json_tokener_success;
}

struct json_object* json_tokener_parse(char *str)
{
  struct json_tokener* tok;
  struct json_object* obj;

  tok = json_tokener_new();
  obj = json_tokener_parse_ex(tok, str, -1);
  if(tok->err != json_tokener_success)
    obj = error_ptr(-tok->err);
  json_tokener_free(tok);
  return obj;
}


#if !HAVE_STRNDUP
/* CAW: compliant version of strndup() */
char* strndup(const char* str, size_t n)
{
  if(str) {
    size_t len = strlen(str);
    size_t nn = min(len,n);
    char* s = (char*)malloc(sizeof(char) * (nn + 1));

    if(s) {
      memcpy(s, str, nn);
      s[nn] = '\0';
    }

    return s;
  }

  return NULL;
}
#endif


#define state  tok->stack[tok->depth].state
#define saved_state  tok->stack[tok->depth].saved_state
#define current tok->stack[tok->depth].current
#define obj_field_name tok->stack[tok->depth].obj_field_name

struct json_object* json_tokener_parse_ex(struct json_tokener *tok,
					  char *str, int len)
{
  struct json_object *obj = NULL;
  char c;

  tok->char_offset = 0;
  tok->err = json_tokener_success;

  do {
    if(tok->char_offset == len) {
      if(tok->depth == 0 && state == json_tokener_state_eatws &&
	 saved_state == json_tokener_state_finish)
	tok->err = json_tokener_success;
      else
	tok->err = json_tokener_continue;
      goto out;
    }

    c = *str;
  redo_char:
    switch(state) {

    case json_tokener_state_eatws:
      if(isspace(c)) {
	/* okay */
      } else if(c == '/') {
	printbuf_reset(tok->pb);
	printbuf_memappend(tok->pb, &c, 1);
	state = json_tokener_state_comment_start;
      } else {
	state = saved_state;
	goto redo_char;
      }
      break;

    case json_tokener_state_start:
      switch(c) {
      case '{':
	state = json_tokener_state_eatws;
	saved_state = json_tokener_state_object_field_start;
	current = json_object_new_object();
	break;
      case '[':
	state = json_tokener_state_eatws;
	saved_state = json_tokener_state_array;
	current = json_object_new_array();
	break;
      case 'N':
      case 'n':
	state = json_tokener_state_null;
	printbuf_reset(tok->pb);
	tok->st_pos = 0;
	goto redo_char;
      case '"':
      case '\'':
	state = json_tokener_state_string;
	printbuf_reset(tok->pb);
	tok->quote_char = c;
	break;
      case 'T':
      case 't':
      case 'F':
      case 'f':
	state = json_tokener_state_boolean;
	printbuf_reset(tok->pb);
	tok->st_pos = 0;
	goto redo_char;
#if defined(__GNUC__)
	  case '0' ... '9':
#else
	  case '0':
      case '1':
      case '2':
      case '3':
      case '4':
      case '5':
      case '6':
      case '7':
      case '8':
      case '9':
#endif
      case '-':
	state = json_tokener_state_number;
	printbuf_reset(tok->pb);
	tok->is_double = 0;
	goto redo_char;
      default:
	tok->err = json_tokener_error_parse_unexpected;
	goto out;
      }
      break;

    case json_tokener_state_finish:
      if(tok->depth == 0) goto out;
      obj = json_object_get(current);
      json_tokener_reset_level(tok, tok->depth);
      tok->depth--;
      goto redo_char;

    case json_tokener_state_null:
      printbuf_memappend(tok->pb, &c, 1);
      if(strncasecmp(json_null_str, tok->pb->buf,
		     min(tok->st_pos+1, strlen(json_null_str))) == 0) {
	if(tok->st_pos == strlen(json_null_str)) {
	  current = NULL;
	  saved_state = json_tokener_state_finish;
	  state = json_tokener_state_eatws;
	  goto redo_char;
	}
      } else {
	tok->err = json_tokener_error_parse_null;
	goto out;
      }
      tok->st_pos++;
      break;

    case json_tokener_state_comment_start:
      if(c == '*') {
	state = json_tokener_state_comment;
      } else if(c == '/') {
	state = json_tokener_state_comment_eol;
      } else {
	tok->err = json_tokener_error_parse_comment;
	goto out;
      }
      printbuf_memappend(tok->pb, &c, 1);
      break;

    case json_tokener_state_comment:
      if(c == '*') state = json_tokener_state_comment_end;
      printbuf_memappend(tok->pb, &c, 1);
      break;

    case json_tokener_state_comment_eol:
      if(c == '\n') {
	mc_debug("json_tokener_comment: %s\n", tok->pb->buf);
	state = json_tokener_state_eatws;
      } else {
	printbuf_memappend(tok->pb, &c, 1);
      }
      break;

    case json_tokener_state_comment_end:
      printbuf_memappend(tok->pb, &c, 1);
      if(c == '/') {
	mc_debug("json_tokener_comment: %s\n", tok->pb->buf);
	state = json_tokener_state_eatws;
      } else {
	state = json_tokener_state_comment;
      }
      break;

    case json_tokener_state_string:
      if(c == tok->quote_char) {
	current = json_object_new_string(tok->pb->buf);
	saved_state = json_tokener_state_finish;
	state = json_tokener_state_eatws;
      } else if(c == '\\') {
	saved_state = json_tokener_state_string;
	state = json_tokener_state_string_escape;
      } else {
	printbuf_memappend(tok->pb, &c, 1);
      }
      break;

    case json_tokener_state_string_escape:
      switch(c) {
      case '"':
      case '\\':
      case '/':
	printbuf_memappend(tok->pb, &c, 1);
	state = saved_state;
	break;
      case 'b':
      case 'n':
      case 'r':
      case 't':
	if(c == 'b') printbuf_memappend(tok->pb, "\b", 1);
	else if(c == 'n') printbuf_memappend(tok->pb, "\n", 1);
	else if(c == 'r') printbuf_memappend(tok->pb, "\r", 1);
	else if(c == 't') printbuf_memappend(tok->pb, "\t", 1);
	state = saved_state;
	break;
      case 'u':
	tok->ucs_char = 0;
	tok->st_pos = 0;
	state = json_tokener_state_escape_unicode;
	break;
      default:
	tok->err = json_tokener_error_parse_string;
	goto out;
      }
      break;

    case json_tokener_state_escape_unicode:
      if(strchr(json_hex_chars, c)) {
	tok->ucs_char += ((unsigned int)hexdigit(c) << ((3-tok->st_pos++)*4));
	if(tok->st_pos == 4) {
	  unsigned char utf_out[3];
	  if (tok->ucs_char < 0x80) {
	    utf_out[0] = tok->ucs_char;
	    printbuf_memappend(tok->pb, (char*)utf_out, 1);
	  } else if (tok->ucs_char < 0x800) {
	    utf_out[0] = 0xc0 | (tok->ucs_char >> 6);
	    utf_out[1] = 0x80 | (tok->ucs_char & 0x3f);
	    printbuf_memappend(tok->pb, (char*)utf_out, 2);
	  } else {
	    utf_out[0] = 0xe0 | (tok->ucs_char >> 12);
	    utf_out[1] = 0x80 | ((tok->ucs_char >> 6) & 0x3f);
	    utf_out[2] = 0x80 | (tok->ucs_char & 0x3f);
	    printbuf_memappend(tok->pb, (char*)utf_out, 3);
	  }
	  state = saved_state;
	}
      } else {
	tok->err = json_tokener_error_parse_string;
	goto out;
      }
      break;

    case json_tokener_state_boolean:
      printbuf_memappend(tok->pb, &c, 1);
      if(strncasecmp(json_true_str, tok->pb->buf,
		     min(tok->st_pos+1, strlen(json_true_str))) == 0) {
	if(tok->st_pos == strlen(json_true_str)) {
	  current = json_object_new_boolean(1);
	  saved_state = json_tokener_state_finish;
	  state = json_tokener_state_eatws;
	  goto redo_char;
	}
      } else if(strncasecmp(json_false_str, tok->pb->buf,
			    min(tok->st_pos+1, strlen(json_false_str))) == 0) {
	if(tok->st_pos == strlen(json_false_str)) {
	  current = json_object_new_boolean(0);
	  saved_state = json_tokener_state_finish;
	  state = json_tokener_state_eatws;
	  goto redo_char;
	}
      } else {
	tok->err = json_tokener_error_parse_boolean;
	goto out;
      }
      tok->st_pos++;
      break;

    case json_tokener_state_number:
      if(c && strchr(json_number_chars, c)) {
	printbuf_memappend(tok->pb, &c, 1);	
	if(c == '.' || c == 'e') tok->is_double = 1;
      } else {
	int numi;
	double numd;
	if(!tok->is_double && sscanf(tok->pb->buf, "%d", &numi) == 1) {
	  current = json_object_new_int(numi);
	} else if(tok->is_double && sscanf(tok->pb->buf, "%lf", &numd) == 1) {
	  current = json_object_new_double(numd);
	} else {
	  tok->err = json_tokener_error_parse_number;
	  goto out;
	}
	saved_state = json_tokener_state_finish;
	state = json_tokener_state_eatws;
	goto redo_char;
      }
      break;

    case json_tokener_state_array:
      if(c == ']') {
	saved_state = json_tokener_state_finish;
	state = json_tokener_state_eatws;
      } else {
	if(tok->depth >= JSON_TOKENER_MAX_DEPTH-1) {
	  tok->err = json_tokener_error_depth;
	  goto out;
	}
	state = json_tokener_state_array_add;
	tok->depth++;
	json_tokener_reset_level(tok, tok->depth);
	goto redo_char;
      }
      break;

    case json_tokener_state_array_add:
      json_object_array_add(current, obj);
      saved_state = json_tokener_state_array_sep;
      state = json_tokener_state_eatws;
      goto redo_char;

    case json_tokener_state_array_sep:
      if(c == ']') {
	saved_state = json_tokener_state_finish;
	state = json_tokener_state_eatws;
      } else if(c == ',') {
	saved_state = json_tokener_state_array;
	state = json_tokener_state_eatws;
      } else {
	tok->err = json_tokener_error_parse_array;
	goto out;
      }
      break;

    case json_tokener_state_object_field_start:
      if(c == '}') {
	saved_state = json_tokener_state_finish;
	state = json_tokener_state_eatws;
      } else if (c == '"' || c == '\'') {
	tok->quote_char = c;
	printbuf_reset(tok->pb);
	state = json_tokener_state_object_field;
      } else {
	tok->err = json_tokener_error_parse_object_key_name;
	goto out;
      }
      break;

    case json_tokener_state_object_field:
      if(c == tok->quote_char) {
	obj_field_name = strdup(tok->pb->buf);
	saved_state = json_tokener_state_object_field_end;
	state = json_tokener_state_eatws;
      } else if(c == '\\') {
	saved_state = json_tokener_state_object_field;
	state = json_tokener_state_string_escape;
      } else {
	printbuf_memappend(tok->pb, &c, 1);
      }
      break;

    case json_tokener_state_object_field_end:
      if(c == ':') {
	saved_state = json_tokener_state_object_value;
	state = json_tokener_state_eatws;
      } else {
	tok->err = json_tokener_error_parse_object_key_sep;
	goto out;
      }
      break;

    case json_tokener_state_object_value:
      if(tok->depth >= JSON_TOKENER_MAX_DEPTH-1) {
	tok->err = json_tokener_error_depth;
	goto out;
      }
      state = json_tokener_state_object_value_add;
      tok->depth++;
      json_tokener_reset_level(tok, tok->depth);
      goto redo_char;

    case json_tokener_state_object_value_add:
      json_object_object_add(current, obj_field_name, obj);
      free(obj_field_name);
      obj_field_name = NULL;
      saved_state = json_tokener_state_object_sep;
      state = json_tokener_state_eatws;
      goto redo_char;

    case json_tokener_state_object_sep:
      if(c == '}') {
	saved_state = json_tokener_state_finish;
	state = json_tokener_state_eatws;
      } else if(c == ',') {
	saved_state = json_tokener_state_object_field_start;
	state = json_tokener_state_eatws;
      } else {
	tok->err = json_tokener_error_parse_object_value_sep;
	goto out;
      }
      break;

    }
    str++;
    tok->char_offset++;
  } while(c);

  if(state != json_tokener_state_finish &&
     saved_state != json_tokener_state_finish)
    tok->err = json_tokener_error_parse_eof;

 out:
  if(tok->err == json_tokener_success) return json_object_get(current);
  mc_debug("json_tokener_parse_ex: error %s at offset %d\n",
	   json_tokener_errors[tok->err], tok->char_offset);
  return NULL;
}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产一区999| 欧美日韩国产影片| 狠狠久久亚洲欧美| 天天色综合成人网| 亚洲一区在线视频| 一区二区久久久| 国产精品国产三级国产有无不卡| 日韩欧美在线一区二区三区| 欧美性生活久久| 色一区在线观看| 波多野结衣中文一区| 日本大胆欧美人术艺术动态| 香蕉久久夜色精品国产使用方法| 日韩理论在线观看| 国产日韩高清在线| 久久久久久97三级| 久久久夜色精品亚洲| 精品奇米国产一区二区三区| 欧美四级电影网| 91啪九色porn原创视频在线观看| 懂色av一区二区三区蜜臀| 国产综合久久久久久鬼色| 久久精品久久99精品久久| 日韩电影在线免费看| 日韩黄色片在线观看| 青椒成人免费视频| 国内精品国产三级国产a久久| 免费看日韩精品| 久久99国产精品久久99| 毛片av一区二区| 久久精品国产一区二区| 久久99在线观看| 国产成人av自拍| 高清不卡一二三区| heyzo一本久久综合| 成人少妇影院yyyy| 色综合天天综合狠狠| 欧美日韩国产一级片| 欧美精品一区二区三区在线| 国产精品黄色在线观看| 亚洲成年人影院| 国产乱子伦视频一区二区三区 | 中文字幕一区二区在线播放| 夜夜嗨av一区二区三区四季av | av一区二区三区| 欧美日韩一级二级| 精品成人一区二区三区四区| 一区二区中文视频| 麻豆精品在线视频| 97se亚洲国产综合自在线不卡| 欧美日韩国产bt| 国产精品无遮挡| 免费av网站大全久久| 不卡一卡二卡三乱码免费网站 | 亚洲成人综合网站| 国产伦精品一区二区三区免费迷 | 久久精品国产澳门| 91蝌蚪porny| 日韩欧美一卡二卡| 亚洲精品日产精品乱码不卡| 久久成人av少妇免费| 91国产成人在线| 国产三级一区二区| 天堂成人国产精品一区| 成人免费毛片嘿嘿连载视频| 日韩一本二本av| 亚洲一区二区三区美女| 国产成人免费视频一区| 欧美高清激情brazzers| 亚洲天堂a在线| 国产盗摄精品一区二区三区在线| 欧美日本在线观看| 亚洲女同ⅹxx女同tv| 国产一区二区三区在线观看免费视频| 在线国产电影不卡| 国产精品灌醉下药二区| 国产在线精品国自产拍免费| 欧美精品久久天天躁| 一区二区三区免费在线观看| 成人国产精品视频| 国产亚洲1区2区3区| 麻豆精品国产传媒mv男同| 欧美色区777第一页| 亚洲精品国产一区二区三区四区在线| 国产成人久久精品77777最新版本 国产成人鲁色资源国产91色综 | 污片在线观看一区二区| 97久久超碰国产精品| 国产校园另类小说区| 国产一区久久久| 日韩欧美美女一区二区三区| 午夜伊人狠狠久久| 色婷婷av一区二区三区大白胸| 精品成人私密视频| 九九热在线视频观看这里只有精品| 国产欧美综合在线观看第十页| 久久99久久久久久久久久久| 4hu四虎永久在线影院成人| 亚洲图片有声小说| 欧洲亚洲精品在线| 亚洲精品自拍动漫在线| 99久久亚洲一区二区三区青草| 国产人伦精品一区二区| 国产福利精品导航| 久久麻豆一区二区| 国产麻豆精品在线| 国产网红主播福利一区二区| 国产精品主播直播| 国产婷婷色一区二区三区| 国产在线观看一区二区| 久久久久久久精| 国产.欧美.日韩| 中文字幕一区二区三区乱码在线| 成人精品一区二区三区四区| 国产精品久久久久桃色tv| 不卡av在线网| 一区二区三区毛片| 911精品国产一区二区在线| 免费成人性网站| 欧美精品一区男女天堂| 国产麻豆一精品一av一免费 | 亚洲综合色网站| 欧美欧美欧美欧美首页| 免费高清在线视频一区·| 精品国产污污免费网站入口 | 久久99精品久久久久久动态图| 精品少妇一区二区三区日产乱码| 久久99精品久久久久久久久久久久| 欧美精品一区二区蜜臀亚洲| 国产黑丝在线一区二区三区| 国产精品欧美精品| 欧美在线色视频| 欧美日韩欧美一区二区| 久久精品二区亚洲w码| 欧美经典三级视频一区二区三区| 99久久国产综合精品麻豆| 亚洲成人午夜影院| 精品久久久久久最新网址| 成人免费视频网站在线观看| 一区二区三区四区在线| 日韩欧美一级特黄在线播放| 国产成人超碰人人澡人人澡| 一区二区欧美在线观看| 日韩一卡二卡三卡四卡| 国产成人免费av在线| 亚洲资源在线观看| 26uuu亚洲综合色| 91美女福利视频| 琪琪久久久久日韩精品| 国产精品久久久久久久蜜臀| 7777女厕盗摄久久久| 国产成人精品影视| 亚洲福利电影网| 国产清纯白嫩初高生在线观看91 | 午夜精品成人在线视频| 久久久久久久久久久黄色| 91在线免费播放| 精品一区中文字幕| 亚洲精品国产一区二区三区四区在线| 7777精品伊人久久久大香线蕉 | 成人福利在线看| 日韩国产欧美在线视频| 国产精品久久久久久久久久久免费看| 欧美熟乱第一页| 成人激情免费视频| 免费国产亚洲视频| 亚洲欧美日韩国产另类专区| 精品国产乱码久久久久久老虎| 91一区二区三区在线播放| 麻豆精品视频在线观看视频| 亚洲欧美色综合| 精品黑人一区二区三区久久| 91亚洲大成网污www| 国产乱国产乱300精品| 婷婷开心久久网| 亚洲免费av在线| 久久久99精品免费观看| 制服丝袜中文字幕一区| 色综合久久综合网欧美综合网| 国产在线国偷精品免费看| 日韩电影在线免费观看| 亚洲综合丁香婷婷六月香| 国产精品久久久久一区| 国产精品自在在线| 日韩av电影免费观看高清完整版 | 久久电影网站中文字幕| 香蕉加勒比综合久久| 有坂深雪av一区二区精品| 国产精品日日摸夜夜摸av| 久久婷婷综合激情| 日韩欧美色综合网站| 91麻豆精品久久久久蜜臀| 欧美色图免费看| 欧美午夜精品久久久久久超碰 | 91精品国产综合久久精品性色 | 久久免费视频色| 日韩欧美自拍偷拍| 欧美精三区欧美精三区| 色老汉一区二区三区| 99精品桃花视频在线观看| 99久久综合狠狠综合久久|