clibcni: remove securec new
Signed-off-by: openeuler-iSula <isula@huawei.com>
This commit is contained in:
parent
2a27ee3e0a
commit
2ea21dcaae
@ -3,7 +3,7 @@ project (clibcni)
|
|||||||
|
|
||||||
option(VERSION "set clibcni version" ON)
|
option(VERSION "set clibcni version" ON)
|
||||||
if (VERSION STREQUAL "ON")
|
if (VERSION STREQUAL "ON")
|
||||||
set(CLIBCNI_VERSION "1.0.7")
|
set(CLIBCNI_VERSION "1.0.8")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
option(DEBUG "set clibcni gcc option" ON)
|
option(DEBUG "set clibcni gcc option" ON)
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
%global _version 1.0.7
|
%global _version 1.0.8
|
||||||
%global _release 20191222.225602.gita3d7e9d4
|
%global _release 20191225.122403.git49093ba7
|
||||||
Name: clibcni
|
Name: clibcni
|
||||||
Version: %{_version}
|
Version: %{_version}
|
||||||
Release: %{_release}
|
Release: %{_release}
|
||||||
@ -12,10 +12,8 @@ BuildRoot: %{_tmppath}/%{name}-%{version}
|
|||||||
|
|
||||||
BuildRequires: gcc
|
BuildRequires: gcc
|
||||||
BuildRequires: cmake
|
BuildRequires: cmake
|
||||||
BuildRequires: libsecurec libsecurec-devel
|
|
||||||
BuildRequires: yajl yajl-devel
|
BuildRequires: yajl yajl-devel
|
||||||
|
|
||||||
Requires: libsecurec
|
|
||||||
Requires: yajl
|
Requires: yajl
|
||||||
|
|
||||||
%ifarch x86_64 aarch64
|
%ifarch x86_64 aarch64
|
||||||
|
|||||||
@ -14,13 +14,6 @@ endmacro()
|
|||||||
find_program(CMD_PYTHON python)
|
find_program(CMD_PYTHON python)
|
||||||
_CHECK(CMD_PYTHON "CMD_PYTHON-NOTFOUND" "python")
|
_CHECK(CMD_PYTHON "CMD_PYTHON-NOTFOUND" "python")
|
||||||
|
|
||||||
# check securec
|
|
||||||
find_path(LIBSECUREC_INCLUDE_DIR securec.h)
|
|
||||||
_CHECK(LIBSECUREC_INCLUDE_DIR "LIBSECUREC_INCLUDE_DIR-NOTFOUND" "securec.h")
|
|
||||||
|
|
||||||
find_library(LIBSECUREC_LIBRARY securec)
|
|
||||||
_CHECK(LIBSECUREC_LIBRARY "LIBSECUREC_LIBRARY-NOTFOUND" "libsecurec.so")
|
|
||||||
|
|
||||||
# check libyajl
|
# check libyajl
|
||||||
pkg_check_modules(PC_LIBYAJL REQUIRED "yajl>=2")
|
pkg_check_modules(PC_LIBYAJL REQUIRED "yajl>=2")
|
||||||
if (NOT PC_LIBYAJL_FOUND)
|
if (NOT PC_LIBYAJL_FOUND)
|
||||||
|
|||||||
@ -43,7 +43,7 @@ if (CLIBCNI_GCOV)
|
|||||||
target_link_libraries(clibcni -lgcov)
|
target_link_libraries(clibcni -lgcov)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
target_link_libraries(clibcni -lyajl -lsecurec)
|
target_link_libraries(clibcni -lyajl)
|
||||||
|
|
||||||
# install all files
|
# install all files
|
||||||
install(TARGETS clibcni
|
install(TARGETS clibcni
|
||||||
|
|||||||
@ -28,7 +28,6 @@
|
|||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
#include <errno.h>
|
#include <errno.h>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
#include <securec.h>
|
|
||||||
|
|
||||||
#include "utils.h"
|
#include "utils.h"
|
||||||
#include "log.h"
|
#include "log.h"
|
||||||
@ -359,8 +358,8 @@ static int check_conf_file(const char *dir, const char * const *extensions, size
|
|||||||
int ret = 0;
|
int ret = 0;
|
||||||
size_t cap = *result_size;
|
size_t cap = *result_size;
|
||||||
|
|
||||||
nret = sprintf_s(fname, PATH_MAX, "%s/%s", dir, pdirent->d_name);
|
nret = snprintf(fname, PATH_MAX, "%s/%s", dir, pdirent->d_name);
|
||||||
if (nret < 0) {
|
if (nret < 0 || nret >= PATH_MAX) {
|
||||||
*err = util_strdup_s("Pathname too long");
|
*err = util_strdup_s("Pathname too long");
|
||||||
ERROR("Pathname too long");
|
ERROR("Pathname too long");
|
||||||
return -1;
|
return -1;
|
||||||
|
|||||||
@ -222,8 +222,8 @@ int raw_get_version_info(const char *plugin_path, struct plugin_info **result, c
|
|||||||
ret = -1;
|
ret = -1;
|
||||||
goto free_out;
|
goto free_out;
|
||||||
}
|
}
|
||||||
ret = sprintf_s(stdin_data, len, "{\"cniVersion\":%s}", version);
|
ret = snprintf(stdin_data, len, "{\"cniVersion\":%s}", version);
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
*err = util_strdup_s("Sprintf failed");
|
*err = util_strdup_s("Sprintf failed");
|
||||||
goto free_out;
|
goto free_out;
|
||||||
@ -342,8 +342,8 @@ static int prepare_raw_exec(const char *plugin_path, int pipe_stdin[2], int pipe
|
|||||||
int ret = 0;
|
int ret = 0;
|
||||||
|
|
||||||
if (check_prepare_raw_exec_args(plugin_path)) {
|
if (check_prepare_raw_exec_args(plugin_path)) {
|
||||||
ret = sprintf_s(errmsg, len, "Empty or not absolute path: %s", plugin_path);
|
ret = snprintf(errmsg, len, "Empty or not absolute path: %s", plugin_path);
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
}
|
}
|
||||||
return -1;
|
return -1;
|
||||||
@ -351,8 +351,8 @@ static int prepare_raw_exec(const char *plugin_path, int pipe_stdin[2], int pipe
|
|||||||
|
|
||||||
ret = pipe2(pipe_stdin, O_CLOEXEC | O_NONBLOCK);
|
ret = pipe2(pipe_stdin, O_CLOEXEC | O_NONBLOCK);
|
||||||
if (ret < 0) {
|
if (ret < 0) {
|
||||||
ret = sprintf_s(errmsg, len, "Pipe stdin failed: %s", strerror(errno));
|
ret = snprintf(errmsg, len, "Pipe stdin failed: %s", strerror(errno));
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
}
|
}
|
||||||
return -1;
|
return -1;
|
||||||
@ -360,8 +360,8 @@ static int prepare_raw_exec(const char *plugin_path, int pipe_stdin[2], int pipe
|
|||||||
|
|
||||||
ret = pipe2(pipe_stdout, O_CLOEXEC | O_NONBLOCK);
|
ret = pipe2(pipe_stdout, O_CLOEXEC | O_NONBLOCK);
|
||||||
if (ret < 0) {
|
if (ret < 0) {
|
||||||
ret = sprintf_s(errmsg, len, "Pipe stdout failed: %s", strerror(errno));
|
ret = snprintf(errmsg, len, "Pipe stdout failed: %s", strerror(errno));
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
}
|
}
|
||||||
return -1;
|
return -1;
|
||||||
@ -380,8 +380,8 @@ static int write_stdin_data_to_child(int pipe_stdin[2], const char *stdin_data,
|
|||||||
|
|
||||||
len = strlen(stdin_data);
|
len = strlen(stdin_data);
|
||||||
if (util_write_nointr(pipe_stdin[1], stdin_data, len) != (ssize_t)len) {
|
if (util_write_nointr(pipe_stdin[1], stdin_data, len) != (ssize_t)len) {
|
||||||
ret = sprintf_s(errmsg, errmsg_len, "Write stdin data failed: %s", strerror(errno));
|
ret = snprintf(errmsg, errmsg_len, "Write stdin data failed: %s", strerror(errno));
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= errmsg_len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
}
|
}
|
||||||
ret = -1;
|
ret = -1;
|
||||||
@ -403,9 +403,9 @@ static int read_child_stdout_msg(const int pipe_stdout[2], char *errmsg, size_t
|
|||||||
char buffer[BUFFER_SIZE] = { 0 };
|
char buffer[BUFFER_SIZE] = { 0 };
|
||||||
ssize_t tmp_len = util_read_nointr(pipe_stdout[0], buffer, BUFFER_SIZE - 1);
|
ssize_t tmp_len = util_read_nointr(pipe_stdout[0], buffer, BUFFER_SIZE - 1);
|
||||||
if (tmp_len < 0) {
|
if (tmp_len < 0) {
|
||||||
ret = sprintf_s(errmsg, errmsg_len, "%s; read stdout failed: %s", strlen(errmsg) > 0 ? errmsg : "",
|
ret = snprintf(errmsg, errmsg_len, "%s; read stdout failed: %s", strlen(errmsg) > 0 ? errmsg : "",
|
||||||
strerror(errno));
|
strerror(errno));
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= errmsg_len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
}
|
}
|
||||||
ret = -1;
|
ret = -1;
|
||||||
@ -434,26 +434,26 @@ static int wait_pid_for_raw_exec_child(pid_t child_pid, const int pipe_stdout[2]
|
|||||||
ret = read_child_stdout_msg(pipe_stdout, errmsg, errmsg_len, stdout_str);
|
ret = read_child_stdout_msg(pipe_stdout, errmsg, errmsg_len, stdout_str);
|
||||||
|
|
||||||
if (wait_pid < 0) {
|
if (wait_pid < 0) {
|
||||||
ret = sprintf_s(errmsg, errmsg_len, "%s; waitpid failed: %s", strlen(errmsg) > 0 ? errmsg : "",
|
ret = snprintf(errmsg, errmsg_len, "%s; waitpid failed: %s", strlen(errmsg) > 0 ? errmsg : "",
|
||||||
strerror(errno));
|
strerror(errno));
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= errmsg_len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
}
|
}
|
||||||
ret = -1;
|
ret = -1;
|
||||||
goto err_free_out;
|
goto err_free_out;
|
||||||
} else if (WIFEXITED(wait_status) && WEXITSTATUS(wait_status)) {
|
} else if (WIFEXITED(wait_status) && WEXITSTATUS(wait_status)) {
|
||||||
ret = sprintf_s(errmsg, errmsg_len, "%s; get child status: %d", strlen(errmsg) > 0 ? errmsg : "",
|
ret = snprintf(errmsg, errmsg_len, "%s; get child status: %d", strlen(errmsg) > 0 ? errmsg : "",
|
||||||
WEXITSTATUS(wait_status));
|
WEXITSTATUS(wait_status));
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= errmsg_len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
}
|
}
|
||||||
ret = WEXITSTATUS(wait_status);
|
ret = WEXITSTATUS(wait_status);
|
||||||
*parse_exec_err = true;
|
*parse_exec_err = true;
|
||||||
goto err_free_out;
|
goto err_free_out;
|
||||||
} else if (WIFSIGNALED(wait_status)) {
|
} else if (WIFSIGNALED(wait_status)) {
|
||||||
ret = sprintf_s(errmsg, errmsg_len, "%s; child get signal: %d", strlen(errmsg) > 0 ? errmsg : "",
|
ret = snprintf(errmsg, errmsg_len, "%s; child get signal: %d", strlen(errmsg) > 0 ? errmsg : "",
|
||||||
WTERMSIG(wait_status));
|
WTERMSIG(wait_status));
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= errmsg_len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
}
|
}
|
||||||
ret = INK_ERR_TERM_BY_SIG;
|
ret = INK_ERR_TERM_BY_SIG;
|
||||||
@ -503,9 +503,9 @@ static void make_err_message(const char *plugin_path, char **stdout_str, int ret
|
|||||||
parser_error json_err = NULL;
|
parser_error json_err = NULL;
|
||||||
*err = exec_error_parse_data(*stdout_str, NULL, &json_err);
|
*err = exec_error_parse_data(*stdout_str, NULL, &json_err);
|
||||||
if (*err == NULL) {
|
if (*err == NULL) {
|
||||||
nret = sprintf_s(errmsg, errmsg_len, "exec \'%s\': %s; parse failed: %s", plugin_path,
|
nret = snprintf(errmsg, errmsg_len, "exec \'%s\': %s; parse failed: %s", plugin_path,
|
||||||
strlen(errmsg) > 0 ? errmsg : "", json_err);
|
strlen(errmsg) > 0 ? errmsg : "", json_err);
|
||||||
if (nret < 0) {
|
if (nret < 0 || (size_t)nret >= errmsg_len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
}
|
}
|
||||||
nret = INK_ERR_PARSE_JSON_TO_OBJECT_FAILED;
|
nret = INK_ERR_PARSE_JSON_TO_OBJECT_FAILED;
|
||||||
@ -568,7 +568,8 @@ static int raw_exec(const char *plugin_path, const char *stdin_data, char * cons
|
|||||||
|
|
||||||
child_pid = fork();
|
child_pid = fork();
|
||||||
if (child_pid < 0) {
|
if (child_pid < 0) {
|
||||||
if (sprintf_s(errmsg, sizeof(errmsg), "Fork failed: %s", strerror(errno)) < 0) {
|
ret = snprintf(errmsg, sizeof(errmsg), "Fork failed: %s", strerror(errno));
|
||||||
|
if (ret < 0 || (size_t)ret >= sizeof(errmsg)) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
}
|
}
|
||||||
ret = -1;
|
ret = -1;
|
||||||
|
|||||||
@ -61,8 +61,8 @@ static int do_check_file(const char *plugin, const char *path, char **find_path,
|
|||||||
char tmp_path[PATH_MAX] = { 0 };
|
char tmp_path[PATH_MAX] = { 0 };
|
||||||
struct stat rt_stat = { 0 };
|
struct stat rt_stat = { 0 };
|
||||||
|
|
||||||
nret = sprintf_s(tmp_path, PATH_MAX, "%s/%s", path, plugin);
|
nret = snprintf(tmp_path, PATH_MAX, "%s/%s", path, plugin);
|
||||||
if (nret < 0) {
|
if (nret < 0 || nret >= PATH_MAX) {
|
||||||
ERROR("Sprint failed");
|
ERROR("Sprint failed");
|
||||||
*save_errno = INK_ERR_SPRINT_FAILED;
|
*save_errno = INK_ERR_SPRINT_FAILED;
|
||||||
return -1;
|
return -1;
|
||||||
|
|||||||
@ -4,7 +4,7 @@ Description: commom source file
|
|||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
# - Copyright (C) Huawei Technologies Co., Ltd. 2019. All rights reserved.
|
# - Copyright (C) Huawei Technologies., Ltd. 2018-2019. All rights reserved.
|
||||||
# - clibcni licensed under the Mulan PSL v1.
|
# - clibcni licensed under the Mulan PSL v1.
|
||||||
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
||||||
# - You may obtain a copy of Mulan PSL v1 at:
|
# - You may obtain a copy of Mulan PSL v1 at:
|
||||||
@ -15,7 +15,7 @@ History: 2019-06-17
|
|||||||
# - See the Mulan PSL v1 for more details.
|
# - See the Mulan PSL v1 for more details.
|
||||||
# - Description: generate json
|
# - Description: generate json
|
||||||
# - Author: tanyifeng
|
# - Author: tanyifeng
|
||||||
# - Create: 2019-04-25
|
# - Create: 2018-04-25
|
||||||
#!/usr/bin/python -Es
|
#!/usr/bin/python -Es
|
||||||
|
|
||||||
CODE = '''// Auto generated file. Do not edit!
|
CODE = '''// Auto generated file. Do not edit!
|
||||||
@ -33,8 +33,8 @@ yajl_gen_status map_uint(void *ctx, long long unsigned int num) {
|
|||||||
char numstr[MAX_NUM_STR_LEN];
|
char numstr[MAX_NUM_STR_LEN];
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
ret = sprintf_s(numstr, sizeof(numstr), "%llu", num);
|
ret = snprintf(numstr, sizeof(numstr), "%llu", num);
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= sizeof(numstr)) {
|
||||||
return yajl_gen_in_error_state;
|
return yajl_gen_in_error_state;
|
||||||
}
|
}
|
||||||
return yajl_gen_number((yajl_gen)ctx, (const char *)numstr, strlen(numstr));
|
return yajl_gen_number((yajl_gen)ctx, (const char *)numstr, strlen(numstr));
|
||||||
@ -44,8 +44,8 @@ yajl_gen_status map_int(void *ctx, long long int num) {
|
|||||||
char numstr[MAX_NUM_STR_LEN];
|
char numstr[MAX_NUM_STR_LEN];
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
ret = sprintf_s(numstr, sizeof(numstr), "%lld", num);
|
ret = snprintf(numstr, sizeof(numstr), "%lld", num);
|
||||||
if (ret < 0) {
|
if (ret < 0 || (size_t)ret >= sizeof(numstr)) {
|
||||||
return yajl_gen_in_error_state;
|
return yajl_gen_in_error_state;
|
||||||
}
|
}
|
||||||
return yajl_gen_number((yajl_gen)ctx, (const char *)numstr, strlen(numstr));
|
return yajl_gen_number((yajl_gen)ctx, (const char *)numstr, strlen(numstr));
|
||||||
@ -389,8 +389,8 @@ yajl_gen_status gen_json_map_int_int(void *ctx, const json_map_int_int *map, con
|
|||||||
for (i = 0; i < len; i++) {
|
for (i = 0; i < len; i++) {
|
||||||
char numstr[MAX_NUM_STR_LEN];
|
char numstr[MAX_NUM_STR_LEN];
|
||||||
int nret;
|
int nret;
|
||||||
nret = sprintf_s(numstr, sizeof(numstr), "%lld", (long long int)map->keys[i]);
|
nret = snprintf(numstr, sizeof(numstr), "%lld", (long long int)map->keys[i]);
|
||||||
if (nret < 0) {
|
if (nret < 0 || (size_t)nret >= sizeof(numstr)) {
|
||||||
if (!*err && asprintf(err, "Error to print string") < 0) {
|
if (!*err && asprintf(err, "Error to print string") < 0) {
|
||||||
*(err) = safe_strdup("error allocating memory");
|
*(err) = safe_strdup("error allocating memory");
|
||||||
}
|
}
|
||||||
@ -490,16 +490,8 @@ int append_json_map_int_int(json_map_int_int *map, int key, int val) {
|
|||||||
vals = safe_malloc(len * sizeof(int));
|
vals = safe_malloc(len * sizeof(int));
|
||||||
|
|
||||||
if (map->len) {
|
if (map->len) {
|
||||||
if (memcpy_s(keys, len * sizeof(int), map->keys, map->len * sizeof(int)) != EOK) {
|
(void)memcpy(keys, map->keys, map->len * sizeof(int));
|
||||||
free(keys);
|
(void)memcpy(vals, map->values, map->len * sizeof(int));
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (memcpy_s(vals, len * sizeof(int), map->values, map->len * sizeof(int)) != EOK) {
|
|
||||||
free(keys);
|
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
free(map->keys);
|
free(map->keys);
|
||||||
map->keys = keys;
|
map->keys = keys;
|
||||||
@ -530,8 +522,8 @@ yajl_gen_status gen_json_map_int_bool(void *ctx, const json_map_int_bool *map, c
|
|||||||
for (i = 0; i < len; i++) {
|
for (i = 0; i < len; i++) {
|
||||||
char numstr[MAX_NUM_STR_LEN];
|
char numstr[MAX_NUM_STR_LEN];
|
||||||
int nret;
|
int nret;
|
||||||
nret = sprintf_s(numstr, sizeof(numstr), "%lld", (long long int)map->keys[i]);
|
nret = snprintf(numstr, sizeof(numstr), "%lld", (long long int)map->keys[i]);
|
||||||
if (nret < 0) {
|
if (nret < 0 || (size_t)nret >= sizeof(numstr)) {
|
||||||
if (!*err && asprintf(err, "Error to print string") < 0) {
|
if (!*err && asprintf(err, "Error to print string") < 0) {
|
||||||
*(err) = safe_strdup("error allocating memory");
|
*(err) = safe_strdup("error allocating memory");
|
||||||
}
|
}
|
||||||
@ -631,16 +623,8 @@ int append_json_map_int_bool(json_map_int_bool *map, int key, bool val) {
|
|||||||
vals = safe_malloc(len * sizeof(bool));
|
vals = safe_malloc(len * sizeof(bool));
|
||||||
|
|
||||||
if (map->len) {
|
if (map->len) {
|
||||||
if (memcpy_s(keys, len * sizeof(int), map->keys, map->len * sizeof(int)) != EOK) {
|
(void)memcpy(keys, map->keys, map->len * sizeof(int));
|
||||||
free(keys);
|
(void)memcpy(vals, map->values, map->len * sizeof(bool));
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (memcpy_s(vals, len * sizeof(bool), map->values, map->len * sizeof(bool)) != EOK) {
|
|
||||||
free(keys);
|
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
free(map->keys);
|
free(map->keys);
|
||||||
map->keys = keys;
|
map->keys = keys;
|
||||||
@ -671,8 +655,8 @@ yajl_gen_status gen_json_map_int_string(void *ctx, const json_map_int_string *ma
|
|||||||
for (i = 0; i < len; i++) {
|
for (i = 0; i < len; i++) {
|
||||||
char numstr[MAX_NUM_STR_LEN];
|
char numstr[MAX_NUM_STR_LEN];
|
||||||
int nret;
|
int nret;
|
||||||
nret = sprintf_s(numstr, sizeof(numstr), "%lld", (long long int)map->keys[i]);
|
nret = snprintf(numstr, sizeof(numstr), "%lld", (long long int)map->keys[i]);
|
||||||
if (nret < 0) {
|
if (nret < 0 || (size_t)nret >= sizeof(numstr)) {
|
||||||
if (!*err && asprintf(err, "Error to print string") < 0) {
|
if (!*err && asprintf(err, "Error to print string") < 0) {
|
||||||
*(err) = safe_strdup("error allocating memory");
|
*(err) = safe_strdup("error allocating memory");
|
||||||
}
|
}
|
||||||
@ -771,16 +755,8 @@ int append_json_map_int_string(json_map_int_string *map, int key, const char *va
|
|||||||
vals = safe_malloc(len * sizeof(char *));
|
vals = safe_malloc(len * sizeof(char *));
|
||||||
|
|
||||||
if (map->len) {
|
if (map->len) {
|
||||||
if (memcpy_s(keys, len * sizeof(int), map->keys, map->len * sizeof(int)) != EOK) {
|
(void)memcpy(keys, map->keys, map->len * sizeof(int));
|
||||||
free(keys);
|
(void)memcpy(vals, map->values, map->len * sizeof(char *));
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (memcpy_s(vals, len * sizeof(char *), map->values, map->len * sizeof(char *)) != EOK) {
|
|
||||||
free(keys);
|
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
free(map->keys);
|
free(map->keys);
|
||||||
map->keys = keys;
|
map->keys = keys;
|
||||||
@ -897,16 +873,8 @@ int append_json_map_string_int(json_map_string_int *map, const char *key, int va
|
|||||||
vals = safe_malloc(len * sizeof(int));
|
vals = safe_malloc(len * sizeof(int));
|
||||||
|
|
||||||
if (map->len) {
|
if (map->len) {
|
||||||
if (memcpy_s(keys, len * sizeof(char *), map->keys, map->len * sizeof(char *)) != EOK) {
|
(void)memcpy(keys, map->keys, map->len * sizeof(char *));
|
||||||
free(keys);
|
(void)memcpy(vals, map->values, map->len * sizeof(int));
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (memcpy_s(vals, len * sizeof(int), map->values, map->len * sizeof(int)) != EOK) {
|
|
||||||
free(keys);
|
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
free(map->keys);
|
free(map->keys);
|
||||||
map->keys = keys;
|
map->keys = keys;
|
||||||
@ -1019,16 +987,8 @@ int append_json_map_string_bool(json_map_string_bool *map, const char *key, bool
|
|||||||
vals = safe_malloc(len * sizeof(bool));
|
vals = safe_malloc(len * sizeof(bool));
|
||||||
|
|
||||||
if (map->len) {
|
if (map->len) {
|
||||||
if (memcpy_s(keys, len * sizeof(char *), map->keys, map->len * sizeof(char *)) != EOK) {
|
(void)memcpy(keys, map->keys, map->len * sizeof(char *));
|
||||||
free(keys);
|
(void)memcpy(vals, map->values, map->len * sizeof(bool));
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (memcpy_s(vals, len * sizeof(bool), map->values, map->len * sizeof(bool)) != EOK) {
|
|
||||||
free(keys);
|
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
free(map->keys);
|
free(map->keys);
|
||||||
map->keys = keys;
|
map->keys = keys;
|
||||||
@ -1148,16 +1108,8 @@ int append_json_map_string_string(json_map_string_string *map, const char *key,
|
|||||||
vals = safe_malloc(len * sizeof(char *));
|
vals = safe_malloc(len * sizeof(char *));
|
||||||
|
|
||||||
if (map->len) {
|
if (map->len) {
|
||||||
if (memcpy_s(keys, len * sizeof(char *), map->keys, map->len * sizeof(char *)) != EOK) {
|
(void)memcpy(keys, map->keys, map->len * sizeof(char *));
|
||||||
free(keys);
|
(void)memcpy(vals, map->values, map->len * sizeof(char *));
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (memcpy_s(vals, len * sizeof(char *), map->values, map->len * sizeof(char *)) != EOK) {
|
|
||||||
free(keys);
|
|
||||||
free(vals);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
free(map->keys);
|
free(map->keys);
|
||||||
map->keys = keys;
|
map->keys = keys;
|
||||||
@ -1205,12 +1157,7 @@ char *json_marshal_string(const char *str, size_t strlen, const struct parser_co
|
|||||||
}
|
}
|
||||||
|
|
||||||
json_buf = safe_malloc(gen_len + 1);
|
json_buf = safe_malloc(gen_len + 1);
|
||||||
if (memcpy_s(json_buf, gen_len + 1, gen_buf, gen_len) != EOK) {
|
(void)memcpy(json_buf, gen_buf, gen_len);
|
||||||
*err = safe_strdup("Error to memcpy json");
|
|
||||||
free(json_buf);
|
|
||||||
json_buf = NULL;
|
|
||||||
goto free_out;
|
|
||||||
}
|
|
||||||
json_buf[gen_len] = '\\0';
|
json_buf[gen_len] = '\\0';
|
||||||
|
|
||||||
free_out:
|
free_out:
|
||||||
|
|||||||
@ -4,7 +4,7 @@ Description: commom header file
|
|||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
# - Copyright (C) Huawei Technologies Co., Ltd. 2019. All rights reserved.
|
# - Copyright (C) Huawei Technologies., Ltd. 2018-2019. All rights reserved.
|
||||||
# - clibcni licensed under the Mulan PSL v1.
|
# - clibcni licensed under the Mulan PSL v1.
|
||||||
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
||||||
# - You may obtain a copy of Mulan PSL v1 at:
|
# - You may obtain a copy of Mulan PSL v1 at:
|
||||||
@ -15,7 +15,7 @@ History: 2019-06-17
|
|||||||
# - See the Mulan PSL v1 for more details.
|
# - See the Mulan PSL v1 for more details.
|
||||||
# - Description: generate json
|
# - Description: generate json
|
||||||
# - Author: tanyifeng
|
# - Author: tanyifeng
|
||||||
# - Create: 2019-04-25
|
# - Create: 2018-04-25
|
||||||
#!/usr/bin/python -Es
|
#!/usr/bin/python -Es
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@ -30,13 +30,13 @@ CODE = '''// Auto generated file. Do not edit!
|
|||||||
# ifndef _JSON_COMMON_H
|
# ifndef _JSON_COMMON_H
|
||||||
# define _JSON_COMMON_H
|
# define _JSON_COMMON_H
|
||||||
|
|
||||||
|
# include <stdlib.h>
|
||||||
# include <stdbool.h>
|
# include <stdbool.h>
|
||||||
# include <stdio.h>
|
# include <stdio.h>
|
||||||
# include <string.h>
|
# include <string.h>
|
||||||
# include <stdint.h>
|
# include <stdint.h>
|
||||||
# include <yajl/yajl_tree.h>
|
# include <yajl/yajl_tree.h>
|
||||||
# include <yajl/yajl_gen.h>
|
# include <yajl/yajl_gen.h>
|
||||||
# include "securec.h"
|
|
||||||
|
|
||||||
# ifdef __cplusplus
|
# ifdef __cplusplus
|
||||||
extern "C" {
|
extern "C" {
|
||||||
@ -44,13 +44,13 @@ extern "C" {
|
|||||||
|
|
||||||
# undef linux
|
# undef linux
|
||||||
|
|
||||||
//options to report error if there is unknown key found in json
|
// options to report error if there is unknown key found in json
|
||||||
# define OPT_PARSE_STRICT 0x01
|
# define OPT_PARSE_STRICT 0x01
|
||||||
//options to generate all key and value
|
// options to generate all key and value
|
||||||
# define OPT_GEN_KAY_VALUE 0x02
|
# define OPT_GEN_KAY_VALUE 0x02
|
||||||
//options to generate simplify(no indent) json string
|
// options to generate simplify(no indent) json string
|
||||||
# define OPT_GEN_SIMPLIFY 0x04
|
# define OPT_GEN_SIMPLIFY 0x04
|
||||||
//options not to validate utf8 data
|
// options not to validate utf8 data
|
||||||
# define OPT_GEN_NO_VALIDATE_UTF8 0x08
|
# define OPT_GEN_NO_VALIDATE_UTF8 0x08
|
||||||
|
|
||||||
# define GEN_SET_ERROR_AND_RETURN(stat, err) { \\
|
# define GEN_SET_ERROR_AND_RETURN(stat, err) { \\
|
||||||
|
|||||||
@ -4,7 +4,7 @@ Description: header class and functions
|
|||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
# - Copyright (C) Huawei Technologies Co., Ltd. 2019. All rights reserved.
|
# - Copyright (C) Huawei Technologies., Ltd. 2018-2019. All rights reserved.
|
||||||
# - clibcni licensed under the Mulan PSL v1.
|
# - clibcni licensed under the Mulan PSL v1.
|
||||||
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
||||||
# - You may obtain a copy of Mulan PSL v1 at:
|
# - You may obtain a copy of Mulan PSL v1 at:
|
||||||
@ -15,7 +15,7 @@ History: 2019-06-17
|
|||||||
# - See the Mulan PSL v1 for more details.
|
# - See the Mulan PSL v1 for more details.
|
||||||
# - Description: generate json
|
# - Description: generate json
|
||||||
# - Author: tanyifeng
|
# - Author: tanyifeng
|
||||||
# - Create: 2019-04-25
|
# - Create: 2018-04-25
|
||||||
#!/usr/bin/python -Es
|
#!/usr/bin/python -Es
|
||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
@ -74,7 +74,7 @@ class MyRoot(object):
|
|||||||
return self.root_path
|
return self.root_path
|
||||||
|
|
||||||
|
|
||||||
def trimJsonSuffix(name):
|
def trim_json_suffix(name):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -82,10 +82,10 @@ def trimJsonSuffix(name):
|
|||||||
"""
|
"""
|
||||||
if name.endswith(JSON_SUFFIX) or name.endswith(REF_SUFFIX):
|
if name.endswith(JSON_SUFFIX) or name.endswith(REF_SUFFIX):
|
||||||
name = name[:-len(JSON_SUFFIX)]
|
name = name[:-len(JSON_SUFFIX)]
|
||||||
return helpers.convertToCStyle(name.replace('.', '_').replace('-', '_'))
|
return helpers.conv_to_c_style(name.replace('.', '_').replace('-', '_'))
|
||||||
|
|
||||||
|
|
||||||
def getPrefixPackage(filepath, rootpath):
|
def get_prefix_package(filepath, rootpath):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -94,49 +94,52 @@ def getPrefixPackage(filepath, rootpath):
|
|||||||
realpath = os.path.realpath(filepath)
|
realpath = os.path.realpath(filepath)
|
||||||
|
|
||||||
if realpath.startswith(rootpath) and len(realpath) > len(rootpath):
|
if realpath.startswith(rootpath) and len(realpath) > len(rootpath):
|
||||||
return helpers.convertToCStyle(os.path.dirname(realpath)[(len(rootpath) + 1):])
|
return helpers.conv_to_c_style(os.path.dirname(realpath)[(len(rootpath) + 1):])
|
||||||
else:
|
else:
|
||||||
raise RuntimeError('schema path \"%s\" is not in scope of root path \"%s\"' \
|
raise RuntimeError('schema path \"%s\" is not in scope of root path \"%s\"' \
|
||||||
% (realpath, rootpath))
|
% (realpath, rootpath))
|
||||||
|
|
||||||
|
|
||||||
def getPrefixFromFile(filepath):
|
def get_prefix_from_file(filepath):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
prefix_file = trimJsonSuffix(os.path.basename(filepath))
|
prefix_file = trim_json_suffix(os.path.basename(filepath))
|
||||||
root_path = MyRoot.root_path
|
root_path = MyRoot.root_path
|
||||||
prefix_package = getPrefixPackage(filepath, root_path)
|
prefix_package = get_prefix_package(filepath, root_path)
|
||||||
prefix = prefix_file if prefix_package == "" else prefix_package + "_" + prefix_file
|
prefix = prefix_file if prefix_package == "" else prefix_package + "_" + prefix_file
|
||||||
return prefix
|
return prefix
|
||||||
|
|
||||||
def schemaFromFile(filepath, srcpath):
|
|
||||||
|
def schema_from_file(filepath, srcpath):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
schemapath = helpers.FilePath(filepath)
|
schemapath = helpers.FilePath(filepath)
|
||||||
prefix = getPrefixFromFile(schemapath.name)
|
prefix = get_prefix_from_file(schemapath.name)
|
||||||
header = helpers.FilePath(os.path.join(srcpath, prefix + ".h"))
|
header = helpers.FilePath(os.path.join(srcpath, prefix + ".h"))
|
||||||
source = helpers.FilePath(os.path.join(srcpath, prefix + ".c"))
|
source = helpers.FilePath(os.path.join(srcpath, prefix + ".c"))
|
||||||
schema_info = helpers.SchemaInfo(schemapath, header, source, prefix, srcpath)
|
schema_info = helpers.SchemaInfo(schemapath, header, source, prefix, srcpath)
|
||||||
return schema_info
|
return schema_info
|
||||||
|
|
||||||
def makeRefName(refname, reffile):
|
|
||||||
|
def make_ref_name(refname, reffile):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
prefix = getPrefixFromFile(reffile)
|
prefix = get_prefix_from_file(reffile)
|
||||||
if refname == "" or prefix.endswith(refname):
|
if refname == "" or prefix.endswith(refname):
|
||||||
return prefix
|
return prefix
|
||||||
return prefix + "_" + helpers.convertToCStyle(refname)
|
return prefix + "_" + helpers.conv_to_c_style(refname)
|
||||||
|
|
||||||
def splitRefName(ref):
|
|
||||||
|
def splite_ref_name(ref):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -145,6 +148,7 @@ def splitRefName(ref):
|
|||||||
tmp_f, tmp_r = ref.split("#/") if '#/' in ref else (ref, "")
|
tmp_f, tmp_r = ref.split("#/") if '#/' in ref else (ref, "")
|
||||||
return tmp_f, tmp_r
|
return tmp_f, tmp_r
|
||||||
|
|
||||||
|
|
||||||
def merge(children):
|
def merge(children):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
@ -166,7 +170,8 @@ BASIC_TYPES = (
|
|||||||
"booleanPointer"
|
"booleanPointer"
|
||||||
)
|
)
|
||||||
|
|
||||||
def judgeSupportedType(typ):
|
|
||||||
|
def judge_support_type(typ):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -174,7 +179,8 @@ def judgeSupportedType(typ):
|
|||||||
"""
|
"""
|
||||||
return typ in ("integer", "boolean", "string", "double") or typ in BASIC_TYPES
|
return typ in ("integer", "boolean", "string", "double") or typ in BASIC_TYPES
|
||||||
|
|
||||||
def getRefSubref(src, subref):
|
|
||||||
|
def get_ref_subref(src, subref):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -190,14 +196,15 @@ def getRefSubref(src, subref):
|
|||||||
|
|
||||||
return src, cur, subrefname
|
return src, cur, subrefname
|
||||||
|
|
||||||
def getRefRoot(schema_info, src, ref, curfile):
|
|
||||||
|
def get_ref_root(schema_info, src, ref, curfile):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
refname = ""
|
refname = ""
|
||||||
tmp_f, tmp_r = splitRefName(ref)
|
tmp_f, tmp_r = splite_ref_name(ref)
|
||||||
|
|
||||||
if tmp_f == "":
|
if tmp_f == "":
|
||||||
cur = src
|
cur = src
|
||||||
@ -205,7 +212,7 @@ def getRefRoot(schema_info, src, ref, curfile):
|
|||||||
realpath = os.path.realpath(os.path.join(os.path.dirname(curfile), tmp_f))
|
realpath = os.path.realpath(os.path.join(os.path.dirname(curfile), tmp_f))
|
||||||
curfile = realpath
|
curfile = realpath
|
||||||
|
|
||||||
subschema = schemaFromFile(realpath, schema_info.filesdir)
|
subschema = schema_from_file(realpath, schema_info.filesdir)
|
||||||
if schema_info.refs is None:
|
if schema_info.refs is None:
|
||||||
schema_info.refs = {}
|
schema_info.refs = {}
|
||||||
schema_info.refs[subschema.header.basename] = subschema
|
schema_info.refs[subschema.header.basename] = subschema
|
||||||
@ -213,19 +220,20 @@ def getRefRoot(schema_info, src, ref, curfile):
|
|||||||
cur = src = json.loads(i.read())
|
cur = src = json.loads(i.read())
|
||||||
subcur = cur
|
subcur = cur
|
||||||
if tmp_r != "":
|
if tmp_r != "":
|
||||||
src, subcur, refname = getRefSubref(src, tmp_r)
|
src, subcur, refname = get_ref_subref(src, tmp_r)
|
||||||
|
|
||||||
if 'type' not in subcur and '$ref' in subcur:
|
if 'type' not in subcur and '$ref' in subcur:
|
||||||
subf, subr = splitRefName(subcur['$ref'])
|
subf, subr = splite_ref_name(subcur['$ref'])
|
||||||
if subf == "":
|
if subf == "":
|
||||||
src, subcur, refname = getRefSubref(src, subr)
|
src, subcur, refname = get_ref_subref(src, subr)
|
||||||
if 'type' not in subcur:
|
if 'type' not in subcur:
|
||||||
raise RuntimeError("Not support reference of nesting more than 2 level: ", ref)
|
raise RuntimeError("Not support reference of nesting more than 2 level: ", ref)
|
||||||
else:
|
else:
|
||||||
return getRefRoot(schema_info, src, subcur['$ref'], curfile)
|
return get_ref_root(schema_info, src, subcur['$ref'], curfile)
|
||||||
return src, subcur, curfile, makeRefName(refname, curfile)
|
return src, subcur, curfile, make_ref_name(refname, curfile)
|
||||||
|
|
||||||
def getTypePatternInCur(cur, schema_info, src, curfile):
|
|
||||||
|
def get_type_pattern_incur(cur, schema_info, src, curfile):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -244,22 +252,23 @@ def getTypePatternInCur(cur, schema_info, src, curfile):
|
|||||||
if 'type' in value:
|
if 'type' in value:
|
||||||
val = value["type"]
|
val = value["type"]
|
||||||
else:
|
else:
|
||||||
dummy_subsrc, subcur, dummy_subcurfile, dummy_subrefname = getRefRoot(
|
dummy_subsrc, subcur, dummy_subcurfile, dummy_subrefname = get_ref_root(
|
||||||
schema_info, src, value['$ref'], curfile)
|
schema_info, src, value['$ref'], curfile)
|
||||||
val = subcur['type']
|
val = subcur['type']
|
||||||
break
|
break
|
||||||
|
|
||||||
mapKey = {
|
m_key = {
|
||||||
'object': 'Object',
|
'object': 'Object',
|
||||||
'string': 'String',
|
'string': 'String',
|
||||||
'integer': 'Int',
|
'integer': 'Int',
|
||||||
'boolean': 'Bool'
|
'boolean': 'Bool'
|
||||||
}[val]
|
}[val]
|
||||||
map_val = mapKey
|
map_val = m_key
|
||||||
|
|
||||||
typ = 'map' + map_key + map_val
|
typ = 'map' + map_key + map_val
|
||||||
return typ
|
return typ
|
||||||
|
|
||||||
|
|
||||||
class GenerateNodeInfo(object):
|
class GenerateNodeInfo(object):
|
||||||
'''
|
'''
|
||||||
Description: Store schema information
|
Description: Store schema information
|
||||||
@ -289,7 +298,8 @@ class GenerateNodeInfo(object):
|
|||||||
'''
|
'''
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
def generateAllofArrayTypNode(node_info, src, typ, refname):
|
|
||||||
|
def gen_all_arr_typnode(node_info, src, typ, refname):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -302,7 +312,7 @@ def generateAllofArrayTypNode(node_info, src, typ, refname):
|
|||||||
subtyp = None
|
subtyp = None
|
||||||
subtypobj = None
|
subtypobj = None
|
||||||
required = None
|
required = None
|
||||||
children = merge(parseList(schema_info, name, src, cur["items"]['allOf'], curfile))
|
children = merge(resolve_list(schema_info, name, src, cur["items"]['allOf'], curfile))
|
||||||
subtyp = children[0].typ
|
subtyp = children[0].typ
|
||||||
subtypobj = children
|
subtypobj = children
|
||||||
return helpers.Unite(name,
|
return helpers.Unite(name,
|
||||||
@ -313,7 +323,8 @@ def generateAllofArrayTypNode(node_info, src, typ, refname):
|
|||||||
subtypname=refname,
|
subtypname=refname,
|
||||||
required=required), src
|
required=required), src
|
||||||
|
|
||||||
def generateAnyofArrayTypNode(node_info, src, typ, refname):
|
|
||||||
|
def gen_any_arr_typnode(node_info, src, typ, refname):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -326,7 +337,7 @@ def generateAnyofArrayTypNode(node_info, src, typ, refname):
|
|||||||
subtyp = None
|
subtyp = None
|
||||||
subtypobj = None
|
subtypobj = None
|
||||||
required = None
|
required = None
|
||||||
anychildren = parseList(schema_info, name, src, cur["items"]['anyOf'], curfile)
|
anychildren = resolve_list(schema_info, name, src, cur["items"]['anyOf'], curfile)
|
||||||
subtyp = anychildren[0].typ
|
subtyp = anychildren[0].typ
|
||||||
children = anychildren[0].children
|
children = anychildren[0].children
|
||||||
subtypobj = children
|
subtypobj = children
|
||||||
@ -339,7 +350,8 @@ def generateAnyofArrayTypNode(node_info, src, typ, refname):
|
|||||||
subtypname=refname,
|
subtypname=refname,
|
||||||
required=required), src
|
required=required), src
|
||||||
|
|
||||||
def generateRefArrayTypNode(node_info, src, typ, refname):
|
|
||||||
|
def gen_ref_arr_typnode(node_info, src, typ, refname):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -350,11 +362,11 @@ def generateRefArrayTypNode(node_info, src, typ, refname):
|
|||||||
cur = node_info.cur
|
cur = node_info.cur
|
||||||
curfile = node_info.curfile
|
curfile = node_info.curfile
|
||||||
|
|
||||||
item_type, src = resolveType(schema_info, name, src, cur["items"], curfile)
|
item_type, src = resolve_type(schema_info, name, src, cur["items"], curfile)
|
||||||
ref_file, subref = splitRefName(cur['items']['$ref'])
|
ref_file, subref = splite_ref_name(cur['items']['$ref'])
|
||||||
if ref_file == "":
|
if ref_file == "":
|
||||||
src, dummy_subcur, subrefname = getRefSubref(src, subref)
|
src, dummy_subcur, subrefname = get_ref_subref(src, subref)
|
||||||
refname = makeRefName(subrefname, curfile)
|
refname = make_ref_name(subrefname, curfile)
|
||||||
else:
|
else:
|
||||||
refname = item_type.subtypname
|
refname = item_type.subtypname
|
||||||
return helpers.Unite(name,
|
return helpers.Unite(name,
|
||||||
@ -365,7 +377,8 @@ def generateRefArrayTypNode(node_info, src, typ, refname):
|
|||||||
subtypname=refname,
|
subtypname=refname,
|
||||||
required=item_type.required), src
|
required=item_type.required), src
|
||||||
|
|
||||||
def generateTypeArrayTypNode(node_info, src, typ, refname):
|
|
||||||
|
def gen_type_arr_typnode(node_info, src, typ, refname):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -376,7 +389,7 @@ def generateTypeArrayTypNode(node_info, src, typ, refname):
|
|||||||
cur = node_info.cur
|
cur = node_info.cur
|
||||||
curfile = node_info.curfile
|
curfile = node_info.curfile
|
||||||
|
|
||||||
item_type, src = resolveType(schema_info, name, src, cur["items"], curfile)
|
item_type, src = resolve_type(schema_info, name, src, cur["items"], curfile)
|
||||||
return helpers.Unite(name,
|
return helpers.Unite(name,
|
||||||
typ,
|
typ,
|
||||||
None,
|
None,
|
||||||
@ -386,7 +399,7 @@ def generateTypeArrayTypNode(node_info, src, typ, refname):
|
|||||||
required=item_type.required), src
|
required=item_type.required), src
|
||||||
|
|
||||||
|
|
||||||
def generateArrayTypNode(node_info, src, typ, refname):
|
def gen_arr_typnode(node_info, src, typ, refname):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -395,16 +408,17 @@ def generateArrayTypNode(node_info, src, typ, refname):
|
|||||||
cur = node_info.cur
|
cur = node_info.cur
|
||||||
|
|
||||||
if 'allOf' in cur["items"]:
|
if 'allOf' in cur["items"]:
|
||||||
return generateAllofArrayTypNode(node_info, src, typ, refname)
|
return gen_all_arr_typnode(node_info, src, typ, refname)
|
||||||
elif 'anyOf' in cur["items"]:
|
elif 'anyOf' in cur["items"]:
|
||||||
return generateAnyofArrayTypNode(node_info, src, typ, refname)
|
return gen_any_arr_typnode(node_info, src, typ, refname)
|
||||||
elif '$ref' in cur["items"]:
|
elif '$ref' in cur["items"]:
|
||||||
return generateRefArrayTypNode(node_info, src, typ, refname)
|
return gen_ref_arr_typnode(node_info, src, typ, refname)
|
||||||
elif 'type' in cur["items"]:
|
elif 'type' in cur["items"]:
|
||||||
return generateTypeArrayTypNode(node_info, src, typ, refname)
|
return gen_type_arr_typnode(node_info, src, typ, refname)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def generateObjTypNode(node_info, src, typ, refname):
|
|
||||||
|
def gen_obj_typnode(node_info, src, typ, refname):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -420,18 +434,18 @@ def generateObjTypNode(node_info, src, typ, refname):
|
|||||||
required = None
|
required = None
|
||||||
|
|
||||||
if 'allOf' in cur:
|
if 'allOf' in cur:
|
||||||
children = merge(parseList(schema_info, name, src, cur['allOf'], curfile))
|
children = merge(resolve_list(schema_info, name, src, cur['allOf'], curfile))
|
||||||
elif 'anyOf' in cur:
|
elif 'anyOf' in cur:
|
||||||
children = parseList(schema_info, name, src, cur['anyOf'], curfile)
|
children = resolve_list(schema_info, name, src, cur['anyOf'], curfile)
|
||||||
elif 'patternProperties' in cur:
|
elif 'patternProperties' in cur:
|
||||||
children = parseProperties(schema_info, name, src, cur, curfile)
|
children = parse_properties(schema_info, name, src, cur, curfile)
|
||||||
children[0].name = children[0].name.replace('_{1,}', 'element').replace('_{2,}', \
|
children[0].name = children[0].name.replace('_{1,}', 'element').replace('_{2,}', \
|
||||||
'element')
|
'element')
|
||||||
children[0].fixname = "values"
|
children[0].fixname = "values"
|
||||||
if helpers.validBasicMapName(children[0].typ):
|
if helpers.valid_basic_map_name(children[0].typ):
|
||||||
children[0].name = helpers.makeBasicMapName(children[0].typ)
|
children[0].name = helpers.make_basic_map_name(children[0].typ)
|
||||||
else:
|
else:
|
||||||
children = parseProperties(schema_info, name, src, cur, curfile) \
|
children = parse_properties(schema_info, name, src, cur, curfile) \
|
||||||
if 'properties' in cur else None
|
if 'properties' in cur else None
|
||||||
if 'required' in cur:
|
if 'required' in cur:
|
||||||
required = cur['required']
|
required = cur['required']
|
||||||
@ -443,14 +457,15 @@ def generateObjTypNode(node_info, src, typ, refname):
|
|||||||
subtypname=refname,\
|
subtypname=refname,\
|
||||||
required=required), src
|
required=required), src
|
||||||
|
|
||||||
def getTypNotOneof(schema_info, src, cur, curfile):
|
|
||||||
|
def get_typ_notoneof(schema_info, src, cur, curfile):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
if 'patternProperties' in cur:
|
if 'patternProperties' in cur:
|
||||||
typ = getTypePatternInCur(cur, schema_info, src, curfile)
|
typ = get_type_pattern_incur(cur, schema_info, src, curfile)
|
||||||
elif "type" in cur:
|
elif "type" in cur:
|
||||||
typ = cur["type"]
|
typ = cur["type"]
|
||||||
else:
|
else:
|
||||||
@ -459,7 +474,7 @@ def getTypNotOneof(schema_info, src, cur, curfile):
|
|||||||
return typ
|
return typ
|
||||||
|
|
||||||
|
|
||||||
def resolveType(schema_info, name, src, cur, curfile):
|
def resolve_type(schema_info, name, src, cur, curfile):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -472,31 +487,31 @@ def resolveType(schema_info, name, src, cur, curfile):
|
|||||||
refname = None
|
refname = None
|
||||||
|
|
||||||
if '$ref' in cur:
|
if '$ref' in cur:
|
||||||
src, cur, curfile, refname = getRefRoot(schema_info, src, cur['$ref'], curfile)
|
src, cur, curfile, refname = get_ref_root(schema_info, src, cur['$ref'], curfile)
|
||||||
|
|
||||||
if "oneOf" in cur:
|
if "oneOf" in cur:
|
||||||
cur = cur['oneOf'][0]
|
cur = cur['oneOf'][0]
|
||||||
if '$ref' in cur:
|
if '$ref' in cur:
|
||||||
return resolveType(schema_info, name, src, cur, curfile)
|
return resolve_type(schema_info, name, src, cur, curfile)
|
||||||
else:
|
else:
|
||||||
typ = cur['type']
|
typ = cur['type']
|
||||||
else:
|
else:
|
||||||
typ = getTypNotOneof(schema_info, src, cur, curfile)
|
typ = get_typ_notoneof(schema_info, src, cur, curfile)
|
||||||
|
|
||||||
node_info = GenerateNodeInfo(schema_info, name, cur, curfile)
|
node_info = GenerateNodeInfo(schema_info, name, cur, curfile)
|
||||||
|
|
||||||
if helpers.validBasicMapName(typ):
|
if helpers.valid_basic_map_name(typ):
|
||||||
pass
|
pass
|
||||||
elif typ == 'array':
|
elif typ == 'array':
|
||||||
return generateArrayTypNode(node_info, src, typ, refname)
|
return gen_arr_typnode(node_info, src, typ, refname)
|
||||||
elif typ == 'object' or typ == 'mapStringObject':
|
elif typ == 'object' or typ == 'mapStringObject':
|
||||||
return generateObjTypNode(node_info, src, typ, refname)
|
return gen_obj_typnode(node_info, src, typ, refname)
|
||||||
elif typ == 'ArrayOfStrings':
|
elif typ == 'ArrayOfStrings':
|
||||||
typ = 'array'
|
typ = 'array'
|
||||||
subtyp = 'string'
|
subtyp = 'string'
|
||||||
children = subtypobj = None
|
children = subtypobj = None
|
||||||
else:
|
else:
|
||||||
if not judgeSupportedType(typ):
|
if not judge_support_type(typ):
|
||||||
raise RuntimeError("Invalid schema type: %s" % typ)
|
raise RuntimeError("Invalid schema type: %s" % typ)
|
||||||
children = None
|
children = None
|
||||||
|
|
||||||
@ -509,7 +524,7 @@ def resolveType(schema_info, name, src, cur, curfile):
|
|||||||
required=required), src
|
required=required), src
|
||||||
|
|
||||||
|
|
||||||
def parseList(schema_info, name, schema, objs, curfile):
|
def resolve_list(schema_info, name, schema, objs, curfile):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -518,9 +533,10 @@ def parseList(schema_info, name, schema, objs, curfile):
|
|||||||
obj = []
|
obj = []
|
||||||
index = 0
|
index = 0
|
||||||
for i in objs:
|
for i in objs:
|
||||||
generated_name = helpers.CombinationName( \
|
generated_name = helpers.CombinateName( \
|
||||||
i['$ref'].split("/")[-1]) if '$ref' in i else helpers.CombinationName(name.name + str(index))
|
i['$ref'].split("/")[-1]) if '$ref' in i \
|
||||||
node, _ = resolveType(schema_info, generated_name, schema, i, curfile)
|
else helpers.CombinateName(name.name + str(index))
|
||||||
|
node, _ = resolve_type(schema_info, generated_name, schema, i, curfile)
|
||||||
if node:
|
if node:
|
||||||
obj.append(node)
|
obj.append(node)
|
||||||
index += 1
|
index += 1
|
||||||
@ -529,7 +545,7 @@ def parseList(schema_info, name, schema, objs, curfile):
|
|||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
def parseDictionary(schema_info, name, schema, objs, curfile):
|
def parse_dict(schema_info, name, schema, objs, curfile):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -537,7 +553,7 @@ def parseDictionary(schema_info, name, schema, objs, curfile):
|
|||||||
"""
|
"""
|
||||||
obj = []
|
obj = []
|
||||||
for i in objs:
|
for i in objs:
|
||||||
node, _ = resolveType(schema_info, name.append(i), schema, objs[i], curfile)
|
node, _ = resolve_type(schema_info, name.append(i), schema, objs[i], curfile)
|
||||||
if node:
|
if node:
|
||||||
obj.append(node)
|
obj.append(node)
|
||||||
if not obj:
|
if not obj:
|
||||||
@ -545,19 +561,20 @@ def parseDictionary(schema_info, name, schema, objs, curfile):
|
|||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
def parseProperties(schema_info, name, schema, props, curfile):
|
def parse_properties(schema_info, name, schema, props, curfile):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
if 'definitions' in props:
|
if 'definitions' in props:
|
||||||
return parseDictionary(schema_info, name, schema, props['definitions'], curfile)
|
return parse_dict(schema_info, name, schema, props['definitions'], curfile)
|
||||||
if 'patternProperties' in props:
|
if 'patternProperties' in props:
|
||||||
return parseDictionary(schema_info, name, schema, props['patternProperties'], curfile)
|
return parse_dict(schema_info, name, schema, props['patternProperties'], curfile)
|
||||||
return parseDictionary(schema_info, name, schema, props['properties'], curfile)
|
return parse_dict(schema_info, name, schema, props['properties'], curfile)
|
||||||
|
|
||||||
def handleTypeNotInSchema(schema_info, schema, prefix):
|
|
||||||
|
def handle_type_not_in_schema(schema_info, schema, prefix):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -566,8 +583,8 @@ def handleTypeNotInSchema(schema_info, schema, prefix):
|
|||||||
required = None
|
required = None
|
||||||
if 'definitions' in schema:
|
if 'definitions' in schema:
|
||||||
return helpers.Unite( \
|
return helpers.Unite( \
|
||||||
helpers.CombinationName("definitions"), 'definitions', \
|
helpers.CombinateName("definitions"), 'definitions', \
|
||||||
parseProperties(schema_info, helpers.CombinationName(""), schema, schema, \
|
parse_properties(schema_info, helpers.CombinateName(""), schema, schema, \
|
||||||
schema_info.name.name), None, None, None, None)
|
schema_info.name.name), None, None, None, None)
|
||||||
else:
|
else:
|
||||||
if len(schema) > 1:
|
if len(schema) > 1:
|
||||||
@ -577,15 +594,19 @@ def handleTypeNotInSchema(schema_info, schema, prefix):
|
|||||||
for value in schema:
|
for value in schema:
|
||||||
if 'required' in schema[value]:
|
if 'required' in schema[value]:
|
||||||
required = schema[value]['required']
|
required = schema[value]['required']
|
||||||
childrens = parseProperties(schema_info, helpers.CombinationName(""), schema[value], \
|
childrens = parse_properties(schema_info, helpers.CombinateName(""), \
|
||||||
schema[value], schema_info.name.name)
|
schema[value], schema[value], \
|
||||||
value_node = helpers.Unite(helpers.CombinationName(prefix), 'object', childrens, None, None, \
|
schema_info.name.name)
|
||||||
None, required)
|
value_node = helpers.Unite(helpers.CombinateName(prefix), \
|
||||||
|
'object', childrens, None, None, \
|
||||||
|
None, required)
|
||||||
value_nodes.append(value_node)
|
value_nodes.append(value_node)
|
||||||
return helpers.Unite(helpers.CombinationName("definitions"), 'definitions', value_nodes, None, None, \
|
return helpers.Unite(helpers.CombinateName("definitions"), \
|
||||||
|
'definitions', value_nodes, None, None, \
|
||||||
None, None)
|
None, None)
|
||||||
|
|
||||||
def parseSchema(schema_info, schema, prefix):
|
|
||||||
|
def parse_schema(schema_info, schema, prefix):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -593,23 +614,21 @@ def parseSchema(schema_info, schema, prefix):
|
|||||||
"""
|
"""
|
||||||
required = None
|
required = None
|
||||||
if 'type' not in schema:
|
if 'type' not in schema:
|
||||||
return handleTypeNotInSchema(schema_info, schema, prefix)
|
return handle_type_not_in_schema(schema_info, schema, prefix)
|
||||||
|
|
||||||
if 'type' not in schema:
|
|
||||||
print("No 'type' defined in schema")
|
|
||||||
return prefix, None
|
|
||||||
|
|
||||||
if 'object' in schema['type']:
|
if 'object' in schema['type']:
|
||||||
if 'required' in schema:
|
if 'required' in schema:
|
||||||
required = schema['required']
|
required = schema['required']
|
||||||
return helpers.Unite(
|
return helpers.Unite(
|
||||||
helpers.CombinationName(prefix), 'object',
|
helpers.CombinateName(prefix), 'object',
|
||||||
parseProperties(schema_info, helpers.CombinationName(""), schema, schema, schema_info.name.name), \
|
parse_properties(schema_info, \
|
||||||
|
helpers.CombinateName(""), \
|
||||||
|
schema, schema, schema_info.name.name), \
|
||||||
None, None, None, required)
|
None, None, None, required)
|
||||||
elif 'array' in schema['type']:
|
elif 'array' in schema['type']:
|
||||||
item_type, _ = resolveType(schema_info, helpers.CombinationName(""), schema['items'], \
|
item_type, _ = resolve_type(schema_info, helpers.CombinateName(""), \
|
||||||
schema['items'], schema_info.name.name)
|
schema['items'], schema['items'], schema_info.name.name)
|
||||||
return helpers.Unite(helpers.CombinationName(prefix), 'array', None, item_type.typ, \
|
return helpers.Unite(helpers.CombinateName(prefix), 'array', None, item_type.typ, \
|
||||||
item_type.children, None, item_type.required)
|
item_type.children, None, item_type.required)
|
||||||
else:
|
else:
|
||||||
print("Not supported type '%s'") % schema['type']
|
print("Not supported type '%s'") % schema['type']
|
||||||
@ -631,8 +650,8 @@ def expand(tree, structs, visited):
|
|||||||
for i in tree.subtypobj:
|
for i in tree.subtypobj:
|
||||||
expand(i, structs, visited=visited)
|
expand(i, structs, visited=visited)
|
||||||
|
|
||||||
if tree.typ == 'array' and helpers.validBasicMapName(tree.subtyp):
|
if tree.typ == 'array' and helpers.valid_basic_map_name(tree.subtyp):
|
||||||
name = helpers.CombinationName(tree.name + "_element")
|
name = helpers.CombinateName(tree.name + "_element")
|
||||||
node = helpers.Unite(name, tree.subtyp, None)
|
node = helpers.Unite(name, tree.subtyp, None)
|
||||||
expand(node, structs, visited)
|
expand(node, structs, visited)
|
||||||
|
|
||||||
@ -658,13 +677,13 @@ def reflection(schema_info, gen_ref):
|
|||||||
with open(schema_info.name.name) as schema_file:
|
with open(schema_info.name.name) as schema_file:
|
||||||
schema_json = json.loads(schema_file.read(), object_pairs_hook=OrderedDict)
|
schema_json = json.loads(schema_file.read(), object_pairs_hook=OrderedDict)
|
||||||
try:
|
try:
|
||||||
tree = parseSchema(schema_info, schema_json, schema_info.prefix)
|
tree = parse_schema(schema_info, schema_json, schema_info.prefix)
|
||||||
if tree is None:
|
if tree is None:
|
||||||
print("Failed parse schema")
|
print("Failed parse schema")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
structs = expand(tree, [], {})
|
structs = expand(tree, [], {})
|
||||||
headers.headerReflection(structs, schema_info, header_file)
|
headers.header_reflect(structs, schema_info, header_file)
|
||||||
sources.sourceReflection(structs, schema_info, source_file, tree.typ)
|
sources.src_reflect(structs, schema_info, source_file, tree.typ)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
print("Failed to parse schema file: %s") % schema_info.name.name
|
print("Failed to parse schema file: %s") % schema_info.name.name
|
||||||
@ -681,7 +700,7 @@ def reflection(schema_info, gen_ref):
|
|||||||
reflection(reffile, True)
|
reflection(reffile, True)
|
||||||
|
|
||||||
|
|
||||||
def generateCommonFiles(out):
|
def gen_common_files(out):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -699,7 +718,8 @@ def generateCommonFiles(out):
|
|||||||
fcntl.flock(source_file, fcntl.LOCK_UN)
|
fcntl.flock(source_file, fcntl.LOCK_UN)
|
||||||
fcntl.flock(header_file, fcntl.LOCK_UN)
|
fcntl.flock(header_file, fcntl.LOCK_UN)
|
||||||
|
|
||||||
def handlerSingleFile(args, srcpath, gen_ref, schemapath):
|
|
||||||
|
def handle_single_file(args, srcpath, gen_ref, schemapath):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -715,7 +735,7 @@ def handlerSingleFile(args, srcpath, gen_ref, schemapath):
|
|||||||
for dirpath, dummy_dirnames, files in os.walk(schemapath.name):
|
for dirpath, dummy_dirnames, files in os.walk(schemapath.name):
|
||||||
for target_file in files:
|
for target_file in files:
|
||||||
if target_file.endswith(JSON_SUFFIX):
|
if target_file.endswith(JSON_SUFFIX):
|
||||||
schema_info = schemaFromFile(os.path.join(dirpath, target_file), \
|
schema_info = schema_from_file(os.path.join(dirpath, target_file), \
|
||||||
srcpath.name)
|
srcpath.name)
|
||||||
reflection(schema_info, gen_ref)
|
reflection(schema_info, gen_ref)
|
||||||
else:
|
else:
|
||||||
@ -723,17 +743,17 @@ def handlerSingleFile(args, srcpath, gen_ref, schemapath):
|
|||||||
for target_file in os.listdir(schemapath.name):
|
for target_file in os.listdir(schemapath.name):
|
||||||
fullpath = os.path.join(schemapath.name, target_file)
|
fullpath = os.path.join(schemapath.name, target_file)
|
||||||
if fullpath.endswith(JSON_SUFFIX) and os.path.isfile(fullpath):
|
if fullpath.endswith(JSON_SUFFIX) and os.path.isfile(fullpath):
|
||||||
schema_info = schemaFromFile(fullpath, srcpath.name)
|
schema_info = schema_from_file(fullpath, srcpath.name)
|
||||||
reflection(schema_info, gen_ref)
|
reflection(schema_info, gen_ref)
|
||||||
else:
|
else:
|
||||||
if schemapath.name.endswith(JSON_SUFFIX):
|
if schemapath.name.endswith(JSON_SUFFIX):
|
||||||
schema_info = schemaFromFile(schemapath.name, srcpath.name)
|
schema_info = schema_from_file(schemapath.name, srcpath.name)
|
||||||
reflection(schema_info, gen_ref)
|
reflection(schema_info, gen_ref)
|
||||||
else:
|
else:
|
||||||
print('File %s is not ends with .json') % schemapath.name
|
print('File %s is not ends with .json') % schemapath.name
|
||||||
|
|
||||||
|
|
||||||
def handlerFiles(args, srcpath):
|
def handle_files(args, srcpath):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -742,7 +762,8 @@ def handlerFiles(args, srcpath):
|
|||||||
for path in args.path:
|
for path in args.path:
|
||||||
gen_ref = args.gen_ref
|
gen_ref = args.gen_ref
|
||||||
schemapath = helpers.FilePath(path)
|
schemapath = helpers.FilePath(path)
|
||||||
handlerSingleFile(args, srcpath, gen_ref, schemapath)
|
handle_single_file(args, srcpath, gen_ref, schemapath)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""
|
"""
|
||||||
@ -795,8 +816,11 @@ def main():
|
|||||||
os.makedirs(srcpath.name)
|
os.makedirs(srcpath.name)
|
||||||
|
|
||||||
if args.gen_common:
|
if args.gen_common:
|
||||||
generateCommonFiles(srcpath.name)
|
gen_common_files(srcpath.name)
|
||||||
handlerFiles(args, srcpath)
|
handle_files(args, srcpath)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -4,7 +4,7 @@ Description: header class and functions
|
|||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
# - Copyright (C) Huawei Technologies Co., Ltd. 2019. All rights reserved.
|
# - Copyright (C) Huawei Technologies., Ltd. 2018-2019. All rights reserved.
|
||||||
# - clibcni licensed under the Mulan PSL v1.
|
# - clibcni licensed under the Mulan PSL v1.
|
||||||
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
||||||
# - You may obtain a copy of Mulan PSL v1 at:
|
# - You may obtain a copy of Mulan PSL v1 at:
|
||||||
@ -15,11 +15,11 @@ History: 2019-06-17
|
|||||||
# - See the Mulan PSL v1 for more details.
|
# - See the Mulan PSL v1 for more details.
|
||||||
# - Description: generate json
|
# - Description: generate json
|
||||||
# - Author: tanyifeng
|
# - Author: tanyifeng
|
||||||
# - Create: 2019-04-25
|
# - Create: 2018-04-25
|
||||||
#!/usr/bin/python -Es
|
#!/usr/bin/python -Es
|
||||||
import helpers
|
import helpers
|
||||||
|
|
||||||
def appendHeaderArray(obj, header, prefix):
|
def append_header_arr(obj, header, prefix):
|
||||||
'''
|
'''
|
||||||
Description: Write c header file of array
|
Description: Write c header file of array
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -30,28 +30,29 @@ def appendHeaderArray(obj, header, prefix):
|
|||||||
header.write("typedef struct {\n")
|
header.write("typedef struct {\n")
|
||||||
for i in obj.subtypobj:
|
for i in obj.subtypobj:
|
||||||
if i.typ == 'array':
|
if i.typ == 'array':
|
||||||
c_typ = helpers.getPrefixPointer(i.name, i.subtyp, prefix) or \
|
c_typ = helpers.get_prefixe_pointer(i.name, i.subtyp, prefix) or \
|
||||||
helpers.getMapCTypes(i.subtyp)
|
helpers.get_map_c_types(i.subtyp)
|
||||||
if i.subtypobj is not None:
|
if i.subtypobj is not None:
|
||||||
c_typ = helpers.getNameSubstr(i.name, prefix)
|
c_typ = helpers.get_name_substr(i.name, prefix)
|
||||||
|
|
||||||
if not helpers.judgeComplex(i.subtyp):
|
if not helpers.judge_complex(i.subtyp):
|
||||||
header.write(" %s%s*%s;\n" % (c_typ, " " if '*' not in c_typ else "", \
|
header.write(" %s%s*%s;\n" % (c_typ, " " if '*' not in c_typ else "", \
|
||||||
i.fixname))
|
i.fixname))
|
||||||
else:
|
else:
|
||||||
header.write(" %s **%s;\n" % (c_typ, i.fixname))
|
header.write(" %s **%s;\n" % (c_typ, i.fixname))
|
||||||
header.write(" size_t %s;\n\n" % (i.fixname + "_len"))
|
header.write(" size_t %s;\n\n" % (i.fixname + "_len"))
|
||||||
else:
|
else:
|
||||||
c_typ = helpers.getPrefixPointer(i.name, i.typ, prefix) or \
|
c_typ = helpers.get_prefixe_pointer(i.name, i.typ, prefix) or \
|
||||||
helpers.getMapCTypes(i.typ)
|
helpers.get_map_c_types(i.typ)
|
||||||
header.write(" %s%s%s;\n" % (c_typ, " " if '*' not in c_typ else "", i.fixname))
|
header.write(" %s%s%s;\n" % (c_typ, " " if '*' not in c_typ else "", i.fixname))
|
||||||
typename = helpers.getNameSubstr(obj.name, prefix)
|
typename = helpers.get_name_substr(obj.name, prefix)
|
||||||
header.write("}\n%s;\n\n" % typename)
|
header.write("}\n%s;\n\n" % typename)
|
||||||
header.write("void free_%s(%s *ptr);\n\n" % (typename, typename))
|
header.write("void free_%s(%s *ptr);\n\n" % (typename, typename))
|
||||||
header.write("%s *make_%s(yajl_val tree, const struct parser_context *ctx, parser_error *err);"\
|
header.write("%s *make_%s(yajl_val tree, const struct parser_context *ctx, parser_error *err);"\
|
||||||
"\n\n" % (typename, typename))
|
"\n\n" % (typename, typename))
|
||||||
|
|
||||||
def appendHeaderMapStrObj(obj, header, prefix):
|
|
||||||
|
def append_header_map_str_obj(obj, header, prefix):
|
||||||
'''
|
'''
|
||||||
Description: Write c header file of mapStringObject
|
Description: Write c header file of mapStringObject
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -60,86 +61,89 @@ def appendHeaderMapStrObj(obj, header, prefix):
|
|||||||
child = obj.children[0]
|
child = obj.children[0]
|
||||||
header.write("typedef struct {\n")
|
header.write("typedef struct {\n")
|
||||||
header.write(" char **keys;\n")
|
header.write(" char **keys;\n")
|
||||||
if helpers.validBasicMapName(child.typ):
|
if helpers.valid_basic_map_name(child.typ):
|
||||||
c_typ = helpers.getPrefixPointer("", child.typ, "")
|
c_typ = helpers.get_prefixe_pointer("", child.typ, "")
|
||||||
elif child.subtypname:
|
elif child.subtypname:
|
||||||
c_typ = child.subtypname
|
c_typ = child.subtypname
|
||||||
else:
|
else:
|
||||||
c_typ = helpers.getPrefixPointer(child.name, child.typ, prefix)
|
c_typ = helpers.get_prefixe_pointer(child.name, child.typ, prefix)
|
||||||
header.write(" %s%s*%s;\n" % (c_typ, " " if '*' not in c_typ else "", child.fixname))
|
header.write(" %s%s*%s;\n" % (c_typ, " " if '*' not in c_typ else "", child.fixname))
|
||||||
header.write(" size_t len;\n")
|
header.write(" size_t len;\n")
|
||||||
|
|
||||||
def appendHeaderChildArray(child, header, prefix):
|
|
||||||
|
def append_header_child_arr(child, header, prefix):
|
||||||
'''
|
'''
|
||||||
Description: Write c header file of array of child
|
Description: Write c header file of array of child
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
if helpers.getMapCTypes(child.subtyp) != "":
|
if helpers.get_map_c_types(child.subtyp) != "":
|
||||||
c_typ = helpers.getMapCTypes(child.subtyp)
|
c_typ = helpers.get_map_c_types(child.subtyp)
|
||||||
elif helpers.validBasicMapName(child.subtyp):
|
elif helpers.valid_basic_map_name(child.subtyp):
|
||||||
c_typ = '%s *' % helpers.makeBasicMapName(child.subtyp)
|
c_typ = '%s *' % helpers.make_basic_map_name(child.subtyp)
|
||||||
elif child.subtypname is not None:
|
elif child.subtypname is not None:
|
||||||
c_typ = child.subtypname
|
c_typ = child.subtypname
|
||||||
elif child.subtypobj is not None:
|
elif child.subtypobj is not None:
|
||||||
c_typ = helpers.getNameSubstr(child.name, prefix)
|
c_typ = helpers.get_name_substr(child.name, prefix)
|
||||||
else:
|
else:
|
||||||
c_typ = helpers.getPrefixPointer(child.name, child.subtyp, prefix)
|
c_typ = helpers.get_prefixe_pointer(child.name, child.subtyp, prefix)
|
||||||
|
|
||||||
if helpers.validBasicMapName(child.subtyp):
|
if helpers.valid_basic_map_name(child.subtyp):
|
||||||
header.write(" %s **%s;\n" % (helpers.makeBasicMapName(child.subtyp), child.fixname))
|
header.write(" %s **%s;\n" % (helpers.make_basic_map_name(child.subtyp), child.fixname))
|
||||||
elif not helpers.judgeComplex(child.subtyp):
|
elif not helpers.judge_complex(child.subtyp):
|
||||||
header.write(" %s%s*%s;\n" % (c_typ, " " if '*' not in c_typ else "", child.fixname))
|
header.write(" %s%s*%s;\n" % (c_typ, " " if '*' not in c_typ else "", child.fixname))
|
||||||
else:
|
else:
|
||||||
header.write(" %s%s**%s;\n" % (c_typ, " " if '*' not in c_typ else "", child.fixname))
|
header.write(" %s%s**%s;\n" % (c_typ, " " if '*' not in c_typ else "", child.fixname))
|
||||||
header.write(" size_t %s;\n\n" % (child.fixname + "_len"))
|
header.write(" size_t %s;\n\n" % (child.fixname + "_len"))
|
||||||
|
|
||||||
def appendHeaderChildOthers(child, header, prefix):
|
|
||||||
|
def append_header_child_others(child, header, prefix):
|
||||||
'''
|
'''
|
||||||
Description: Write c header file of others of child
|
Description: Write c header file of others of child
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
if helpers.getMapCTypes(child.typ) != "":
|
if helpers.get_map_c_types(child.typ) != "":
|
||||||
c_typ = helpers.getMapCTypes(child.typ)
|
c_typ = helpers.get_map_c_types(child.typ)
|
||||||
elif helpers.validBasicMapName(child.typ):
|
elif helpers.valid_basic_map_name(child.typ):
|
||||||
c_typ = '%s *' % helpers.makeBasicMapName(child.typ)
|
c_typ = '%s *' % helpers.make_basic_map_name(child.typ)
|
||||||
elif child.subtypname:
|
elif child.subtypname:
|
||||||
c_typ = helpers.getPrefixPointer(child.subtypname, child.typ, "")
|
c_typ = helpers.get_prefixe_pointer(child.subtypname, child.typ, "")
|
||||||
else:
|
else:
|
||||||
c_typ = helpers.getPrefixPointer(child.name, child.typ, prefix)
|
c_typ = helpers.get_prefixe_pointer(child.name, child.typ, prefix)
|
||||||
header.write(" %s%s%s;\n\n" % (c_typ, " " if '*' not in c_typ else "", child.fixname))
|
header.write(" %s%s%s;\n\n" % (c_typ, " " if '*' not in c_typ else "", child.fixname))
|
||||||
|
|
||||||
def appendTypeCHeader(obj, header, prefix):
|
|
||||||
|
def append_type_c_header(obj, header, prefix):
|
||||||
'''
|
'''
|
||||||
Description: Write c header file
|
Description: Write c header file
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
if not helpers.judgeComplex(obj.typ):
|
if not helpers.judge_complex(obj.typ):
|
||||||
return
|
return
|
||||||
|
|
||||||
if obj.typ == 'array':
|
if obj.typ == 'array':
|
||||||
appendHeaderArray(obj, header, prefix)
|
append_header_arr(obj, header, prefix)
|
||||||
return
|
return
|
||||||
|
|
||||||
if obj.typ == 'mapStringObject':
|
if obj.typ == 'mapStringObject':
|
||||||
if obj.subtypname is not None:
|
if obj.subtypname is not None:
|
||||||
return
|
return
|
||||||
appendHeaderMapStrObj(obj, header, prefix)
|
append_header_map_str_obj(obj, header, prefix)
|
||||||
elif obj.typ == 'object':
|
elif obj.typ == 'object':
|
||||||
if obj.subtypname is not None:
|
if obj.subtypname is not None:
|
||||||
return
|
return
|
||||||
header.write("typedef struct {\n")
|
header.write("typedef struct {\n")
|
||||||
if obj.children is None:
|
if obj.children is None:
|
||||||
header.write(" char unuseful;//unuseful definition to avoid empty struct\n")
|
header.write(" char unuseful; // unuseful definition to avoid empty struct\n")
|
||||||
for i in obj.children or [ ]:
|
for i in obj.children or []:
|
||||||
if i.typ == 'array':
|
if i.typ == 'array':
|
||||||
appendHeaderChildArray(i, header, prefix)
|
append_header_child_arr(i, header, prefix)
|
||||||
else:
|
else:
|
||||||
appendHeaderChildOthers(i, header, prefix)
|
append_header_child_others(i, header, prefix)
|
||||||
|
|
||||||
typename = helpers.getPrefixName(obj.name, prefix)
|
typename = helpers.get_prefixe_name(obj.name, prefix)
|
||||||
header.write("}\n%s;\n\n" % typename)
|
header.write("}\n%s;\n\n" % typename)
|
||||||
header.write("void free_%s(%s *ptr);\n\n" % (typename, typename))
|
header.write("void free_%s(%s *ptr);\n\n" % (typename, typename))
|
||||||
header.write("%s *make_%s(yajl_val tree, const struct parser_context *ctx, parser_error *err)"\
|
header.write("%s *make_%s(yajl_val tree, const struct parser_context *ctx, parser_error *err)"\
|
||||||
@ -147,7 +151,8 @@ def appendTypeCHeader(obj, header, prefix):
|
|||||||
header.write("yajl_gen_status gen_%s(yajl_gen g, const %s *ptr, const struct parser_context "\
|
header.write("yajl_gen_status gen_%s(yajl_gen g, const %s *ptr, const struct parser_context "\
|
||||||
"*ctx, parser_error *err);\n\n" % (typename, typename))
|
"*ctx, parser_error *err);\n\n" % (typename, typename))
|
||||||
|
|
||||||
def headerReflection(structs, schema_info, header):
|
|
||||||
|
def header_reflect(structs, schema_info, header):
|
||||||
'''
|
'''
|
||||||
Description: Reflection header files
|
Description: Reflection header files
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -168,7 +173,7 @@ def headerReflection(structs, schema_info, header):
|
|||||||
header.write("#endif\n\n")
|
header.write("#endif\n\n")
|
||||||
|
|
||||||
for i in structs:
|
for i in structs:
|
||||||
appendTypeCHeader(i, header, prefix)
|
append_type_c_header(i, header, prefix)
|
||||||
length = len(structs)
|
length = len(structs)
|
||||||
toptype = structs[length - 1].typ if length != 0 else ""
|
toptype = structs[length - 1].typ if length != 0 else ""
|
||||||
if toptype == 'object':
|
if toptype == 'object':
|
||||||
@ -195,3 +200,4 @@ def headerReflection(structs, schema_info, header):
|
|||||||
header.write("}\n")
|
header.write("}\n")
|
||||||
header.write("#endif\n\n")
|
header.write("#endif\n\n")
|
||||||
header.write("#endif\n\n")
|
header.write("#endif\n\n")
|
||||||
|
|
||||||
|
|||||||
@ -4,7 +4,7 @@ Description: helper class and functions
|
|||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
# - Copyright (C) Huawei Technologies Co., Ltd. 2019. All rights reserved.
|
# - Copyright (C) Huawei Technologies., Ltd. 2018-2019. All rights reserved.
|
||||||
# - clibcni licensed under the Mulan PSL v1.
|
# - clibcni licensed under the Mulan PSL v1.
|
||||||
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
||||||
# - You may obtain a copy of Mulan PSL v1 at:
|
# - You may obtain a copy of Mulan PSL v1 at:
|
||||||
@ -15,21 +15,21 @@ History: 2019-06-17
|
|||||||
# - See the Mulan PSL v1 for more details.
|
# - See the Mulan PSL v1 for more details.
|
||||||
# - Description: generate json
|
# - Description: generate json
|
||||||
# - Author: tanyifeng
|
# - Author: tanyifeng
|
||||||
# - Create: 2019-04-25
|
# - Create: 2018-04-25
|
||||||
#!/usr/bin/python -Es
|
#!/usr/bin/python -Es
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
def appendSeparator(subStr):
|
def append_separator(substr):
|
||||||
'''
|
'''
|
||||||
Description: append only '_' at last position of subStr
|
Description: append only '_' at last position of subStr
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-09-20
|
History: 2019-09-20
|
||||||
'''
|
'''
|
||||||
if subStr and subStr[-1] != '_':
|
if substr and substr[-1] != '_':
|
||||||
subStr.append('_')
|
substr.append('_')
|
||||||
|
|
||||||
def convertToCStyle(name):
|
def conv_to_c_style(name):
|
||||||
'''
|
'''
|
||||||
Description: convert name to linux c format
|
Description: convert name to linux c format
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -38,33 +38,33 @@ def convertToCStyle(name):
|
|||||||
if name is None or name == "":
|
if name is None or name == "":
|
||||||
return ""
|
return ""
|
||||||
name = name.replace('.', '_').replace('-', '_').replace('/', '_')
|
name = name.replace('.', '_').replace('-', '_').replace('/', '_')
|
||||||
subStr = []
|
substr = []
|
||||||
preindex = 0
|
preindex = 0
|
||||||
index = 0
|
index = 0
|
||||||
for index, char in enumerate(name):
|
for index, char in enumerate(name):
|
||||||
if char == '_':
|
if char == '_':
|
||||||
appendSeparator(subStr)
|
append_separator(substr)
|
||||||
subStr.append(name[preindex:index].lower())
|
substr.append(name[preindex:index].lower())
|
||||||
preindex = index + 1
|
preindex = index + 1
|
||||||
if not char.isupper() and name[preindex].isupper() and \
|
if not char.isupper() and name[preindex].isupper() and \
|
||||||
name[preindex + 1].isupper():
|
name[preindex + 1].isupper():
|
||||||
appendSeparator(subStr)
|
append_separator(substr)
|
||||||
subStr.append(name[preindex:index - 1].lower())
|
substr.append(name[preindex:index - 1].lower())
|
||||||
preindex = index - 1
|
preindex = index - 1
|
||||||
continue
|
continue
|
||||||
if char.isupper() and index > 0 and name[index - 1].islower():
|
if char.isupper() and index > 0 and name[index - 1].islower():
|
||||||
appendSeparator(subStr)
|
append_separator(substr)
|
||||||
subStr.append(name[preindex:index].lower())
|
substr.append(name[preindex:index].lower())
|
||||||
preindex = index
|
preindex = index
|
||||||
|
|
||||||
if preindex <= index and index >= 0 and name[index] != '_' and \
|
if preindex <= index and index >= 0 and name[index] != '_' and \
|
||||||
preindex != 0:
|
preindex != 0:
|
||||||
appendSeparator(subStr)
|
append_separator(substr)
|
||||||
subStr.append(name[preindex:index + 1].lower())
|
substr.append(name[preindex:index + 1].lower())
|
||||||
result = ''.join(subStr)
|
result = ''.join(substr)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def getMapCTypes(typ):
|
def get_map_c_types(typ):
|
||||||
'''
|
'''
|
||||||
Description: Get map c types
|
Description: Get map c types
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -103,7 +103,7 @@ def getMapCTypes(typ):
|
|||||||
return map_c_types[typ]
|
return map_c_types[typ]
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def validBasicMapName(typ):
|
def valid_basic_map_name(typ):
|
||||||
'''
|
'''
|
||||||
Description: Valid basic map name
|
Description: Valid basic map name
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -112,14 +112,14 @@ def validBasicMapName(typ):
|
|||||||
return typ != 'mapStringObject' and hasattr(typ, 'startswith') and \
|
return typ != 'mapStringObject' and hasattr(typ, 'startswith') and \
|
||||||
typ.startswith('map')
|
typ.startswith('map')
|
||||||
|
|
||||||
def makeBasicMapName(mapname):
|
def make_basic_map_name(mapname):
|
||||||
'''
|
'''
|
||||||
Description: Make basic map name
|
Description: Make basic map name
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
basic_map_types = ('string', 'int', 'bool')
|
basic_map_types = ('string', 'int', 'bool')
|
||||||
parts = convertToCStyle(mapname).split('_')
|
parts = conv_to_c_style(mapname).split('_')
|
||||||
if len(parts) != 3 or parts[0] != 'map' or \
|
if len(parts) != 3 or parts[0] != 'map' or \
|
||||||
(parts[1] not in basic_map_types) or \
|
(parts[1] not in basic_map_types) or \
|
||||||
(parts[2] not in basic_map_types):
|
(parts[2] not in basic_map_types):
|
||||||
@ -128,7 +128,7 @@ def makeBasicMapName(mapname):
|
|||||||
return "json_map_%s_%s" % (parts[1], parts[2])
|
return "json_map_%s_%s" % (parts[1], parts[2])
|
||||||
|
|
||||||
|
|
||||||
def getNameSubstr(name, prefix):
|
def get_name_substr(name, prefix):
|
||||||
'''
|
'''
|
||||||
Description: Make array name
|
Description: Make array name
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -137,7 +137,7 @@ def getNameSubstr(name, prefix):
|
|||||||
return "%s_element" % prefix if name is None or name == "" or prefix == name \
|
return "%s_element" % prefix if name is None or name == "" or prefix == name \
|
||||||
else "%s_%s_element" % (prefix, name)
|
else "%s_%s_element" % (prefix, name)
|
||||||
|
|
||||||
def getPrefixName(name, prefix):
|
def get_prefixe_name(name, prefix):
|
||||||
'''
|
'''
|
||||||
Description: Make name
|
Description: Make name
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -149,19 +149,19 @@ def getPrefixName(name, prefix):
|
|||||||
return "%s" % name
|
return "%s" % name
|
||||||
return "%s_%s" % (prefix, name)
|
return "%s_%s" % (prefix, name)
|
||||||
|
|
||||||
def getPrefixPointer(name, typ, prefix):
|
def get_prefixe_pointer(name, typ, prefix):
|
||||||
'''
|
'''
|
||||||
Description: Make pointer name
|
Description: Make pointer name
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
if typ != 'object' and typ != 'mapStringObject' and \
|
if typ != 'object' and typ != 'mapStringObject' and \
|
||||||
not validBasicMapName(typ):
|
not valid_basic_map_name(typ):
|
||||||
return ""
|
return ""
|
||||||
return '%s *' % makeBasicMapName(typ) if validBasicMapName(typ) \
|
return '%s *' % make_basic_map_name(typ) if valid_basic_map_name(typ) \
|
||||||
else "%s *" % getPrefixName(name, prefix)
|
else "%s *" % get_prefixe_name(name, prefix)
|
||||||
|
|
||||||
def judgeComplex(typ):
|
def judge_complex(typ):
|
||||||
'''
|
'''
|
||||||
Description: Check compound object
|
Description: Check compound object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -169,7 +169,7 @@ def judgeComplex(typ):
|
|||||||
'''
|
'''
|
||||||
return typ in ('object', 'array', 'mapStringObject')
|
return typ in ('object', 'array', 'mapStringObject')
|
||||||
|
|
||||||
def judgeDataType(typ):
|
def judge_data_type(typ):
|
||||||
'''
|
'''
|
||||||
Description: Check numeric type
|
Description: Check numeric type
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -180,7 +180,7 @@ def judgeDataType(typ):
|
|||||||
return True
|
return True
|
||||||
return typ in ("integer", "UID", "GID", "double")
|
return typ in ("integer", "UID", "GID", "double")
|
||||||
|
|
||||||
def judgeDataPointerType(typ):
|
def judge_data_pointer_type(typ):
|
||||||
'''
|
'''
|
||||||
Description: Check numeric pointer type
|
Description: Check numeric pointer type
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -190,7 +190,7 @@ def judgeDataPointerType(typ):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def obtainDataPointerType(typ):
|
def obtain_data_pointer_type(typ):
|
||||||
'''
|
'''
|
||||||
Description: Get numeric pointer type
|
Description: Get numeric pointer type
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -199,22 +199,22 @@ def obtainDataPointerType(typ):
|
|||||||
index = typ.find("Pointer")
|
index = typ.find("Pointer")
|
||||||
return typ[0:index] if index != -1 else ""
|
return typ[0:index] if index != -1 else ""
|
||||||
|
|
||||||
def obtainPointer(name, typ, prefix):
|
def obtain_pointer(name, typ, prefix):
|
||||||
'''
|
'''
|
||||||
Description: Obtain pointer string
|
Description: Obtain pointer string
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
ptr = getPrefixPointer(name, typ, prefix)
|
ptr = get_prefixe_pointer(name, typ, prefix)
|
||||||
if ptr != "":
|
if ptr != "":
|
||||||
return ptr
|
return ptr
|
||||||
|
|
||||||
return "char *" if typ == "string" else \
|
return "char *" if typ == "string" else \
|
||||||
("%s *" % typ if typ == "ArrayOfStrings" else "")
|
("%s *" % typ if typ == "ArrayOfStrings" else "")
|
||||||
|
|
||||||
class CombinationName(object):
|
class CombinateName(object):
|
||||||
'''
|
'''
|
||||||
Description: Store CombinationName information
|
Description: Store CombinateName information
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
@ -236,7 +236,7 @@ class CombinationName(object):
|
|||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
'''
|
'''
|
||||||
prefix_name = self.name + '_' if self.name != "" else ""
|
prefix_name = self.name + '_' if self.name != "" else ""
|
||||||
return CombinationName(prefix_name + leaf, leaf)
|
return CombinateName(prefix_name + leaf, leaf)
|
||||||
|
|
||||||
|
|
||||||
class Unite(object):
|
class Unite(object):
|
||||||
@ -253,9 +253,9 @@ class Unite(object):
|
|||||||
self.subtypobj = subtypobj
|
self.subtypobj = subtypobj
|
||||||
self.subtypname = subtypname
|
self.subtypname = subtypname
|
||||||
self.required = required
|
self.required = required
|
||||||
self.name = convertToCStyle(name.name.replace('.', '_'))
|
self.name = conv_to_c_style(name.name.replace('.', '_'))
|
||||||
self.origname = name.leaf or name.name
|
self.origname = name.leaf or name.name
|
||||||
self.fixname = convertToCStyle(self.origname.replace('.', '_'))
|
self.fixname = conv_to_c_style(self.origname.replace('.', '_'))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -297,7 +297,7 @@ class SchemaInfo(object):
|
|||||||
|
|
||||||
def __init__(self, name, header, source, prefix, filesdir, refs=None):
|
def __init__(self, name, header, source, prefix, filesdir, refs=None):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.fileprefix = convertToCStyle( \
|
self.fileprefix = conv_to_c_style( \
|
||||||
name.basename.replace('.', '_').replace('-', '_'))
|
name.basename.replace('.', '_').replace('-', '_'))
|
||||||
self.header = header
|
self.header = header
|
||||||
self.source = source
|
self.source = source
|
||||||
@ -311,3 +311,7 @@ class SchemaInfo(object):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.__repr__(self)
|
return self.__repr__(self)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
/******************************************************************************
|
/******************************************************************************
|
||||||
* Copyright (c) Huawei Technologies Co., Ltd. 2019. All rights reserved.
|
* Copyright (c) Huawei Technologies Co., Ltd. 2018-2019. All rights reserved.
|
||||||
* clibcni licensed under the Mulan PSL v1.
|
* clibcni licensed under the Mulan PSL v1.
|
||||||
* You can use this software according to the terms and conditions of the Mulan PSL v1.
|
* You can use this software according to the terms and conditions of the Mulan PSL v1.
|
||||||
* You may obtain a copy of Mulan PSL v1 at:
|
* You may obtain a copy of Mulan PSL v1 at:
|
||||||
@ -9,7 +9,7 @@
|
|||||||
* PURPOSE.
|
* PURPOSE.
|
||||||
* See the Mulan PSL v1 for more details.
|
* See the Mulan PSL v1 for more details.
|
||||||
* Author: tanyifeng
|
* Author: tanyifeng
|
||||||
* Create: 2019-04-25
|
* Create: 2018-11-1
|
||||||
* Description: provide file read functions
|
* Description: provide file read functions
|
||||||
********************************************************************************/
|
********************************************************************************/
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
@ -23,7 +23,6 @@
|
|||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
||||||
#include <config.h>
|
#include <config.h>
|
||||||
#include "securec.h"
|
|
||||||
#include "read_file.h"
|
#include "read_file.h"
|
||||||
|
|
||||||
#ifndef O_CLOEXEC
|
#ifndef O_CLOEXEC
|
||||||
@ -57,8 +56,6 @@ char *fread_file(FILE *stream, size_t *length)
|
|||||||
|
|
||||||
while (1) {
|
while (1) {
|
||||||
size_t ret, newsize, sizejudge;
|
size_t ret, newsize, sizejudge;
|
||||||
int pret;
|
|
||||||
errno_t rc = EOK;
|
|
||||||
sizejudge = (JSON_MAX_SIZE - BUFSIZ) - 1;
|
sizejudge = (JSON_MAX_SIZE - BUFSIZ) - 1;
|
||||||
if (sizejudge < off) {
|
if (sizejudge < off) {
|
||||||
goto out;
|
goto out;
|
||||||
@ -71,15 +68,9 @@ char *fread_file(FILE *stream, size_t *length)
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (buf != NULL) {
|
if (buf != NULL) {
|
||||||
pret = memcpy_s(tmpbuf, newsize, buf, off);
|
(void)memcpy(tmpbuf, buf, off);
|
||||||
if (pret) {
|
|
||||||
goto out;
|
|
||||||
}
|
|
||||||
|
|
||||||
rc = memset_s(buf, off, 0, off);
|
(void)memset(buf, 0, off);
|
||||||
if (rc != EOK) {
|
|
||||||
goto out;
|
|
||||||
}
|
|
||||||
|
|
||||||
free(buf);
|
free(buf);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
/*****************************************************************************
|
/*****************************************************************************
|
||||||
* Copyright (c) Huawei Technologies Co., Ltd. 2019. All rights reserved.
|
* Copyright (c) Huawei Technologies Co., Ltd. 2018-2019. All rights reserved.
|
||||||
* clibcni licensed under the Mulan PSL v1.
|
* clibcni licensed under the Mulan PSL v1.
|
||||||
* You can use this software according to the terms and conditions of the Mulan PSL v1.
|
* You can use this software according to the terms and conditions of the Mulan PSL v1.
|
||||||
* You may obtain a copy of Mulan PSL v1 at:
|
* You may obtain a copy of Mulan PSL v1 at:
|
||||||
@ -9,7 +9,7 @@
|
|||||||
* PURPOSE.
|
* PURPOSE.
|
||||||
* See the Mulan PSL v1 for more details.
|
* See the Mulan PSL v1 for more details.
|
||||||
* Author: tanyifeng
|
* Author: tanyifeng
|
||||||
* Create: 2019-4-08
|
* Create: 2018-11-08
|
||||||
* Description: provide container read file definition
|
* Description: provide container read file definition
|
||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
#ifndef __JSON_READ_FILE_H_
|
#ifndef __JSON_READ_FILE_H_
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
"""
|
"""
|
||||||
Copyright (C) Huawei Technologies Co., Ltd. 2019. All rights reserved.
|
Copyright (C) Huawei Technologies., Ltd. 2017-2019. All rights reserved.
|
||||||
# - clibcni licensed under the Mulan PSL v1.
|
# - clibcni licensed under the Mulan PSL v1.
|
||||||
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
# - You can use this software according to the terms and conditions of the Mulan PSL v1.
|
||||||
# - You may obtain a copy of Mulan PSL v1 at:
|
# - You may obtain a copy of Mulan PSL v1 at:
|
||||||
@ -19,30 +19,31 @@ History: 2018-04-25 created
|
|||||||
import helpers
|
import helpers
|
||||||
|
|
||||||
|
|
||||||
def appendCCode(obj, c_file, prefix):
|
def append_c_code(obj, c_file, prefix):
|
||||||
"""
|
"""
|
||||||
Description: append c language code to file
|
Description: append c language code to file
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
parseJsonToC(obj, c_file, prefix)
|
parse_json_to_c(obj, c_file, prefix)
|
||||||
makeCFree(obj, c_file, prefix)
|
make_c_free(obj, c_file, prefix)
|
||||||
obtainCJson(obj, c_file, prefix)
|
get_c_json(obj, c_file, prefix)
|
||||||
|
|
||||||
def parseMapStringObject(obj, c_file, prefix, obj_typename):
|
|
||||||
|
def parse_map_string_obj(obj, c_file, prefix, obj_typename):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json map string object
|
Description: generate c language for parse json map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
child = obj.children[0]
|
child = obj.children[0]
|
||||||
if helpers.validBasicMapName(child.typ):
|
if helpers.valid_basic_map_name(child.typ):
|
||||||
childname = helpers.makeBasicMapName(child.typ)
|
childname = helpers.make_basic_map_name(child.typ)
|
||||||
else:
|
else:
|
||||||
if child.subtypname:
|
if child.subtypname:
|
||||||
childname = child.subtypname
|
childname = child.subtypname
|
||||||
else:
|
else:
|
||||||
childname = helpers.getPrefixName(child.name, prefix)
|
childname = helpers.get_prefixe_name(child.name, prefix)
|
||||||
c_file.write(' if (YAJL_GET_OBJECT(tree) != NULL && YAJL_GET_OBJECT(tree)->len > 0) {\n')
|
c_file.write(' if (YAJL_GET_OBJECT(tree) != NULL && YAJL_GET_OBJECT(tree)->len > 0) {\n')
|
||||||
c_file.write(' size_t i;\n')
|
c_file.write(' size_t i;\n')
|
||||||
c_file.write(' ret->len = YAJL_GET_OBJECT(tree)->len;\n')
|
c_file.write(' ret->len = YAJL_GET_OBJECT(tree)->len;\n')
|
||||||
@ -63,7 +64,8 @@ def parseMapStringObject(obj, c_file, prefix, obj_typename):
|
|||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
|
|
||||||
def parseObjectType(obj, c_file, prefix, obj_typename):
|
|
||||||
|
def parse_obj_type(obj, c_file, prefix, obj_typename):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse object type
|
Description: generate c language for parse object type
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -71,34 +73,34 @@ def parseObjectType(obj, c_file, prefix, obj_typename):
|
|||||||
"""
|
"""
|
||||||
if obj.typ == 'string':
|
if obj.typ == 'string':
|
||||||
c_file.write(' {\n')
|
c_file.write(' {\n')
|
||||||
readValueGenerator(c_file, 2, 'get_val(tree, "%s", yajl_t_string)' % obj.origname, \
|
read_val_generator(c_file, 2, 'get_val(tree, "%s", yajl_t_string)' % obj.origname, \
|
||||||
"ret->%s" % obj.fixname, obj.typ, obj.origname, obj_typename)
|
"ret->%s" % obj.fixname, obj.typ, obj.origname, obj_typename)
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
elif helpers.judgeDataType(obj.typ):
|
elif helpers.judge_data_type(obj.typ):
|
||||||
c_file.write(' {\n')
|
c_file.write(' {\n')
|
||||||
readValueGenerator(c_file, 2, 'get_val(tree, "%s", yajl_t_number)' % obj.origname, \
|
read_val_generator(c_file, 2, 'get_val(tree, "%s", yajl_t_number)' % obj.origname, \
|
||||||
"ret->%s" % obj.fixname, obj.typ, obj.origname, obj_typename)
|
"ret->%s" % obj.fixname, obj.typ, obj.origname, obj_typename)
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
elif helpers.judgeDataPointerType(obj.typ):
|
elif helpers.judge_data_pointer_type(obj.typ):
|
||||||
c_file.write(' {\n')
|
c_file.write(' {\n')
|
||||||
readValueGenerator(c_file, 2, 'get_val(tree, "%s", yajl_t_number)' % obj.origname, \
|
read_val_generator(c_file, 2, 'get_val(tree, "%s", yajl_t_number)' % obj.origname, \
|
||||||
"ret->%s" % obj.fixname, obj.typ, obj.origname, obj_typename)
|
"ret->%s" % obj.fixname, obj.typ, obj.origname, obj_typename)
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
if obj.typ == 'boolean':
|
if obj.typ == 'boolean':
|
||||||
c_file.write(' {\n')
|
c_file.write(' {\n')
|
||||||
readValueGenerator(c_file, 2, 'get_val(tree, "%s", yajl_t_true)' % obj.origname, \
|
read_val_generator(c_file, 2, 'get_val(tree, "%s", yajl_t_true)' % obj.origname, \
|
||||||
"ret->%s" % obj.fixname, obj.typ, obj.origname, obj_typename)
|
"ret->%s" % obj.fixname, obj.typ, obj.origname, obj_typename)
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
if obj.typ == 'booleanPointer':
|
if obj.typ == 'booleanPointer':
|
||||||
c_file.write(' {\n')
|
c_file.write(' {\n')
|
||||||
readValueGenerator(c_file, 2, 'get_val(tree, "%s", yajl_t_true)' % obj.origname, \
|
read_val_generator(c_file, 2, 'get_val(tree, "%s", yajl_t_true)' % obj.origname, \
|
||||||
"ret->%s" % obj.fixname, obj.typ, obj.origname, obj_typename)
|
"ret->%s" % obj.fixname, obj.typ, obj.origname, obj_typename)
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
elif obj.typ == 'object' or obj.typ == 'mapStringObject':
|
elif obj.typ == 'object' or obj.typ == 'mapStringObject':
|
||||||
if obj.subtypname is not None:
|
if obj.subtypname is not None:
|
||||||
typename = obj.subtypname
|
typename = obj.subtypname
|
||||||
else:
|
else:
|
||||||
typename = helpers.getPrefixName(obj.name, prefix)
|
typename = helpers.get_prefixe_name(obj.name, prefix)
|
||||||
c_file.write(
|
c_file.write(
|
||||||
' ret->%s = make_%s(get_val(tree, "%s", yajl_t_object), ctx, err);\n' \
|
' ret->%s = make_%s(get_val(tree, "%s", yajl_t_object), ctx, err);\n' \
|
||||||
% (obj.fixname, typename, obj.origname))
|
% (obj.fixname, typename, obj.origname))
|
||||||
@ -110,7 +112,7 @@ def parseObjectType(obj, c_file, prefix, obj_typename):
|
|||||||
if obj.subtypname:
|
if obj.subtypname:
|
||||||
typename = obj.subtypname
|
typename = obj.subtypname
|
||||||
else:
|
else:
|
||||||
typename = helpers.getNameSubstr(obj.name, prefix)
|
typename = helpers.get_name_substr(obj.name, prefix)
|
||||||
c_file.write(' {\n')
|
c_file.write(' {\n')
|
||||||
c_file.write(' yajl_val tmp = get_val(tree, "%s", yajl_t_array);\n' \
|
c_file.write(' yajl_val tmp = get_val(tree, "%s", yajl_t_array);\n' \
|
||||||
% (obj.origname))
|
% (obj.origname))
|
||||||
@ -155,18 +157,18 @@ def parseObjectType(obj, c_file, prefix, obj_typename):
|
|||||||
' ret->%s = safe_malloc((YAJL_GET_ARRAY(tmp)->len + 1) *' \
|
' ret->%s = safe_malloc((YAJL_GET_ARRAY(tmp)->len + 1) *' \
|
||||||
' sizeof(*ret->%s));\n' % (obj.fixname, obj.fixname))
|
' sizeof(*ret->%s));\n' % (obj.fixname, obj.fixname))
|
||||||
c_file.write(' for (i = 0; i < YAJL_GET_ARRAY(tmp)->len; i++) {\n')
|
c_file.write(' for (i = 0; i < YAJL_GET_ARRAY(tmp)->len; i++) {\n')
|
||||||
readValueGenerator(c_file, 4, 'YAJL_GET_ARRAY(tmp)->values[i]', \
|
read_val_generator(c_file, 4, 'YAJL_GET_ARRAY(tmp)->values[i]', \
|
||||||
"ret->%s[i]" % obj.fixname, obj.subtyp, obj.origname, obj_typename)
|
"ret->%s[i]" % obj.fixname, obj.subtyp, obj.origname, obj_typename)
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
elif helpers.validBasicMapName(obj.typ):
|
elif helpers.valid_basic_map_name(obj.typ):
|
||||||
c_file.write(' {\n')
|
c_file.write(' {\n')
|
||||||
c_file.write(' yajl_val tmp = get_val(tree, "%s", yajl_t_object);\n' \
|
c_file.write(' yajl_val tmp = get_val(tree, "%s", yajl_t_object);\n' \
|
||||||
% (obj.origname))
|
% (obj.origname))
|
||||||
c_file.write(' if (tmp != NULL) {\n')
|
c_file.write(' if (tmp != NULL) {\n')
|
||||||
c_file.write(' ret->%s = make_%s(tmp, ctx, err);\n' \
|
c_file.write(' ret->%s = make_%s(tmp, ctx, err);\n' \
|
||||||
% (obj.fixname, helpers.makeBasicMapName(obj.typ)))
|
% (obj.fixname, helpers.make_basic_map_name(obj.typ)))
|
||||||
c_file.write(' if (ret->%s == NULL) {\n' % (obj.fixname))
|
c_file.write(' if (ret->%s == NULL) {\n' % (obj.fixname))
|
||||||
c_file.write(' char *new_error = NULL;\n')
|
c_file.write(' char *new_error = NULL;\n')
|
||||||
c_file.write(" if (asprintf(&new_error, \"Value error for key" \
|
c_file.write(" if (asprintf(&new_error, \"Value error for key" \
|
||||||
@ -182,20 +184,19 @@ def parseObjectType(obj, c_file, prefix, obj_typename):
|
|||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
|
|
||||||
def parseObjectOrArrayObject(obj, c_file, prefix, obj_typename):
|
def parse_obj_arr_obj(obj, c_file, prefix, obj_typename):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse object or array object
|
Description: generate c language for parse object or array object
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
nodes = obj.children if obj.typ == 'object' else obj.subtypobj
|
nodes = obj.children if obj.typ == 'object' else obj.subtypobj
|
||||||
|
|
||||||
required_to_check = []
|
required_to_check = []
|
||||||
for i in nodes or []:
|
for i in nodes or []:
|
||||||
if obj.required and i.origname in obj.required and \
|
if obj.required and i.origname in obj.required and \
|
||||||
not helpers.judgeDataType(i.typ) and i.typ != 'boolean':
|
not helpers.judge_data_type(i.typ) and i.typ != 'boolean':
|
||||||
required_to_check.append(i)
|
required_to_check.append(i)
|
||||||
parseObjectType(i, c_file, prefix, obj_typename)
|
parse_obj_type(i, c_file, prefix, obj_typename)
|
||||||
|
|
||||||
for i in required_to_check:
|
for i in required_to_check:
|
||||||
c_file.write(' if (ret->%s == NULL) {\n' % i.fixname)
|
c_file.write(' if (ret->%s == NULL) {\n' % i.fixname)
|
||||||
@ -222,26 +223,24 @@ def parseObjectOrArrayObject(obj, c_file, prefix, obj_typename):
|
|||||||
}
|
}
|
||||||
""" % condition)
|
""" % condition)
|
||||||
|
|
||||||
def parseJsonToC(obj, c_file, prefix):
|
|
||||||
|
def parse_json_to_c(obj, c_file, prefix):
|
||||||
"""
|
"""
|
||||||
Description: generate c language for parse json file
|
Description: generate c language for parse json file
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
if not helpers.judgeComplex(obj.typ):
|
if not helpers.judge_complex(obj.typ):
|
||||||
return
|
return
|
||||||
|
|
||||||
if obj.typ == 'object' or obj.typ == 'mapStringObject':
|
if obj.typ == 'object' or obj.typ == 'mapStringObject':
|
||||||
if obj.subtypname:
|
if obj.subtypname:
|
||||||
return
|
return
|
||||||
obj_typename = typename = helpers.getPrefixName(obj.name, prefix)
|
obj_typename = typename = helpers.get_prefixe_name(obj.name, prefix)
|
||||||
|
|
||||||
if obj.typ == 'array':
|
if obj.typ == 'array':
|
||||||
obj_typename = typename = helpers.getNameSubstr(obj.name, prefix)
|
obj_typename = typename = helpers.get_name_substr(obj.name, prefix)
|
||||||
objs = obj.subtypobj
|
objs = obj.subtypobj
|
||||||
if objs is None or obj.subtypname:
|
if objs is None or obj.subtypname:
|
||||||
return
|
return
|
||||||
|
|
||||||
c_file.write("%s *make_%s(yajl_val tree, const struct parser_context *ctx, "\
|
c_file.write("%s *make_%s(yajl_val tree, const struct parser_context *ctx, "\
|
||||||
"parser_error *err) {\n" % (typename, typename))
|
"parser_error *err) {\n" % (typename, typename))
|
||||||
c_file.write(" %s *ret = NULL;\n" % (typename))
|
c_file.write(" %s *ret = NULL;\n" % (typename))
|
||||||
@ -249,31 +248,29 @@ def parseJsonToC(obj, c_file, prefix):
|
|||||||
c_file.write(" if (tree == NULL)\n")
|
c_file.write(" if (tree == NULL)\n")
|
||||||
c_file.write(" return ret;\n")
|
c_file.write(" return ret;\n")
|
||||||
c_file.write(" ret = safe_malloc(sizeof(*ret));\n")
|
c_file.write(" ret = safe_malloc(sizeof(*ret));\n")
|
||||||
|
|
||||||
if obj.typ == 'mapStringObject':
|
if obj.typ == 'mapStringObject':
|
||||||
parseMapStringObject(obj, c_file, prefix, obj_typename)
|
parse_map_string_obj(obj, c_file, prefix, obj_typename)
|
||||||
|
|
||||||
if obj.typ == 'object' or (obj.typ == 'array' and obj.subtypobj):
|
if obj.typ == 'object' or (obj.typ == 'array' and obj.subtypobj):
|
||||||
parseObjectOrArrayObject(obj, c_file, prefix, obj_typename)
|
parse_obj_arr_obj(obj, c_file, prefix, obj_typename)
|
||||||
|
|
||||||
c_file.write(' return ret;\n')
|
c_file.write(' return ret;\n')
|
||||||
c_file.write("}\n\n")
|
c_file.write("}\n\n")
|
||||||
|
|
||||||
|
|
||||||
def obtainMapStringObject(obj, c_file, prefix):
|
def get_map_string_obj(obj, c_file, prefix):
|
||||||
"""
|
"""
|
||||||
Description: c language generate map string object
|
Description: c language generate map string object
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
child = obj.children[0]
|
child = obj.children[0]
|
||||||
if helpers.validBasicMapName(child.typ):
|
if helpers.valid_basic_map_name(child.typ):
|
||||||
childname = helpers.makeBasicMapName(child.typ)
|
childname = helpers.make_basic_map_name(child.typ)
|
||||||
else:
|
else:
|
||||||
if child.subtypname:
|
if child.subtypname:
|
||||||
childname = child.subtypname
|
childname = child.subtypname
|
||||||
else:
|
else:
|
||||||
childname = helpers.getPrefixName(child.name, prefix)
|
childname = helpers.get_prefixe_name(child.name, prefix)
|
||||||
c_file.write(' size_t len = 0, i;\n')
|
c_file.write(' size_t len = 0, i;\n')
|
||||||
c_file.write(" if (ptr != NULL)\n")
|
c_file.write(" if (ptr != NULL)\n")
|
||||||
c_file.write(" len = ptr->len;\n")
|
c_file.write(" len = ptr->len;\n")
|
||||||
@ -286,7 +283,8 @@ def obtainMapStringObject(obj, c_file, prefix):
|
|||||||
% child.fixname)
|
% child.fixname)
|
||||||
c_file.write(' for (i = 0; i < len; i++) {\n')
|
c_file.write(' for (i = 0; i < len; i++) {\n')
|
||||||
c_file.write(' char *str = ptr->keys[i] ? ptr->keys[i] : "";\n')
|
c_file.write(' char *str = ptr->keys[i] ? ptr->keys[i] : "";\n')
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)str, strlen(str));\n')
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
|
(const unsigned char *)str, strlen(str));\n')
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(' stat = gen_%s(g, ptr->%s[i], ctx, err);\n' \
|
c_file.write(' stat = gen_%s(g, ptr->%s[i], ctx, err);\n' \
|
||||||
@ -301,7 +299,8 @@ def obtainMapStringObject(obj, c_file, prefix):
|
|||||||
c_file.write(" if (!len && !(ctx->options & OPT_GEN_SIMPLIFY))\n")
|
c_file.write(" if (!len && !(ctx->options & OPT_GEN_SIMPLIFY))\n")
|
||||||
c_file.write(' yajl_gen_config(g, yajl_gen_beautify, 1);\n')
|
c_file.write(' yajl_gen_config(g, yajl_gen_beautify, 1);\n')
|
||||||
|
|
||||||
def obtainObjectOrArrayObject(obj, c_file, prefix):
|
|
||||||
|
def get_obj_arr_obj(obj, c_file, prefix):
|
||||||
"""
|
"""
|
||||||
Description: c language generate object or array object
|
Description: c language generate object or array object
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -311,17 +310,16 @@ def obtainObjectOrArrayObject(obj, c_file, prefix):
|
|||||||
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) ||' \
|
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) ||' \
|
||||||
' (ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
' (ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
||||||
c_file.write(' char *str = "";\n')
|
c_file.write(' char *str = "";\n')
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)("%s"), strlen("%s"));\n' \
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
% (obj.origname, obj.origname))
|
(const unsigned char *)("%s"), strlen("%s"));\n' % (obj.origname, obj.origname))
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(" if (ptr != NULL && ptr->%s != NULL) {\n" % obj.fixname)
|
c_file.write(" if (ptr != NULL && ptr->%s != NULL) {\n" % obj.fixname)
|
||||||
c_file.write(" str = ptr->%s;\n" % obj.fixname)
|
c_file.write(" str = ptr->%s;\n" % obj.fixname)
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
jsonValueGenerator(c_file, 2, "str", 'g', 'ctx', obj.typ)
|
json_value_generator(c_file, 2, "str", 'g', 'ctx', obj.typ)
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
|
elif helpers.judge_data_type(obj.typ):
|
||||||
elif helpers.judgeDataType(obj.typ):
|
|
||||||
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) ||' \
|
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) ||' \
|
||||||
' (ptr != NULL && ptr->%s)) {\n' % obj.fixname)
|
' (ptr != NULL && ptr->%s)) {\n' % obj.fixname)
|
||||||
if obj.typ == 'double':
|
if obj.typ == 'double':
|
||||||
@ -331,55 +329,53 @@ def obtainObjectOrArrayObject(obj, c_file, prefix):
|
|||||||
else:
|
else:
|
||||||
numtyp = 'long long int'
|
numtyp = 'long long int'
|
||||||
c_file.write(' %s num = 0;\n' % numtyp)
|
c_file.write(' %s num = 0;\n' % numtyp)
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)("%s"), strlen("%s"));\n' \
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
% (obj.origname, obj.origname))
|
(const unsigned char *)("%s"), strlen("%s"));\n' % (obj.origname, obj.origname))
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(" if (ptr != NULL && ptr->%s) {\n" % obj.fixname)
|
c_file.write(" if (ptr != NULL && ptr->%s) {\n" % obj.fixname)
|
||||||
c_file.write(" num = (%s)ptr->%s;\n" % (numtyp, obj.fixname))
|
c_file.write(" num = (%s)ptr->%s;\n" % (numtyp, obj.fixname))
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
jsonValueGenerator(c_file, 2, "num", 'g', 'ctx', obj.typ)
|
json_value_generator(c_file, 2, "num", 'g', 'ctx', obj.typ)
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
|
elif helpers.judge_data_pointer_type(obj.typ):
|
||||||
elif helpers.judgeDataPointerType(obj.typ):
|
|
||||||
c_file.write(' if ((ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
c_file.write(' if ((ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
||||||
numtyp = helpers.obtainDataPointerType(obj.typ)
|
numtyp = helpers.obtain_data_pointer_type(obj.typ)
|
||||||
if numtyp == "":
|
if numtyp == "":
|
||||||
return
|
return
|
||||||
c_file.write(' %s num = 0;\n' % helpers.getMapCTypes(numtyp))
|
c_file.write(' %s num = 0;\n' % helpers.get_map_c_types(numtyp))
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)("%s"), strlen("%s"));\n' \
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
% (obj.origname, obj.origname))
|
(const unsigned char *)("%s"), strlen("%s"));\n' % (obj.origname, obj.origname))
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(" if (ptr != NULL && ptr->%s != NULL) {\n" % obj.fixname)
|
c_file.write(" if (ptr != NULL && ptr->%s != NULL) {\n" % obj.fixname)
|
||||||
c_file.write(" num = (%s)*(ptr->%s);\n" \
|
c_file.write(" num = (%s)*(ptr->%s);\n" \
|
||||||
% (helpers.getMapCTypes(numtyp), obj.fixname))
|
% (helpers.get_map_c_types(numtyp), obj.fixname))
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
jsonValueGenerator(c_file, 2, "num", 'g', 'ctx', numtyp)
|
json_value_generator(c_file, 2, "num", 'g', 'ctx', numtyp)
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
|
|
||||||
elif obj.typ == 'boolean':
|
elif obj.typ == 'boolean':
|
||||||
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) ||' \
|
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) ||' \
|
||||||
' (ptr != NULL && ptr->%s)) {\n' % obj.fixname)
|
' (ptr != NULL && ptr->%s)) {\n' % obj.fixname)
|
||||||
c_file.write(' bool b = false;\n')
|
c_file.write(' bool b = false;\n')
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)("%s"), strlen("%s"));\n' \
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
% (obj.origname, obj.origname))
|
(const unsigned char *)("%s"), strlen("%s"));\n' % (obj.origname, obj.origname))
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(" if (ptr != NULL && ptr->%s) {\n" % obj.fixname)
|
c_file.write(" if (ptr != NULL && ptr->%s) {\n" % obj.fixname)
|
||||||
c_file.write(" b = ptr->%s;\n" % obj.fixname)
|
c_file.write(" b = ptr->%s;\n" % obj.fixname)
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
jsonValueGenerator(c_file, 2, "b", 'g', 'ctx', obj.typ)
|
json_value_generator(c_file, 2, "b", 'g', 'ctx', obj.typ)
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
elif obj.typ == 'object' or obj.typ == 'mapStringObject':
|
elif obj.typ == 'object' or obj.typ == 'mapStringObject':
|
||||||
if obj.subtypname:
|
if obj.subtypname:
|
||||||
typename = obj.subtypname
|
typename = obj.subtypname
|
||||||
else:
|
else:
|
||||||
typename = helpers.getPrefixName(obj.name, prefix)
|
typename = helpers.get_prefixe_name(obj.name, prefix)
|
||||||
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) ||' \
|
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) ||' \
|
||||||
' (ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
' (ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)("%s"), strlen("%s"));\n' \
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
% (obj.origname, obj.origname))
|
(const unsigned char *)("%s"), strlen("%s"));\n' % (obj.origname, obj.origname))
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(' stat = gen_%s(g, ptr != NULL ? ptr->%s : NULL, ctx, err);\n' \
|
c_file.write(' stat = gen_%s(g, ptr != NULL ? ptr->%s : NULL, ctx, err);\n' \
|
||||||
@ -387,17 +383,16 @@ def obtainObjectOrArrayObject(obj, c_file, prefix):
|
|||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
|
|
||||||
elif obj.typ == 'array' and (obj.subtypobj or obj.subtyp == 'object'):
|
elif obj.typ == 'array' and (obj.subtypobj or obj.subtyp == 'object'):
|
||||||
if obj.subtypname:
|
if obj.subtypname:
|
||||||
typename = obj.subtypname
|
typename = obj.subtypname
|
||||||
else:
|
else:
|
||||||
typename = helpers.getNameSubstr(obj.name, prefix)
|
typename = helpers.get_name_substr(obj.name, prefix)
|
||||||
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) || ' \
|
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) || ' \
|
||||||
'(ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
'(ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
||||||
c_file.write(' size_t len = 0, i;\n')
|
c_file.write(' size_t len = 0, i;\n')
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)("%s"), strlen("%s"));\n' \
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
% (obj.origname, obj.origname))
|
(const unsigned char *)("%s"), strlen("%s"));\n' % (obj.origname, obj.origname))
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(" if (ptr != NULL && ptr->%s != NULL) {\n" % obj.fixname)
|
c_file.write(" if (ptr != NULL && ptr->%s != NULL) {\n" % obj.fixname)
|
||||||
@ -426,15 +421,16 @@ def obtainObjectOrArrayObject(obj, c_file, prefix):
|
|||||||
% (obj.fixname, obj.fixname))
|
% (obj.fixname, obj.fixname))
|
||||||
c_file.write(' const char *str = "";\n')
|
c_file.write(' const char *str = "";\n')
|
||||||
c_file.write(' size_t len = 0;\n')
|
c_file.write(' size_t len = 0;\n')
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)("%s"), strlen("%s"));\n' \
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
% (obj.origname, obj.origname))
|
(const unsigned char *)("%s"), strlen("%s"));\n' % (obj.origname, obj.origname))
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(" if (ptr != NULL && ptr->%s != NULL) {\n" % obj.fixname)
|
c_file.write(" if (ptr != NULL && ptr->%s != NULL) {\n" % obj.fixname)
|
||||||
c_file.write(" str = (const char *)ptr->%s;\n" % obj.fixname)
|
c_file.write(" str = (const char *)ptr->%s;\n" % obj.fixname)
|
||||||
c_file.write(" len = ptr->%s_len;\n" % obj.fixname)
|
c_file.write(" len = ptr->%s_len;\n" % obj.fixname)
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)str, len);\n')
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
|
(const unsigned char *)str, len);\n')
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
@ -442,8 +438,8 @@ def obtainObjectOrArrayObject(obj, c_file, prefix):
|
|||||||
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) || ' \
|
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) || ' \
|
||||||
'(ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
'(ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
||||||
c_file.write(' size_t len = 0, i;\n')
|
c_file.write(' size_t len = 0, i;\n')
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)("%s"), strlen("%s"));\n' \
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
% (obj.origname, obj.origname))
|
(const unsigned char *)("%s"), strlen("%s"));\n' % (obj.origname, obj.origname))
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(" if (ptr != NULL && ptr->%s != NULL) {\n" % obj.fixname)
|
c_file.write(" if (ptr != NULL && ptr->%s != NULL) {\n" % obj.fixname)
|
||||||
@ -455,7 +451,7 @@ def obtainObjectOrArrayObject(obj, c_file, prefix):
|
|||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(' for (i = 0; i < len; i++) {\n')
|
c_file.write(' for (i = 0; i < len; i++) {\n')
|
||||||
jsonValueGenerator(c_file, 3, "ptr->%s[i]" % obj.fixname, 'g', 'ctx', obj.subtyp)
|
json_value_generator(c_file, 3, "ptr->%s[i]" % obj.fixname, 'g', 'ctx', obj.subtyp)
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
c_file.write(' stat = yajl_gen_array_close((yajl_gen)g);\n')
|
c_file.write(' stat = yajl_gen_array_close((yajl_gen)g);\n')
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
@ -463,44 +459,42 @@ def obtainObjectOrArrayObject(obj, c_file, prefix):
|
|||||||
c_file.write(" if (!len && !(ctx->options & OPT_GEN_SIMPLIFY))\n")
|
c_file.write(" if (!len && !(ctx->options & OPT_GEN_SIMPLIFY))\n")
|
||||||
c_file.write(' yajl_gen_config(g, yajl_gen_beautify, 1);\n')
|
c_file.write(' yajl_gen_config(g, yajl_gen_beautify, 1);\n')
|
||||||
c_file.write(' }\n')
|
c_file.write(' }\n')
|
||||||
|
elif helpers.valid_basic_map_name(obj.typ):
|
||||||
elif helpers.validBasicMapName(obj.typ):
|
|
||||||
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) || ' \
|
c_file.write(' if ((ctx->options & OPT_GEN_KAY_VALUE) || ' \
|
||||||
'(ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
'(ptr != NULL && ptr->%s != NULL)) {\n' % obj.fixname)
|
||||||
c_file.write(' stat = yajl_gen_string((yajl_gen)g, (const unsigned char *)("%s"), strlen("%s"));\n' \
|
c_file.write(' stat = yajl_gen_string((yajl_gen)g, \
|
||||||
% (obj.origname, obj.origname))
|
(const unsigned char *)("%s"), strlen("%s"));\n' % (obj.origname, obj.origname))
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(' stat = gen_%s(g, ptr ? ptr->%s : NULL, ctx, err);\n' \
|
c_file.write(' stat = gen_%s(g, ptr ? ptr->%s : NULL, ctx, err);\n' \
|
||||||
% (helpers.makeBasicMapName(obj.typ), obj.fixname))
|
% (helpers.make_basic_map_name(obj.typ), obj.fixname))
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
|
|
||||||
def obtainCJson(obj, c_file, prefix):
|
|
||||||
|
def get_c_json(obj, c_file, prefix):
|
||||||
"""
|
"""
|
||||||
Description: c language generate json file
|
Description: c language generate json file
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
if not helpers.judgeComplex(obj.typ) or obj.subtypname:
|
if not helpers.judge_complex(obj.typ) or obj.subtypname:
|
||||||
return
|
return
|
||||||
if obj.typ == 'object' or obj.typ == 'mapStringObject':
|
if obj.typ == 'object' or obj.typ == 'mapStringObject':
|
||||||
obj_typename = typename = helpers.getPrefixName(obj.name, prefix)
|
typename = helpers.get_prefixe_name(obj.name, prefix)
|
||||||
elif obj.typ == 'array':
|
elif obj.typ == 'array':
|
||||||
obj_typename = typename = helpers.getNameSubstr(obj.name, prefix)
|
typename = helpers.get_name_substr(obj.name, prefix)
|
||||||
objs = obj.subtypobj
|
objs = obj.subtypobj
|
||||||
if objs is None:
|
if objs is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
c_file.write(
|
c_file.write(
|
||||||
"yajl_gen_status gen_%s(yajl_gen g, const %s *ptr, const struct parser_context " \
|
"yajl_gen_status gen_%s(yajl_gen g, const %s *ptr, const struct parser_context " \
|
||||||
"*ctx, parser_error *err) {\n" % (typename, typename))
|
"*ctx, parser_error *err) {\n" % (typename, typename))
|
||||||
c_file.write(" yajl_gen_status stat = yajl_gen_status_ok;\n")
|
c_file.write(" yajl_gen_status stat = yajl_gen_status_ok;\n")
|
||||||
c_file.write(" *err = 0;\n")
|
c_file.write(" *err = 0;\n")
|
||||||
|
|
||||||
if obj.typ == 'mapStringObject':
|
if obj.typ == 'mapStringObject':
|
||||||
obtainMapStringObject(obj, c_file, prefix)
|
get_map_string_obj(obj, c_file, prefix)
|
||||||
elif obj.typ == 'object' or (obj.typ == 'array' and obj.subtypobj):
|
elif obj.typ == 'object' or (obj.typ == 'array' and obj.subtypobj):
|
||||||
nodes = obj.children if obj.typ == 'object' else obj.subtypobj
|
nodes = obj.children if obj.typ == 'object' else obj.subtypobj
|
||||||
if nodes is None:
|
if nodes is None:
|
||||||
@ -510,10 +504,8 @@ def obtainCJson(obj, c_file, prefix):
|
|||||||
c_file.write(" stat = yajl_gen_map_open((yajl_gen)g);\n")
|
c_file.write(" stat = yajl_gen_map_open((yajl_gen)g);\n")
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
|
|
||||||
for i in nodes or []:
|
for i in nodes or []:
|
||||||
obtainObjectOrArrayObject(i, c_file, prefix)
|
get_obj_arr_obj(i, c_file, prefix)
|
||||||
|
|
||||||
c_file.write(" stat = yajl_gen_map_close((yajl_gen)g);\n")
|
c_file.write(" stat = yajl_gen_map_close((yajl_gen)g);\n")
|
||||||
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
c_file.write(" if (yajl_gen_status_ok != stat)\n")
|
||||||
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
c_file.write(" GEN_SET_ERROR_AND_RETURN(stat, err);\n")
|
||||||
@ -524,17 +516,17 @@ def obtainCJson(obj, c_file, prefix):
|
|||||||
c_file.write("}\n\n")
|
c_file.write("}\n\n")
|
||||||
|
|
||||||
|
|
||||||
def readValueGenerator(c_file, level, src, dest, typ, keyname, obj_typename):
|
def read_val_generator(c_file, level, src, dest, typ, keyname, obj_typename):
|
||||||
"""
|
"""
|
||||||
Description: read value generateor
|
Description: read value generateor
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
if helpers.validBasicMapName(typ):
|
if helpers.valid_basic_map_name(typ):
|
||||||
c_file.write('%syajl_val val = %s;\n' % (' ' * level, src))
|
c_file.write('%syajl_val val = %s;\n' % (' ' * level, src))
|
||||||
c_file.write('%sif (val != NULL) {\n' % (' ' * level))
|
c_file.write('%sif (val != NULL) {\n' % (' ' * level))
|
||||||
c_file.write('%s%s = make_%s(val, ctx, err);\n' \
|
c_file.write('%s%s = make_%s(val, ctx, err);\n' \
|
||||||
% (' ' * (level + 1), dest, helpers.makeBasicMapName(typ)))
|
% (' ' * (level + 1), dest, helpers.make_basic_map_name(typ)))
|
||||||
c_file.write('%sif (%s == NULL) {\n' % (' ' * (level + 1), dest))
|
c_file.write('%sif (%s == NULL) {\n' % (' ' * (level + 1), dest))
|
||||||
c_file.write('%s char *new_error = NULL;\n' % (' ' * (level + 1)))
|
c_file.write('%s char *new_error = NULL;\n' % (' ' * (level + 1)))
|
||||||
c_file.write("%s if (asprintf(&new_error, \"Value error for key" \
|
c_file.write("%s if (asprintf(&new_error, \"Value error for key" \
|
||||||
@ -555,7 +547,7 @@ def readValueGenerator(c_file, level, src, dest, typ, keyname, obj_typename):
|
|||||||
c_file.write('%schar *str = YAJL_GET_STRING(val);\n' % (' ' * (level + 1)))
|
c_file.write('%schar *str = YAJL_GET_STRING(val);\n' % (' ' * (level + 1)))
|
||||||
c_file.write('%s%s = safe_strdup(str ? str : "");\n' % (' ' * (level + 1), dest))
|
c_file.write('%s%s = safe_strdup(str ? str : "");\n' % (' ' * (level + 1), dest))
|
||||||
c_file.write('%s}\n' % (' ' * level))
|
c_file.write('%s}\n' % (' ' * level))
|
||||||
elif helpers.judgeDataType(typ):
|
elif helpers.judge_data_type(typ):
|
||||||
c_file.write('%syajl_val val = %s;\n' % (' ' * (level), src))
|
c_file.write('%syajl_val val = %s;\n' % (' ' * (level), src))
|
||||||
c_file.write('%sif (val != NULL) {\n' % (' ' * (level)))
|
c_file.write('%sif (val != NULL) {\n' % (' ' * (level)))
|
||||||
if typ.startswith("uint") or \
|
if typ.startswith("uint") or \
|
||||||
@ -578,14 +570,14 @@ def readValueGenerator(c_file, level, src, dest, typ, keyname, obj_typename):
|
|||||||
c_file.write('%s return NULL;\n' % (' ' * (level + 1)))
|
c_file.write('%s return NULL;\n' % (' ' * (level + 1)))
|
||||||
c_file.write('%s}\n' % (' ' * (level + 1)))
|
c_file.write('%s}\n' % (' ' * (level + 1)))
|
||||||
c_file.write('%s}\n' % (' ' * (level)))
|
c_file.write('%s}\n' % (' ' * (level)))
|
||||||
elif helpers.judgeDataPointerType(typ):
|
elif helpers.judge_data_pointer_type(typ):
|
||||||
num_type = helpers.obtainDataPointerType(typ)
|
num_type = helpers.obtain_data_pointer_type(typ)
|
||||||
if num_type == "":
|
if num_type == "":
|
||||||
return
|
return
|
||||||
c_file.write('%syajl_val val = %s;\n' % (' ' * (level), src))
|
c_file.write('%syajl_val val = %s;\n' % (' ' * (level), src))
|
||||||
c_file.write('%sif (val != NULL) {\n' % (' ' * (level)))
|
c_file.write('%sif (val != NULL) {\n' % (' ' * (level)))
|
||||||
c_file.write('%s%s = safe_malloc(sizeof(%s));\n' %
|
c_file.write('%s%s = safe_malloc(sizeof(%s));\n' %
|
||||||
(' ' * (level + 1), dest, helpers.getMapCTypes(num_type)))
|
(' ' * (level + 1), dest, helpers.get_map_c_types(num_type)))
|
||||||
c_file.write('%sint invalid = common_safe_%s(YAJL_GET_NUMBER(val), %s);\n' \
|
c_file.write('%sint invalid = common_safe_%s(YAJL_GET_NUMBER(val), %s);\n' \
|
||||||
% (' ' * (level + 1), num_type, dest))
|
% (' ' * (level + 1), num_type, dest))
|
||||||
c_file.write('%sif (invalid) {\n' % (' ' * (level + 1)))
|
c_file.write('%sif (invalid) {\n' % (' ' * (level + 1)))
|
||||||
@ -617,105 +609,86 @@ def readValueGenerator(c_file, level, src, dest, typ, keyname, obj_typename):
|
|||||||
c_file.write('%s}\n' % (' ' * (level)))
|
c_file.write('%s}\n' % (' ' * (level)))
|
||||||
|
|
||||||
|
|
||||||
def jsonValueGenerator(c_file, level, src, dst, ptx, typ):
|
def json_value_generator(c_file, level, src, dst, ptx, typ):
|
||||||
"""
|
"""
|
||||||
Description: json value generateor
|
Description: json value generateor
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
if helpers.validBasicMapName(typ):
|
if helpers.valid_basic_map_name(typ):
|
||||||
c_file.write('%sstat = gen_%s(%s, %s, %s, err);\n' \
|
c_file.write('%sstat = gen_%s(%s, %s, %s, err);\n' \
|
||||||
% (' ' * (level), helpers.makeBasicMapName(typ), dst, src, ptx))
|
% (' ' * (level), helpers.make_basic_map_name(typ), dst, src, ptx))
|
||||||
c_file.write("%sif (yajl_gen_status_ok != stat)\n" % (' ' * (level)))
|
c_file.write("%sif (yajl_gen_status_ok != stat)\n" % (' ' * (level)))
|
||||||
c_file.write("%sGEN_SET_ERROR_AND_RETURN(stat, err);\n" % (' ' * (level + 1)))
|
c_file.write("%sGEN_SET_ERROR_AND_RETURN(stat, err);\n" % (' ' * (level + 1)))
|
||||||
elif typ == 'string':
|
elif typ == 'string':
|
||||||
c_file.write('%sstat = yajl_gen_string((yajl_gen)%s, (const unsigned char *)(%s), strlen(%s));\n' \
|
c_file.write('%sstat = yajl_gen_string((yajl_gen)%s, \
|
||||||
% (' ' * (level), dst, src, src))
|
(const unsigned char *)(%s), strlen(%s));\n' % (' ' * (level), dst, src, src))
|
||||||
c_file.write("%sif (yajl_gen_status_ok != stat)\n" % (' ' * (level)))
|
c_file.write("%sif (yajl_gen_status_ok != stat)\n" % (' ' * (level)))
|
||||||
c_file.write("%sGEN_SET_ERROR_AND_RETURN(stat, err);\n" % (' ' * (level + 1)))
|
c_file.write("%sGEN_SET_ERROR_AND_RETURN(stat, err);\n" % (' ' * (level + 1)))
|
||||||
|
elif helpers.judge_data_type(typ):
|
||||||
elif helpers.judgeDataType(typ):
|
|
||||||
if typ == 'double':
|
if typ == 'double':
|
||||||
c_file.write('%sstat = yajl_gen_double((yajl_gen)%s, %s);\n' % (' ' * (level), dst, src))
|
c_file.write('%sstat = yajl_gen_double((yajl_gen)%s, %s);\n' \
|
||||||
|
% (' ' * (level), dst, src))
|
||||||
elif typ.startswith("uint") or typ == 'GID' or typ == 'UID':
|
elif typ.startswith("uint") or typ == 'GID' or typ == 'UID':
|
||||||
c_file.write('%sstat = map_uint(%s, %s);\n' % (' ' * (level), dst, src))
|
c_file.write('%sstat = map_uint(%s, %s);\n' % (' ' * (level), dst, src))
|
||||||
else:
|
else:
|
||||||
c_file.write('%sstat = map_int(%s, %s);\n' % (' ' * (level), dst, src))
|
c_file.write('%sstat = map_int(%s, %s);\n' % (' ' * (level), dst, src))
|
||||||
c_file.write("%sif (yajl_gen_status_ok != stat)\n" % (' ' * (level)))
|
c_file.write("%sif (yajl_gen_status_ok != stat)\n" % (' ' * (level)))
|
||||||
c_file.write("%sGEN_SET_ERROR_AND_RETURN(stat, err);\n" % (' ' * (level + 1)))
|
c_file.write("%sGEN_SET_ERROR_AND_RETURN(stat, err);\n" \
|
||||||
|
% (' ' * (level + 1)))
|
||||||
elif typ == 'boolean':
|
elif typ == 'boolean':
|
||||||
c_file.write('%sstat = yajl_gen_bool((yajl_gen)%s, (int)(%s));\n' % (' ' * (level), dst, src))
|
c_file.write('%sstat = yajl_gen_bool((yajl_gen)%s, (int)(%s));\n' \
|
||||||
|
% (' ' * (level), dst, src))
|
||||||
c_file.write("%sif (yajl_gen_status_ok != stat)\n" % (' ' * (level)))
|
c_file.write("%sif (yajl_gen_status_ok != stat)\n" % (' ' * (level)))
|
||||||
c_file.write("%sGEN_SET_ERROR_AND_RETURN(stat, err);\n" % (' ' * (level + 1)))
|
c_file.write("%sGEN_SET_ERROR_AND_RETURN(stat, err);\n" % (' ' * (level + 1)))
|
||||||
|
|
||||||
|
|
||||||
def makeCFree(obj, c_file, prefix):
|
def make_c_free(obj, c_file, prefix):
|
||||||
"""
|
"""
|
||||||
Description: generate c free function
|
Description: generate c free function
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
if not helpers.judgeComplex(obj.typ) or obj.subtypname:
|
if not helpers.judge_complex(obj.typ) or obj.subtypname:
|
||||||
return
|
return
|
||||||
|
typename = helpers.get_prefixe_name(obj.name, prefix)
|
||||||
typename = helpers.getPrefixName(obj.name, prefix)
|
|
||||||
|
|
||||||
case = obj.typ
|
case = obj.typ
|
||||||
result = {
|
result = {'mapStringObject': lambda x: [], 'object': lambda x: x.children,
|
||||||
'mapStringObject': lambda x: [],
|
'array': lambda x: x.subtypobj}[case](obj)
|
||||||
'object': lambda x: x.children,
|
|
||||||
'array': lambda x: x.subtypobj
|
|
||||||
}[case](obj)
|
|
||||||
|
|
||||||
objs = result
|
objs = result
|
||||||
if obj.typ == 'array':
|
if obj.typ == 'array':
|
||||||
if objs is None:
|
if objs is None:
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
typename = helpers.getNameSubstr(obj.name, prefix)
|
typename = helpers.get_name_substr(obj.name, prefix)
|
||||||
|
|
||||||
c_file.write("void free_%s(%s *ptr) {\n" % (typename, typename))
|
c_file.write("void free_%s(%s *ptr) {\n" % (typename, typename))
|
||||||
c_file.write(" if (ptr == NULL)\n")
|
c_file.write(" if (ptr == NULL)\n")
|
||||||
c_file.write(" return;\n")
|
c_file.write(" return;\n")
|
||||||
if obj.typ == 'mapStringObject':
|
if obj.typ == 'mapStringObject':
|
||||||
child = obj.children[0]
|
child = obj.children[0]
|
||||||
if helpers.validBasicMapName(child.typ):
|
if helpers.valid_basic_map_name(child.typ):
|
||||||
childname = helpers.makeBasicMapName(child.typ)
|
childname = helpers.make_basic_map_name(child.typ)
|
||||||
else:
|
else:
|
||||||
if child.subtypname:
|
if child.subtypname:
|
||||||
childname = child.subtypname
|
childname = child.subtypname
|
||||||
else:
|
else:
|
||||||
childname = helpers.getPrefixName(child.name, prefix)
|
childname = helpers.get_prefixe_name(child.name, prefix)
|
||||||
c_file.write(" if (ptr->keys != NULL && ptr->%s != NULL) {\n" % child.fixname)
|
c_file_map_str(c_file, child, childname)
|
||||||
c_file.write(" size_t i;\n")
|
|
||||||
c_file.write(" for (i = 0; i < ptr->len; i++) {\n")
|
|
||||||
c_file.write(" free(ptr->keys[i]);\n")
|
|
||||||
c_file.write(" ptr->keys[i] = NULL;\n")
|
|
||||||
c_file.write(" free_%s(ptr->%s[i]);\n" % (childname, child.fixname))
|
|
||||||
c_file.write(" ptr->%s[i] = NULL;\n" % (child.fixname))
|
|
||||||
c_file.write(" }\n")
|
|
||||||
c_file.write(" free(ptr->keys);\n")
|
|
||||||
c_file.write(" ptr->keys = NULL;\n")
|
|
||||||
c_file.write(" free(ptr->%s);\n" % (child.fixname))
|
|
||||||
c_file.write(" ptr->%s = NULL;\n" % (child.fixname))
|
|
||||||
c_file.write(" }\n")
|
|
||||||
|
|
||||||
for i in objs or []:
|
for i in objs or []:
|
||||||
if helpers.validBasicMapName(i.typ):
|
if helpers.valid_basic_map_name(i.typ):
|
||||||
free_func = helpers.makeBasicMapName(i.typ)
|
free_func = helpers.make_basic_map_name(i.typ)
|
||||||
c_file.write(" free_%s(ptr->%s);\n" % (free_func, i.fixname))
|
c_file.write(" free_%s(ptr->%s);\n" % (free_func, i.fixname))
|
||||||
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
||||||
if i.typ == 'mapStringObject':
|
if i.typ == 'mapStringObject':
|
||||||
if i.subtypname:
|
if i.subtypname:
|
||||||
free_func = i.subtypname
|
free_func = i.subtypname
|
||||||
else:
|
else:
|
||||||
free_func = helpers.getPrefixName(i.name, prefix)
|
free_func = helpers.get_prefixe_name(i.name, prefix)
|
||||||
c_file.write(" free_%s(ptr->%s);\n" % (free_func, i.fixname))
|
c_file.write(" free_%s(ptr->%s);\n" % (free_func, i.fixname))
|
||||||
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
||||||
elif i.typ == 'array':
|
elif i.typ == 'array':
|
||||||
if helpers.validBasicMapName(i.subtyp):
|
if helpers.valid_basic_map_name(i.subtyp):
|
||||||
free_func = helpers.makeBasicMapName(i.subtyp)
|
free_func = helpers.make_basic_map_name(i.subtyp)
|
||||||
c_file.write(" if (ptr->%s != NULL) {\n" % i.fixname)
|
c_file.write(" if (ptr->%s != NULL) {\n" % i.fixname)
|
||||||
c_file.write(" size_t i;\n")
|
c_file.write(" size_t i;\n")
|
||||||
c_file.write(" for (i = 0; i < ptr->%s_len; i++) {\n" % i.fixname)
|
c_file.write(" for (i = 0; i < ptr->%s_len; i++) {\n" % i.fixname)
|
||||||
@ -728,25 +701,15 @@ def makeCFree(obj, c_file, prefix):
|
|||||||
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
elif i.subtyp == 'string':
|
elif i.subtyp == 'string':
|
||||||
c_file.write(" if (ptr->%s != NULL) {\n" % i.fixname)
|
c_file_str(c_file, i)
|
||||||
c_file.write(" size_t i;\n")
|
elif not helpers.judge_complex(i.subtyp):
|
||||||
c_file.write(" for (i = 0; i < ptr->%s_len; i++) {\n" % i.fixname)
|
|
||||||
c_file.write(" if (ptr->%s[i] != NULL) {\n" % (i.fixname))
|
|
||||||
c_file.write(" free(ptr->%s[i]);\n" % (i.fixname))
|
|
||||||
c_file.write(" ptr->%s[i] = NULL;\n" % (i.fixname))
|
|
||||||
c_file.write(" }\n")
|
|
||||||
c_file.write(" }\n")
|
|
||||||
c_file.write(" free(ptr->%s);\n" % (i.fixname))
|
|
||||||
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
|
||||||
c_file.write(" }\n")
|
|
||||||
elif not helpers.judgeComplex(i.subtyp):
|
|
||||||
c_file.write(" free(ptr->%s);\n" % (i.fixname))
|
c_file.write(" free(ptr->%s);\n" % (i.fixname))
|
||||||
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
||||||
elif i.subtyp == 'object' or i.subtypobj is not None:
|
elif i.subtyp == 'object' or i.subtypobj is not None:
|
||||||
if i.subtypname is not None:
|
if i.subtypname is not None:
|
||||||
free_func = i.subtypname
|
free_func = i.subtypname
|
||||||
else:
|
else:
|
||||||
free_func = helpers.getNameSubstr(i.name, prefix)
|
free_func = helpers.get_name_substr(i.name, prefix)
|
||||||
c_file.write(" if (ptr->%s != NULL) {\n" % i.fixname)
|
c_file.write(" if (ptr->%s != NULL) {\n" % i.fixname)
|
||||||
c_file.write(" size_t i;\n")
|
c_file.write(" size_t i;\n")
|
||||||
c_file.write(" for (i = 0; i < ptr->%s_len; i++)\n" % i.fixname)
|
c_file.write(" for (i = 0; i < ptr->%s_len; i++)\n" % i.fixname)
|
||||||
@ -757,23 +720,17 @@ def makeCFree(obj, c_file, prefix):
|
|||||||
c_file.write(" free(ptr->%s);\n" % i.fixname)
|
c_file.write(" free(ptr->%s);\n" % i.fixname)
|
||||||
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
||||||
c_file.write(" }\n")
|
c_file.write(" }\n")
|
||||||
|
c_typ = helpers.obtain_pointer(i.name, i.subtypobj, prefix)
|
||||||
c_typ = helpers.obtainPointer(i.name, i.subtypobj, prefix)
|
|
||||||
if c_typ == "":
|
if c_typ == "":
|
||||||
continue
|
continue
|
||||||
if i.subobj is not None:
|
if i.subobj is not None:
|
||||||
c_typ = c_typ + "_element"
|
c_typ = c_typ + "_element"
|
||||||
c_file.write(" free_%s(ptr->%s);\n" % (c_typ, i.fixname))
|
c_file.write(" free_%s(ptr->%s);\n" % (c_typ, i.fixname))
|
||||||
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
||||||
else: # not array
|
else:
|
||||||
typename = helpers.getPrefixName(i.name, prefix)
|
typename = helpers.get_prefixe_name(i.name, prefix)
|
||||||
if i.typ == 'string':
|
if i.typ == 'string' or i.typ == 'booleanPointer' or \
|
||||||
c_file.write(" free(ptr->%s);\n" % (i.fixname))
|
helpers.judge_data_pointer_type(i.typ):
|
||||||
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
|
||||||
elif i.typ == 'booleanPointer':
|
|
||||||
c_file.write(" free(ptr->%s);\n" % (i.fixname))
|
|
||||||
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
|
||||||
elif helpers.judgeDataPointerType(i.typ):
|
|
||||||
c_file.write(" free(ptr->%s);\n" % (i.fixname))
|
c_file.write(" free(ptr->%s);\n" % (i.fixname))
|
||||||
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
||||||
elif i.typ == 'object':
|
elif i.typ == 'object':
|
||||||
@ -787,28 +744,66 @@ def makeCFree(obj, c_file, prefix):
|
|||||||
c_file.write("}\n\n")
|
c_file.write("}\n\n")
|
||||||
|
|
||||||
|
|
||||||
def sourceReflection(structs, schema_info, c_file, root_typ):
|
def c_file_map_str(c_file, child, childname):
|
||||||
|
"""
|
||||||
|
Description: generate c code for map string
|
||||||
|
Interface: None
|
||||||
|
History: 2019-10-31
|
||||||
|
"""
|
||||||
|
c_file.write(" if (ptr->keys != NULL && ptr->%s != NULL) {\n" % child.fixname)
|
||||||
|
c_file.write(" size_t i;\n")
|
||||||
|
c_file.write(" for (i = 0; i < ptr->len; i++) {\n")
|
||||||
|
c_file.write(" free(ptr->keys[i]);\n")
|
||||||
|
c_file.write(" ptr->keys[i] = NULL;\n")
|
||||||
|
c_file.write(" free_%s(ptr->%s[i]);\n" % (childname, child.fixname))
|
||||||
|
c_file.write(" ptr->%s[i] = NULL;\n" % (child.fixname))
|
||||||
|
c_file.write(" }\n")
|
||||||
|
c_file.write(" free(ptr->keys);\n")
|
||||||
|
c_file.write(" ptr->keys = NULL;\n")
|
||||||
|
c_file.write(" free(ptr->%s);\n" % (child.fixname))
|
||||||
|
c_file.write(" ptr->%s = NULL;\n" % (child.fixname))
|
||||||
|
c_file.write(" }\n")
|
||||||
|
|
||||||
|
|
||||||
|
def c_file_str(c_file, i):
|
||||||
|
"""
|
||||||
|
Description: generate c code template
|
||||||
|
Interface: None
|
||||||
|
History: 2019-10-31
|
||||||
|
"""
|
||||||
|
c_file.write(" if (ptr->%s != NULL) {\n" % i.fixname)
|
||||||
|
c_file.write(" size_t i;\n")
|
||||||
|
c_file.write(" for (i = 0; i < ptr->%s_len; i++) {\n" % i.fixname)
|
||||||
|
c_file.write(" if (ptr->%s[i] != NULL) {\n" % (i.fixname))
|
||||||
|
c_file.write(" free(ptr->%s[i]);\n" % (i.fixname))
|
||||||
|
c_file.write(" ptr->%s[i] = NULL;\n" % (i.fixname))
|
||||||
|
c_file.write(" }\n")
|
||||||
|
c_file.write(" }\n")
|
||||||
|
c_file.write(" free(ptr->%s);\n" % (i.fixname))
|
||||||
|
c_file.write(" ptr->%s = NULL;\n" % (i.fixname))
|
||||||
|
c_file.write(" }\n")
|
||||||
|
|
||||||
|
|
||||||
|
def src_reflect(structs, schema_info, c_file, root_typ):
|
||||||
"""
|
"""
|
||||||
Description: reflect code
|
Description: reflect code
|
||||||
Interface: None
|
Interface: None
|
||||||
History: 2019-06-17
|
History: 2019-06-17
|
||||||
"""
|
"""
|
||||||
c_file.write("// Generated from %s. Do not edit!\n" % (schema_info.name.basename))
|
c_file.write("// Generated from %s. Do not edit!\n" \
|
||||||
|
% (schema_info.name.basename))
|
||||||
c_file.write("#ifndef _GNU_SOURCE\n")
|
c_file.write("#ifndef _GNU_SOURCE\n")
|
||||||
c_file.write("#define _GNU_SOURCE\n")
|
c_file.write("#define _GNU_SOURCE\n")
|
||||||
c_file.write("#endif\n")
|
c_file.write("#endif\n")
|
||||||
c_file.write('#include <string.h>\n')
|
c_file.write('#include <string.h>\n')
|
||||||
c_file.write('#include <read_file.h>\n')
|
c_file.write('#include <read_file.h>\n')
|
||||||
c_file.write('#include "securec.h"\n')
|
|
||||||
c_file.write('#include "%s"\n\n' % schema_info.header.basename)
|
c_file.write('#include "%s"\n\n' % schema_info.header.basename)
|
||||||
|
|
||||||
for i in structs:
|
for i in structs:
|
||||||
appendCCode(i, c_file, schema_info.prefix)
|
append_c_code(i, c_file, schema_info.prefix)
|
||||||
|
get_c_epilog(c_file, schema_info.prefix, root_typ)
|
||||||
obtainCEpilogue(c_file, schema_info.prefix, root_typ)
|
|
||||||
|
|
||||||
|
|
||||||
def obtainCEpilogue(c_file, prefix, typ):
|
def get_c_epilog(c_file, prefix, typ):
|
||||||
"""
|
"""
|
||||||
Description: generate c language epilogue
|
Description: generate c language epilogue
|
||||||
Interface: None
|
Interface: None
|
||||||
@ -816,7 +811,6 @@ def obtainCEpilogue(c_file, prefix, typ):
|
|||||||
"""
|
"""
|
||||||
if typ != 'array' and typ != 'object':
|
if typ != 'array' and typ != 'object':
|
||||||
return
|
return
|
||||||
|
|
||||||
if typ == 'array':
|
if typ == 'array':
|
||||||
c_file.write("""\n
|
c_file.write("""\n
|
||||||
%s_element **make_%s(yajl_val tree, const struct parser_context *ctx, parser_error *err, size_t *len) {
|
%s_element **make_%s(yajl_val tree, const struct parser_context *ctx, parser_error *err, size_t *len) {
|
||||||
@ -877,13 +871,11 @@ yajl_gen_status gen_%s(yajl_gen g, const %s_element **ptr, size_t len, const str
|
|||||||
return yajl_gen_status_ok;
|
return yajl_gen_status_ok;
|
||||||
}
|
}
|
||||||
""" % (prefix, prefix, prefix))
|
""" % (prefix, prefix, prefix))
|
||||||
|
|
||||||
c_file.write("""
|
c_file.write("""
|
||||||
%s%s*%s_parse_file(const char *filename, const struct parser_context *ctx, parser_error *err%s) {
|
%s%s*%s_parse_file(const char *filename, const struct parser_context *ctx, parser_error *err%s) {
|
||||||
%s%s*ptr = NULL;""" % (prefix, ' ' if typ == 'object' else '_element *', \
|
%s%s*ptr = NULL;""" % (prefix, ' ' if typ == 'object' else '_element *', \
|
||||||
prefix, '' if typ == 'object' else ', size_t *len', \
|
prefix, '' if typ == 'object' else ', size_t *len', \
|
||||||
prefix, ' ' if typ == 'object' else '_element *'))
|
prefix, ' ' if typ == 'object' else '_element *'))
|
||||||
|
|
||||||
c_file.write("""
|
c_file.write("""
|
||||||
size_t filesize;
|
size_t filesize;
|
||||||
char *content = NULL;
|
char *content = NULL;
|
||||||
@ -903,13 +895,11 @@ yajl_gen_status gen_%s(yajl_gen g, const %s_element **ptr, size_t len, const str
|
|||||||
return ptr;
|
return ptr;
|
||||||
}
|
}
|
||||||
""" % (prefix, '' if typ == 'object' else ', len'))
|
""" % (prefix, '' if typ == 'object' else ', len'))
|
||||||
|
|
||||||
c_file.write("""
|
c_file.write("""
|
||||||
%s%s*%s_parse_file_stream(FILE *stream, const struct parser_context *ctx, parser_error *err%s) {
|
%s%s*%s_parse_file_stream(FILE *stream, const struct parser_context *ctx, parser_error *err%s) {
|
||||||
%s%s*ptr = NULL;""" % (prefix, ' ' if typ == 'object' else '_element *', \
|
%s%s*ptr = NULL;""" % (prefix, ' ' if typ == 'object' else '_element *', \
|
||||||
prefix, '' if typ == 'object' else ', size_t *len', \
|
prefix, '' if typ == 'object' else ', size_t *len', \
|
||||||
prefix, ' ' if typ == 'object' else '_element *'))
|
prefix, ' ' if typ == 'object' else '_element *'))
|
||||||
|
|
||||||
c_file.write("""
|
c_file.write("""
|
||||||
size_t filesize;
|
size_t filesize;
|
||||||
char *content = NULL ;
|
char *content = NULL ;
|
||||||
@ -928,13 +918,11 @@ yajl_gen_status gen_%s(yajl_gen g, const %s_element **ptr, size_t len, const str
|
|||||||
return ptr;
|
return ptr;
|
||||||
}
|
}
|
||||||
""" % (prefix, '' if typ == 'object' else ', len'))
|
""" % (prefix, '' if typ == 'object' else ', len'))
|
||||||
|
|
||||||
c_file.write("""
|
c_file.write("""
|
||||||
%s%s*%s_parse_data(const char *jsondata, const struct parser_context *ctx, parser_error *err%s) {
|
%s%s*%s_parse_data(const char *jsondata, const struct parser_context *ctx, parser_error *err%s) {
|
||||||
%s%s*ptr = NULL;""" % (prefix, ' ' if typ == 'object' else '_element *', \
|
%s%s*ptr = NULL;""" % (prefix, ' ' if typ == 'object' else '_element *', \
|
||||||
prefix, '' if typ == 'object' else ', size_t *len', \
|
prefix, '' if typ == 'object' else ', size_t *len', \
|
||||||
prefix, ' ' if typ == 'object' else '_element *'))
|
prefix, ' ' if typ == 'object' else '_element *'))
|
||||||
|
|
||||||
c_file.write("""
|
c_file.write("""
|
||||||
yajl_val tree;
|
yajl_val tree;
|
||||||
char errbuf[1024];
|
char errbuf[1024];
|
||||||
@ -958,12 +946,10 @@ yajl_gen_status gen_%s(yajl_gen g, const %s_element **ptr, size_t len, const str
|
|||||||
return ptr;
|
return ptr;
|
||||||
}
|
}
|
||||||
""" % (prefix, '' if typ == 'object' else ', len'))
|
""" % (prefix, '' if typ == 'object' else ', len'))
|
||||||
|
|
||||||
c_file.write("char *%s_generate_json(const %s%s*ptr%s, const struct parser_context *ctx," \
|
c_file.write("char *%s_generate_json(const %s%s*ptr%s, const struct parser_context *ctx," \
|
||||||
" parser_error *err) {" % (prefix, prefix, \
|
" parser_error *err) {" % (prefix, prefix, \
|
||||||
' ' if typ == 'object' else '_element *', \
|
' ' if typ == 'object' else '_element *', \
|
||||||
'' if typ == 'object' else ', size_t len'))
|
'' if typ == 'object' else ', size_t len'))
|
||||||
|
|
||||||
c_file.write("""
|
c_file.write("""
|
||||||
yajl_gen g = NULL;
|
yajl_gen g = NULL;
|
||||||
struct parser_context tmp_ctx = { 0 };
|
struct parser_context tmp_ctx = { 0 };
|
||||||
@ -995,12 +981,7 @@ yajl_gen_status gen_%s(yajl_gen g, const %s_element **ptr, size_t len, const str
|
|||||||
}
|
}
|
||||||
|
|
||||||
json_buf = safe_malloc(gen_len + 1);
|
json_buf = safe_malloc(gen_len + 1);
|
||||||
if (memcpy_s(json_buf, gen_len + 1, gen_buf, gen_len) != EOK) {
|
(void)memcpy(json_buf, gen_buf, gen_len);
|
||||||
*err = safe_strdup("Error to memcpy json");
|
|
||||||
free(json_buf);
|
|
||||||
json_buf = NULL;
|
|
||||||
goto free_out;
|
|
||||||
}
|
|
||||||
json_buf[gen_len] = '\\0';
|
json_buf[gen_len] = '\\0';
|
||||||
|
|
||||||
free_out:
|
free_out:
|
||||||
@ -1011,3 +992,5 @@ out:
|
|||||||
}
|
}
|
||||||
|
|
||||||
""" % (prefix, '' if typ == 'object' else ', len'))
|
""" % (prefix, '' if typ == 'object' else ', len'))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
33
src/log.c
33
src/log.c
@ -27,7 +27,6 @@
|
|||||||
#include <inttypes.h>
|
#include <inttypes.h>
|
||||||
#include <time.h>
|
#include <time.h>
|
||||||
|
|
||||||
#include "securec.h"
|
|
||||||
#include "utils.h"
|
#include "utils.h"
|
||||||
#include "log.h"
|
#include "log.h"
|
||||||
|
|
||||||
@ -246,6 +245,8 @@ static char *parse_timespec_to_human()
|
|||||||
struct tm ptm = {0};
|
struct tm ptm = {0};
|
||||||
char date_time[CLIBCNI_LOG_TIME_SIZE] = { 0 };
|
char date_time[CLIBCNI_LOG_TIME_SIZE] = { 0 };
|
||||||
int nret;
|
int nret;
|
||||||
|
#define SEC_TO_NSEC 1000000
|
||||||
|
#define FIRST_YEAR_OF_GMT 1900
|
||||||
|
|
||||||
if (clock_gettime(CLOCK_REALTIME, ×tamp) == -1) {
|
if (clock_gettime(CLOCK_REALTIME, ×tamp) == -1) {
|
||||||
COMMAND_ERROR("Failed to get real time");
|
COMMAND_ERROR("Failed to get real time");
|
||||||
@ -257,11 +258,11 @@ static char *parse_timespec_to_human()
|
|||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
nret = sprintf_s(date_time, CLIBCNI_LOG_TIME_SIZE, "%04d%02d%02d%02d%02d%02d.%03ld",
|
nret = snprintf(date_time, CLIBCNI_LOG_TIME_SIZE, "%04d%02d%02d%02d%02d%02d.%03ld",
|
||||||
ptm.tm_year + 1900, ptm.tm_mon + 1, ptm.tm_mday, ptm.tm_hour, ptm.tm_min, ptm.tm_sec,
|
ptm.tm_year + FIRST_YEAR_OF_GMT, ptm.tm_mon + 1, ptm.tm_mday, ptm.tm_hour, ptm.tm_min, ptm.tm_sec,
|
||||||
timestamp.tv_nsec / 1000000);
|
timestamp.tv_nsec / SEC_TO_NSEC);
|
||||||
|
|
||||||
if (nret < 0) {
|
if (nret < 0 || nret >= CLIBCNI_LOG_TIME_SIZE) {
|
||||||
COMMAND_ERROR("Sprintf failed");
|
COMMAND_ERROR("Sprintf failed");
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
@ -279,10 +280,10 @@ int clibcni_log_append(const struct clibcni_log_object_metadata *metadata, const
|
|||||||
int ret = 0;
|
int ret = 0;
|
||||||
|
|
||||||
va_start(args, format);
|
va_start(args, format);
|
||||||
rc = vsprintf_s(msg, MAX_MSG_LENGTH, format, args);
|
rc = vsnprintf(msg, MAX_MSG_LENGTH, format, args);
|
||||||
va_end(args);
|
va_end(args);
|
||||||
if (rc < 0 || rc >= MAX_MSG_LENGTH) {
|
if (rc < 0) {
|
||||||
rc = sprintf_s(msg, MAX_MSG_LENGTH, "%s", "!!LONG LONG A LOG!!");
|
rc = snprintf(msg, MAX_MSG_LENGTH, "%s", "!!LONG LONG A LOG!!");
|
||||||
if (rc < 0) {
|
if (rc < 0) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -338,16 +339,16 @@ static void log_append_logfile(const struct clibcni_log_object_metadata *metadat
|
|||||||
if (tmp_prefix != NULL && strlen(tmp_prefix) > MAX_LOG_PREFIX_LENGTH) {
|
if (tmp_prefix != NULL && strlen(tmp_prefix) > MAX_LOG_PREFIX_LENGTH) {
|
||||||
tmp_prefix = tmp_prefix + (strlen(tmp_prefix) - MAX_LOG_PREFIX_LENGTH);
|
tmp_prefix = tmp_prefix + (strlen(tmp_prefix) - MAX_LOG_PREFIX_LENGTH);
|
||||||
}
|
}
|
||||||
nret = sprintf_s(log_buffer, sizeof(log_buffer), "%15s %s %-8s %s - %s:%s:%d - %s", tmp_prefix ? tmp_prefix : "",
|
nret = snprintf(log_buffer, sizeof(log_buffer), "%15s %s %-8s %s - %s:%s:%d - %s", tmp_prefix ? tmp_prefix : "",
|
||||||
timestamp, g_clibcni_log_prio_name[metadata->level],
|
timestamp, g_clibcni_log_prio_name[metadata->level],
|
||||||
g_clibcni_log_vmname ? g_clibcni_log_vmname : "clibcni", metadata->file,
|
g_clibcni_log_vmname ? g_clibcni_log_vmname : "clibcni", metadata->file,
|
||||||
metadata->func, metadata->line, msg);
|
metadata->func, metadata->line, msg);
|
||||||
|
|
||||||
if (nret < 0) {
|
if (nret < 0) {
|
||||||
nret = sprintf_s(log_buffer, sizeof(log_buffer), "%15s %s %-8s %s - %s:%s:%d - %s",
|
nret = snprintf(log_buffer, sizeof(log_buffer), "%15s %s %-8s %s - %s:%s:%d - %s",
|
||||||
tmp_prefix ? tmp_prefix : "", timestamp, g_clibcni_log_prio_name[metadata->level],
|
tmp_prefix ? tmp_prefix : "", timestamp, g_clibcni_log_prio_name[metadata->level],
|
||||||
g_clibcni_log_vmname ? g_clibcni_log_vmname : "clibcni", metadata->file,
|
g_clibcni_log_vmname ? g_clibcni_log_vmname : "clibcni", metadata->file,
|
||||||
metadata->func, metadata->line, "Large log message");
|
metadata->func, metadata->line, "Large log message");
|
||||||
if (nret < 0) {
|
if (nret < 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,6 +17,7 @@
|
|||||||
#endif
|
#endif
|
||||||
#include "current.h"
|
#include "current.h"
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
|
||||||
#include "utils.h"
|
#include "utils.h"
|
||||||
#include "log.h"
|
#include "log.h"
|
||||||
@ -237,10 +238,7 @@ static struct dns *convert_curr_dns(network_dns *curr_dns, char **err)
|
|||||||
result->search = curr_dns->search;
|
result->search = curr_dns->search;
|
||||||
result->search_len = curr_dns->search_len;
|
result->search_len = curr_dns->search_len;
|
||||||
|
|
||||||
if (memset_s(curr_dns, sizeof(network_dns), 0, sizeof(network_dns)) != EOK) {
|
(void)memset(curr_dns, 0, sizeof(network_dns));
|
||||||
*err = util_strdup_s("Memset failed");
|
|
||||||
ERROR("Memset failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -249,13 +247,7 @@ static int copy_result_interface(const result_curr *curr_result, struct result *
|
|||||||
{
|
{
|
||||||
value->interfaces_len = curr_result->interfaces_len;
|
value->interfaces_len = curr_result->interfaces_len;
|
||||||
if (value->interfaces_len > 0) {
|
if (value->interfaces_len > 0) {
|
||||||
if (value->interfaces_len > (SIZE_MAX / sizeof(struct interface *))) {
|
value->interfaces = util_smart_calloc_s(value->interfaces_len, sizeof(struct interface *));
|
||||||
*err = util_strdup_s("Too many interface");
|
|
||||||
value->interfaces_len = 0;
|
|
||||||
ERROR("Too many interface");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
value->interfaces = util_common_calloc_s(sizeof(struct interface *) * value->interfaces_len);
|
|
||||||
if (value->interfaces == NULL) {
|
if (value->interfaces == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
value->interfaces_len = 0;
|
value->interfaces_len = 0;
|
||||||
@ -285,20 +277,14 @@ static int copy_result_ips(const result_curr *curr_result, struct result *value,
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (value->ips_len > (SIZE_MAX / sizeof(struct ipconfig *))) {
|
value->ips = util_smart_calloc_s(value->ips_len, sizeof(struct ipconfig *));
|
||||||
*err = util_strdup_s("Too many ips");
|
|
||||||
ERROR("Too many ips");
|
|
||||||
value->ips_len = 0;
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
value->ips = util_common_calloc_s(sizeof(struct ipconfig *) * value->ips_len);
|
|
||||||
if (value->ips == NULL) {
|
if (value->ips == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
value->ips_len = 0;
|
value->ips_len = 0;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (i = 0; i < value->ips_len; i++) {
|
for (i = 0; i < value->ips_len; i++) {
|
||||||
value->ips[i] = convert_curr_ipconfig(curr_result->ips[i], err);
|
value->ips[i] = convert_curr_ipconfig(curr_result->ips[i], err);
|
||||||
if (value->ips[i] == NULL) {
|
if (value->ips[i] == NULL) {
|
||||||
@ -319,20 +305,14 @@ static int copy_result_routes(const result_curr *curr_result, struct result *val
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (value->routes_len > (SIZE_MAX / sizeof(struct route *))) {
|
value->routes = util_smart_calloc_s(value->routes_len, sizeof(struct route *));
|
||||||
*err = util_strdup_s("Too many routes");
|
|
||||||
ERROR("Too many routes");
|
|
||||||
value->routes_len = 0;
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
value->routes = util_common_calloc_s(sizeof(struct route *) * value->routes_len);
|
|
||||||
if (value->routes == NULL) {
|
if (value->routes == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
value->routes_len = 0;
|
value->routes_len = 0;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (i = 0; i < value->routes_len; i++) {
|
for (i = 0; i < value->routes_len; i++) {
|
||||||
value->routes[i] = convert_curr_route(curr_result->routes[i], err);
|
value->routes[i] = convert_curr_route(curr_result->routes[i], err);
|
||||||
if (value->routes[i] == NULL) {
|
if (value->routes[i] == NULL) {
|
||||||
@ -526,24 +506,14 @@ static int dns_to_json_copy_servers(const struct dns *src, network_dns *result,
|
|||||||
bool need_copy = (src->name_servers != NULL && src->name_servers_len > 0);
|
bool need_copy = (src->name_servers != NULL && src->name_servers_len > 0);
|
||||||
|
|
||||||
if (need_copy) {
|
if (need_copy) {
|
||||||
if (src->name_servers_len > (SIZE_MAX / sizeof(char *))) {
|
result->nameservers = (char **)util_smart_calloc_s(src->name_servers_len, sizeof(char *));
|
||||||
*err = util_strdup_s("Too many servers");
|
|
||||||
ERROR("Too many servers");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
result->nameservers = (char **)util_common_calloc_s(sizeof(char *) * src->name_servers_len);
|
|
||||||
if (result->nameservers == NULL) {
|
if (result->nameservers == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
result->nameservers_len = src->name_servers_len;
|
result->nameservers_len = src->name_servers_len;
|
||||||
if (memcpy_s(result->nameservers, result->nameservers_len, src->name_servers, src->name_servers_len) != EOK) {
|
(void)memcpy(result->nameservers, src->name_servers, src->name_servers_len);
|
||||||
*err = util_strdup_s("Memcpy failed");
|
|
||||||
ERROR("Memcpy failed");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -553,24 +523,14 @@ static int dns_to_json_copy_options(const struct dns *src, network_dns *result,
|
|||||||
bool need_copy = (src->options != NULL && src->options_len > 0);
|
bool need_copy = (src->options != NULL && src->options_len > 0);
|
||||||
|
|
||||||
if (need_copy) {
|
if (need_copy) {
|
||||||
if (src->options_len > (SIZE_MAX / sizeof(char *))) {
|
result->options = (char **)util_smart_calloc_s(src->options_len, sizeof(char *));
|
||||||
*err = util_strdup_s("Too many options");
|
|
||||||
ERROR("Too many options");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
result->options = (char **)util_common_calloc_s(sizeof(char *) * src->options_len);
|
|
||||||
if (result->options == NULL) {
|
if (result->options == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
result->options_len = src->options_len;
|
result->options_len = src->options_len;
|
||||||
if (memcpy_s(result->options, result->options_len, src->options, src->options_len) != EOK) {
|
(void)memcpy(result->options, src->options, src->options_len);
|
||||||
*err = util_strdup_s("Memcpy failed");
|
|
||||||
ERROR("Memcpy failed");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -580,24 +540,14 @@ static int dns_to_json_copy_searchs(const struct dns *src, network_dns *result,
|
|||||||
bool need_copy = (src->search != NULL && src->search_len > 0);
|
bool need_copy = (src->search != NULL && src->search_len > 0);
|
||||||
|
|
||||||
if (need_copy) {
|
if (need_copy) {
|
||||||
if (src->search_len > (SIZE_MAX / sizeof(char *))) {
|
result->search = (char **)util_smart_calloc_s(src->search_len, sizeof(char *));
|
||||||
*err = util_strdup_s("Too many searchs");
|
|
||||||
ERROR("Too many searchs");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
result->search = (char **)util_common_calloc_s(sizeof(char *) * src->search_len);
|
|
||||||
if (result->search == NULL) {
|
if (result->search == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
result->search_len = src->search_len;
|
result->search_len = src->search_len;
|
||||||
if (memcpy_s(result->search, result->search_len, src->search, src->search_len) != EOK) {
|
(void)memcpy(result->search, src->search, src->search_len);
|
||||||
*err = util_strdup_s("Memcpy failed");
|
|
||||||
ERROR("Memcpy failed");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -658,13 +608,7 @@ static bool copy_interfaces_from_result_to_json(const struct result *src, result
|
|||||||
|
|
||||||
res->interfaces_len = 0;
|
res->interfaces_len = 0;
|
||||||
|
|
||||||
if (src->interfaces_len > (SIZE_MAX / sizeof(network_interface *))) {
|
res->interfaces = (network_interface **)util_smart_calloc_s(src->interfaces_len, sizeof(network_interface *));
|
||||||
*err = util_strdup_s("Too many interfaces");
|
|
||||||
ERROR("Too many interfaces");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
res->interfaces = (network_interface **)util_common_calloc_s(sizeof(network_interface *) * src->interfaces_len);
|
|
||||||
if (res->interfaces == NULL) {
|
if (res->interfaces == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
@ -691,13 +635,7 @@ static bool copy_ips_from_result_to_json(const struct result *src, result_curr *
|
|||||||
|
|
||||||
res->ips_len = 0;
|
res->ips_len = 0;
|
||||||
if (need_copy) {
|
if (need_copy) {
|
||||||
if (src->ips_len > (SIZE_MAX / sizeof(network_ipconfig *))) {
|
res->ips = (network_ipconfig **)util_smart_calloc_s(src->ips_len, sizeof(network_ipconfig *));
|
||||||
*err = util_strdup_s("Too many ips");
|
|
||||||
ERROR("Too many ips");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
res->ips = (network_ipconfig **)util_common_calloc_s(sizeof(network_ipconfig *) * src->ips_len);
|
|
||||||
if (res->ips == NULL) {
|
if (res->ips == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
@ -722,12 +660,7 @@ static bool copy_routes_from_result_to_json(const struct result *src, result_cur
|
|||||||
|
|
||||||
res->routes_len = 0;
|
res->routes_len = 0;
|
||||||
if (need_copy) {
|
if (need_copy) {
|
||||||
if (src->routes_len > (SIZE_MAX / sizeof(network_route *))) {
|
res->routes = (network_route **)util_smart_calloc_s(src->routes_len, sizeof(network_route *));
|
||||||
*err = util_strdup_s("Too many routes");
|
|
||||||
ERROR("Too many routes");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
res->routes = (network_route **)util_common_calloc_s(sizeof(network_route *) * src->routes_len);
|
|
||||||
if (res->routes == NULL) {
|
if (res->routes == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
|
|||||||
@ -20,7 +20,6 @@
|
|||||||
#include <errno.h>
|
#include <errno.h>
|
||||||
#include <netinet/in.h>
|
#include <netinet/in.h>
|
||||||
#include <arpa/inet.h>
|
#include <arpa/inet.h>
|
||||||
#include <securec.h>
|
|
||||||
|
|
||||||
#include "types.h"
|
#include "types.h"
|
||||||
#include "utils.h"
|
#include "utils.h"
|
||||||
@ -202,20 +201,17 @@ static size_t to_ipv4(const uint8_t *src, size_t src_len, uint8_t **ipv4)
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (src_len == IPV4LEN) {
|
if (src_len == IPV4LEN) {
|
||||||
ip = util_common_calloc_s(IPV4LEN * sizeof(uint8_t));
|
ip = util_smart_calloc_s(IPV4LEN, sizeof(uint8_t));
|
||||||
if (ip == NULL) {
|
if (ip == NULL) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (memcpy_s(ip, IPV4LEN, src, IPV4LEN) != EOK) {
|
(void)memcpy(ip, src, IPV4LEN);
|
||||||
free(ip);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
*ipv4 = ip;
|
*ipv4 = ip;
|
||||||
return IPV4LEN;
|
return IPV4LEN;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (src_len == IPV6LEN && is_ipv4(src, src_len) && src[10] == 0xff && src[11] == 0xff) {
|
if (src_len == IPV6LEN && is_ipv4(src, src_len) && src[10] == 0xff && src[11] == 0xff) {
|
||||||
ip = util_common_calloc_s(IPV4LEN * sizeof(uint8_t));
|
ip = util_smart_calloc_s(IPV4LEN, sizeof(uint8_t));
|
||||||
if (ip == NULL) {
|
if (ip == NULL) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -262,8 +258,8 @@ static int do_parse_ip_to_string(const uint8_t *ip, size_t len, char **result)
|
|||||||
ret = -1;
|
ret = -1;
|
||||||
goto free_out;
|
goto free_out;
|
||||||
}
|
}
|
||||||
nret = sprintf_s(*result, res_len, "%s%s", "?", tmp);
|
nret = snprintf(*result, res_len, "%s%s", "?", tmp);
|
||||||
if (nret < 0) {
|
if (nret < 0 || (size_t)nret >= res_len) {
|
||||||
free(*result);
|
free(*result);
|
||||||
*result = NULL;
|
*result = NULL;
|
||||||
ret = -1;
|
ret = -1;
|
||||||
@ -402,19 +398,13 @@ static size_t try_to_ipv4(const struct ipnet *value, uint8_t **pip, char **err)
|
|||||||
iplen = to_ipv4(value->ip, value->ip_len, pip);
|
iplen = to_ipv4(value->ip, value->ip_len, pip);
|
||||||
if (iplen == 0) {
|
if (iplen == 0) {
|
||||||
if (value->ip_len == IPV6LEN) {
|
if (value->ip_len == IPV6LEN) {
|
||||||
*pip = util_common_calloc_s(IPV6LEN * sizeof(uint8_t));
|
*pip = util_smart_calloc_s(IPV6LEN, sizeof(uint8_t));
|
||||||
if (*pip == NULL) {
|
if (*pip == NULL) {
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (memcpy_s(*pip, IPV6LEN, value->ip, IPV6LEN) != EOK) {
|
(void)memcpy(*pip, value->ip, IPV6LEN);
|
||||||
*err = util_strdup_s("Memcpy failed");
|
|
||||||
free(*pip);
|
|
||||||
*pip = NULL;
|
|
||||||
ERROR("Memcpy failed");
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
iplen = IPV6LEN;
|
iplen = IPV6LEN;
|
||||||
} else {
|
} else {
|
||||||
if (asprintf(err, "Invalid ip, len=%lu", iplen) < 0) {
|
if (asprintf(err, "Invalid ip, len=%lu", iplen) < 0) {
|
||||||
@ -437,41 +427,29 @@ static int get_ipv4_mask(const struct ipnet *value, size_t iplen, uint8_t **mask
|
|||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
*mask = util_common_calloc_s(IPV4LEN * sizeof(uint8_t));
|
*mask = util_smart_calloc_s(IPV4LEN, sizeof(uint8_t));
|
||||||
if (*mask == NULL) {
|
if (*mask == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
if (memcpy_s(*mask, IPV4LEN, value->ip_mask, IPV4LEN) != EOK) {
|
(void)memcpy(*mask, value->ip_mask, IPV4LEN);
|
||||||
*err = util_strdup_s("Memcpy failed");
|
|
||||||
ERROR("Memcpy failed");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
return IPV4LEN;
|
return IPV4LEN;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int get_ipv6_mask(const struct ipnet *value, size_t iplen, uint8_t **mask, char **err)
|
static int get_ipv6_mask(const struct ipnet *value, size_t iplen, uint8_t **mask, char **err)
|
||||||
{
|
{
|
||||||
if (iplen == IPV4LEN) {
|
if (iplen == IPV4LEN) {
|
||||||
*mask = util_common_calloc_s(IPV4LEN * sizeof(uint8_t));
|
*mask = util_smart_calloc_s(IPV4LEN, sizeof(uint8_t));
|
||||||
if (*mask == NULL) {
|
if (*mask == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (memcpy_s(*mask, IPV4LEN, (value->ip_mask + IPV4_TO_V6_EMPTY_PREFIX_BYTES), IPV4LEN) != EOK) {
|
(void)memcpy(*mask, (value->ip_mask + IPV4_TO_V6_EMPTY_PREFIX_BYTES), IPV4LEN);
|
||||||
*err = util_strdup_s("Memcpy failed");
|
|
||||||
ERROR("Memcpy failed");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
return IPV4LEN;
|
return IPV4LEN;
|
||||||
} else {
|
} else {
|
||||||
if (memcpy_s(*mask, IPV6LEN, value->ip_mask, IPV6LEN) != EOK) {
|
(void)memcpy(*mask, value->ip_mask, IPV6LEN);
|
||||||
*err = util_strdup_s("Memcpy failed");
|
|
||||||
ERROR("Memcpy failed");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
return IPV6LEN;
|
return IPV6LEN;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -545,8 +523,8 @@ static char *do_generate_ip_with_mask(const uint8_t *mask, size_t masklen, const
|
|||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
goto free_out;
|
goto free_out;
|
||||||
}
|
}
|
||||||
nret = sprintf_s(result, res_len, "%s/%s", ip, tmp_mask);
|
nret = snprintf(result, res_len, "%s/%s", ip, tmp_mask);
|
||||||
if (nret < 0) {
|
if (nret < 0 || (size_t)nret >= res_len) {
|
||||||
*err = util_strdup_s("Sprintf first type failed");
|
*err = util_strdup_s("Sprintf first type failed");
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
free(result);
|
free(result);
|
||||||
@ -603,8 +581,8 @@ char *ipnet_to_string(const struct ipnet *value, char **err)
|
|||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
goto free_out;
|
goto free_out;
|
||||||
}
|
}
|
||||||
nret = sprintf_s(result, res_len, "%s/%d", tmp_ip, slen);
|
nret = snprintf(result, res_len, "%s/%d", tmp_ip, slen);
|
||||||
if (nret < 0) {
|
if (nret < 0 || (size_t)nret >= res_len) {
|
||||||
ERROR("Sprintf failed");
|
ERROR("Sprintf failed");
|
||||||
*err = util_strdup_s("Sprintf second type failed");
|
*err = util_strdup_s("Sprintf second type failed");
|
||||||
free(result);
|
free(result);
|
||||||
@ -625,16 +603,12 @@ static int get_ip_from_in6_addr(const struct in6_addr *ipv6, uint8_t **ip, size_
|
|||||||
if (ipv6 == NULL) {
|
if (ipv6 == NULL) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
result = util_common_calloc_s(IPV6LEN * sizeof(uint8_t));
|
result = util_smart_calloc_s(IPV6LEN, sizeof(uint8_t));
|
||||||
if (result == NULL) {
|
if (result == NULL) {
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
if (memcpy_s(result, IPV6LEN * sizeof(uint8_t), ipv6->s6_addr, IPV6LEN * sizeof(uint8_t)) != EOK) {
|
(void)memcpy(result, ipv6->s6_addr, IPV6LEN * sizeof(uint8_t));
|
||||||
ERROR("Memcpy failed");
|
|
||||||
free(result);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
*ip = result;
|
*ip = result;
|
||||||
*len = IPV6LEN;
|
*len = IPV6LEN;
|
||||||
@ -650,7 +624,7 @@ static int get_ip_from_in_addr(const struct in_addr *ipv4, uint8_t **ip, size_t
|
|||||||
if (ipv4 == NULL) {
|
if (ipv4 == NULL) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
result = util_common_calloc_s(IPV4LEN * sizeof(uint8_t));
|
result = util_smart_calloc_s(IPV4LEN, sizeof(uint8_t));
|
||||||
if (result == NULL) {
|
if (result == NULL) {
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
return -1;
|
return -1;
|
||||||
@ -734,12 +708,7 @@ static int do_parse_mask_in_cidr(unsigned int mask_num, struct ipnet *result, ch
|
|||||||
|
|
||||||
j = result->ip_len;
|
j = result->ip_len;
|
||||||
|
|
||||||
if (j >= (SIZE_MAX / sizeof(uint8_t))) {
|
result->ip_mask = util_smart_calloc_s(j, sizeof(uint8_t));
|
||||||
*err = util_strdup_s("Too many ips");
|
|
||||||
ERROR("Too many ips");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
result->ip_mask = util_common_calloc_s(j * sizeof(uint8_t));
|
|
||||||
if (result->ip_mask == NULL) {
|
if (result->ip_mask == NULL) {
|
||||||
*err = util_strdup_s("Out of memory");
|
*err = util_strdup_s("Out of memory");
|
||||||
ERROR("Out of memory");
|
ERROR("Out of memory");
|
||||||
|
|||||||
97
src/utils.c
97
src/utils.c
@ -22,8 +22,6 @@
|
|||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
#include <fcntl.h>
|
#include <fcntl.h>
|
||||||
|
|
||||||
#include <securec.h>
|
|
||||||
|
|
||||||
#include "utils.h"
|
#include "utils.h"
|
||||||
#include "log.h"
|
#include "log.h"
|
||||||
|
|
||||||
@ -71,7 +69,6 @@ static int do_clean_path(const char *respath, const char *limit_respath, const c
|
|||||||
{
|
{
|
||||||
char *dest = *dst;
|
char *dest = *dst;
|
||||||
const char *endpos = NULL;
|
const char *endpos = NULL;
|
||||||
errno_t ret;
|
|
||||||
|
|
||||||
endpos = stpos;
|
endpos = stpos;
|
||||||
|
|
||||||
@ -102,11 +99,7 @@ static int do_clean_path(const char *respath, const char *limit_respath, const c
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
ret = memcpy_s(dest, (size_t)(endpos - stpos), stpos, (size_t)(endpos - stpos));
|
(void)memcpy(dest, stpos, (size_t)(endpos - stpos));
|
||||||
if (ret != EOK) {
|
|
||||||
ERROR("Failed at cleanpath memcpy");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
dest += endpos - stpos;
|
dest += endpos - stpos;
|
||||||
*dest = '\0';
|
*dest = '\0';
|
||||||
}
|
}
|
||||||
@ -125,7 +118,6 @@ char *cleanpath(const char *path, char *cleaned_path, size_t cleaned_path_len)
|
|||||||
char *dest = NULL;
|
char *dest = NULL;
|
||||||
const char *stpos = NULL;
|
const char *stpos = NULL;
|
||||||
const char *limit_respath = NULL;
|
const char *limit_respath = NULL;
|
||||||
errno_t ret;
|
|
||||||
|
|
||||||
if (check_cleanpath_args(path, cleaned_path, cleaned_path_len)) {
|
if (check_cleanpath_args(path, cleaned_path, cleaned_path_len)) {
|
||||||
return NULL;
|
return NULL;
|
||||||
@ -133,11 +125,7 @@ char *cleanpath(const char *path, char *cleaned_path, size_t cleaned_path_len)
|
|||||||
|
|
||||||
respath = cleaned_path;
|
respath = cleaned_path;
|
||||||
|
|
||||||
ret = memset_s(respath, cleaned_path_len, 0, cleaned_path_len);
|
(void)memset(respath, 0, cleaned_path_len);
|
||||||
if (ret != EOK) {
|
|
||||||
ERROR("Failed at cleanpath memset");
|
|
||||||
goto error;
|
|
||||||
}
|
|
||||||
limit_respath = respath + PATH_MAX;
|
limit_respath = respath + PATH_MAX;
|
||||||
|
|
||||||
if (!IS_ABSOLUTE_FILE_NAME(path)) {
|
if (!IS_ABSOLUTE_FILE_NAME(path)) {
|
||||||
@ -151,11 +139,11 @@ char *cleanpath(const char *path, char *cleaned_path, size_t cleaned_path_len)
|
|||||||
ERROR("Failed to get the end of respath");
|
ERROR("Failed to get the end of respath");
|
||||||
goto error;
|
goto error;
|
||||||
}
|
}
|
||||||
ret = strcat_s(respath, PATH_MAX, path);
|
if (strlen(path) >= (PATH_MAX - 1) - strlen(respath)) {
|
||||||
if (ret != EOK) {
|
ERROR("%s path too long", path);
|
||||||
ERROR("Failed at cleanpath strcat");
|
|
||||||
goto error;
|
goto error;
|
||||||
}
|
}
|
||||||
|
(void)strcat(respath, path);
|
||||||
stpos = path;
|
stpos = path;
|
||||||
} else {
|
} else {
|
||||||
dest = respath;
|
dest = respath;
|
||||||
@ -183,6 +171,19 @@ bool is_null_or_empty(const char *str)
|
|||||||
return (str == NULL || strlen(str) == 0);
|
return (str == NULL || strlen(str) == 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void *util_smart_calloc_s(size_t count, size_t unit_size)
|
||||||
|
{
|
||||||
|
if (unit_size == 0) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (count > (MAX_MEMORY_SIZE / unit_size)) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
return calloc(count, unit_size);
|
||||||
|
}
|
||||||
|
|
||||||
void *util_common_calloc_s(size_t size)
|
void *util_common_calloc_s(size_t size)
|
||||||
{
|
{
|
||||||
if (size == 0) {
|
if (size == 0) {
|
||||||
@ -281,19 +282,11 @@ static char *do_string_join(const char *sep, const char * const *parts, size_t p
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (iter = 0; iter < parts_len - 1; iter++) {
|
for (iter = 0; iter < parts_len - 1; iter++) {
|
||||||
if (strcat_s(res_string, result_len + 1, parts[iter]) != EOK) {
|
(void)strcat(res_string, parts[iter]);
|
||||||
free(res_string);
|
(void)strcat(res_string, sep);
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
if (strcat_s(res_string, result_len + 1, sep) != EOK) {
|
|
||||||
free(res_string);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (strcat_s(res_string, result_len + 1, parts[parts_len - 1]) != EOK) {
|
|
||||||
free(res_string);
|
|
||||||
return NULL;
|
|
||||||
}
|
}
|
||||||
|
(void)strcat(res_string, parts[parts_len - 1]);
|
||||||
|
|
||||||
return res_string;
|
return res_string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -348,34 +341,23 @@ static char *do_uint8_join(const char *sep, const char *type, const uint8_t *par
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (iter = 0; iter < parts_len - 1; iter++) {
|
for (iter = 0; iter < parts_len - 1; iter++) {
|
||||||
nret = sprintf_s(buffer, sizeof(buffer), type, parts[iter]);
|
nret = snprintf(buffer, MAX_UINT_LEN + 1, type, parts[iter]);
|
||||||
if (nret < 0) {
|
if (nret < 0 || nret >= MAX_UINT_LEN + 1) {
|
||||||
ERROR("Sprint failed");
|
ERROR("Sprint failed");
|
||||||
free(res_string);
|
free(res_string);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
if (strcat_s(res_string, result_len + 1, buffer) != EOK) {
|
(void)strcat(res_string, buffer);
|
||||||
ERROR("Strcat failed");
|
(void)strcat(res_string, sep);
|
||||||
free(res_string);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
if (strcat_s(res_string, result_len + 1, sep) != EOK) {
|
|
||||||
ERROR("Strcat failed");
|
|
||||||
free(res_string);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
nret = sprintf_s(buffer, sizeof(buffer), type, parts[parts_len - 1]);
|
nret = snprintf(buffer, sizeof(buffer), type, parts[parts_len - 1]);
|
||||||
if (nret < 0) {
|
if (nret < 0 || nret >= MAX_UINT_LEN + 1) {
|
||||||
ERROR("Sprint failed");
|
ERROR("Sprint failed");
|
||||||
free(res_string);
|
free(res_string);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
if (strcat_s(res_string, result_len + 1, buffer) != EOK) {
|
(void)strcat(res_string, buffer);
|
||||||
free(res_string);
|
|
||||||
ERROR("Strcat failed");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
return res_string;
|
return res_string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -475,19 +457,12 @@ static int do_util_grow_array(char ***orig_array, size_t *orig_capacity, size_t
|
|||||||
add_capacity += increment;
|
add_capacity += increment;
|
||||||
}
|
}
|
||||||
if (add_capacity != *orig_capacity) {
|
if (add_capacity != *orig_capacity) {
|
||||||
if (add_capacity > (SIZE_MAX / sizeof(void *))) {
|
add_array = util_smart_calloc_s(add_capacity, sizeof(void *));
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
add_array = util_common_calloc_s(add_capacity * sizeof(void *));
|
|
||||||
if (add_array == NULL) {
|
if (add_array == NULL) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
if (*orig_array != NULL) {
|
if (*orig_array != NULL) {
|
||||||
if (memcpy_s(add_array, add_capacity * sizeof(void *),
|
(void)memcpy(add_array, *orig_array, *orig_capacity * sizeof(void *));
|
||||||
*orig_array, *orig_capacity * sizeof(void *)) != EOK) {
|
|
||||||
free(add_array);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
free((void *)*orig_array);
|
free((void *)*orig_array);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -541,9 +516,7 @@ int util_validate_absolute_path(const char *path)
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (memset_s(®match, sizeof(regmatch_t), 0, sizeof(regmatch_t)) != EOK) {
|
(void)memset(®match, 0, sizeof(regmatch_t));
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return do_util_validate_absolute_path(path, ®match);
|
return do_util_validate_absolute_path(path, ®match);
|
||||||
}
|
}
|
||||||
@ -583,9 +556,7 @@ int util_validate_name(const char *name)
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (memset_s(®match, sizeof(regmatch_t), 0, sizeof(regmatch_t)) != EOK) {
|
(void)memset(®match, 0, sizeof(regmatch_t));
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return do_util_validate_name(name, ®match);
|
return do_util_validate_name(name, ®match);
|
||||||
}
|
}
|
||||||
|
|||||||
10
src/utils.h
10
src/utils.h
@ -27,12 +27,22 @@
|
|||||||
|
|
||||||
#define DEFAULT_SECURE_DIRECTORY_MODE 0750
|
#define DEFAULT_SECURE_DIRECTORY_MODE 0750
|
||||||
|
|
||||||
|
#if __WORDSIZE == 64
|
||||||
|
/* current max user memory for 64-machine is 2^47 B */
|
||||||
|
#define MAX_MEMORY_SIZE ((size_t)1 << 47)
|
||||||
|
#else
|
||||||
|
/* current max user memory for 32-machine is 2^31 B */
|
||||||
|
#define MAX_MEMORY_SIZE ((size_t)1 << 31)
|
||||||
|
#endif
|
||||||
|
|
||||||
bool is_null_or_empty(const char *str);
|
bool is_null_or_empty(const char *str);
|
||||||
|
|
||||||
size_t util_array_len(const char * const *array);
|
size_t util_array_len(const char * const *array);
|
||||||
|
|
||||||
void util_free_array(char **array);
|
void util_free_array(char **array);
|
||||||
|
|
||||||
|
void *util_smart_calloc_s(size_t count, size_t unit_size);
|
||||||
|
|
||||||
void *util_common_calloc_s(size_t size);
|
void *util_common_calloc_s(size_t size);
|
||||||
|
|
||||||
ssize_t util_write_nointr(int fd, const void *buf, size_t count);
|
ssize_t util_write_nointr(int fd, const void *buf, size_t count);
|
||||||
|
|||||||
0
tools/static_check
Normal file → Executable file
0
tools/static_check
Normal file → Executable file
0
update-version.bash
Normal file → Executable file
0
update-version.bash
Normal file → Executable file
Loading…
x
Reference in New Issue
Block a user