Skip to content

Commit

Permalink
improve csv
Browse files Browse the repository at this point in the history
  • Loading branch information
xianjimli committed Dec 24, 2023
1 parent da15274 commit fa08dd1
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 5 deletions.
1 change: 1 addition & 0 deletions docs/changes.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
2023/12/24
* 完善data reader
* data\_reader\_http 支持 chunked data
* CSV 支持设置列名,通过列名获取数据。

2023/12/23
* 增加函数path\_expand\_vars
Expand Down
12 changes: 12 additions & 0 deletions src/csv/csv_file.h
Original file line number Diff line number Diff line change
Expand Up @@ -495,6 +495,18 @@ const char* csv_row_get(csv_row_t* row, uint32_t col);
*/
ret_t csv_row_set(csv_row_t* row, uint32_t col, const char* value);

/**
* @method csv_row_get_col
*
* 根据列名获取列号。
*
* @param {csv_row_t*} row 行对象。
* @param {const char*} value 列名。
*
* @return {int32_t} 返回列号。
*/
int32_t csv_row_get_col(csv_row_t* row, const char* value);

/*public for test*/
ret_t csv_row_init(csv_row_t* row, char* buff, uint32_t size, bool_t should_free_buff);
ret_t csv_row_reset(csv_row_t* row);
Expand Down
19 changes: 14 additions & 5 deletions src/csv/csv_file_object.c
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,13 @@ typedef struct _csv_path_t {
const char* col_name;
} csv_path_t;

static ret_t csv_path_parse_impl(csv_path_t* path, csv_file_t* csv, const char* name) {
static ret_t csv_path_parse_impl(csv_file_object_t* o, csv_path_t* path, const char* name) {
const char* p = name;
return_value_if_fail(path != NULL && csv != NULL && name != NULL, RET_BAD_PARAMS);
csv_file_t* csv = NULL;
return_value_if_fail(path != NULL && o != NULL && o->csv != NULL && name != NULL, RET_BAD_PARAMS);

csv = o->csv;
memset(path, 0x00, sizeof(*path));

while (tk_isspace(*p)) p++;

if (tk_isdigit(*p)) {
Expand Down Expand Up @@ -75,7 +76,11 @@ static ret_t csv_path_parse_impl(csv_path_t* path, csv_file_t* csv, const char*
} else if (tk_isdigit(*p)) {
path->col = tk_atoi(p);
} else {
path->col = csv_file_get_col_of_name(csv, p);
if(o->fields.size > 0) {
path->col = csv_row_get_col(&(o->fields), p);
} else {
path->col = csv_file_get_col_of_name(csv, p);
}
if (path->col < 0) {
return_value_if_fail(tk_isdigit(p[0]), RET_BAD_PARAMS);
path->col = tk_atoi(p);
Expand Down Expand Up @@ -119,7 +124,7 @@ static ret_t csv_path_parse_ex(csv_file_object_t* o, csv_path_t* path, const cha
ret_t ret = RET_BAD_PARAMS;
return_value_if_fail(o != NULL && path != NULL && name != NULL, RET_BAD_PARAMS);

ret = csv_path_parse_impl(path, o->csv, name);
ret = csv_path_parse_impl(o, path, name);
if (ret == RET_OK) {
if (o->rows_map != NULL) {
uint32_t index = path->row;
Expand Down Expand Up @@ -161,6 +166,9 @@ static ret_t csv_file_object_set_prop(tk_object_t* obj, const char* name, const

if (tk_str_start_with(name, CSV_QUERY_PREFIX)) {
return tk_object_set_prop(o->query_args, name, v);
} else if(tk_str_eq(name, CSV_PROP_COL_NAMES)) {
csv_row_set_data(&(o->fields), value_str(v), o->csv->sep);
return RET_OK;
}

rows = csv_file_get_rows(o->csv);
Expand Down Expand Up @@ -395,6 +403,7 @@ static ret_t csv_file_object_destroy(tk_object_t* obj) {
csv_file_object_t* o = CSV_FILE_OBJECT(obj);
return_value_if_fail(o != NULL, RET_BAD_PARAMS);

csv_row_reset(&(o->fields));
csv_file_destroy(o->csv);
o->csv = NULL;
TK_OBJECT_UNREF(o->query_args);
Expand Down
4 changes: 4 additions & 0 deletions src/csv/csv_file_object.h
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,8 @@ typedef struct _csv_file_object_t {
bool_t is_dirty;
csv_filter_object_check_new_row_t check_new_row;
void* check_new_row_ctx;

csv_row_t fields;
} csv_file_object_t;

/**
Expand Down Expand Up @@ -244,6 +246,8 @@ csv_file_object_t* csv_file_object_cast(tk_object_t* obj);
#define CSV_CMD_QUERY "query"
#define CSV_CMD_QUERY_ARG_CLEAR "clear"

#define CSV_PROP_COL_NAMES "col_names"

END_C_DECLS

#endif /*TK_CSV_FILE_OBJECT_H*/

0 comments on commit fa08dd1

Please sign in to comment.