Skip to content

Commit

Permalink
Squashed 'data_analytics' changes from 1e7bfcc..d4e5e54 (#698)
Browse files Browse the repository at this point in the history
d4e5e54 Merge pull request Xilinx#405 from RepoOps/update_makefile_20220908-222449
03f4bf7 Merge pull request Xilinx#406 from leol/json-parser-enhance
7e46545 Align producer/consumer(s) write/read operations to the same cycle
760f198 Merge remote-tracking branch 'upstream/next' into json-parser-enhance
8edfc57 update Makefile
3015528 remove HOST_ARCH from docs (Xilinx#404)
3362f73 Fix typo
7d76413 Merge pull request Xilinx#403 from leol/json-parser-enhance
2797903 Fix typo
69a7798 Doc for enhanced JSON parser
9bd6e67 Merge pull request Xilinx#402 from Zhenhong/next
85be3c2 update release notes
39c66fe Merge pull request Xilinx#401 from liyuanz/next
b5383a7 Merge pull request Xilinx#400 from sibow/CR-1128650
2db9c59 update mk
7fac0c4 CR-1137153: fix undefined reference errors (Xilinx#399)
abb1432 specify SWIG_LIB path
77bf014 Merge pull request Xilinx#395 from leol/json-parser-enhance
b586c85 Merge pull request Xilinx#398 from liyuanz/add_time
2862751 Shrink test scale for resolving timeout issue
1429edd Merge remote-tracking branch 'upstream/next' into json-parser-enhance
7155054 Support array on leaf-node for JSON parser
d6c1c3a add time
58a7883 add time
8562291 Merge pull request Xilinx#393 from sibow/string_apis
a3c4d38 fix bug and add comments
b13770a remove dup include file (Xilinx#397)
f6b04f9 Merge remote-tracking branch 'upstream/next' into json-parser-enhance
61af200 Fix for CR-1137154
a0f382a revert Makefile (Xilinx#396)
55e8f0d update docs in next branch for portal (Xilinx#394)
64a71b6 Refactor code to utilize template parameters for jsonParser
6012fb5 Merge remote-tracking branch 'upstream/next' into json-parser-enhance
196ebec Enhance JSON parser to support nested field
08b5e1e modify description
3019c6b fix bug
b54a081 add string like and testcase
b317d0c rename project
045dbca add testcase for string in/not_in
ef6abaa fix bug
d4ec31c add random input string
534f0f8 refactor
7a4b3bb Merge pull request Xilinx#391 from leol/json-parser-enhance
4d50dfb add testcase for string equal
549498d init commit for string EQ/NE/IN
4094b3a Merge pull request Xilinx#390 from sibow/fix_CR1135034
1ccc65f Merge branch 'next' into fix_CR1135034
b095b3e Update Makefile
32c6bbc Trigger auto regression test
141953f Merge remote-tracking branch 'upstream/next' into json-parser-enhance
ed3d71b update makefile (Xilinx#392)
d0129c5 Refactor object definition to use the same base object
816b9ab Fix bug in readSchema of JSON parser
ff31988 modify description.json to pass checker test
677d736 22.2 mks (Xilinx#388)
3c38777 Merge pull request Xilinx#386 from shengl/next
f4126ee revert version
846eb9a update to 22.1 shell
40190b6 Merge pull request Xilinx#383 from leol/port-to-u50
26d5b7a Modify error message
aef55ec Remove O_DIRECT flag for non-p2p scenario
0bd9747 Add checker for pread + eliminate redundant fopen
efe55b5 Avoid overwrite for non-nvme device
23cfb5d Add macro guard for disabling P2P in other non-U2 devices
d68ee86 Port CSV scanner to U50
f3f0f8d Merge pull request Xilinx#380 from changg/132.patch_next
7d2b8f6 132.patch
45581e6 fix readme (Xilinx#378)
826c982 Merge remote-tracking branch 'upstream/next' into next
c431767 Merge pull request Xilinx#372 from leol/add-readme
940aa93 Doc for L3 CSV scanner
32be889 Fix issues in old doc
b166509 change 2022.1_stable_latest to 2022.2_stable_latest
304cd6c fix u2 name confusing (Xilinx#369)
5d14a17 syntax error (Xilinx#370)
8698138 add known issue for log analyzer

Co-authored-by: sdausr <[email protected]>
  • Loading branch information
2 people authored and GitHub Enterprise committed Sep 16, 2022
1 parent 3393858 commit d172d34
Show file tree
Hide file tree
Showing 213 changed files with 8,143 additions and 3,367 deletions.
2 changes: 1 addition & 1 deletion data_analytics/Jenkinsfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
@Library('pipeline-library')_
VitisLibPipeline (branch: 'next', libname: 'xf_DataAnalytics', TARGETS: 'hls_csim:hls_csynth:hls_cosim:vitis_sw_emu:vitis_hw_emu:vitis_hw_build',
upstream_dependencies: 'xf_utils_hw,next,../utils; xf_compression,next,../data_compression; xf_security,next,../security; xf_graph,next,../graph; xf_database,next,../database',
devtest: 'RunDeploy.sh', TOOLVERSION: '2022.1_stable_latest')
devtest: 'RunDeploy.sh', TOOLVERSION: '2022.2_stable_latest')
Original file line number Diff line number Diff line change
Expand Up @@ -29,79 +29,64 @@ namespace xf {
namespace data_analytics {
namespace dataframe {

struct Object : private ap_uint<88> {
// data: ap_uint<64>, the object stream data, maximum 64bits
void set_data(ap_uint<64> data) { ap_uint<88>::range(63, 0) = data; }
template <int FILE_W, int ARRAY_W, int FIELD_W, int TYPE_W, int VALID_W, int DATA_W>
struct ObjectBase : private ap_uint<FILE_W + ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W> {
// data: the object stream data
void set_data(ap_uint<DATA_W> data) { this->range(DATA_W - 1, 0) = data; }

// filed ID: ap_uint<16>, indicate the col field, maximum supporting 256 fields
void set_id(ap_uint<16> data) { ap_uint<88>::range(79, 64) = data; }
// valid: 0 for null, non-zero for the number of valid bytes from LSB
void set_valid(ap_uint<VALID_W> data) { this->range(VALID_W + DATA_W - 1, DATA_W) = data; }

// valid: ap_uint<4>, 0 for null, non-zero for the number of valid byte from LSB
void set_valid(ap_uint<4> data) { ap_uint<88>::range(83, 80) = data; }

// type: ap_uint<4>, 0000-boolean, 0001-int64, 0010-float, 0011-double, 0100-date, 0101-string
// flag: 1101-end of json line, 1110-end of column, 1111-end of file
void set_type(ap_uint<4> data) { ap_uint<88>::range(87, 84) = data; }

ap_uint<64> get_data() { return ap_uint<88>::range(63, 0); }
ap_uint<16> get_id() { return ap_uint<88>::range(79, 64); }
ap_uint<4> get_valid() { return ap_uint<88>::range(83, 80); }
ap_uint<4> get_type() { return ap_uint<88>::range(87, 84); }
ap_uint<88> get_all() { return ap_uint<88>::range(87, 0); }
};

#define OBJ_W 73

struct ObjectAlter1 : private ap_uint<OBJ_W> {
// data: ap_uint<64>, the object stream data, maximum 64bits
void set_data(ap_uint<64> data) { ap_uint<OBJ_W>::range(63, 0) = data; }

// filed ID: ap_uint<16>, indicate the col field, maximum supporting 256 fields
void set_id(ap_uint<1> data) { ap_uint<OBJ_W>::range(64, 64) = data; }

// valid: ap_uint<4>, 0 for null, non-zero for the number of valid byte from LSB
void set_valid(ap_uint<4> data) { ap_uint<OBJ_W>::range(68, 65) = data; }

// type: ap_uint<4>, 0000-boolean, 0001-int64, 0010-float, 0011-double, 0100-date, 0101-string
// type: 0000-boolean, 0001-int64, 0010-float, 0011-double, 0100-date, 0101-string
// flag: 1101-end of json line, 1110-end of column, 1111-end of file
void set_type(ap_uint<4> data) { ap_uint<OBJ_W>::range(72, 69) = data; }
void set_all(ap_uint<OBJ_W> data) { ap_uint<OBJ_W>::range(72, 0) = data; }

ap_uint<64> get_data() { return ap_uint<OBJ_W>::range(63, 0); }
ap_uint<1> get_id() { return ap_uint<OBJ_W>::range(64, 64); }
ap_uint<4> get_valid() { return ap_uint<OBJ_W>::range(68, 65); }
ap_uint<4> get_type() { return ap_uint<OBJ_W>::range(72, 69); }
ap_uint<OBJ_W> get_all() { return ap_uint<OBJ_W>::range(72, 0); }
void set_type(ap_uint<TYPE_W> data) { this->range(TYPE_W + VALID_W + DATA_W - 1, VALID_W + DATA_W) = data; }

// filed ID: indicate the col field, maximum supporting 2^FIELD_W fields
void set_id(ap_uint<FIELD_W> data) {
this->range(FIELD_W + TYPE_W + VALID_W + DATA_W - 1, TYPE_W + VALID_W + DATA_W) = data;
}

// offset of array: indicate the index of the element in each array, -1 stands for non-array value
// or end of the array, maximum supported length of array is 2^ARRAY_W - 1
void set_offset(ap_uint<ARRAY_W> data) {
this->range(ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W - 1, FIELD_W + TYPE_W + VALID_W + DATA_W) = data;
}

void set_file(ap_uint<FILE_W> data) {
this->range(FILE_W + ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W - 1,
ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W) = data;
}

void set_obj(ap_uint<ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W> data) {
this->range(ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W - 1, 0) = data;
}

void set_all(ap_uint<FILE_W + ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W> data) {
this->range(FILE_W + ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W - 1, 0) = data;
}

ap_uint<DATA_W> get_data() { return this->range(DATA_W - 1, 0); }
ap_uint<VALID_W> get_valid() { return this->range(VALID_W + DATA_W - 1, DATA_W); }
ap_uint<TYPE_W> get_type() { return this->range(TYPE_W + VALID_W + DATA_W - 1, VALID_W + DATA_W); }
ap_uint<FIELD_W> get_id() {
return this->range(FIELD_W + TYPE_W + VALID_W + DATA_W - 1, TYPE_W + VALID_W + DATA_W);
}
ap_uint<ARRAY_W> get_offset() {
return this->range(ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W - 1, FIELD_W + TYPE_W + VALID_W + DATA_W);
}
ap_uint<FILE_W> get_file() {
return this->range(FILE_W + ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W - 1,
ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W);
}
ap_uint<ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W> get_all() {
return this->range(ARRAY_W + FIELD_W + TYPE_W + VALID_W + DATA_W - 1, 0);
}
};

//#undef OBJ_W

#define OBJ_WF 77

struct ObjectFile : private ap_uint<OBJ_WF> {
// data: ap_uint<64>, the object stream data, maximum 64bits
void set_data(ap_uint<64> data) { ap_uint<OBJ_WF>::range(63, 0) = data; }

// filed ID: ap_uint<16>, indicate the col field, maximum supporting 256 fields
void set_id(ap_uint<1> data) { ap_uint<OBJ_WF>::range(64, 64) = data; }

// valid: ap_uint<4>, 0 for null, non-zero for the number of valid byte from LSB
void set_valid(ap_uint<4> data) { ap_uint<OBJ_WF>::range(68, 65) = data; }

// type: ap_uint<4>, 0000-boolean, 0001-int64, 0010-float, 0011-double, 0100-date, 0101-string
// flag: 1101-end of json line, 1110-end of column, 1111-end of file
void set_type(ap_uint<4> data) { ap_uint<OBJ_WF>::range(72, 69) = data; }
void set_file(ap_uint<4> data) { ap_uint<OBJ_WF>::range(76, 73) = data; }
void set_obj(ap_uint<OBJ_W> data) { ap_uint<OBJ_WF>::range(OBJ_W - 1, 0) = data; }
void set_all(ap_uint<OBJ_WF> data) { ap_uint<OBJ_WF>::range(OBJ_WF - 1, 0) = data; }

ap_uint<64> get_data() { return ap_uint<OBJ_WF>::range(63, 0); }
ap_uint<1> get_id() { return ap_uint<OBJ_WF>::range(64, 64); }
ap_uint<4> get_valid() { return ap_uint<OBJ_WF>::range(68, 65); }
ap_uint<4> get_type() { return ap_uint<OBJ_WF>::range(72, 69); }
ap_uint<4> get_file() { return ap_uint<OBJ_WF>::range(76, 73); }
ap_uint<OBJ_WF> get_all() { return ap_uint<OBJ_WF>::range(OBJ_WF - 1, 0); }
};
using Object = ObjectBase<0, 0, 16, 4, 4, 64>;
using ObjectAlter1 = ObjectBase<0, 0, 1, 4, 4, 64>;
using ObjectFile = ObjectBase<4, 0, 1, 4, 4, 64>;
using ObjectEx = ObjectBase<0, 4, 4, 4, 4, 64>;

} // end of dataframe namespace
} // end of data_analytics namespace
Expand Down
Loading

0 comments on commit d172d34

Please sign in to comment.