@@ -33,10 +33,11 @@ class SampleXdistGenerator:
3333 process_count (num): generate {no} process for execution
3434 """
3535
36- def __init__ (self , addon_path , config_path = None , process_count = 4 ):
36+ def __init__ (self , addon_path , ingest_with_uuid , config_path = None , process_count = 4 ):
3737 self .addon_path = addon_path
3838 self .process_count = process_count
3939 self .config_path = config_path
40+ self .ingest_with_uuid = ingest_with_uuid
4041
4142 def get_samples (self , store_events ):
4243 """
@@ -67,7 +68,7 @@ def get_samples(self, store_events):
6768 store_sample = pickle .load (file_obj )
6869 else :
6970 sample_generator = SampleGenerator (
70- self .addon_path , self .config_path
71+ self .addon_path , self .ingest_with_uuid , self . config_path
7172 )
7273 tokenized_events = list (sample_generator .get_samples ())
7374 store_sample = {
@@ -79,7 +80,7 @@ def get_samples(self, store_events):
7980 with open (file_path , "wb" ) as file_obj :
8081 pickle .dump (store_sample , file_obj )
8182 else :
82- sample_generator = SampleGenerator (self .addon_path , self .config_path )
83+ sample_generator = SampleGenerator (self .addon_path , self .ingest_with_uuid , self . config_path )
8384 tokenized_events = list (sample_generator .get_samples ())
8485 store_sample = {
8586 "conf_name" : SampleGenerator .conf_name ,
@@ -125,6 +126,7 @@ def store_events(self, tokenized_events):
125126 "sourcetype" : each_event .metadata .get ("sourcetype" ),
126127 "timestamp_type" : each_event .metadata .get ("timestamp_type" ),
127128 "input_type" : each_event .metadata .get ("input_type" ),
129+ "ingest_with_uuid" : self .ingest_with_uuid ,
128130 "expected_event_count" : expected_count ,
129131 "index" : each_event .metadata .get ("index" , "main" ),
130132 },
@@ -137,14 +139,19 @@ def store_events(self, tokenized_events):
137139 }
138140 ],
139141 }
142+ if self .ingest_with_uuid == "true" :
143+ tokenized_samples_dict [each_event .sample_name ]["events" ][0 ]["unique_identifier" ] = each_event .unique_identifier
140144 else :
141- tokenized_samples_dict [each_event .sample_name ]["events" ].append (
142- {
145+ sample_event = {
143146 "event" : each_event .event ,
144147 "key_fields" : each_event .key_fields ,
145148 "time_values" : each_event .time_values ,
146149 "requirement_test_data" : each_event .requirement_test_data ,
147150 }
151+ if self .ingest_with_uuid == "true" :
152+ sample_event ["unique_identifier" ] = each_event .unique_identifier
153+ tokenized_samples_dict [each_event .sample_name ]["events" ].append (
154+ sample_event
148155 )
149156
150157 for sample_name , tokenized_sample in tokenized_samples_dict .items ():
0 commit comments