@@ -117,7 +117,8 @@ def main():
117117
118118 z = zipfile .ZipFile (io .BytesIO (x .content ))
119119 # z.extractall('/app')
120- log_folder = '/Users/ykoer/Workspace/ykoer/github-actions-example-workflows/.github/actions/log_to_splunk/tmp'
120+ #log_folder = '/Users/ykoer/Workspace/ykoer/github-actions-example-workflows/.github/actions/log_to_splunk/tmp'
121+ log_folder = '/tmp'
121122 z .extractall (log_folder )
122123
123124 timestamp = batch = count = 0
@@ -134,19 +135,19 @@ def main():
134135 count += 1
135136 if timestamp :
136137 t2 = timestamp
137- timestamp = re .search ("\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z" ,line .strip ())
138+ timestamp = re .search (r "\d{4}-\d{2}-\d{2}T\d+:\d+:\d+\ .\d+Z" , line .strip ())
138139
139140 if timestamp :
140- timestamp = re .sub ("\dZ" ,"" ,timestamp .group ())
141+ timestamp = re .sub (r "\dZ" ,"" ,timestamp .group ())
141142 timestamp = datetime .strptime (timestamp ,"%Y-%m-%dT%H:%M:%S.%f" )
142143 timestamp = (timestamp - datetime (1970 ,1 ,1 )).total_seconds ()
143144 else :
144145 timestamp = t2
145146
146147 # find empty lines and skip them
147- x = re .sub ("\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z" ,"" ,line .strip ())
148+ x = re .sub (r "\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z" ,"" ,line .strip ())
148149 x = x .strip ()
149- job_name = re .search ("\/\d+\_(?P<job>.*)\.txt" ,name )
150+ job_name = re .search (r "\/\d+\_(?P<job>.*)\.txt" ,name )
150151 job_name = job_name .group ('job' )
151152 fields = {'github_run_id' :GITHUB_RUN_ID ,'github_workflow_id' :GITHUB_WORKFLOWID ,'github_job_name' :job_name ,'line_number' :count }
152153 if x :
@@ -158,18 +159,20 @@ def main():
158159
159160 # push every 1000 log lines to splunk as a batch
160161 if batch >= 1000 :
162+ print (f'log_file={ name } , batch_number={ batch_number } , line_number={ count } ' )
161163 batch = 0
162164
163- x = requests .post (SPLUNK_HEC_URL , data = eventBatch , headers = headers )
164- print (f'log_file={ name } , batch_number={ batch_number } , line_number={ count } , request_status_code:{ x .status_code } ' )
165+ # x=requests.post(SPLUNK_HEC_URL, data=eventBatch, headers=headers)
166+ # print(f'log_file={name}, batch_number={batch_number}, line_number={count}, request_status_code:{x.status_code}')
165167 eventBatch = ""
166168 batch_number += 1
167169 break
168170
169171 # push the last batch
170172 if batch > 0 :
171- x = requests .post (SPLUNK_HEC_URL , data = eventBatch , headers = headers )
172- print (f'log_file={ name } , batch_number={ batch_number } , line_number={ count } , request_status_code:{ x .status_code } ' )
173+ print (f'log_file={ name } , batch_number={ batch_number } , line_number={ count } ' )
174+ # x=requests.post(SPLUNK_HEC_URL, data=eventBatch, headers=headers)
175+ # print(f'log_file={name}, batch_number={batch_number}, line_number={count}, request_status_code:{x.status_code}')
173176 eventBatch = ""
174177 batch_number += 1
175178
0 commit comments