|
| 1 | +import os |
| 2 | +import requests |
| 3 | +import json |
| 4 | +import zipfile |
| 5 | +import io |
| 6 | +import glob |
| 7 | +import re |
| 8 | +from datetime import datetime |
| 9 | + |
| 10 | +def main(): |
| 11 | + print("######################") |
| 12 | + |
| 13 | + GITHUB_REF=os.environ["GITHUB_REF"] |
| 14 | + GITHUB_REPOSITORY=os.environ["GITHUB_REPOSITORY"] |
| 15 | + GITHUB_RUN_ID=os.environ["GITHUB_RUN_ID"] |
| 16 | + GITHUB_API_URL=os.environ["GITHUB_API_URL"] |
| 17 | + GITHUB_WORKFLOWID=os.environ["INPUT_WORKFLOW_ID"] |
| 18 | + GITHUB_TOKEN = os.environ.get("INPUT_GITHUB_TOKEN") |
| 19 | + |
| 20 | + SPLUNK_HEC_URL=os.environ["INPUT_SPLUNK_URL"]+"services/collector/event" |
| 21 | + SPLUNK_HEC_TOKEN=os.environ["INPUT_HEC_TOKEN"] |
| 22 | + SPLUNK_SOURCE=os.environ["INPUT_SOURCE"] |
| 23 | + SPLUNK_SOURCETYPE=os.environ["INPUT_SOURCETYPE"] |
| 24 | + |
| 25 | + batch = count = 0 |
| 26 | + eventBatch = "" |
| 27 | + headers = {"Authorization": "Splunk "+SPLUNK_HEC_TOKEN} |
| 28 | + host=os.uname()[1] |
| 29 | + |
| 30 | + summary_url = "{url}/repos/{repo}/actions/runs/{run_id}".format(url=GITHUB_API_URL,repo=GITHUB_REPOSITORY,run_id=GITHUB_WORKFLOWID) |
| 31 | + |
| 32 | + print(f"GITHUB_REF: {GITHUB_REF}") |
| 33 | + print(f"GITHUB_REPOSITORY: {GITHUB_REPOSITORY}") |
| 34 | + print(f"GITHUB_RUN_ID: {GITHUB_RUN_ID}") |
| 35 | + print(f"GITHUB_API_URL: {GITHUB_API_URL}") |
| 36 | + print(f"GITHUB_WORKFLOWID: {GITHUB_WORKFLOWID}") |
| 37 | + print(f"GITHUB_TOKEN: {GITHUB_TOKEN}") |
| 38 | + print(f"SPLUNK_HEC_URL: {SPLUNK_HEC_URL}") |
| 39 | + print(f"SPLUNK_HEC_TOKEN: {SPLUNK_HEC_TOKEN}") |
| 40 | + print(f"SPLUNK_SOURCE: {SPLUNK_SOURCE}") |
| 41 | + print(f"SPLUNK_SOURCETYPE: {SPLUNK_SOURCETYPE}") |
| 42 | + print(f"host: {host}") |
| 43 | + print(f"headers: {headers}") |
| 44 | + print(f"summary_url: {summary_url}") |
| 45 | + print("######################") |
| 46 | + |
| 47 | + try: |
| 48 | + x = requests.get(summary_url, stream=True, auth=('token',GITHUB_TOKEN)) |
| 49 | + x.raise_for_status() |
| 50 | + except requests.exceptions.HTTPError as errh: |
| 51 | + output = "GITHUB API Http Error:" + str(errh) |
| 52 | + print(f"Error: {output}") |
| 53 | + print(f"::set-output name=result::{output}") |
| 54 | + return x.status_code |
| 55 | + except requests.exceptions.ConnectionError as errc: |
| 56 | + output = "GITHUB API Error Connecting:" + str(errc) |
| 57 | + print(f"Error: {output}") |
| 58 | + print(f"::set-output name=result::{output}") |
| 59 | + return x.status_code |
| 60 | + except requests.exceptions.Timeout as errt: |
| 61 | + output = "Timeout Error:" + str(errt) |
| 62 | + print(f"Error: {output}") |
| 63 | + print(f"::set-output name=result::{output}") |
| 64 | + return x.status_code |
| 65 | + except requests.exceptions.RequestException as err: |
| 66 | + output = "GITHUB API Non catched error conecting:" + str(err) |
| 67 | + print(f"Error: {output}") |
| 68 | + print(f"::set-output name=result::{output}") |
| 69 | + return x.status_code |
| 70 | + except Exception as e: |
| 71 | + print("Internal error", e) |
| 72 | + return x.status_code |
| 73 | + |
| 74 | + summary = x.json() |
| 75 | + |
| 76 | + summary.pop('repository') |
| 77 | + |
| 78 | + summary["repository"]=summary["head_repository"]["name"] |
| 79 | + summary["repository_full"]=summary["head_repository"]["full_name"] |
| 80 | + |
| 81 | + summary.pop('head_repository') |
| 82 | + |
| 83 | + utc_time = datetime.strptime(summary["updated_at"], "%Y-%m-%dT%H:%M:%SZ") |
| 84 | + epoch_time = (utc_time - datetime(1970, 1, 1)).total_seconds() |
| 85 | + |
| 86 | + event={'event':json.dumps(summary),'sourcetype':SPLUNK_SOURCETYPE,'source':'workflow_summary','host':host,'time':epoch_time} |
| 87 | + event=json.dumps(event) |
| 88 | + |
| 89 | + x=requests.post(SPLUNK_HEC_URL, data=event, headers=headers) |
| 90 | + |
| 91 | + |
| 92 | + url = "{url}/repos/{repo}/actions/runs/{run_id}/logs".format(url=GITHUB_API_URL,repo=GITHUB_REPOSITORY,run_id=GITHUB_WORKFLOWID) |
| 93 | + print(url) |
| 94 | + |
| 95 | + # try: |
| 96 | + # x = requests.get(url, stream=True, auth=('token',GITHUB_TOKEN)) |
| 97 | + |
| 98 | + # except requests.exceptions.HTTPError as errh: |
| 99 | + # output = "GITHUB API Http Error:" + str(errh) |
| 100 | + # print(f"Error: {output}") |
| 101 | + # print(f"::set-output name=result::{output}") |
| 102 | + # return |
| 103 | + # except requests.exceptions.ConnectionError as errc: |
| 104 | + # output = "GITHUB API Error Connecting:" + str(errc) |
| 105 | + # print(f"Error: {output}") |
| 106 | + # print(f"::set-output name=result::{output}") |
| 107 | + # return |
| 108 | + # except requests.exceptions.Timeout as errt: |
| 109 | + # output = "Timeout Error:" + str(errt) |
| 110 | + # print(f"Error: {output}") |
| 111 | + # print(f"::set-output name=result::{output}") |
| 112 | + # return |
| 113 | + # except requests.exceptions.RequestException as err: |
| 114 | + # output = "GITHUB API Non catched error conecting:" + str(err) |
| 115 | + # print(f"Error: {output}") |
| 116 | + # print(f"::set-output name=result::{output}") |
| 117 | + # return |
| 118 | + |
| 119 | + # z = zipfile.ZipFile(io.BytesIO(x.content)) |
| 120 | + # z.extractall('/app') |
| 121 | + |
| 122 | + # timestamp = batch = count = 0 |
| 123 | + |
| 124 | + # for name in glob.glob('/app/*.txt'): |
| 125 | + # logfile = open(os.path.join(os.path.dirname(os.path.abspath(__file__)), name.replace('./','')),'r') |
| 126 | + # Lines = logfile.readlines() |
| 127 | + # for line in Lines: |
| 128 | + |
| 129 | + # if line: |
| 130 | + # count+=1 |
| 131 | + # if timestamp: |
| 132 | + # t2=timestamp |
| 133 | + # timestamp = re.search("\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z",line.strip()) |
| 134 | + |
| 135 | + # if timestamp: |
| 136 | + # timestamp = re.sub("\dZ","",timestamp.group()) |
| 137 | + # timestamp = datetime.strptime(timestamp,"%Y-%m-%dT%H:%M:%S.%f") |
| 138 | + # timestamp = (timestamp - datetime(1970,1,1)).total_seconds() |
| 139 | + # else: |
| 140 | + # timestamp=t2 |
| 141 | + |
| 142 | + # x = re.sub("\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z","",line.strip()) |
| 143 | + # x=x.strip() |
| 144 | + # job_name=re.search("\/\d+\_(?P<job>.*)\.txt",name) |
| 145 | + # job_name=job_name.group('job') |
| 146 | + # fields = {'lineNumber':count,'workflowID':GITHUB_WORKFLOWID,'job':job_name} |
| 147 | + # if x: |
| 148 | + # batch+=1 |
| 149 | + # event={'event':x,'sourcetype':SPLUNK_SOURCETYPE,'source':SPLUNK_SOURCE,'host':host,'time':timestamp,'fields':fields} |
| 150 | + # eventBatch=eventBatch+json.dumps(event) |
| 151 | + # else: |
| 152 | + # print("skipped line "+str(count)) |
| 153 | + |
| 154 | + # if batch>=1000: |
| 155 | + # batch=0 |
| 156 | + # x=requests.post(SPLUNK_HEC_URL, data=eventBatch, headers=headers) |
| 157 | + # eventBatch="" |
| 158 | + |
| 159 | + # x=requests.post(SPLUNK_HEC_URL, data=eventBatch, headers=headers) |
| 160 | + |
| 161 | +if __name__ == '__main__': |
| 162 | + main() |
0 commit comments