Skip to content

Commit 5a3e53d

Browse files
committed
wip
1 parent a78d7f4 commit 5a3e53d

File tree

4 files changed

+87
-0
lines changed

4 files changed

+87
-0
lines changed
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
# Container image that runs your code
2+
FROM python:3.8-slim-buster
3+
4+
# Copies your code file from your action repository to the filesystem path `/` of the container
5+
COPY entrypoint.sh /entrypoint.sh
6+
7+
# Code file to execute when the docker container starts up (`entrypoint.sh`)
8+
ENTRYPOINT ["/entrypoint.sh"]
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# POST GitHub Workflow Logs to Splunk HTTP Event Collector
2+
test
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# action.yml
2+
name: 'Post Logs to Splunk HEC'
3+
description: 'Upload GitHub Workflow logs to Splunk HEC'
4+
inputs:
5+
splunk-url:
6+
description: 'Full URL for Splunk HEC endpoint'
7+
required: true
8+
hec-token:
9+
description: 'Splunk HEC Token'
10+
required: true
11+
sourcetype:
12+
description: 'Splunk Sourcetype'
13+
default: 'github_workflow_log_job'
14+
source:
15+
description: 'GitHub Workflow name'
16+
default: ${{ github.workflow }}
17+
workflowID:
18+
description: 'The Workflow Run number'
19+
default: ${{ github.run_id}}
20+
outputs:
21+
status:
22+
description: 'value is success/fail based on app inspect result'
23+
runs:
24+
using: 'docker'
25+
image: 'Dockerfile'
26+
args:
27+
- ${{ inputs.splunk-url }}
28+
- ${{ inputs.hec-token }}
29+
- ${{ inputs.sourcetype }}
30+
- ${{ inputs.source }}
31+
- ${{ inputs.workflowID }}
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
#!/bin/sh -l
2+
3+
python3 -m pip install requests
4+
echo "
5+
import os
6+
import requests
7+
import re
8+
from datetime import datetime
9+
import json
10+
11+
logfile = open(os.path.join(os.path.dirname(os.path.abspath(__file__)), \"file.log\"),'r')
12+
Lines = logfile.readlines()
13+
14+
batch = count = 0
15+
url = \"$1services/collector/event\"
16+
token=\"$2\"
17+
headers = {\"Authorization\": \"Splunk \"+token}
18+
sourcetype = \"$3\"
19+
eventBatch = \"\"
20+
workflowID=\"$5\"
21+
source=\"$4\"
22+
host=\"$HOSTNAME\"
23+
24+
for line in Lines:
25+
count+=1
26+
timestamp = re.search(\"\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z\",line.strip())
27+
timestamp = re.sub(\"\dZ\",\"\",timestamp.group())
28+
timestamp = datetime.strptime(timestamp,\"%Y-%m-%dT%H:%M:%S.%f\")
29+
timestamp = (timestamp - datetime(1970,1,1)).total_seconds()
30+
x = re.sub(\"\d{4}-\d{2}-\d{2}T\d+:\d+:\d+.\d+Z\",\"\",line.strip())
31+
x=x.strip()
32+
fields = {'lineNumber':count,'workflowID':workflowID}
33+
if x:
34+
batch+=1
35+
event={'event':x,'sourcetype':sourcetype,'source':source,'host':host,'time':timestamp,'fields':fields}
36+
eventBatch=eventBatch+json.dumps(event)
37+
else:
38+
print(\"skipped line \"+str(count))
39+
40+
if batch>=1000:
41+
batch=0
42+
x=requests.post(url, data=eventBatch, headers=headers)
43+
eventBatch=\"\"
44+
45+
x=requests.post(url, data=eventBatch, headers=headers)" > t.py
46+
python3 t.py

0 commit comments

Comments
 (0)