Skip to content

Commit

Permalink
add groups
Browse files Browse the repository at this point in the history
  • Loading branch information
hunkom committed Dec 8, 2023
1 parent 0e3bcb0 commit 330937b
Show file tree
Hide file tree
Showing 4 changed files with 161 additions and 31 deletions.
5 changes: 3 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ RUN add-apt-repository ppa:deadsnakes/ppa && apt-get update && \
python -m pip install --upgrade pip && \
apt-get clean && \
python -m pip install setuptools==40.6.2 && \
python -m pip install 'common==0.1.2' 'configobj==5.0.6' 'redis==3.2.0' 'argparse==1.4.0' && \
python -m pip install 'common==0.1.2' 'configobj==5.0.6' 'redis==3.2.0' 'argparse==1.4.0' 'watchdog' && \
rm -rf /tmp/*

RUN pip install git+https://github.com/carrier-io/perfreporter.git
Expand Down Expand Up @@ -71,7 +71,7 @@ ENV M2_HOME='/opt/apache-maven-3.6.3'
ENV PATH="$M2_HOME/bin:$PATH"

RUN mvn -version

ENV PYTHONUNBUFFERED=1
RUN mkdir -p /opt/gatling/bin
RUN mkdir -p /opt/gatling/conf
RUN mkdir -p /opt/gatling/lib
Expand All @@ -81,6 +81,7 @@ COPY executor.sh /opt
RUN sudo chmod +x /opt/executor.sh
COPY post_processing/post_processor.py /opt/gatling/bin
COPY post_processing/downsampling.py /opt/gatling/bin
COPY post_processing/simulation_log_parser.py /opt/gatling/bin
COPY pre_processing/minio_reader.py /opt/gatling/bin
COPY pre_processing/minio_poster.py /opt/gatling/bin
COPY pre_processing/minio_args_poster.py /opt/gatling/bin
Expand Down
1 change: 1 addition & 0 deletions executor.sh
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ export tests_path=/opt/gatling
python /opt/gatling/bin/minio_reader.py
python /opt/gatling/bin/minio_additional_files_reader.py
python /opt/gatling/bin/downsampling.py -t $test_type -s $simulation_name -b ${build_id} -l ${lg_id} ${_influx_host} -p ${influx_port} -idb ${gatling_db} -en ${env} ${_influx_user} ${_influx_password} &
python /opt/gatling/bin/simulation_log_parser.py -t $test_type -s $simulation_name -b ${build_id} -l ${lg_id} ${_influx_host} -p ${influx_port} -idb ${gatling_db} -en ${env} ${_influx_user} ${_influx_password} &

cp /opt/gatling/conf/logback.xml /opt/gatling/src/test/resources/logback.xml

Expand Down
40 changes: 11 additions & 29 deletions post_processing/post_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
import requests
from os import environ
import shutil
from perfreporter.post_processor import PostProcessor
from perfreporter.error_parser import ErrorLogParser
from os import environ


Expand Down Expand Up @@ -54,14 +52,13 @@ def update_test_status():
args = get_args()
if environ.get("report_id"):
args["report_id"] = environ.get("report_id")
logParser = ErrorLogParser(args)
try:
aggregated_errors = logParser.parse_errors()
except Exception as e:
aggregated_errors = {}
prefix = environ.get('DISTRIBUTED_MODE_PREFIX')
save_reports = True if environ.get('save_reports') == "True" else False
token = environ.get('token')
integrations = json.loads(environ.get("integrations", '{}'))
s3_config = integrations.get('system', {}).get('s3_integration', {})
print("s3_config")
print(s3_config)
if prefix:
PROJECT_ID = environ.get('project_id')
URL = environ.get('galloper_url')
Expand All @@ -70,29 +67,14 @@ def update_test_status():
exit(0)

# Make archive with gatling reports
# path_to_reports = "/tmp/reports_" + prefix + "_" + str(args['lg_id'])
# shutil.make_archive(path_to_reports, 'zip', RESULTS_FOLDER)

# # Make archive with data for post processing
# with open(DATA_FOR_POST_PROCESSING_FOLDER + "args.json", 'w') as f:
# f.write(json.dumps(args))
# with open(DATA_FOR_POST_PROCESSING_FOLDER + "aggregated_errors.json", 'w') as f:
# f.write(json.dumps(aggregated_errors))
# path_to_test_results = "/tmp/" + prefix + "_" + str(args['lg_id'])
# shutil.make_archive(path_to_test_results, 'zip', DATA_FOR_POST_PROCESSING_FOLDER)
path_to_reports = "/tmp/reports_test_results_" + environ.get("build_id") + "_" + str(args['lg_id'])
shutil.make_archive(path_to_reports, 'zip', "/opt/gatling/target/gatling")

# Send data to minio
headers = {'Authorization': f'bearer {token}'} if token else {}
upload_url = f'{URL}/api/v1/artifacts/artifacts/{PROJECT_ID}/{BUCKET}'
# requests.post(f'{URL}/api/v1/artifacts/buckets/{PROJECT_ID}', data={"name": BUCKET}, allow_redirects=True,
# headers={**headers, 'Content-type': 'application/json'})
# files = {'file': open(path_to_test_results + ".zip", 'rb')}
#
# requests.post(upload_url, allow_redirects=True, files=files, headers=headers)
# if save_reports:
# files = {'file': open(path_to_reports + ".zip", 'rb')}
# requests.post(upload_url, allow_redirects=True, files=files, headers=headers)

else:
post_processor = PostProcessor()
post_processor.post_processing(args, aggregated_errors)
requests.post(f'{URL}/api/v1/artifacts/buckets/{PROJECT_ID}', data={"name": BUCKET},
params=s3_config, allow_redirects=True,
headers={**headers, 'Content-type': 'application/json'})
files = {'file': open(path_to_reports + ".zip", 'rb')}
requests.post(upload_url, params=s3_config, allow_redirects=True, files=files, headers=headers)
146 changes: 146 additions & 0 deletions post_processing/simulation_log_parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
import time
import argparse
import os, shutil
from influxdb import InfluxDBClient


class CSVHandler:
def __init__(self, csv_path, external_client, internal_client, args):
self.args = args
self.csv_path = csv_path
self.external_client = external_client
self.internal_client = internal_client
self.last_position = 0 # Initial position in the file


def check_new_records(self):
# Read CSV file from the last known position
with open(self.csv_path, 'r') as file:
file.seek(self.last_position)
new_data = file.readlines()
records = []
results = []
if new_data:
for each in new_data:
if each.startswith("GROUP"):
records.append(each.replace("\n", ""))
for each in records:
try:
_tmp = each.split("\t")
if _tmp[0] == "GROUP":
_res = {
"time": int(_tmp[2]),
"simulation": self.args["simulation"],
"request_name": _tmp[1],
"response_time": int(_tmp[3]) - int(_tmp[2]),
"method": "TRANSACTION",
"status": _tmp[5],
"status_code": 200,
"user_id": "1", # TODO check user_id
"env": self.args["env"],
"test_type": self.args["type"],
"build_id": self.args["build_id"],
"lg_id": self.args["lg_id"]
}
results.append(_res)
except Exception as e:
print(e)

print(f"Results count: {len(results)}")
# Update the last known position
self.last_position = file.tell()

internal_points, external_points = [], []
for req in results:
internal_influx_record = {
"measurement": self.args["simulation"],
"tags": {
"request_name": req['request_name'],
"user_id": req['user_id']
},
"time": int(req["time"]) * 1000000,
"fields": {
"simulation": req['simulation'],
"method": req['method'],
"response_time": int(req["response_time"]),
"status": req["status"],
"status_code": req["status_code"]
}
}
internal_points.append(internal_influx_record)
external_influx_record = {
"measurement": self.args["simulation"],
"tags": {
"env": req['env'],
"test_type": req['test_type'],
"build_id": req['build_id'],
"request_name": req['request_name'],
"method": req['method'],
"lg_id": req['lg_id'],
"user_id": req['user_id']
},
"time": int(req["time"]) * 1000000,
"fields": {
"simulation": req['simulation'],
"response_time": int(req["response_time"]),
"status": req["status"],
"status_code": req["status_code"]
}
}
external_points.append(external_influx_record)

# Write data to internal InfluxDB
self.internal_client.write_points(internal_points)

# Write data to external InfluxDB
self.external_client.write_points(external_points)


def get_args():
parser = argparse.ArgumentParser(description='Simlog parser.')
parser.add_argument("-t", "--type", help="Test type.")
parser.add_argument("-s", "--simulation", help='Test simulation', default=None)
parser.add_argument("-b", "--build_id", help="build ID", default=None)
parser.add_argument("-en", "--env", help="Test type.", default=None)
parser.add_argument("-i", "--influx_host", help='InfluxDB host or IP', default=None)
parser.add_argument("-p", "--influx_port", help='InfluxDB port', default=8086)
parser.add_argument("-iu", "--influx_user", help='InfluxDB user', default="")
parser.add_argument("-ip", "--influx_password", help='InfluxDB password', default="")
parser.add_argument("-idb", "--influx_db", help='Test results InfluxDB', default="gatling")
parser.add_argument("-l", "--lg_id", help='Load generator ID', default=None)
return vars(parser.parse_args())

if __name__ == '__main__':
folder = "/opt/gatling/target/gatling"
for filename in os.listdir(folder):
file_path = os.path.join(folder, filename)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
print('Failed to delete %s. Reason: %s' % (file_path, e))
args = get_args()
target_file = 'simulation.log'
csv_file_path = ""
while True:
for folder_path, _, files in os.walk(folder):
if target_file in files:
csv_file_path = os.path.join(folder_path, target_file)
print(f"The file at '{csv_file_path}' exists. Continue with your script.")
if csv_file_path:
break
print(f"The file '{target_file}' does not exist. Waiting for it to appear...")
time.sleep(5)

# Connect to InfluxDB
external_client = InfluxDBClient(args["influx_host"], args["influx_port"], args["influx_user"],
args["influx_password"], args["influx_db"])

internal_client = InfluxDBClient("localhost", "8086", "", "", "local")

handler = CSVHandler(csv_file_path, external_client, internal_client, args)
while True:
time.sleep(10)
handler.check_new_records()

0 comments on commit 330937b

Please sign in to comment.