From 13f92ba92c3af8df363edef10c18b0700f341f13 Mon Sep 17 00:00:00 2001 From: Dustin Kaiser Date: Wed, 18 Dec 2024 09:25:24 +0100 Subject: [PATCH] Remove old scripts, makefile targets --- services/monitoring/Makefile | 10 +- services/monitoring/docker-compose.yml.j2 | 2 - .../monitoring/grafana/scripts/.gitignore | 2 - services/monitoring/grafana/scripts/README.MD | 26 - services/monitoring/grafana/scripts/export.py | 150 ----- services/monitoring/grafana/scripts/import.py | 521 ------------------ .../grafana/scripts/requirements.txt | 4 - .../grafana/template-config.monitoring | 1 - 8 files changed, 1 insertion(+), 715 deletions(-) delete mode 100644 services/monitoring/grafana/scripts/.gitignore delete mode 100644 services/monitoring/grafana/scripts/README.MD delete mode 100644 services/monitoring/grafana/scripts/export.py delete mode 100644 services/monitoring/grafana/scripts/import.py delete mode 100644 services/monitoring/grafana/scripts/requirements.txt diff --git a/services/monitoring/Makefile b/services/monitoring/Makefile index 556b8ed8..8796554e 100644 --- a/services/monitoring/Makefile +++ b/services/monitoring/Makefile @@ -84,16 +84,8 @@ update.grafana.pwd: .env ## Change grafana pwd grafanacontainerid=$$(docker ps | grep grafana | awk '{print $$1;}');\ docker exec -ti $$grafanacontainerid grafana-cli admin reset-admin-password $$TRAEFIK_PASSWORD - -.PHONY: grafana-export -grafana-export: .venv## Export the remote grafana dashboards and datasources TO YOUR LOCAL MACHINE - @cd grafana/scripts;\ - source ${REPO_BASE_DIR}/.venv/bin/activate;\ - pip install -r requirements.txt > /dev/null 2>&1;\ - python3 export.py; - .PHONY: grafana-import -grafana-import: grafana/assets ## Imports AND OVERWRITES the remote grafana dashboards and datasources FROM YOUR LOCAL MACHINE +grafana-import: grafana/assets ## Imports the remote grafana dashboards and datasources FROM YOUR LOCAL MACHINE @pushd ${REPO_BASE_DIR}/services/monitoring/grafana && \ $(MAKE) terraform-plan && \ $(MAKE) terraform-apply; \ diff --git a/services/monitoring/docker-compose.yml.j2 b/services/monitoring/docker-compose.yml.j2 index 680f0f09..9f0e0d67 100644 --- a/services/monitoring/docker-compose.yml.j2 +++ b/services/monitoring/docker-compose.yml.j2 @@ -25,8 +25,6 @@ configs: file: ./prometheus/prometheus-federation.yml prometheus_rules: file: ./prometheus/prometheus.rules.yml - grafana_image_renderer_config: - file: ./grafana-image-renderer/config.json pgsql_query_exporter_config: file: ./pgsql_query_exporter_config.yaml smokeping_prober_config: diff --git a/services/monitoring/grafana/scripts/.gitignore b/services/monitoring/grafana/scripts/.gitignore deleted file mode 100644 index 99fa7e12..00000000 --- a/services/monitoring/grafana/scripts/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -venv -venv/* diff --git a/services/monitoring/grafana/scripts/README.MD b/services/monitoring/grafana/scripts/README.MD deleted file mode 100644 index 7383674b..00000000 --- a/services/monitoring/grafana/scripts/README.MD +++ /dev/null @@ -1,26 +0,0 @@ -# Purpose - -This python scripts export and import all the folders/datasources/dashboards of a grafana instance and is used to handle the different configuration of all Osparc Grafana instances. -At each update on grafana dashboards and/or datasources, the export script has to be run, and the changes pushed to the git repo. - -# Installation - -* Create a virtualenvironment with python 3.X and activate it -```console -python3 -m venv venv -source venv/bin/activate -``` -* Install the dependancies -```console -pip install -r requirements.txt -``` - -* To export everything (has to be run each time something is updated on Grafana) -```console -python export.py -``` - -* To import everything -```console -python import.py -``` diff --git a/services/monitoring/grafana/scripts/export.py b/services/monitoring/grafana/scripts/export.py deleted file mode 100644 index c403607e..00000000 --- a/services/monitoring/grafana/scripts/export.py +++ /dev/null @@ -1,150 +0,0 @@ -# pylint: disable=pointless-string-statement,too-many-statements -import json -import os -import shutil -import sys -import warnings -from pathlib import Path - -import requests -import typer -from environs import Env - -repo_config_location = os.getenv("REPO_CONFIG_LOCATION") -if not repo_config_location: - print("ERROR: Env-Var REPO_CONFIG_LOCATION not set.") - sys.exit(1) -if "\n" in repo_config_location: - repo_config_location = repo_config_location.split("\n")[0] - -env = Env() -env.read_env(repo_config_location, recurse=False) - -warnings.filterwarnings( - "ignore", - ".*Adding certificate verification is strongly advised.*", -) - - -def main(foldername: str = ""): - # We delete the previous files - if foldername == "": - directory = "./../provisioning/exported/" + env.str("MACHINE_FQDN") - - export_dir = Path.cwd() / ".." / "provisioning/exported" - export_dir.mkdir(parents=True, exist_ok=True) - else: - directory = foldername - if os.path.exists(directory): - shutil.rmtree(directory) - - os.mkdir(directory) - - # We export the Datasources - print("**************** Export datasources *******************") - os.mkdir(directory + "/datasources") - url = "https://monitoring." + env.str("MACHINE_FQDN") + "/grafana/api/" - session = requests.Session() - session.auth = (env.str("SERVICES_USER"), env.str("SERVICES_PASSWORD")) - hed = {"Content-Type": "application/json"} - - r = session.get(url + "datasources", headers=hed, verify=False) - for datasource in r.json(): - r_datasource = session.get( - url + "datasources/" + str(datasource["id"]), headers=hed, verify=False - ) - with open( - directory + "/datasources/" + str(datasource["id"]) + ".json", "w" - ) as outfile: - # If the datasource is Prometheus, we remove the login/password credentials - json_data = r_datasource.json() - if json_data["type"] == "prometheus": - json_data["basicAuthUser"] = "" - json_data["basicAuthPassword"] = "" - json.dump(json_data, outfile, sort_keys=True, indent=2) - print("Export datasource " + json_data["name"]) - - # We export the dashboards - print("**************** Export dashboards *******************") - os.mkdir(directory + "/dashboards") - r = session.get(url + "search?query=%", headers=hed, verify=False) - for dashboard in r.json(): - r_dashboard = session.get( - url + "dashboards/uid/" + str(dashboard["uid"]), headers=hed, verify=False - ) - if r_dashboard.json()["meta"]["isFolder"] is not True: - if ( - os.path.exists( - directory - + "/dashboards/" - + r_dashboard.json()["meta"]["folderTitle"] - ) - == False - ): - os.mkdir( - directory - + "/dashboards/" - + r_dashboard.json()["meta"]["folderTitle"] - ) - - with open( - directory - + "/dashboards/" - + r_dashboard.json()["meta"]["folderTitle"] - + "/" - + str(r_dashboard.json()["dashboard"]["title"]) - + ".json", - "w", - ) as outfile: - print("Export Dashboard " + r_dashboard.json()["dashboard"]["title"]) - exported_dashboard = r_dashboard.json() - exported_dashboard["meta"].pop("updated", None) - exported_dashboard["meta"].pop("created", None) - exported_dashboard["meta"].pop("folderId", None) - exported_dashboard["meta"].pop("folderUid", None) - exported_dashboard["meta"].pop("folderUrl", None) - exported_dashboard["meta"].pop("version", None) - exported_dashboard.pop("id", None) - exported_dashboard["dashboard"].pop("id", None) - exported_dashboard.pop("iteration", None) - json.dump(exported_dashboard, outfile, sort_keys=True, indent=2) - - # Export Alerts - print("**************** Export alerts *******************") - if not os.path.exists(directory + "/alerts/"): - os.mkdir(directory + "/alerts/") - r = session.get(url + "ruler/grafana/api/v1/rules", headers=hed, verify=False) - for alert in r.json()["ops"]: - with open(directory + "/alerts/" + alert["name"] + ".json", "w") as outfile: - print("Export Alert " + alert["name"]) - # Remove UID if present - for rule_iter in range(len(alert["rules"])): - alert["rules"][rule_iter]["grafana_alert"].pop("uid", None) - # Remove orgId - alert["rules"][rule_iter]["grafana_alert"].pop("orgId", None) - # Remove id - alert["rules"][rule_iter]["grafana_alert"].pop("id", None) - # Remove id - alert["rules"][rule_iter]["grafana_alert"].pop("namespace_id", None) - # Remove id - alert["rules"][rule_iter]["grafana_alert"].pop("namespace_uid", None) - if ( - str(env.str("MACHINE_FQDN") + " - ") - in alert["rules"][rule_iter]["grafana_alert"]["title"] - ): - alert["rules"][rule_iter]["grafana_alert"]["title"] = alert[ - "rules" - ][rule_iter]["grafana_alert"]["title"].replace( - str(env.str("MACHINE_FQDN") + " - "), "" - ) - json.dump(alert, outfile, sort_keys=True, indent=2) - - -if __name__ == "__main__": - """ - Imports grafana dashboard from dumped json files via the Grafana API - - If --foldername is used, the data is taken from this location. - Otherwise, the default ops-repo folder is assumed. - """ - typer.run(main) diff --git a/services/monitoring/grafana/scripts/import.py b/services/monitoring/grafana/scripts/import.py deleted file mode 100644 index 796f848b..00000000 --- a/services/monitoring/grafana/scripts/import.py +++ /dev/null @@ -1,521 +0,0 @@ -# pylint: disable=invalid-name,pointless-string-statement,too-many-statements,too-many-branches -import glob -import json -import os -import sys -import time -import warnings -from pathlib import Path - -import requests -import typer -import yaml -from environs import Env - -warnings.filterwarnings( - "ignore", - ".*Adding certificate verification is strongly advised.*", -) - -repo_config_location = os.getenv("REPO_CONFIG_LOCATION") -assert repo_config_location is not None -if "\n" in repo_config_location: - repo_config_location = repo_config_location.split("\n")[0] - -env = Env() -env.read_env(repo_config_location, recurse=False) - - -def dictionary_traversal_datasource_uid_replacement( - _input, datasourceType, replacementID -): - """ - Traverses Dictionary, potentially with lists of dicts, recursively. - If a "datasource" dict tis found, the uid value is replaced. - """ - for key, value in _input.items(): - # If we found the target dict "datasource" - if ( - key == "datasource" - and "uid" in value - and "type" in value - and isinstance(value, dict) - ): - if value["type"] == datasourceType: - value["uid"] = replacementID - # Recursively step down if value is a dict - elif isinstance(value, dict): - # if "datasource" in value: - # print("v: ",value) - dictionary_traversal_datasource_uid_replacement( - value, datasourceType, replacementID - ) - # Iterate list of dict - elif isinstance(value, list) and len(value) > 0 and isinstance(value[0], dict): - for i in value: - dictionary_traversal_datasource_uid_replacement( - i, datasourceType, replacementID - ) - # Ignore endpoints of recursive traversal - else: - time.sleep(0) # Do nothing - - -def subsituteDatasources( - directoriesDatasources, - configFilePath, - dashboardTitle, - jsonObject, -): - if configFilePath.is_file(): - with open(str(configFilePath)) as cfile: - configYaml = yaml.load(cfile, Loader=yaml.FullLoader) - else: - print("ERROR: Config file missing at: " + str(configFilePath)) - sys.exit(1) - ###### - ###### - listOfDatasources = [] - for file in directoriesDatasources: - with open(file) as jsonFile: - jsonObjectDatasource = json.load(jsonFile) - objectToKeepTrack = { - "name": jsonObjectDatasource["name"], - "uid": jsonObjectDatasource["uid"], - "type": jsonObjectDatasource["type"], - } - listOfDatasources.append(objectToKeepTrack) - - listOfDatasourcesWhichAreUnique = [ - i - for i in listOfDatasources - if [j["type"] for j in listOfDatasources].count(i["type"]) == 1 - ] - listOfDatasourcesWhichAreNotUnique = [ - i - for i in listOfDatasources - if [j["type"] for j in listOfDatasources].count(i["type"]) > 1 - ] - # - ####### - # - for presentDatasource in listOfDatasourcesWhichAreUnique: - # print("DEBUG: Subsituting unqiue type ",presentDatasource["type"]) - dictionary_traversal_datasource_uid_replacement( - jsonObject, presentDatasource["type"], presentDatasource["uid"] - ) - for nonUniqueDatasource in listOfDatasourcesWhichAreNotUnique: - assert nonUniqueDatasource["type"] in { - i["type"] for i in configYaml["defaults"] - } - defaultNameForCurrent = [ - i["datasource_name"] - for i in configYaml["defaults"] - if i["type"] == nonUniqueDatasource["type"] - ][0] - if nonUniqueDatasource["name"] == defaultNameForCurrent: - # print("DEBUG: Subsituting non-unqiue type ",nonUniqueDatasource["type"], " as given in defaults.") - dictionary_traversal_datasource_uid_replacement( - jsonObject, nonUniqueDatasource["type"], nonUniqueDatasource["uid"] - ) - # Subsitute custom dashboard mappings now - if "datasources2dashboards" in configYaml: - if len(configYaml["datasources2dashboards"]) > 0: - if dashboardTitle in [ - i["dashboard_name"] for i in configYaml["datasources2dashboards"] - ]: - currentConfigMapping = [ - i - for i in configYaml["datasources2dashboards"] - if i["dashboard_name"] == dashboardTitle - ][0]["mapping"] - for j in currentConfigMapping.copy(): - j["uid"] = [ - i["uid"] - for i in listOfDatasources - if i["name"] == j["datasource_name"] - ][0] - # print("DEBUG: Subsituting custom type ",j["type"], " as given in config.") - dictionary_traversal_datasource_uid_replacement( - jsonObject, j["type"], j["uid"] - ) - - -def main(foldername: str = "", overwrite: bool = True): - # Get mail adress for alerts: - grafanaAlertingMailTarget = env.str("GRAFANA_ALERTS_MAIL", default=None) - grafanaAlertingSlackTarget = env.str("GRAFANA_ALERTS_SLACK", default=None) - - # We first import the datasources - url = "https://monitoring." + env.str("MACHINE_FQDN") + "/grafana/api/" - # - # - print("**************** GRAFANA PROVISIONING *******************") - print("Assuming deployment", env.str("MACHINE_FQDN"), "at", url) - if grafanaAlertingMailTarget: - print("Assuming alerting mail address", grafanaAlertingMailTarget) - if grafanaAlertingSlackTarget: - print("Assuming alerting slack webhook", grafanaAlertingSlackTarget) - # - # - session = requests.Session() - session.auth = (env.str("SERVICES_USER"), env.str("SERVICES_PASSWORD")) - hed = {"Content-Type": "application/json"} - - if foldername == "": - directoriesDatasources = glob.glob("./../assets/datasources/*") - directoriesDatasources += glob.glob("./../assets/shared" + "/datasources/*") - else: - directoriesDatasources = glob.glob(foldername + "/datasources/*") - # - print("**************** Add datasources *******************") - if overwrite: - # Get all datasources - # print("Deleting datasource " + str(i["uid"]) + " - " + str(i["name"])) - r = session.get(url + "datasources", headers=hed, verify=False) - if r.status_code > 300: - print("Recieved non-200 status code upon import: ", str(r.status_code)) - print("ABORTING!") - print(r.json()) - sys.exit(1) - for i in r.json(): - print("Response: ", r.status_code) - r = session.delete( - url + "datasources/uid/" + str(i["uid"]), headers=hed, verify=False - ) - listOfDatasources = [] - for file in directoriesDatasources: - with open(file) as jsonFile: - jsonObjectDatasource = json.load(jsonFile) - jsonFile.close() - - # We add the credentials for the PGSQL Databases with the secureJsonData field - # DK Mar2023 : THIS IS CURRENTLY NOT USED - if jsonObjectDatasource["type"].lower() == "postgres": - print("postgres datasource is currently not supported (deprecated)") - sys.exit(1) - elif jsonObjectDatasource["type"] == "Prometheus": - jsonObjectDatasource["basicAuthUser"] = env.str("SERVICES_USER") - jsonObjectDatasource["basicAuthPassword"] = env.str("SERVICES_PASSWORD") - jsonObjectDatasource["url"] = "http://prometheus:" + env.str( - "MONITORING_PROMETHEUS_PORT" - ) - r = session.post( - url + "datasources", json=jsonObjectDatasource, headers=hed, verify=False - ) - objectToKeepTrack = { - "name": jsonObjectDatasource["name"], - "uid": jsonObjectDatasource["uid"], - "type": jsonObjectDatasource["type"], - } - listOfDatasources.append(objectToKeepTrack) - # print(r.json()) - print("Import of datasource " + jsonObjectDatasource["name"]) - if r.status_code != 200: - print("Received non-200 status code upon import: ", str(r.status_code)) - print("JSON file failed uploading.") - # - # Second, we import the folders structure - directoriesData = [] - if foldername == "": - directoriesDashboards = glob.glob("./../assets/dashboards/*") - directoriesDashboards = [ - *directoriesDashboards, - *list(glob.glob("./../assets/shared" + "/dashboards/*")), - ] - else: - directoriesDashboards = glob.glob(foldername + "/dashboards/*") - for directory in directoriesDashboards: - if ".json" in directory: - print( - "Error: Looking for folders but got json file. Is your folder structure organized properly?\nABORTING" - ) - sys.exit(1) - for file in glob.glob(directory + "/*"): - with open(file) as jsonFile: - jsonObject = json.load( - jsonFile - ) # Assert the file is valid json, otherwise will give an error - break - directoriesData.append(os.path.basename(os.path.normpath(directory))) - directoriesData = list(set(directoriesData)) - - print("Deleting alerts") - r = session.get(url + "v1/provisioning/alert-rules", headers=hed, verify=False) - # Status code is 404 if no alerts are present. Handling it: - if r.status_code != 404: - for alert in r.json(): - deleteResponse = session.delete( - url + f"v1/provisioning/alert-rules/{alert['uid']}", - headers=hed, - verify=False, - ) - if deleteResponse.status_code < 200 or deleteResponse.status_code > 204: - print( - "Received status code not 200-204 upon delete: ", - str(deleteResponse.status_code), - ) - print("ABORTING!") - sys.exit(1) - - # We add them in grafana - print("**************** Add folders *******************") - if overwrite: - print("Deleting all folders and dashboards") - # Get all datasources - r = session.get(url + "folders", headers=hed, verify=False) - for i in r.json(): - r = session.delete( - url + "folders/" + str(i["uid"]), headers=hed, verify=False - ) - print("Adding folders") - for directoryData in directoriesData: - r = session.post( - url + "folders", json={"title": directoryData}, headers=hed, verify=False - ) - if r.status_code != 200: - print("Received non-200 status code upon import: ", str(r.status_code)) - print("JSON file failed uploading:") - print(json.dumps(directoryData, sort_keys=True, indent=2)) - print("**************** Add dashboards *******************") - # - # - configFilePath = Path("./../assets/datasources2dashboards.yaml") - - # Finally we import the dashboards - for directory in directoriesDashboards: - for file in glob.glob(directory + "/*.json"): - with open(file) as jsonFile: - jsonObject = json.load(jsonFile) - # We set the folder ID - r = session.get(url + "folders", headers=hed, verify=False) - folderID = None - for i in r.json(): - if i["title"] == file.split("/")[-2]: - folderID = i["id"] - break - assert folderID - print("Add dashboard " + jsonObject["dashboard"]["title"]) - # Subsitute datasource UID - # pylint: disable=too-many-function-args - subsituteDatasources( - directoriesDatasources, - configFilePath, - jsonObject["dashboard"]["title"], - jsonObject, - ) - dashboard = {"Dashboard": jsonObject["dashboard"]} - # DEBUGPRINT - # with open(".out.temp","w") as ofile: - # ofile.write(json.dumps(jsonObject,indent=2)) - - dashboard["Dashboard"]["id"] = "null" - dashboard["overwrite"] = True - dashboard["folderId"] = folderID - r = session.post( - url + "dashboards/db", json=dashboard, headers=hed, verify=False - ) - - if r.status_code != 200: - print( - "Received non-200 status code upon import: ", str(r.status_code) - ) - # print(r.json()) - print("JSON file failed uploading.") - sys.exit() - - # IMPORT ALERTS - # 1. Provision Alerting User - print("**************** Add Target Mail Bucket / Slack Webhook *******************") - if grafanaAlertingMailTarget: - mailAddressProvisioningJSON = ( - '''{ - "template_files": {}, - "alertmanager_config": { - "route": { - "receiver": "''' - + grafanaAlertingMailTarget.split("@")[0] - + '''", - "continue": false, - "group_by": [], - "routes": [] - }, - "templates": null, - "receivers": [{ - "name": "''' - + grafanaAlertingMailTarget.split("@")[0] - + '''", - "grafana_managed_receiver_configs": [{ - "name": "''' - + grafanaAlertingMailTarget.split("@")[0] - + '''", - "type": "email", - "disableResolveMessage": false, - "settings": { - "addresses": "''' - + grafanaAlertingMailTarget - + """" - }, - "secureFields": {} - }] - }] - } - }""" - ) - else: - slackWebhookProvisioningJSON = ( - '''{ - "template_files": {}, - "alertmanager_config": { - "route": { - "receiver": "''' - + "slackwebhook" - + '''", - "continue": false, - "group_by": [], - "routes": [] - }, - "templates": null, - "receivers": [{ - "name": "''' - + "slackwebhook" - + '''", - "grafana_managed_receiver_configs": [{ - "name": "''' - + "slackwebhook" - + '''", - "type": "slack", - "disableResolveMessage": false, - "settings": { - "username": "''' - + "grafana-alert" - + '''" - }, - "secureSettings": - { - "url": "''' - + str(grafanaAlertingSlackTarget) - + """", - "token": "" - } - }] - }] - } - }""" - ) - r = session.post( - url + "alertmanager/grafana/config/api/v1/alerts", - json=json.loads( - mailAddressProvisioningJSON - if grafanaAlertingMailTarget - else slackWebhookProvisioningJSON - ), - verify=False, - headers=hed, - ) - if r.status_code != 202: - print( - "Received non-202 status code upon mail address provisioning: ", - str(r.status_code), - ) - print( - "POST to URL", url + "alertmanager/grafana/config/api/v1/alerts", "failed" - ) - print("JSON file failed uploading:") - print( - mailAddressProvisioningJSON - if grafanaAlertingMailTarget - else slackWebhookProvisioningJSON - ) - print("Response Error:") - print(r.json()) - sys.exit(1) - # 2. Import alerts - print("**************** Add alerts *******************") - # Finally we import the dashboards - if foldername == "": - directoriesAlerts = glob.glob("./../assets/alerts/*") - directoriesAlerts += glob.glob("./../assets/shared" + "/alerts/*") - else: - directoriesAlerts = glob.glob(foldername + "/alerts/*") - # - print("***************** Add folders ******************") - r = session.get( - url + "folders", - headers=hed, - verify=False, - ) - ops_uid = ( - r.json()[ - next((i for i, item in enumerate(r.json()) if item["title"] == "ops"), None) - ]["uid"] - if next((i for i, item in enumerate(r.json()) if item["title"] == "ops"), None) - else None - ) - if not ops_uid: - print("Could not find required grafana folder named `ops`. Aborting.") - sys.exit(1) - print(f"Info: Adding alerts always to folder `ops`, determined with uid {ops_uid}.") - if r.status_code != 200: - print( - "Received non-200 status code upon alerts folder creation: ", - str(r.status_code), - ) - sys.exit(1) - # - for file in directoriesAlerts: - with open(file) as jsonFile: - jsonObject = json.load(jsonFile) - # pylint: disable=too-many-nested-blocks - for rule in jsonObject["rules"]: - # Add deployment name to alert name - rule["grafana_alert"]["title"] = ( - env.str("MACHINE_FQDN") + " - " + rule["grafana_alert"]["title"] - ) - # Subsitute UIDs of datasources - # pylint: disable=too-many-function-args - subsituteDatasources( - directoriesDatasources, - configFilePath, - jsonObject["name"], - rule, - ) - # Propagate subsituted UIDs to other fields - for i in rule["grafana_alert"]["data"]: - if "datasourceUid" in i: - if "model" in i: - if "datasource" in i["model"]: - if "type" in i["model"]["datasource"]: - if ( - i["model"]["datasource"]["type"] - != "grafana-expression" - ): - i["datasourceUid"] = i["model"]["datasource"][ - "uid" - ] - # Remove UID if present - rule["grafana_alert"].pop("uid", None) - - print("Add alerts " + jsonObject["name"]) - - r = session.post( - url + f"ruler/grafana/api/v1/rules/{ops_uid}", - json=jsonObject, - headers=hed, - verify=False, - ) - if r.status_code != 202: - print("Received non-202 status code upon import: ", str(r.status_code)) - print(r.json()) - print("JSON file failed uploading.") - sys.exit() - - -if __name__ == "__main__": - """ - Imports grafana dashboard from dumped json files via the Grafana API - - If --foldername is used, the data is taken from this location. - Otherwise, the default ops-repo folder is assumed. - """ - typer.run(main) diff --git a/services/monitoring/grafana/scripts/requirements.txt b/services/monitoring/grafana/scripts/requirements.txt deleted file mode 100644 index a94c2e09..00000000 --- a/services/monitoring/grafana/scripts/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -environs -requests -typer -pyyaml diff --git a/services/monitoring/grafana/template-config.monitoring b/services/monitoring/grafana/template-config.monitoring index dd8d02d3..d435ab73 100644 --- a/services/monitoring/grafana/template-config.monitoring +++ b/services/monitoring/grafana/template-config.monitoring @@ -4,7 +4,6 @@ GF_USERS_ALLOW_SIGN_UP=false GF_SERVER_ROOT_URL=https://${MONITORING_DOMAIN}/grafana GF_INSTALL_PLUGINS=grafana-piechart-panel, grafana-worldmap-panel, grafana-polystat-panel # NOTE: grafana image rendering plugin: https://grafana.com/blog/2020/05/07/grafana-7.0-preview-new-image-renderer-plugin-to-replace-phantomjs/ -GF_RENDERING_SERVER_URL=http://grafana-image-renderer:8081/render GF_RENDERING_CALLBACK_URL=http://grafana:3000/ GF_LOG_FILTERS=rendering:debug GF_SERVER_DOMAIN=${MONITORING_DOMAIN}