Skip to content
This repository has been archived by the owner on Jun 2, 2020. It is now read-only.

Commit

Permalink
Merge branch 'naemono-timeouts-pep-threading-optimizations'
Browse files Browse the repository at this point in the history
  • Loading branch information
alex-leonhardt committed Jan 27, 2018
2 parents 14a290a + e47b4da commit c7f7e99
Show file tree
Hide file tree
Showing 11 changed files with 257 additions and 69 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
.vagrant/
.idea/
.vscode/

.noseids
*.pyc

### JetBrains template
Expand Down
13 changes: 8 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@ Overview (DCs)

# faq

#### how can I filter by more than 1 value?
#### how can I filter by more than 1 value?

Amend the URL and add all the filters together as a comma-separated list, e.g.:
Amend the URL and add all the filters together as a comma-separated list, e.g.:
http://localhost:5000/filtered/aaa,bbb,ccc,ddd

#### what do the filters filter by ?
#### what do the filters filter by ?

They filter based on the hosts' subscriptions, except in the Events view where they filter on all properties of the check and the host.

Expand Down Expand Up @@ -85,7 +85,7 @@ Add via pip install or via your package management
```
useradd -r sensu-grid
```

## run as a service
Expand Down Expand Up @@ -121,10 +121,13 @@ dcs:
port: 4567
user: apiuser
password: apipassword
app:
refresh: 60
bg_color: #333333
# This is a python requests layer timeout, as by default, it does not timeout
requests_timeout: 10
logging_level: info
```

## run locally / manually
Expand Down
5 changes: 3 additions & 2 deletions gridcheck.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
import requests
import re


def check_connection(dc):
url = 'http://{0}:{1}/info'.format(dc['url'], dc['port'])
try:
if 'user' and 'password' in dc:
r = requests.get(url, auth=(dc['user'], dc['password']))
r = requests.get(url, auth=(dc['user'], dc['password']), timeout=30)
else:
r = requests.get(url)
r = requests.get(url, timeout=30)
if r:
return True
else:
Expand Down
2 changes: 1 addition & 1 deletion gridconfig.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import yaml


class Config(object):
DEBUG = False
TESTING = False
Expand All @@ -25,4 +26,3 @@ class ProdConfig(Config):
class TestingConfig(Config):
TESTING = True
DEBUG = True

151 changes: 107 additions & 44 deletions griddata.py
Original file line number Diff line number Diff line change
@@ -1,96 +1,144 @@
from gridcheck import *
import logging
import requests
import six

from functools import partial
from multiprocessing.dummy import Pool as ThreadPool

def get_filter_data(dcs):
filter_data = []
data = None
from gridcheck import check_stash


LOGGER = logging.getLogger(__name__)


def _filter_data(timeout, dc):
filter_data = list()
r = None
data = None
LOGGER.debug("Retrieving filters for datacenter: {0}".format(dc['name']))
url = 'http://{0}:{1}/clients'.format(dc['url'], dc['port'])
try:
if 'user' and 'password' in dc:
r = requests.get(url, auth=(dc['user'], dc['password']), timeout=timeout)
else:
r = requests.get(url, timeout=timeout)
r.raise_for_status()
except Exception as ex:
LOGGER.error("Got exception while filtering on clients: {0}".format(str(ex)))
pass
finally:
if r:
data = r.json()
r.close()
else:
LOGGER.error("no reponse")

for dc in dcs:
url = 'http://{0}:{1}/clients'.format(dc['url'], dc['port'])
try:
if 'user' and 'password' in dc:
r = requests.get(url, auth=(dc['user'], dc['password']))
else:
r = requests.get(url)
except Exception:
pass
finally:
if r:
data = r.json()
r.close()
if data:
for i in data:
for s in i['subscriptions']:
if s not in filter_data:
filter_data.append(s)
else:
LOGGER.error("No response data")
LOGGER.debug("Filter Retrieval for datacenter {0} complete".format(dc['name']))
return filter_data

if data:
for i in data:
for s in i['subscriptions']:
if s not in filter_data:
filter_data.append(s)

if filter_data:
assert type(filter_data) == list
return filter_data
def get_filter_data(dcs, timeout):
aggregated = list()
final_aggregated_filter_data = []
pool = ThreadPool(len(dcs))
func = partial(_filter_data, timeout)
try:
aggregated = pool.map(func, dcs)
assert type(aggregated) == list
for filterdata in aggregated:
if filterdata not in final_aggregated_filter_data:
final_aggregated_filter_data.append(filterdata)

except Exception as e:
LOGGER.error("unable to get filter data, ex: {0}".format(e))
finally:
pool.close()

return []
return final_aggregated_filter_data[0]


def get_data(dc):
def get_data(dc, timeout):
LOGGER.debug("Retrieving data for datacenter: {0}".format(dc['name']))
url = 'http://{0}:{1}/results'.format(dc['url'], dc['port'])
data = None
r = None
try:
if 'user' and 'password' in dc:
r = requests.get(url, auth=(dc['user'], dc['password']))
r = requests.get(url, auth=(dc['user'], dc['password']), timeout=timeout)
else:
r = requests.get(url)

except Exception:
r = requests.get(url, timeout=timeout)
r.raise_for_status()
except Exception as ex:
LOGGER.error("Got exception while retrieving data for dc: {0} ex: {1}".format(dc, str(ex)))
pass
finally:
if r:
data = r.json()
r.close()
else:
LOGGER.error("no reponse")

LOGGER.debug("Data Retrieval for datacenter {0} complete".format(dc['name']))
return data


def get_clients(dc):
def get_clients(dc, timeout):
LOGGER.debug("Retrieving clients for datacenter: {0}".format(dc['name']))
url = 'http://{0}:{1}/clients'.format(dc['url'], dc['port'])
data = None
r = None

try:
if 'user' and 'password' in dc:
r = requests.get(url, auth=(dc['user'], dc['password']))
r = requests.get(url, auth=(dc['user'], dc['password']), timeout=timeout)
r.raise_for_status()
data = r.json()
else:
r = requests.get(url)
r = requests.get(url, timeout=timeout)
data = r.json()
except Exception:
except Exception as ex:
LOGGER.error("Got exception while retrieving clients for dc: {0} ex: {1}".format(dc, str(ex)))
pass
finally:
if r:
r.close()
else:
LOGGER.error("no reponse")

LOGGER.debug("Client Retrieval for datacenter {0} complete".format(dc['name']))
return data


def get_stashes(dc):
def get_stashes(dc, timeout):
LOGGER.debug("Retrieving stashes for datacenter: {0}".format(dc['name']))
url = 'http://{0}:{1}/silenced'.format(dc['url'], dc['port'])
data = None
r = None
try:
if 'user' and 'password' in dc:
r = requests.get(url, auth=(dc['user'], dc['password']))
r = requests.get(url, auth=(dc['user'], dc['password']), timeout=timeout)
r.raise_for_status()
data = r.json()
else:
r = requests.get(url)
r = requests.get(url, timeout=timeout)
data = r.json()
except Exception:
except Exception as ex:
LOGGER.error("Got exception while retrieving stashes for dc: {0} ex: {1}".format(dc, str(ex)))
pass
finally:
if r:
r.close()
else:
LOGGER.error("no reponse")

LOGGER.debug("Stash Retrieval for datacenter {0} complete".format(dc['name']))
return data


Expand All @@ -104,7 +152,15 @@ def filter_object(obj, search):
if filter_object(value, search):
return True
else:
return unicode(search) in unicode(obj)
LOGGER.debug("search type {0} // obj type {1}".format(type(search), type(obj)))
try:
return six.u(search) in six.b(obj)
except TypeError as e:
LOGGER.warn("filter_object exception (PY2 vs PY3 unicode/str): {0}".format(e))
try:
return unicode(search) in unicode(obj)
except Exception as e:
LOGGER.error("filter_object exception: {0}".format(e))

return False

Expand All @@ -119,23 +175,29 @@ def filter_event(event):
return filter_event


def get_events(dc, filters=[]):
def get_events(dc, timeout, filters=[]):
LOGGER.debug("Retrieving events for datacenter: {0}".format(dc['name']))
url = 'http://{0}:{1}/events'.format(dc['url'], dc['port'])

data = []
r = None

try:
if 'user' and 'password' in dc:
r = requests.get(url, auth=(dc['user'], dc['password']))
r = requests.get(url, auth=(dc['user'], dc['password']), timeout=timeout)
r.raise_for_status()
data = r.json()
else:
r = requests.get(url)
r = requests.get(url, timeout=timeout)
data = r.json()
except Exception as ex:
LOGGER.error("Got exception while retrieving events for dc: {0} ex: {1}".format(dc, str(ex)))
pass
finally:
if r:
r.close()

LOGGER.debug("Events Retrieval for datacenter {0} complete".format(dc['name']))
if len(filters) > 0:
return filter(filter_events(filters), data)
else:
Expand Down Expand Up @@ -186,7 +248,8 @@ def agg_data(dc, data, stashes, client_data=None, filters=None):

if i['check']['name'] == "keepalive" and i['check']['status'] == 2:
if not check_stash(stashes, i['client'], i['check']['name']):
# we cannot currently apply filters as keepalive checks do not have subscribers/subscriptions
# we cannot currently apply filters as keepalive checks do
# not have subscribers/subscriptions
down += 1
else:
ack += 1
Expand Down
9 changes: 5 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
Flask
PyYAML
Flask==0.12.2
PyYAML==3.12
argparse
requests
gunicorn
requests==2.18.4
gunicorn==19.7.1
six==1.11.0
Loading

0 comments on commit c7f7e99

Please sign in to comment.