Skip to content

Commit

Permalink
Merge pull request #347 from hongwei1/develop
Browse files Browse the repository at this point in the history
refactor/removed the duplicated rest calls
  • Loading branch information
simonredfern authored Dec 6, 2023
2 parents 7e7a3db + 22e1378 commit 7030e30
Showing 1 changed file with 5 additions and 27 deletions.
32 changes: 5 additions & 27 deletions apimanager/metrics/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,30 +389,19 @@ def get_active_apps(self, from_date, to_date):
apps = []
form = self.get_form()
active_apps_list = []
urlpath = '/management/metrics/top-consumers?from_date={}&to_date={}'.format(from_date, to_date)
urlpath = '/management/metrics/top-consumers?from_date={}&to_date={}&exclude_implemented_by_partial_functions={}&exclude_url_pattern={}'.format(
from_date, to_date, ",".join(EXCLUDE_FUNCTIONS), ",".join(EXCLUDE_URL_PATTERN))
api = API(self.request.session.get('obp'))
try:
apps = api.get(urlpath)
if apps is not None and 'code' in apps and apps['code']==403:
error_once_only(self.request, apps['message'])
else:
active_apps_list = list(apps)
active_apps_list = list(apps['top_consumers'])
except APIError as err:
error_once_only(self.request, err)
except Exception as err:
error_once_only(self.request, err)
else:
urlpath = '/management/metrics/top-consumers?from_date={}&to_date={}&exclude_implemented_by_partial_functions={}&exclude_url_pattern={}'.format(
from_date, to_date, ",".join(EXCLUDE_FUNCTIONS), ",".join(EXCLUDE_URL_PATTERN))
api = API(self.request.session.get('obp'))
try:
apps = api.get(urlpath)
active_apps_list = list(apps['top_consumers'])
except APIError as err:
error_once_only(self.request, err)
except Exception as err:
error_once_only(self.request, err)

return active_apps_list


Expand Down Expand Up @@ -718,12 +707,7 @@ def _api_data(self, urlpath, data_key):

def get_top_apis(self, cleaned_data, from_date, to_date):
top_apis = []
#if cleaned_data.get('include_obp_apps'):
# urlpath = '/management/metrics/top-apis?from_date={}&to_date={}'.format(from_date, to_date)
#else:
# urlpath = '/management/metrics/top-apis?from_date={}&to_date={}&exclude_app_names={}&exclude_implemented_by_partial_functions={}&exclude_url_pattern={}'.format(
# from_date, to_date, ",".join(local_settings.EXCLUDE_APPS), ",".join(EXCLUDE_FUNCTIONS), ",".join(EXCLUDE_URL_PATTERN))
urlpath = '/management/metrics/top-apis?from_date={}&to_date={}&exclude_implemented_by_partial_functions={}&exclude_url_pattern={}'.format(
urlpath = '/management/metrics/top-apis?limit=10&from_date={}&to_date={}&exclude_implemented_by_partial_functions={}&exclude_url_pattern={}'.format(
from_date, to_date, ",".join(EXCLUDE_FUNCTIONS), ",".join(EXCLUDE_URL_PATTERN))
top_apis = self._api_data(urlpath, 'top_apis')

Expand All @@ -739,20 +723,14 @@ def get_top_apis(self, cleaned_data, from_date, to_date):

def get_top_consumers(self, cleaned_data, from_date, to_date):
top_consumers = []
#if cleaned_data.get('include_obp_apps'):
# urlpath = '/management/metrics/top-consumers?from_date={}&to_date={}'.format(from_date, to_date)
#else:
# urlpath = '/management/metrics/top-consumers?from_date={}&to_date={}&exclude_app_names={}&exclude_implemented_by_partial_functions={}&exclude_url_pattern={}'.format(
# from_date, to_date, ",".join(local_settings.EXCLUDE_APPS), ",".join(EXCLUDE_FUNCTIONS), ",".join(EXCLUDE_URL_PATTERN))
urlpath = '/management/metrics/top-consumers?from_date={}&to_date={}&exclude_implemented_by_partial_functions={}&exclude_url_pattern={}'.format(
urlpath = '/management/metrics/top-consumers?limit=10&from_date={}&to_date={}&exclude_implemented_by_partial_functions={}&exclude_url_pattern={}'.format(
from_date, to_date, ",".join(EXCLUDE_FUNCTIONS), ",".join(EXCLUDE_URL_PATTERN))
top_consumers = self._api_data(urlpath, 'top_consumers')

for consumer in top_consumers:
if consumer['app_name'] == "":
top_consumers.remove(consumer)

top_consumers = top_consumers[:10]
top_consumers = reversed(top_consumers)

return top_consumers
Expand Down

0 comments on commit 7030e30

Please sign in to comment.