From 3c7d252a6750cb15c1e1aafdb7c794c659e0f33e Mon Sep 17 00:00:00 2001 From: JP White Date: Tue, 16 Feb 2021 20:23:10 -0500 Subject: [PATCH 01/46] Updated to support Python3 using 2to3 --- gitinspector/blame.py | 10 +++---- gitinspector/changes.py | 12 ++++---- gitinspector/clone.py | 4 +-- gitinspector/comment.py | 2 +- gitinspector/config.py | 6 ++-- gitinspector/extensions.py | 2 +- gitinspector/filtering.py | 4 +-- gitinspector/format.py | 4 +-- gitinspector/gitinspector.py | 4 +-- gitinspector/gravatar.py | 4 +-- gitinspector/help.py | 4 +-- gitinspector/interval.py | 2 +- gitinspector/localization.py | 4 +-- gitinspector/metrics.py | 2 +- gitinspector/optval.py | 2 +- gitinspector/output/blameoutput.py | 4 +-- gitinspector/output/changesoutput.py | 4 +-- gitinspector/output/extensionsoutput.py | 4 +-- gitinspector/output/filteringoutput.py | 4 +-- gitinspector/output/metricsoutput.py | 28 +++++++++---------- gitinspector/output/outputable.py | 4 +-- gitinspector/output/responsibilitiesoutput.py | 4 +-- gitinspector/output/timelineoutput.py | 4 +-- gitinspector/responsibilities.py | 6 ++-- gitinspector/terminal.py | 2 +- gitinspector/timeline.py | 4 +-- gitinspector/version.py | 4 +-- 27 files changed, 69 insertions(+), 69 deletions(-) diff --git a/gitinspector/blame.py b/gitinspector/blame.py index 317d3f9d..f1a9c66f 100644 --- a/gitinspector/blame.py +++ b/gitinspector/blame.py @@ -17,8 +17,8 @@ # You should have received a copy of the GNU General Public License # along with gitinspector. If not, see . -from __future__ import print_function -from __future__ import unicode_literals + + import datetime import multiprocessing import re @@ -141,9 +141,9 @@ def __init__(self, repo, hard, useweeks, changes): if FileDiff.get_extension(row) in extensions.get_located() and \ FileDiff.is_valid_extension(row) and not filtering.set_filtered(FileDiff.get_filename(row)): - blame_command = filter(None, ["git", "blame", "--line-porcelain", "-w"] + \ + blame_command = [_f for _f in ["git", "blame", "--line-porcelain", "-w"] + \ (["-C", "-C", "-M"] if hard else []) + - [interval.get_since(), interval.get_ref(), "--", row]) + [interval.get_since(), interval.get_ref(), "--", row] if _f] thread = BlameThread(useweeks, changes, blame_command, FileDiff.get_extension(row), self.blames, row.strip()) thread.daemon = True @@ -190,7 +190,7 @@ def get_time(string): def get_summed_blames(self): summed_blames = {} - for i in self.blames.items(): + for i in list(self.blames.items()): if summed_blames.get(i[0][0], None) == None: summed_blames[i[0][0]] = BlameEntry() diff --git a/gitinspector/changes.py b/gitinspector/changes.py index f1b39ff8..9758a9aa 100644 --- a/gitinspector/changes.py +++ b/gitinspector/changes.py @@ -17,8 +17,8 @@ # You should have received a copy of the GNU General Public License # along with gitinspector. If not, see . -from __future__ import division -from __future__ import unicode_literals + + import bisect import datetime import multiprocessing @@ -122,10 +122,10 @@ def create(hard, changes, first_hash, second_hash, offset): thread.start() def run(self): - git_log_r = subprocess.Popen(filter(None, ["git", "log", "--reverse", "--pretty=%ct|%cd|%H|%aN|%aE", + git_log_r = subprocess.Popen([_f for _f in ["git", "log", "--reverse", "--pretty=%ct|%cd|%H|%aN|%aE", "--stat=100000,8192", "--no-merges", "-w", interval.get_since(), interval.get_until(), "--date=short"] + (["-C", "-C", "-M"] if self.hard else []) + - [self.first_hash + self.second_hash]), bufsize=1, stdout=subprocess.PIPE).stdout + [self.first_hash + self.second_hash] if _f], bufsize=1, stdout=subprocess.PIPE).stdout lines = git_log_r.readlines() git_log_r.close() @@ -185,8 +185,8 @@ class Changes(object): def __init__(self, repo, hard): self.commits = [] interval.set_ref("HEAD"); - git_rev_list_p = subprocess.Popen(filter(None, ["git", "rev-list", "--reverse", "--no-merges", - interval.get_since(), interval.get_until(), "HEAD"]), bufsize=1, + git_rev_list_p = subprocess.Popen([_f for _f in ["git", "rev-list", "--reverse", "--no-merges", + interval.get_since(), interval.get_until(), "HEAD"] if _f], bufsize=1, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) lines = git_rev_list_p.communicate()[0].splitlines() git_rev_list_p.stdout.close() diff --git a/gitinspector/clone.py b/gitinspector/clone.py index 4fe858d4..18fe8c64 100644 --- a/gitinspector/clone.py +++ b/gitinspector/clone.py @@ -17,7 +17,7 @@ # You should have received a copy of the GNU General Public License # along with gitinspector. If not, see . -from __future__ import unicode_literals + import os import shutil import subprocess @@ -27,7 +27,7 @@ try: from urllib.parse import urlparse except: - from urlparse import urlparse + from urllib.parse import urlparse __cloned_paths__ = [] diff --git a/gitinspector/comment.py b/gitinspector/comment.py index c80c9e32..0671064e 100644 --- a/gitinspector/comment.py +++ b/gitinspector/comment.py @@ -17,7 +17,7 @@ # You should have received a copy of the GNU General Public License # along with gitinspector. If not, see . -from __future__ import unicode_literals + __comment_begining__ = {"java": "/*", "c": "/*", "cc": "/*", "cpp": "/*", "cs": "/*", "h": "/*", "hh": "/*", "hpp": "/*", "hs": "{-", "html": "", + "php": "*/", + "py": '"""', + "glsl": "*/", + "rb": "=end", + "js": "*/", + "jspx": "-->", + "scala": "*/", + "sql": "*/", + "tex": "\\end{comment}", + "xhtml": "-->", + "xml": "-->", + "ml": "*)", + "mli": "*)", + "go": "*/", + "ly": "%}", + "ily": "%}", +} -__comment_end__ = {"java": "*/", "c": "*/", "cc": "*/", "cpp": "*/", "cs": "*/", "h": "*/", "hh": "*/", "hpp": "*/", "hs": "-}", - "html": "-->", "php": "*/", "py": "\"\"\"", "glsl": "*/", "rb": "=end", "js": "*/", "jspx": "-->", - "scala": "*/", "sql": "*/", "tex": "\\end{comment}", "xhtml": "-->", "xml": "-->", "ml": "*)", "mli": "*)", - "go": "*/", "ly": "%}", "ily": "%}"} - -__comment__ = {"java": "//", "c": "//", "cc": "//", "cpp": "//", "cs": "//", "h": "//", "hh": "//", "hpp": "//", "hs": "--", - "pl": "#", "php": "//", "py": "#", "glsl": "//", "rb": "#", "robot": "#", "rs": "//", "rlib": "//", "js": "//", - "scala": "//", "sql": "--", "tex": "%", "ada": "--", "ads": "--", "adb": "--", "pot": "#", "po": "#", "go": "//", - "ly": "%", "ily": "%"} +__comment__ = { + "java": "//", + "c": "//", + "cc": "//", + "cpp": "//", + "cs": "//", + "h": "//", + "hh": "//", + "hpp": "//", + "hs": "--", + "pl": "#", + "php": "//", + "py": "#", + "glsl": "//", + "rb": "#", + "robot": "#", + "rs": "//", + "rlib": "//", + "js": "//", + "scala": "//", + "sql": "--", + "tex": "%", + "ada": "--", + "ads": "--", + "adb": "--", + "pot": "#", + "po": "#", + "go": "//", + "ly": "%", + "ily": "%", +} __comment_markers_must_be_at_begining__ = {"tex": True} + def __has_comment_begining__(extension, string): - if __comment_markers_must_be_at_begining__.get(extension, None) == True: - return string.find(__comment_begining__[extension]) == 0 - elif __comment_begining__.get(extension, None) != None and string.find(__comment_end__[extension], 2) == -1: - return string.find(__comment_begining__[extension]) != -1 + if __comment_markers_must_be_at_begining__.get(extension, None): + return string.find(__comment_begining__[extension]) == 0 + elif __comment_begining__.get(extension, None) is not None and string.find(__comment_end__[extension], 2) == -1: + return string.find(__comment_begining__[extension]) != -1 + + return False - return False def __has_comment_end__(extension, string): - if __comment_markers_must_be_at_begining__.get(extension, None) == True: - return string.find(__comment_end__[extension]) == 0 - elif __comment_end__.get(extension, None) != None: - return string.find(__comment_end__[extension]) != -1 + if __comment_markers_must_be_at_begining__.get(extension, None): + return string.find(__comment_end__[extension]) == 0 + elif __comment_end__.get(extension, None) is not None: + return string.find(__comment_end__[extension]) != -1 + + return False - return False def is_comment(extension, string): - if __comment_begining__.get(extension, None) != None and string.strip().startswith(__comment_begining__[extension]): - return True - if __comment_end__.get(extension, None) != None and string.strip().endswith(__comment_end__[extension]): - return True - if __comment__.get(extension, None) != None and string.strip().startswith(__comment__[extension]): - return True + if __comment_begining__.get(extension, None) is not None and string.strip().startswith(__comment_begining__[extension]): + return True + if __comment_end__.get(extension, None) is not None and string.strip().endswith(__comment_end__[extension]): + return True + if __comment__.get(extension, None) is not None and string.strip().startswith(__comment__[extension]): + return True + + return False - return False def handle_comment_block(is_inside_comment, extension, content): - comments = 0 - - if is_comment(extension, content): - comments += 1 - if is_inside_comment: - if __has_comment_end__(extension, content): - is_inside_comment = False - else: - comments += 1 - elif __has_comment_begining__(extension, content) and not __has_comment_end__(extension, content): - is_inside_comment = True - - return (comments, is_inside_comment) + comments = 0 + + if is_comment(extension, content): + comments += 1 + if is_inside_comment: + if __has_comment_end__(extension, content): + is_inside_comment = False + else: + comments += 1 + elif __has_comment_begining__(extension, content) and not __has_comment_end__(extension, content): + is_inside_comment = True + + return (comments, is_inside_comment) diff --git a/gitinspector/config.py b/gitinspector/config.py index ea23489b..824161a7 100644 --- a/gitinspector/config.py +++ b/gitinspector/config.py @@ -22,72 +22,75 @@ import subprocess from . import extensions, filtering, format, interval, optval + class GitConfig(object): - def __init__(self, run, repo, global_only=False): - self.run = run - self.repo = repo - self.global_only = global_only - - def __read_git_config__(self, variable): - previous_directory = os.getcwd() - os.chdir(self.repo) - setting = subprocess.Popen([_f for _f in ["git", "config", "--global" if self.global_only else "", - "inspector." + variable] if _f], stdout=subprocess.PIPE).stdout - os.chdir(previous_directory) - - try: - setting = setting.readlines()[0] - setting = setting.decode("utf-8", "replace").strip() - except IndexError: - setting = "" - - return setting - - def __read_git_config_bool__(self, variable): - try: - variable = self.__read_git_config__(variable) - return optval.get_boolean_argument(False if variable == "" else variable) - except optval.InvalidOptionArgument: - return False - - def __read_git_config_string__(self, variable): - string = self.__read_git_config__(variable) - return (True, string) if len(string) > 0 else (False, None) - - def read(self): - var = self.__read_git_config_string__("file-types") - if var[0]: - extensions.define(var[1]) - - var = self.__read_git_config_string__("exclude") - if var[0]: - filtering.add(var[1]) - - var = self.__read_git_config_string__("format") - if var[0] and not format.select(var[1]): - raise format.InvalidFormatError(_("specified output format not supported.")) - - self.run.hard = self.__read_git_config_bool__("hard") - self.run.list_file_types = self.__read_git_config_bool__("list-file-types") - self.run.localize_output = self.__read_git_config_bool__("localize-output") - self.run.metrics = self.__read_git_config_bool__("metrics") - self.run.responsibilities = self.__read_git_config_bool__("responsibilities") - self.run.useweeks = self.__read_git_config_bool__("weeks") - - var = self.__read_git_config_string__("since") - if var[0]: - interval.set_since(var[1]) - - var = self.__read_git_config_string__("until") - if var[0]: - interval.set_until(var[1]) - - self.run.timeline = self.__read_git_config_bool__("timeline") - - if self.__read_git_config_bool__("grading"): - self.run.hard = True - self.run.list_file_types = True - self.run.metrics = True - self.run.responsibilities = True - self.run.timeline = True - self.run.useweeks = True + def __init__(self, run, repo, global_only=False): + self.run = run + self.repo = repo + self.global_only = global_only + + def __read_git_config__(self, variable): + previous_directory = os.getcwd() + os.chdir(self.repo) + setting = subprocess.Popen( + [_f for _f in ["git", "config", "--global" if self.global_only else "", "inspector." + variable] if _f], + stdout=subprocess.PIPE, + ).stdout + os.chdir(previous_directory) + + try: + setting = setting.readlines()[0] + setting = setting.decode("utf-8", "replace").strip() + except IndexError: + setting = "" + + return setting + + def __read_git_config_bool__(self, variable): + try: + variable = self.__read_git_config__(variable) + return optval.get_boolean_argument(False if variable == "" else variable) + except optval.InvalidOptionArgument: + return False + + def __read_git_config_string__(self, variable): + string = self.__read_git_config__(variable) + return (True, string) if len(string) > 0 else (False, None) + + def read(self): + var = self.__read_git_config_string__("file-types") + if var[0]: + extensions.define(var[1]) + + var = self.__read_git_config_string__("exclude") + if var[0]: + filtering.add(var[1]) + + var = self.__read_git_config_string__("format") + if var[0] and not format.select(var[1]): + raise format.InvalidFormatError(_("specified output format not supported.")) + + self.run.hard = self.__read_git_config_bool__("hard") + self.run.list_file_types = self.__read_git_config_bool__("list-file-types") + self.run.localize_output = self.__read_git_config_bool__("localize-output") + self.run.metrics = self.__read_git_config_bool__("metrics") + self.run.responsibilities = self.__read_git_config_bool__("responsibilities") + self.run.useweeks = self.__read_git_config_bool__("weeks") + + var = self.__read_git_config_string__("since") + if var[0]: + interval.set_since(var[1]) + + var = self.__read_git_config_string__("until") + if var[0]: + interval.set_until(var[1]) + + self.run.timeline = self.__read_git_config_bool__("timeline") + + if self.__read_git_config_bool__("grading"): + self.run.hard = True + self.run.list_file_types = True + self.run.metrics = True + self.run.responsibilities = True + self.run.timeline = True + self.run.useweeks = True diff --git a/gitinspector/extensions.py b/gitinspector/extensions.py index 56f45d53..4d1f53b9 100644 --- a/gitinspector/extensions.py +++ b/gitinspector/extensions.py @@ -18,24 +18,27 @@ # along with gitinspector. If not, see . - DEFAULT_EXTENSIONS = ["java", "c", "cc", "cpp", "h", "hh", "hpp", "py", "glsl", "rb", "js", "sql"] __extensions__ = DEFAULT_EXTENSIONS __located_extensions__ = set() + def get(): - return __extensions__ + return __extensions__ + def define(string): - global __extensions__ - __extensions__ = string.split(",") + global __extensions__ + __extensions__ = string.split(",") + def add_located(string): - if len(string) == 0: - __located_extensions__.add("*") - else: - __located_extensions__.add(string) + if len(string) == 0: + __located_extensions__.add("*") + else: + __located_extensions__.add(string) + def get_located(): - return __located_extensions__ + return __located_extensions__ diff --git a/gitinspector/filtering.py b/gitinspector/filtering.py index 5fc65ed7..ee8d825c 100644 --- a/gitinspector/filtering.py +++ b/gitinspector/filtering.py @@ -21,69 +21,84 @@ import re import subprocess -__filters__ = {"file": [set(), set()], "author": [set(), set()], "email": [set(), set()], "revision": [set(), set()], - "message" : [set(), None]} +__filters__ = { + "file": [set(), set()], + "author": [set(), set()], + "email": [set(), set()], + "revision": [set(), set()], + "message": [set(), None], +} + class InvalidRegExpError(ValueError): - def __init__(self, msg): - super(InvalidRegExpError, self).__init__(msg) - self.msg = msg + def __init__(self, msg): + super(InvalidRegExpError, self).__init__(msg) + self.msg = msg + def get(): - return __filters__ + return __filters__ + def __add_one__(string): - for i in __filters__: - if (i + ":").lower() == string[0:len(i) + 1].lower(): - __filters__[i][0].add(string[len(i) + 1:]) - return - __filters__["file"][0].add(string) + for i in __filters__: + if (i + ":").lower() == string[0:len(i) + 1].lower(): + __filters__[i][0].add(string[len(i) + 1:]) + return + __filters__["file"][0].add(string) + def add(string): - rules = string.split(",") - for rule in rules: - __add_one__(rule) + rules = string.split(",") + for rule in rules: + __add_one__(rule) + def clear(): - for i in __filters__: - __filters__[i][0] = set() + for i in __filters__: + __filters__[i][0] = set() + def get_filered(filter_type="file"): - return __filters__[filter_type][1] + return __filters__[filter_type][1] + def has_filtered(): - for i in __filters__: - if __filters__[i][1]: - return True - return False + for i in __filters__: + if __filters__[i][1]: + return True + return False + def __find_commit_message__(sha): - git_show_r = subprocess.Popen([_f for _f in ["git", "show", "-s", "--pretty=%B", "-w", sha] if _f], - stdout=subprocess.PIPE).stdout + git_show_r = subprocess.Popen( + [_f for _f in ["git", "show", "-s", "--pretty=%B", "-w", sha] if _f], stdout=subprocess.PIPE + ).stdout + + commit_message = git_show_r.read() + git_show_r.close() - commit_message = git_show_r.read() - git_show_r.close() + commit_message = commit_message.strip().decode("unicode_escape", "ignore") + commit_message = commit_message.encode("latin-1", "replace") + return commit_message.decode("utf-8", "replace") - commit_message = commit_message.strip().decode("unicode_escape", "ignore") - commit_message = commit_message.encode("latin-1", "replace") - return commit_message.decode("utf-8", "replace") def set_filtered(string, filter_type="file"): - string = string.strip() - - if len(string) > 0: - for i in __filters__[filter_type][0]: - search_for = string - - if filter_type == "message": - search_for = __find_commit_message__(string) - try: - if re.search(i, search_for) != None: - if filter_type == "message": - __add_one__("revision:" + string) - else: - __filters__[filter_type][1].add(string) - return True - except: - raise InvalidRegExpError(_("invalid regular expression specified")) - return False + string = string.strip() + + if len(string) > 0: + for i in __filters__[filter_type][0]: + search_for = string + + if filter_type == "message": + search_for = __find_commit_message__(string) + try: + if re.search(i, search_for) is not None: + if filter_type == "message": + __add_one__("revision:" + string) + else: + __filters__[filter_type][1].add(string) + return True + except: + raise InvalidRegExpError(_("invalid regular expression specified")) + return False diff --git a/gitinspector/format.py b/gitinspector/format.py index c3d9c054..20448710 100644 --- a/gitinspector/format.py +++ b/gitinspector/format.py @@ -18,7 +18,6 @@ # along with gitinspector. If not, see . - import base64 import os import textwrap @@ -33,122 +32,142 @@ __selected_format__ = DEFAULT_FORMAT + class InvalidFormatError(Exception): - def __init__(self, msg): - super(InvalidFormatError, self).__init__(msg) - self.msg = msg + def __init__(self, msg): + super(InvalidFormatError, self).__init__(msg) + self.msg = msg + def select(format): - global __selected_format__ - __selected_format__ = format + global __selected_format__ + __selected_format__ = format + + return format in __available_formats__ - return format in __available_formats__ def get_selected(): - return __selected_format__ + return __selected_format__ + def is_interactive_format(): - return __selected_format__ == "text" + return __selected_format__ == "text" + def __output_html_template__(name): - template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), name) - file_r = open(template_path, "rb") - template = file_r.read().decode("utf-8", "replace") + template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), name) + file_r = open(template_path, "rb") + template = file_r.read().decode("utf-8", "replace") + + file_r.close() + return template - file_r.close() - return template def __get_zip_file_content__(name, file_name="/html/flot.zip"): - zip_file = zipfile.ZipFile(basedir.get_basedir() + file_name, "r") - content = zip_file.read(name) + zip_file = zipfile.ZipFile(basedir.get_basedir() + file_name, "r") + content = zip_file.read(name) + + zip_file.close() + return content.decode("utf-8", "replace") - zip_file.close() - return content.decode("utf-8", "replace") INFO_ONE_REPOSITORY = N_("Statistical information for the repository '{0}' was gathered on {1}.") INFO_MANY_REPOSITORIES = N_("Statistical information for the repositories '{0}' was gathered on {1}.") + def output_header(repos): - repos_string = ", ".join([repo.name for repo in repos]) - - if __selected_format__ == "html" or __selected_format__ == "htmlembedded": - base = basedir.get_basedir() - html_header = __output_html_template__(base + "/html/html.header") - tablesorter_js = __get_zip_file_content__("jquery.tablesorter.min.js", - "/html/jquery.tablesorter.min.js.zip").encode("latin-1", "replace") - tablesorter_js = tablesorter_js.decode("utf-8", "ignore") - flot_js = __get_zip_file_content__("jquery.flot.js") - pie_js = __get_zip_file_content__("jquery.flot.pie.js") - resize_js = __get_zip_file_content__("jquery.flot.resize.js") - - logo_file = open(base + "/html/gitinspector_piclet.png", "rb") - logo = logo_file.read() - logo_file.close() - logo = base64.b64encode(logo) - - if __selected_format__ == "htmlembedded": - jquery_js = ">" + __get_zip_file_content__("jquery.js") - else: - jquery_js = " src=\"https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js\">" - - print(html_header.format(title=_("Repository statistics for '{0}'").format(repos_string), - jquery=jquery_js, - jquery_tablesorter=tablesorter_js, - jquery_flot=flot_js, - jquery_flot_pie=pie_js, - jquery_flot_resize=resize_js, - logo=logo.decode("utf-8", "replace"), - logo_text=_("The output has been generated by {0} {1}. The statistical analysis tool" - " for git repositories.").format( - "gitinspector", - version.__version__), - repo_text=_(INFO_ONE_REPOSITORY if len(repos) <= 1 else INFO_MANY_REPOSITORIES).format( - repos_string, localization.get_date()), - show_minor_authors=_("Show minor authors"), - hide_minor_authors=_("Hide minor authors"), - show_minor_rows=_("Show rows with minor work"), - hide_minor_rows=_("Hide rows with minor work"))) - elif __selected_format__ == "json": - print("{\n\t\"gitinspector\": {") - print("\t\t\"version\": \"" + version.__version__ + "\",") - - if len(repos) <= 1: - print("\t\t\"repository\": \"" + repos_string + "\",") - else: - repos_json = "\t\t\"repositories\": [ " - - for repo in repos: - repos_json += "\"" + repo.name + "\", " - - print(repos_json[:-2] + " ],") - - print("\t\t\"report_date\": \"" + time.strftime("%Y/%m/%d") + "\",") - - elif __selected_format__ == "xml": - print("") - print("\t" + version.__version__ + "") - - if len(repos) <= 1: - print("\t" + repos_string + "") - else: - print("\t") - - for repo in repos: - print("\t\t" + repo.name + "") - - print("\t") - - print("\t" + time.strftime("%Y/%m/%d") + "") - else: - print(textwrap.fill(_(INFO_ONE_REPOSITORY if len(repos) <= 1 else INFO_MANY_REPOSITORIES).format( - repos_string, localization.get_date()), width=terminal.get_size()[0])) + repos_string = ", ".join([repo.name for repo in repos]) + + if __selected_format__ == "html" or __selected_format__ == "htmlembedded": + base = basedir.get_basedir() + html_header = __output_html_template__(base + "/html/html.header") + tablesorter_js = __get_zip_file_content__("jquery.tablesorter.min.js", "/html/jquery.tablesorter.min.js.zip").encode( + "latin-1", "replace" + ) + tablesorter_js = tablesorter_js.decode("utf-8", "ignore") + flot_js = __get_zip_file_content__("jquery.flot.js") + pie_js = __get_zip_file_content__("jquery.flot.pie.js") + resize_js = __get_zip_file_content__("jquery.flot.resize.js") + + logo_file = open(base + "/html/gitinspector_piclet.png", "rb") + logo = logo_file.read() + logo_file.close() + logo = base64.b64encode(logo) + + if __selected_format__ == "htmlembedded": + jquery_js = ">" + __get_zip_file_content__("jquery.js") + else: + jquery_js = ' src="https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js">' + + print( + html_header.format( + title=_("Repository statistics for '{0}'").format(repos_string), + jquery=jquery_js, + jquery_tablesorter=tablesorter_js, + jquery_flot=flot_js, + jquery_flot_pie=pie_js, + jquery_flot_resize=resize_js, + logo=logo.decode("utf-8", "replace"), + logo_text=_( + "The output has been generated by {0} {1}. The statistical analysis tool" " for git repositories." + ).format('gitinspector', version.__version__), + repo_text=_(INFO_ONE_REPOSITORY if len(repos) <= 1 else INFO_MANY_REPOSITORIES).format( + repos_string, localization.get_date() + ), + show_minor_authors=_("Show minor authors"), + hide_minor_authors=_("Hide minor authors"), + show_minor_rows=_("Show rows with minor work"), + hide_minor_rows=_("Hide rows with minor work"), + ) + ) + elif __selected_format__ == "json": + print('{\n\t"gitinspector": {') + print('\t\t"version": "' + version.__version__ + '",') + + if len(repos) <= 1: + print('\t\t"repository": "' + repos_string + '",') + else: + repos_json = '\t\t"repositories": [ ' + + for repo in repos: + repos_json += '"' + repo.name + '", ' + + print(repos_json[:-2] + " ],") + + print('\t\t"report_date": "' + time.strftime("%Y/%m/%d") + '",') + + elif __selected_format__ == "xml": + print("") + print("\t" + version.__version__ + "") + + if len(repos) <= 1: + print("\t" + repos_string + "") + else: + print("\t") + + for repo in repos: + print("\t\t" + repo.name + "") + + print("\t") + + print("\t" + time.strftime("%Y/%m/%d") + "") + else: + print( + textwrap.fill( + _(INFO_ONE_REPOSITORY if len(repos) <= 1 else INFO_MANY_REPOSITORIES).format( + repos_string, localization.get_date() + ), + width=terminal.get_size()[0], + ) + ) + def output_footer(): - if __selected_format__ == "html" or __selected_format__ == "htmlembedded": - base = basedir.get_basedir() - html_footer = __output_html_template__(base + "/html/html.footer") - print(html_footer) - elif __selected_format__ == "json": - print("\n\t}\n}") - elif __selected_format__ == "xml": - print("") + if __selected_format__ == "html" or __selected_format__ == "htmlembedded": + base = basedir.get_basedir() + html_footer = __output_html_template__(base + "/html/html.footer") + print(html_footer) + elif __selected_format__ == "json": + print("\n\t}\n}") + elif __selected_format__ == "xml": + print("") diff --git a/gitinspector/gitinspector.py b/gitinspector/gitinspector.py index e21fe9ac..2f8ca3a0 100644 --- a/gitinspector/gitinspector.py +++ b/gitinspector/gitinspector.py @@ -18,7 +18,6 @@ # along with gitinspector. If not, see . - import atexit import getopt import os @@ -27,8 +26,7 @@ from .changes import Changes from .config import GitConfig from .metrics import MetricsLogic -from . import (basedir, clone, extensions, filtering, format, help, interval, - localization, optval, terminal, version) +from . import basedir, clone, extensions, filtering, format, help, interval, localization, optval, terminal, version from .output import outputable from .output.blameoutput import BlameOutput from .output.changesoutput import ChangesOutput @@ -40,179 +38,202 @@ localization.init() + class Runner(object): - def __init__(self): - self.hard = False - self.include_metrics = False - self.list_file_types = False - self.localize_output = False - self.responsibilities = False - self.grading = False - self.timeline = False - self.useweeks = False + def __init__(self): + self.hard = False + self.include_metrics = False + self.list_file_types = False + self.localize_output = False + self.responsibilities = False + self.grading = False + self.timeline = False + self.useweeks = False - def process(self, repos): - localization.check_compatibility(version.__version__) + def process(self, repos): + localization.check_compatibility(version.__version__) - if not self.localize_output: - localization.disable() + if not self.localize_output: + localization.disable() - terminal.skip_escapes(not sys.stdout.isatty()) - terminal.set_stdout_encoding() - previous_directory = os.getcwd() - summed_blames = Blame.__new__(Blame) - summed_changes = Changes.__new__(Changes) - summed_metrics = MetricsLogic.__new__(MetricsLogic) + terminal.skip_escapes(not sys.stdout.isatty()) + terminal.set_stdout_encoding() + previous_directory = os.getcwd() + summed_blames = Blame.__new__(Blame) + summed_changes = Changes.__new__(Changes) + summed_metrics = MetricsLogic.__new__(MetricsLogic) - for repo in repos: - os.chdir(repo.location) - repo = repo if len(repos) > 1 else None - changes = Changes(repo, self.hard) - summed_blames += Blame(repo, self.hard, self.useweeks, changes) - summed_changes += changes + for repo in repos: + os.chdir(repo.location) + repo = repo if len(repos) > 1 else None + changes = Changes(repo, self.hard) + summed_blames += Blame(repo, self.hard, self.useweeks, changes) + summed_changes += changes - if self.include_metrics: - summed_metrics += MetricsLogic() + if self.include_metrics: + summed_metrics += MetricsLogic() - if sys.stdout.isatty() and format.is_interactive_format(): - terminal.clear_row() - else: - os.chdir(previous_directory) + if sys.stdout.isatty() and format.is_interactive_format(): + terminal.clear_row() + else: + os.chdir(previous_directory) - format.output_header(repos) - outputable.output(ChangesOutput(summed_changes)) + format.output_header(repos) + outputable.output(ChangesOutput(summed_changes)) - if summed_changes.get_commits(): - outputable.output(BlameOutput(summed_changes, summed_blames)) + if summed_changes.get_commits(): + outputable.output(BlameOutput(summed_changes, summed_blames)) - if self.timeline: - outputable.output(TimelineOutput(summed_changes, self.useweeks)) + if self.timeline: + outputable.output(TimelineOutput(summed_changes, self.useweeks)) - if self.include_metrics: - outputable.output(MetricsOutput(summed_metrics)) + if self.include_metrics: + outputable.output(MetricsOutput(summed_metrics)) - if self.responsibilities: - outputable.output(ResponsibilitiesOutput(summed_changes, summed_blames)) + if self.responsibilities: + outputable.output(ResponsibilitiesOutput(summed_changes, summed_blames)) - outputable.output(FilteringOutput()) + outputable.output(FilteringOutput()) - if self.list_file_types: - outputable.output(ExtensionsOutput()) + if self.list_file_types: + outputable.output(ExtensionsOutput()) + + format.output_footer() + os.chdir(previous_directory) - format.output_footer() - os.chdir(previous_directory) def __check_python_version__(): - if sys.version_info < (2, 6): - python_version = str(sys.version_info[0]) + "." + str(sys.version_info[1]) - sys.exit(_("gitinspector requires at least Python 2.6 to run (version {0} was found).").format(python_version)) + if sys.version_info < (2, 6): + python_version = str(sys.version_info[0]) + "." + str(sys.version_info[1]) + sys.exit(_("gitinspector requires at least Python 2.6 to run (version {0} was found).").format(python_version)) + def __get_validated_git_repos__(repos_relative): - if not repos_relative: - repos_relative = "." + if not repos_relative: + repos_relative = "." - repos = [] + repos = [] - #Try to clone the repos or return the same directory and bail out. - for repo in repos_relative: - cloned_repo = clone.create(repo) + # Try to clone the repos or return the same directory and bail out. + for repo in repos_relative: + cloned_repo = clone.create(repo) - if cloned_repo.name == None: - cloned_repo.location = basedir.get_basedir_git(cloned_repo.location) - cloned_repo.name = os.path.basename(cloned_repo.location) + if cloned_repo.name is None: + cloned_repo.location = basedir.get_basedir_git(cloned_repo.location) + cloned_repo.name = os.path.basename(cloned_repo.location) - repos.append(cloned_repo) + repos.append(cloned_repo) + + return repos - return repos def main(): - terminal.check_terminal_encoding() - terminal.set_stdin_encoding() - argv = terminal.convert_command_line_to_utf8() - run = Runner() - repos = [] - - try: - opts, args = optval.gnu_getopt(argv[1:], "f:F:hHlLmrTwx:", ["exclude=", "file-types=", "format=", - "hard:true", "help", "list-file-types:true", "localize-output:true", - "metrics:true", "responsibilities:true", "since=", "grading:true", - "timeline:true", "until=", "version", "weeks:true"]) - repos = __get_validated_git_repos__(set(args)) - - #We need the repos above to be set before we read the git config. - GitConfig(run, repos[-1].location).read() - clear_x_on_next_pass = True - - for o, a in opts: - if o in("-h", "--help"): - help.output() - sys.exit(0) - elif o in("-f", "--file-types"): - extensions.define(a) - elif o in("-F", "--format"): - if not format.select(a): - raise format.InvalidFormatError(_("specified output format not supported.")) - elif o == "-H": - run.hard = True - elif o == "--hard": - run.hard = optval.get_boolean_argument(a) - elif o == "-l": - run.list_file_types = True - elif o == "--list-file-types": - run.list_file_types = optval.get_boolean_argument(a) - elif o == "-L": - run.localize_output = True - elif o == "--localize-output": - run.localize_output = optval.get_boolean_argument(a) - elif o == "-m": - run.include_metrics = True - elif o == "--metrics": - run.include_metrics = optval.get_boolean_argument(a) - elif o == "-r": - run.responsibilities = True - elif o == "--responsibilities": - run.responsibilities = optval.get_boolean_argument(a) - elif o == "--since": - interval.set_since(a) - elif o == "--version": - version.output() - sys.exit(0) - elif o == "--grading": - grading = optval.get_boolean_argument(a) - run.include_metrics = grading - run.list_file_types = grading - run.responsibilities = grading - run.grading = grading - run.hard = grading - run.timeline = grading - run.useweeks = grading - elif o == "-T": - run.timeline = True - elif o == "--timeline": - run.timeline = optval.get_boolean_argument(a) - elif o == "--until": - interval.set_until(a) - elif o == "-w": - run.useweeks = True - elif o == "--weeks": - run.useweeks = optval.get_boolean_argument(a) - elif o in("-x", "--exclude"): - if clear_x_on_next_pass: - clear_x_on_next_pass = False - filtering.clear() - filtering.add(a) - - __check_python_version__() - run.process(repos) - - except (filtering.InvalidRegExpError, format.InvalidFormatError, optval.InvalidOptionArgument, getopt.error) as exception: - print(sys.argv[0], "\b:", exception.msg, file=sys.stderr) - print(_("Try `{0} --help' for more information.").format(sys.argv[0]), file=sys.stderr) - sys.exit(2) + terminal.check_terminal_encoding() + terminal.set_stdin_encoding() + argv = terminal.convert_command_line_to_utf8() + run = Runner() + repos = [] + + try: + opts, args = optval.gnu_getopt( + argv[1:], + "f:F:hHlLmrTwx:", + [ + "exclude=", + "file-types=", + "format=", + "hard:true", + "help", + "list-file-types:true", + "localize-output:true", + "metrics:true", + "responsibilities:true", + "since=", + "grading:true", + "timeline:true", + "until=", + "version", + "weeks:true", + ], + ) + repos = __get_validated_git_repos__(set(args)) + + # We need the repos above to be set before we read the git config. + GitConfig(run, repos[-1].location).read() + clear_x_on_next_pass = True + + for o, a in opts: + if o in ("-h", "--help"): + help.output() + sys.exit(0) + elif o in ("-f", "--file-types"): + extensions.define(a) + elif o in ("-F", "--format"): + if not format.select(a): + raise format.InvalidFormatError(_("specified output format not supported.")) + elif o == "-H": + run.hard = True + elif o == "--hard": + run.hard = optval.get_boolean_argument(a) + elif o == "-l": + run.list_file_types = True + elif o == "--list-file-types": + run.list_file_types = optval.get_boolean_argument(a) + elif o == "-L": + run.localize_output = True + elif o == "--localize-output": + run.localize_output = optval.get_boolean_argument(a) + elif o == "-m": + run.include_metrics = True + elif o == "--metrics": + run.include_metrics = optval.get_boolean_argument(a) + elif o == "-r": + run.responsibilities = True + elif o == "--responsibilities": + run.responsibilities = optval.get_boolean_argument(a) + elif o == "--since": + interval.set_since(a) + elif o == "--version": + version.output() + sys.exit(0) + elif o == "--grading": + grading = optval.get_boolean_argument(a) + run.include_metrics = grading + run.list_file_types = grading + run.responsibilities = grading + run.grading = grading + run.hard = grading + run.timeline = grading + run.useweeks = grading + elif o == "-T": + run.timeline = True + elif o == "--timeline": + run.timeline = optval.get_boolean_argument(a) + elif o == "--until": + interval.set_until(a) + elif o == "-w": + run.useweeks = True + elif o == "--weeks": + run.useweeks = optval.get_boolean_argument(a) + elif o in ("-x", "--exclude"): + if clear_x_on_next_pass: + clear_x_on_next_pass = False + filtering.clear() + filtering.add(a) + + __check_python_version__() + run.process(repos) + + except (filtering.InvalidRegExpError, format.InvalidFormatError, optval.InvalidOptionArgument, getopt.error) as exception: + print(sys.argv[0], "\b:", exception.msg, file=sys.stderr) + print(_("Try `{0} --help' for more information.").format(sys.argv[0]), file=sys.stderr) + sys.exit(2) + @atexit.register def cleanup(): - clone.delete() + clone.delete() + if __name__ == "__main__": - main() + main() diff --git a/gitinspector/gravatar.py b/gitinspector/gravatar.py index 634ba44d..2b56f6f8 100644 --- a/gitinspector/gravatar.py +++ b/gitinspector/gravatar.py @@ -21,20 +21,21 @@ import hashlib try: - from urllib.parse import urlencode + from urllib.parse import urlencode except: - from urllib.parse import urlencode + from urllib.parse import urlencode from . import format + def get_url(email, size=20): - md5hash = hashlib.md5(email.encode("utf-8").lower().strip()).hexdigest() - base_url = "https://www.gravatar.com/avatar/" + md5hash - params = None + md5hash = hashlib.md5(email.encode("utf-8").lower().strip()).hexdigest() + base_url = "https://www.gravatar.com/avatar/" + md5hash + params = None - if format.get_selected() == "html": - params = {"default": "identicon", "size": size} - elif format.get_selected() == "xml" or format.get_selected() == "json": - params = {"default": "identicon"} + if format.get_selected() == "html": + params = {"default": "identicon", "size": size} + elif format.get_selected() == "xml" or format.get_selected() == "json": + params = {"default": "identicon"} - return base_url + "?" + urlencode(params) + return base_url + "?" + urlencode(params) diff --git a/gitinspector/help.py b/gitinspector/help.py index 483984aa..c7178ecc 100644 --- a/gitinspector/help.py +++ b/gitinspector/help.py @@ -18,13 +18,13 @@ # along with gitinspector. If not, see . - import sys from .extensions import DEFAULT_EXTENSIONS from .format import __available_formats__ -__doc__ = _("""Usage: {0} [OPTION]... [REPOSITORY]... +__doc__ = _( + """Usage: {0} [OPTION]... [REPOSITORY]... List information about the repository in REPOSITORY. If no repository is specified, the current directory is used. If multiple repositories are given, information will be merged into a unified statistical report. @@ -76,7 +76,9 @@ more information. gitinspector requires that the git executable is available in your PATH. -Report gitinspector bugs to gitinspector@ejwa.se.""") +Report gitinspector bugs to gitinspector@ejwa.se.""" +) + def output(): - print(__doc__.format(sys.argv[0], ",".join(DEFAULT_EXTENSIONS), ",".join(__available_formats__))) + print(__doc__.format(sys.argv[0], ",".join(DEFAULT_EXTENSIONS), ",".join(__available_formats__))) diff --git a/gitinspector/interval.py b/gitinspector/interval.py index c9cbeed7..5f458556 100644 --- a/gitinspector/interval.py +++ b/gitinspector/interval.py @@ -18,11 +18,10 @@ # along with gitinspector. If not, see . - try: - from shlex import quote + from shlex import quote except ImportError: - from pipes import quote + from pipes import quote __since__ = "" @@ -30,26 +29,33 @@ __ref__ = "HEAD" + def has_interval(): - return __since__ + __until__ != "" + return __since__ + __until__ != "" + def get_since(): - return __since__ + return __since__ + def set_since(since): - global __since__ - __since__ = "--since=" + quote(since) + global __since__ + __since__ = "--since=" + quote(since) + def get_until(): - return __until__ + return __until__ + def set_until(until): - global __until__ - __until__ = "--until=" + quote(until) + global __until__ + __until__ = "--until=" + quote(until) + def get_ref(): - return __ref__ + return __ref__ + def set_ref(ref): - global __ref__ - __ref__ = ref + global __ref__ + __ref__ = ref diff --git a/gitinspector/localization.py b/gitinspector/localization.py index 33d256c0..a282d536 100644 --- a/gitinspector/localization.py +++ b/gitinspector/localization.py @@ -18,7 +18,6 @@ # along with gitinspector. If not, see . - import gettext import locale import os @@ -31,76 +30,84 @@ __installed__ = False __translation__ = None -#Dummy function used to handle string constants + +# Dummy function used to handle string constants def N_(message): - return message + return message + def init(): - global __enabled__ - global __installed__ - global __translation__ - - if not __installed__: - try: - locale.setlocale(locale.LC_ALL, "") - except locale.Error: - __translation__ = gettext.NullTranslations() - else: - lang = locale.getlocale() - - #Fix for non-POSIX-compliant systems (Windows et al.). - if os.getenv('LANG') is None: - lang = locale.getdefaultlocale() - - if lang[0]: - os.environ['LANG'] = lang[0] - - if lang[0] is not None: - filename = basedir.get_basedir() + "/translations/messages_%s.mo" % lang[0][0:2] - - try: - __translation__ = gettext.GNUTranslations(open(filename, "rb")) - except IOError: - __translation__ = gettext.NullTranslations() - else: - print("WARNING: Localization disabled because the system language could not be determined.", file=sys.stderr) - __translation__ = gettext.NullTranslations() - - __enabled__ = True - __installed__ = True - __translation__.install() + global __enabled__ + global __installed__ + global __translation__ + + if not __installed__: + try: + locale.setlocale(locale.LC_ALL, "") + except locale.Error: + __translation__ = gettext.NullTranslations() + else: + lang = locale.getlocale() + + # Fix for non-POSIX-compliant systems (Windows et al.). + if os.getenv("LANG") is None: + lang = locale.getdefaultlocale() + + if lang[0]: + os.environ["LANG"] = lang[0] + + if lang[0] is not None: + filename = basedir.get_basedir() + "/translations/messages_%s.mo" % lang[0][0:2] + + try: + __translation__ = gettext.GNUTranslations(open(filename, "rb")) + except IOError: + __translation__ = gettext.NullTranslations() + else: + print("WARNING: Localization disabled because the system language could not be determined.", file=sys.stderr) + __translation__ = gettext.NullTranslations() + + __enabled__ = True + __installed__ = True + __translation__.install() + def check_compatibility(version): - if isinstance(__translation__, gettext.GNUTranslations): - header_pattern = re.compile("^([^:\n]+): *(.*?) *$", re.MULTILINE) - header_entries = dict(header_pattern.findall(_(""))) + if isinstance(__translation__, gettext.GNUTranslations): + header_pattern = re.compile("^([^:\n]+): *(.*?) *$", re.MULTILINE) + header_entries = dict(header_pattern.findall(_(""))) + + if header_entries["Project-Id-Version"] != "gitinspector {0}".format(version): + print( + "WARNING: The translation for your system locale is not up to date with the current gitinspector " + "version. The current maintainer of this locale is {0}.".format(header_entries["Last-Translator"]), + file=sys.stderr, + ) - if header_entries["Project-Id-Version"] != "gitinspector {0}".format(version): - print("WARNING: The translation for your system locale is not up to date with the current gitinspector " - "version. The current maintainer of this locale is {0}.".format(header_entries["Last-Translator"]), - file=sys.stderr) def get_date(): - if __enabled__ and isinstance(__translation__, gettext.GNUTranslations): - date = time.strftime("%x") + if __enabled__ and isinstance(__translation__, gettext.GNUTranslations): + date = time.strftime("%x") + + if hasattr(date, "decode"): + date = date.decode("utf-8", "replace") - if hasattr(date, 'decode'): - date = date.decode("utf-8", "replace") + return date + else: + return time.strftime("%Y/%m/%d") - return date - else: - return time.strftime("%Y/%m/%d") def enable(): - if isinstance(__translation__, gettext.GNUTranslations): - __translation__.install(True) + if isinstance(__translation__, gettext.GNUTranslations): + __translation__.install(True) + + global __enabled__ + __enabled__ = True - global __enabled__ - __enabled__ = True def disable(): - global __enabled__ - __enabled__ = False + global __enabled__ + __enabled__ = False - if __installed__: - gettext.NullTranslations().install() + if __installed__: + gettext.NullTranslations().install() diff --git a/gitinspector/metrics.py b/gitinspector/metrics.py index dd460234..ee969bd5 100644 --- a/gitinspector/metrics.py +++ b/gitinspector/metrics.py @@ -23,103 +23,137 @@ from .changes import FileDiff from . import comment, filtering, interval -__metric_eloc__ = {"java": 500, "c": 500, "cpp": 500, "cs": 500, "h": 300, "hpp": 300, "php": 500, "py": 500, "glsl": 1000, - "rb": 500, "js": 500, "sql": 1000, "xml": 1000} - -__metric_cc_tokens__ = [[["java", "js", "c", "cc", "cpp"], ["else", r"for\s+\(.*\)", r"if\s+\(.*\)", r"case\s+\w+:", - "default:", r"while\s+\(.*\)"], - ["assert", "break", "continue", "return"]], - [["cs"], ["else", r"for\s+\(.*\)", r"foreach\s+\(.*\)", r"goto\s+\w+:", r"if\s+\(.*\)", r"case\s+\w+:", - "default:", r"while\s+\(.*\)"], - ["assert", "break", "continue", "return"]], - [["py"], [r"^\s+elif .*:$", r"^\s+else:$", r"^\s+for .*:", r"^\s+if .*:$", r"^\s+while .*:$"], - [r"^\s+assert", "break", "continue", "return"]]] +__metric_eloc__ = { + "java": 500, + "c": 500, + "cpp": 500, + "cs": 500, + "h": 300, + "hpp": 300, + "php": 500, + "py": 500, + "glsl": 1000, + "rb": 500, + "js": 500, + "sql": 1000, + "xml": 1000, +} + +__metric_cc_tokens__ = [ + [ + ["java", "js", "c", "cc", "cpp"], + ["else", r"for\s+\(.*\)", r"if\s+\(.*\)", r"case\s+\w+:", "default:", r"while\s+\(.*\)"], + ["assert", "break", "continue", "return"], + ], + [ + ["cs"], + [ + "else", + r"for\s+\(.*\)", + r"foreach\s+\(.*\)", + r"goto\s+\w+:", + r"if\s+\(.*\)", + r"case\s+\w+:", + "default:", + r"while\s+\(.*\)", + ], + ["assert", "break", "continue", "return"], + ], + [ + ["py"], + [r"^\s+elif .*:$", r"^\s+else:$", r"^\s+for .*:", r"^\s+if .*:$", r"^\s+while .*:$"], + [r"^\s+assert", "break", "continue", "return"], + ], +] METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD = 50 METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD = 0.75 + class MetricsLogic(object): - def __init__(self): - self.eloc = {} - self.cyclomatic_complexity = {} - self.cyclomatic_complexity_density = {} - - ls_tree_p = subprocess.Popen(["git", "ls-tree", "--name-only", "-r", interval.get_ref()], - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - lines = ls_tree_p.communicate()[0].splitlines() - ls_tree_p.stdout.close() - - if ls_tree_p.returncode == 0: - for i in lines: - i = i.strip().decode("unicode_escape", "ignore") - i = i.encode("latin-1", "replace") - i = i.decode("utf-8", "replace").strip("\"").strip("'").strip() - - if FileDiff.is_valid_extension(i) and not filtering.set_filtered(FileDiff.get_filename(i)): - file_r = subprocess.Popen(["git", "show", interval.get_ref() + ":{0}".format(i.strip())], - stdout=subprocess.PIPE).stdout.readlines() - - extension = FileDiff.get_extension(i) - lines = MetricsLogic.get_eloc(file_r, extension) - cycc = MetricsLogic.get_cyclomatic_complexity(file_r, extension) - - if __metric_eloc__.get(extension, None) != None and __metric_eloc__[extension] < lines: - self.eloc[i.strip()] = lines - - if METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD < cycc: - self.cyclomatic_complexity[i.strip()] = cycc - - if lines > 0 and METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD < cycc / float(lines): - self.cyclomatic_complexity_density[i.strip()] = cycc / float(lines) - - def __iadd__(self, other): - try: - self.eloc.update(other.eloc) - self.cyclomatic_complexity.update(other.cyclomatic_complexity) - self.cyclomatic_complexity_density.update(other.cyclomatic_complexity_density) - return self - except AttributeError: - return other; - - @staticmethod - def get_cyclomatic_complexity(file_r, extension): - is_inside_comment = False - cc_counter = 0 - - entry_tokens = None - exit_tokens = None - - for i in __metric_cc_tokens__: - if extension in i[0]: - entry_tokens = i[1] - exit_tokens = i[2] - - if entry_tokens or exit_tokens: - for i in file_r: - i = i.decode("utf-8", "replace") - (_, is_inside_comment) = comment.handle_comment_block(is_inside_comment, extension, i) - - if not is_inside_comment and not comment.is_comment(extension, i): - for j in entry_tokens: - if re.search(j, i, re.DOTALL): - cc_counter += 2 - for j in exit_tokens: - if re.search(j, i, re.DOTALL): - cc_counter += 1 - return cc_counter - - return -1 - - @staticmethod - def get_eloc(file_r, extension): - is_inside_comment = False - eloc_counter = 0 - - for i in file_r: - i = i.decode("utf-8", "replace") - (_, is_inside_comment) = comment.handle_comment_block(is_inside_comment, extension, i) - - if not is_inside_comment and not comment.is_comment(extension, i): - eloc_counter += 1 - - return eloc_counter + def __init__(self): + self.eloc = {} + self.cyclomatic_complexity = {} + self.cyclomatic_complexity_density = {} + + ls_tree_p = subprocess.Popen( + ["git", "ls-tree", "--name-only", "-r", interval.get_ref()], stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + lines = ls_tree_p.communicate()[0].splitlines() + ls_tree_p.stdout.close() + + if ls_tree_p.returncode == 0: + for i in lines: + i = i.strip().decode("unicode_escape", "ignore") + i = i.encode("latin-1", "replace") + i = i.decode("utf-8", "replace").strip('"').strip("'").strip() + + if FileDiff.is_valid_extension(i) and not filtering.set_filtered(FileDiff.get_filename(i)): + file_r = subprocess.Popen( + ["git", "show", interval.get_ref() + ":{0}".format(i.strip())], stdout=subprocess.PIPE + ).stdout.readlines() + + extension = FileDiff.get_extension(i) + lines = MetricsLogic.get_eloc(file_r, extension) + cycc = MetricsLogic.get_cyclomatic_complexity(file_r, extension) + + if __metric_eloc__.get(extension, None) is not None and __metric_eloc__[extension] < lines: + self.eloc[i.strip()] = lines + + if METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD < cycc: + self.cyclomatic_complexity[i.strip()] = cycc + + if lines > 0 and METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD < cycc / float(lines): + self.cyclomatic_complexity_density[i.strip()] = cycc / float(lines) + + def __iadd__(self, other): + try: + self.eloc.update(other.eloc) + self.cyclomatic_complexity.update(other.cyclomatic_complexity) + self.cyclomatic_complexity_density.update(other.cyclomatic_complexity_density) + return self + except AttributeError: + return other + + @staticmethod + def get_cyclomatic_complexity(file_r, extension): + is_inside_comment = False + cc_counter = 0 + + entry_tokens = None + exit_tokens = None + + for i in __metric_cc_tokens__: + if extension in i[0]: + entry_tokens = i[1] + exit_tokens = i[2] + + if entry_tokens or exit_tokens: + for i in file_r: + i = i.decode("utf-8", "replace") + (_, is_inside_comment) = comment.handle_comment_block(is_inside_comment, extension, i) + + if not is_inside_comment and not comment.is_comment(extension, i): + for j in entry_tokens: + if re.search(j, i, re.DOTALL): + cc_counter += 2 + for j in exit_tokens: + if re.search(j, i, re.DOTALL): + cc_counter += 1 + return cc_counter + + return -1 + + @staticmethod + def get_eloc(file_r, extension): + is_inside_comment = False + eloc_counter = 0 + + for i in file_r: + i = i.decode("utf-8", "replace") + (_, is_inside_comment) = comment.handle_comment_block(is_inside_comment, extension, i) + + if not is_inside_comment and not comment.is_comment(extension, i): + eloc_counter += 1 + + return eloc_counter diff --git a/gitinspector/optval.py b/gitinspector/optval.py index e6b57aac..558e3a2f 100644 --- a/gitinspector/optval.py +++ b/gitinspector/optval.py @@ -20,47 +20,53 @@ import getopt + class InvalidOptionArgument(Exception): - def __init__(self, msg): - super(InvalidOptionArgument, self).__init__(msg) - self.msg = msg + def __init__(self, msg): + super(InvalidOptionArgument, self).__init__(msg) + self.msg = msg + def __find_arg_in_options__(arg, options): - for opt in options: - if opt[0].find(arg) == 0: - return opt + for opt in options: + if opt[0].find(arg) == 0: + return opt + + return None - return None def __find_options_to_extend__(long_options): - options_to_extend = [] + options_to_extend = [] + + for num, arg in enumerate(long_options): + arg = arg.split(":") + if len(arg) == 2: + long_options[num] = arg[0] + "=" + options_to_extend.append(("--" + arg[0], arg[1])) - for num, arg in enumerate(long_options): - arg = arg.split(":") - if len(arg) == 2: - long_options[num] = arg[0] + "=" - options_to_extend.append(("--" + arg[0], arg[1])) + return options_to_extend - return options_to_extend # This is a duplicate of gnu_getopt, but with support for optional arguments in long options, in the form; "arg:default_value". + def gnu_getopt(args, options, long_options): - options_to_extend = __find_options_to_extend__(long_options) + options_to_extend = __find_options_to_extend__(long_options) + + for num, arg in enumerate(args): + opt = __find_arg_in_options__(arg, options_to_extend) + if opt: + args[num] = arg + "=" + opt[1] - for num, arg in enumerate(args): - opt = __find_arg_in_options__(arg, options_to_extend) - if opt: - args[num] = arg + "=" + opt[1] + return getopt.gnu_getopt(args, options, long_options) - return getopt.gnu_getopt(args, options, long_options) def get_boolean_argument(arg): - if isinstance(arg, bool): - return arg - elif arg == None or arg.lower() == "false" or arg.lower() == "f" or arg == "0": - return False - elif arg.lower() == "true" or arg.lower() == "t" or arg == "1": - return True - - raise InvalidOptionArgument(_("The given option argument is not a valid boolean.")) + if isinstance(arg, bool): + return arg + elif arg is None or arg.lower() == "false" or arg.lower() == "f" or arg == "0": + return False + elif arg.lower() == "true" or arg.lower() == "t" or arg == "1": + return True + + raise InvalidOptionArgument(_("The given option argument is not a valid boolean.")) diff --git a/gitinspector/output/blameoutput.py b/gitinspector/output/blameoutput.py index d802627d..ee35947f 100644 --- a/gitinspector/output/blameoutput.py +++ b/gitinspector/output/blameoutput.py @@ -18,7 +18,6 @@ # along with gitinspector. If not, see . - import json import sys import textwrap @@ -27,128 +26,160 @@ from ..blame import Blame from .outputable import Outputable -BLAME_INFO_TEXT = N_("Below are the number of rows from each author that have survived and are still " - "intact in the current revision") +BLAME_INFO_TEXT = N_( + "Below are the number of rows from each author that have survived and are still " "intact in the current revision" +) + class BlameOutput(Outputable): - def __init__(self, changes, blame): - if format.is_interactive_format(): - print("") - - self.changes = changes - self.blame = blame - Outputable.__init__(self) - - def output_html(self): - blame_xml = "
" - blame_xml += "

" + _(BLAME_INFO_TEXT) + ".

" - blame_xml += "".format( - _("Author"), _("Rows"), _("Stability"), _("Age"), _("% in comments")) - blame_xml += "" - chart_data = "" - blames = sorted(self.blame.get_summed_blames().items()) - total_blames = 0 - - for i in blames: - total_blames += i[1].rows - - for i, entry in enumerate(blames): - work_percentage = str("{0:.2f}".format(100.0 * entry[1].rows / total_blames)) - blame_xml += "" if i % 2 == 1 else ">") - - if format.get_selected() == "html": - author_email = self.changes.get_latest_email_by_author(entry[0]) - blame_xml += "".format(gravatar.get_url(author_email), entry[0]) - else: - blame_xml += "" - - blame_xml += "" - blame_xml += "") - blame_xml += "" - blame_xml += "" - blame_xml += "" - blame_xml += "" - chart_data += "{{label: {0}, data: {1}}}".format(json.dumps(entry[0]), work_percentage) - - if blames[-1] != entry: - chart_data += ", " - - blame_xml += "
{0} {1} {2} {3} {4}
{1}" + entry[0] + "" + str(entry[1].rows) + "" + ("{0:.1f}".format(Blame.get_stability(entry[0], entry[1].rows, self.changes)) + "" + "{0:.1f}".format(float(entry[1].skew) / entry[1].rows) + "" + "{0:.2f}".format(100.0 * entry[1].comments / entry[1].rows) + "" + work_percentage + "
 
" - blame_xml += "
" - blame_xml += "
" - - print(blame_xml) - - def output_json(self): - message_json = "\t\t\t\"message\": \"" + _(BLAME_INFO_TEXT) + "\",\n" - blame_json = "" - - for i in sorted(self.blame.get_summed_blames().items()): - author_email = self.changes.get_latest_email_by_author(i[0]) - - name_json = "\t\t\t\t\"name\": \"" + i[0] + "\",\n" - email_json = "\t\t\t\t\"email\": \"" + author_email + "\",\n" - gravatar_json = "\t\t\t\t\"gravatar\": \"" + gravatar.get_url(author_email) + "\",\n" - rows_json = "\t\t\t\t\"rows\": " + str(i[1].rows) + ",\n" - stability_json = ("\t\t\t\t\"stability\": " + "{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, - self.changes)) + ",\n") - age_json = ("\t\t\t\t\"age\": " + "{0:.1f}".format(float(i[1].skew) / i[1].rows) + ",\n") - percentage_in_comments_json = ("\t\t\t\t\"percentage_in_comments\": " + - "{0:.2f}".format(100.0 * i[1].comments / i[1].rows) + "\n") - blame_json += ("{\n" + name_json + email_json + gravatar_json + rows_json + stability_json + age_json + - percentage_in_comments_json + "\t\t\t},") - else: - blame_json = blame_json[:-1] - - print(",\n\t\t\"blame\": {\n" + message_json + "\t\t\t\"authors\": [\n\t\t\t" + blame_json + "]\n\t\t}", end="") - - def output_text(self): - if sys.stdout.isatty() and format.is_interactive_format(): - terminal.clear_row() - - print(textwrap.fill(_(BLAME_INFO_TEXT) + ":", width=terminal.get_size()[0]) + "\n") - terminal.printb(terminal.ljust(_("Author"), 21) + terminal.rjust(_("Rows"), 10) + terminal.rjust(_("Stability"), 15) + - terminal.rjust(_("Age"), 13) + terminal.rjust(_("% in comments"), 20)) - - for i in sorted(self.blame.get_summed_blames().items()): - print(terminal.ljust(i[0], 20)[0:20 - terminal.get_excess_column_count(i[0])], end=" ") - print(str(i[1].rows).rjust(10), end=" ") - print("{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)).rjust(14), end=" ") - print("{0:.1f}".format(float(i[1].skew) / i[1].rows).rjust(12), end=" ") - print("{0:.2f}".format(100.0 * i[1].comments / i[1].rows).rjust(19)) - - def output_xml(self): - message_xml = "\t\t" + _(BLAME_INFO_TEXT) + "\n" - blame_xml = "" - - for i in sorted(self.blame.get_summed_blames().items()): - author_email = self.changes.get_latest_email_by_author(i[0]) - - name_xml = "\t\t\t\t" + i[0] + "\n" - email_xml = "\t\t\t\t" + author_email + "\n" - gravatar_xml = "\t\t\t\t" + gravatar.get_url(author_email) + "\n" - rows_xml = "\t\t\t\t" + str(i[1].rows) + "\n" - stability_xml = ("\t\t\t\t" + "{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, - self.changes)) + "\n") - age_xml = ("\t\t\t\t" + "{0:.1f}".format(float(i[1].skew) / i[1].rows) + "\n") - percentage_in_comments_xml = ("\t\t\t\t" + "{0:.2f}".format(100.0 * i[1].comments / i[1].rows) + - "\n") - blame_xml += ("\t\t\t\n" + name_xml + email_xml + gravatar_xml + rows_xml + stability_xml + - age_xml + percentage_in_comments_xml + "\t\t\t\n") - - print("\t\n" + message_xml + "\t\t\n" + blame_xml + "\t\t\n\t") + def __init__(self, changes, blame): + if format.is_interactive_format(): + print("") + + self.changes = changes + self.blame = blame + Outputable.__init__(self) + + def output_html(self): + blame_xml = '
' + blame_xml += "

" + _(BLAME_INFO_TEXT) + '.

' + blame_xml += "".format( + _("Author"), _("Rows"), _("Stability"), _("Age"), _("% in comments") + ) + blame_xml += "" + chart_data = "" + blames = sorted(self.blame.get_summed_blames().items()) + total_blames = 0 + + for i in blames: + total_blames += i[1].rows + + for i, entry in enumerate(blames): + work_percentage = str("{0:.2f}".format(100.0 * entry[1].rows / total_blames)) + blame_xml += "' if i % 2 == 1 else ">") + + if format.get_selected() == "html": + author_email = self.changes.get_latest_email_by_author(entry[0]) + blame_xml += ''.format(gravatar.get_url(author_email), entry[0]) + else: + blame_xml += "" + + blame_xml += "" + blame_xml += "") + blame_xml += "" + blame_xml += "" + blame_xml += '" + blame_xml += "" + chart_data += "{{label: {0}, data: {1}}}".format(json.dumps(entry[0]), work_percentage) + + if blames[-1] != entry: + chart_data += ", " + + blame_xml += '
{0} {1} {2} {3} {4}
{1}" + entry[0] + "" + str(entry[1].rows) + "" + ("{0:.1f}".format(Blame.get_stability(entry[0], entry[1].rows, self.changes)) + "" + "{0:.1f}".format(float(entry[1].skew) / entry[1].rows) + "" + "{0:.2f}".format(100.0 * entry[1].comments / entry[1].rows) + "' + work_percentage + "
 
' + blame_xml += '
' + blame_xml += '
" + + print(blame_xml) + + def output_json(self): + message_json = '\t\t\t"message": "' + _(BLAME_INFO_TEXT) + '",\n' + blame_json = "" + + for i in sorted(self.blame.get_summed_blames().items()): + author_email = self.changes.get_latest_email_by_author(i[0]) + + name_json = '\t\t\t\t"name": "' + i[0] + '",\n' + email_json = '\t\t\t\t"email": "' + author_email + '",\n' + gravatar_json = '\t\t\t\t"gravatar": "' + gravatar.get_url(author_email) + '",\n' + rows_json = '\t\t\t\t"rows": ' + str(i[1].rows) + ",\n" + stability_json = ( + '\t\t\t\t"stability": ' + "{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)) + ",\n" + ) + age_json = '\t\t\t\t"age": ' + "{0:.1f}".format(float(i[1].skew) / i[1].rows) + ",\n" + percentage_in_comments_json = ( + '\t\t\t\t"percentage_in_comments": ' + "{0:.2f}".format(100.0 * i[1].comments / i[1].rows) + "\n" + ) + blame_json += ( + "{\n" + + name_json + + email_json + + gravatar_json + + rows_json + + stability_json + + age_json + + percentage_in_comments_json + + "\t\t\t}," + ) + else: + blame_json = blame_json[:-1] + + print(',\n\t\t"blame": {\n' + message_json + '\t\t\t"authors": [\n\t\t\t' + blame_json + "]\n\t\t}", end="") + + def output_text(self): + if sys.stdout.isatty() and format.is_interactive_format(): + terminal.clear_row() + + print(textwrap.fill(_(BLAME_INFO_TEXT) + ":", width=terminal.get_size()[0]) + "\n") + terminal.printb( + terminal.ljust(_("Author"), 21) + + terminal.rjust(_("Rows"), 10) + + terminal.rjust(_("Stability"), 15) + + terminal.rjust(_("Age"), 13) + + terminal.rjust(_("% in comments"), 20) + ) + + for i in sorted(self.blame.get_summed_blames().items()): + print(terminal.ljust(i[0], 20)[0:20 - terminal.get_excess_column_count(i[0])], end=" ") + print(str(i[1].rows).rjust(10), end=" ") + print("{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)).rjust(14), end=" ") + print("{0:.1f}".format(float(i[1].skew) / i[1].rows).rjust(12), end=" ") + print("{0:.2f}".format(100.0 * i[1].comments / i[1].rows).rjust(19)) + + def output_xml(self): + message_xml = "\t\t" + _(BLAME_INFO_TEXT) + "\n" + blame_xml = "" + + for i in sorted(self.blame.get_summed_blames().items()): + author_email = self.changes.get_latest_email_by_author(i[0]) + + name_xml = "\t\t\t\t" + i[0] + "\n" + email_xml = "\t\t\t\t" + author_email + "\n" + gravatar_xml = "\t\t\t\t" + gravatar.get_url(author_email) + "\n" + rows_xml = "\t\t\t\t" + str(i[1].rows) + "\n" + stability_xml = ( + "\t\t\t\t" + "{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)) + "\n" + ) + age_xml = "\t\t\t\t" + "{0:.1f}".format(float(i[1].skew) / i[1].rows) + "\n" + percentage_in_comments_xml = ( + "\t\t\t\t" + + "{0:.2f}".format(100.0 * i[1].comments / i[1].rows) + + "\n" + ) + blame_xml += ( + "\t\t\t\n" + + name_xml + + email_xml + + gravatar_xml + + rows_xml + + stability_xml + + age_xml + + percentage_in_comments_xml + + "\t\t\t\n" + ) + + print("\t\n" + message_xml + "\t\t\n" + blame_xml + "\t\t\n\t") diff --git a/gitinspector/output/changesoutput.py b/gitinspector/output/changesoutput.py index 945f4ac7..a7175d9d 100644 --- a/gitinspector/output/changesoutput.py +++ b/gitinspector/output/changesoutput.py @@ -18,7 +18,6 @@ # along with gitinspector. If not, see . - import json import textwrap from ..localization import N_ @@ -28,162 +27,189 @@ HISTORICAL_INFO_TEXT = N_("The following historical commit information, by author, was found") NO_COMMITED_FILES_TEXT = N_("No commited files with the specified extensions were found") + class ChangesOutput(Outputable): - def __init__(self, changes): - self.changes = changes - Outputable.__init__(self) - - def output_html(self): - authorinfo_list = self.changes.get_authorinfo_list() - total_changes = 0.0 - changes_xml = "
" - chart_data = "" - - for i in authorinfo_list: - total_changes += authorinfo_list.get(i).insertions - total_changes += authorinfo_list.get(i).deletions - - if authorinfo_list: - changes_xml += "

" + _(HISTORICAL_INFO_TEXT) + ".

" - changes_xml += "".format( - _("Author"), _("Commits"), _("Insertions"), _("Deletions"), _("% of changes")) - changes_xml += "" - - for i, entry in enumerate(sorted(authorinfo_list)): - authorinfo = authorinfo_list.get(entry) - percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 - - changes_xml += "" if i % 2 == 1 else ">") - - if format.get_selected() == "html": - changes_xml += "".format( - gravatar.get_url(self.changes.get_latest_email_by_author(entry)), entry) - else: - changes_xml += "" - - changes_xml += "" - changes_xml += "" - changes_xml += "" - changes_xml += "" - changes_xml += "" - chart_data += "{{label: {0}, data: {1}}}".format(json.dumps(entry), "{0:.2f}".format(percentage)) - - if sorted(authorinfo_list)[-1] != entry: - chart_data += ", " - - changes_xml += ("
{0} {1} {2} {3} {4}
{1}" + entry + "" + str(authorinfo.commits) + "" + str(authorinfo.insertions) + "" + str(authorinfo.deletions) + "" + "{0:.2f}".format(percentage) + "
 
") - changes_xml += "
" - changes_xml += "" - else: - changes_xml += "

" + _(NO_COMMITED_FILES_TEXT) + ".

" - - changes_xml += "
" - print(changes_xml) - - def output_json(self): - authorinfo_list = self.changes.get_authorinfo_list() - total_changes = 0.0 - - for i in authorinfo_list: - total_changes += authorinfo_list.get(i).insertions - total_changes += authorinfo_list.get(i).deletions - - if authorinfo_list: - message_json = "\t\t\t\"message\": \"" + _(HISTORICAL_INFO_TEXT) + "\",\n" - changes_json = "" - - for i in sorted(authorinfo_list): - author_email = self.changes.get_latest_email_by_author(i) - authorinfo = authorinfo_list.get(i) - - percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 - name_json = "\t\t\t\t\"name\": \"" + i + "\",\n" - email_json = "\t\t\t\t\"email\": \"" + author_email + "\",\n" - gravatar_json = "\t\t\t\t\"gravatar\": \"" + gravatar.get_url(author_email) + "\",\n" - commits_json = "\t\t\t\t\"commits\": " + str(authorinfo.commits) + ",\n" - insertions_json = "\t\t\t\t\"insertions\": " + str(authorinfo.insertions) + ",\n" - deletions_json = "\t\t\t\t\"deletions\": " + str(authorinfo.deletions) + ",\n" - percentage_json = "\t\t\t\t\"percentage_of_changes\": " + "{0:.2f}".format(percentage) + "\n" - - changes_json += ("{\n" + name_json + email_json + gravatar_json + commits_json + - insertions_json + deletions_json + percentage_json + "\t\t\t}") - changes_json += "," - else: - changes_json = changes_json[:-1] - - print("\t\t\"changes\": {\n" + message_json + "\t\t\t\"authors\": [\n\t\t\t" + changes_json + "]\n\t\t}", end="") - else: - print("\t\t\"exception\": \"" + _(NO_COMMITED_FILES_TEXT) + "\"") - - def output_text(self): - authorinfo_list = self.changes.get_authorinfo_list() - total_changes = 0.0 - - for i in authorinfo_list: - total_changes += authorinfo_list.get(i).insertions - total_changes += authorinfo_list.get(i).deletions - - if authorinfo_list: - print(textwrap.fill(_(HISTORICAL_INFO_TEXT) + ":", width=terminal.get_size()[0]) + "\n") - terminal.printb(terminal.ljust(_("Author"), 21) + terminal.rjust(_("Commits"), 13) + - terminal.rjust(_("Insertions"), 14) + terminal.rjust(_("Deletions"), 15) + - terminal.rjust(_("% of changes"), 16)) - - for i in sorted(authorinfo_list): - authorinfo = authorinfo_list.get(i) - percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 - - print(terminal.ljust(i, 20)[0:20 - terminal.get_excess_column_count(i)], end=" ") - print(str(authorinfo.commits).rjust(13), end=" ") - print(str(authorinfo.insertions).rjust(13), end=" ") - print(str(authorinfo.deletions).rjust(14), end=" ") - print("{0:.2f}".format(percentage).rjust(15)) - else: - print(_(NO_COMMITED_FILES_TEXT) + ".") - - def output_xml(self): - authorinfo_list = self.changes.get_authorinfo_list() - total_changes = 0.0 - - for i in authorinfo_list: - total_changes += authorinfo_list.get(i).insertions - total_changes += authorinfo_list.get(i).deletions - - if authorinfo_list: - message_xml = "\t\t" + _(HISTORICAL_INFO_TEXT) + "\n" - changes_xml = "" - - for i in sorted(authorinfo_list): - author_email = self.changes.get_latest_email_by_author(i) - authorinfo = authorinfo_list.get(i) - - percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 - name_xml = "\t\t\t\t" + i + "\n" - email_xml = "\t\t\t\t" + author_email + "\n" - gravatar_xml = "\t\t\t\t" + gravatar.get_url(author_email) + "\n" - commits_xml = "\t\t\t\t" + str(authorinfo.commits) + "\n" - insertions_xml = "\t\t\t\t" + str(authorinfo.insertions) + "\n" - deletions_xml = "\t\t\t\t" + str(authorinfo.deletions) + "\n" - percentage_xml = "\t\t\t\t" + "{0:.2f}".format(percentage) + "\n" - - changes_xml += ("\t\t\t\n" + name_xml + email_xml + gravatar_xml + commits_xml + - insertions_xml + deletions_xml + percentage_xml + "\t\t\t\n") - - print("\t\n" + message_xml + "\t\t\n" + changes_xml + "\t\t\n\t") - else: - print("\t\n\t\t" + _(NO_COMMITED_FILES_TEXT) + "\n\t") + def __init__(self, changes): + self.changes = changes + Outputable.__init__(self) + + def output_html(self): + authorinfo_list = self.changes.get_authorinfo_list() + total_changes = 0.0 + changes_xml = '
' + chart_data = "" + + for i in authorinfo_list: + total_changes += authorinfo_list.get(i).insertions + total_changes += authorinfo_list.get(i).deletions + + if authorinfo_list: + changes_xml += "

" + _(HISTORICAL_INFO_TEXT) + '.

' + changes_xml += "".format( + _("Author"), _("Commits"), _("Insertions"), _("Deletions"), _("% of changes") + ) + changes_xml += "" + + for i, entry in enumerate(sorted(authorinfo_list)): + authorinfo = authorinfo_list.get(entry) + percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 + + changes_xml += "' if i % 2 == 1 else ">") + + if format.get_selected() == "html": + changes_xml += ''.format( + gravatar.get_url(self.changes.get_latest_email_by_author(entry)), entry + ) + else: + changes_xml += "" + + changes_xml += "" + changes_xml += "" + changes_xml += "" + changes_xml += "" + changes_xml += "" + chart_data += "{{label: {0}, data: {1}}}".format(json.dumps(entry), "{0:.2f}".format(percentage)) + + if sorted(authorinfo_list)[-1] != entry: + chart_data += ", " + + changes_xml += '
{0} {1} {2} {3} {4}
{1}" + entry + "" + str(authorinfo.commits) + "" + str(authorinfo.insertions) + "" + str(authorinfo.deletions) + "" + "{0:.2f}".format(percentage) + "
 
' + changes_xml += '
' + changes_xml += '" + else: + changes_xml += "

" + _(NO_COMMITED_FILES_TEXT) + ".

" + + changes_xml += "
" + print(changes_xml) + + def output_json(self): + authorinfo_list = self.changes.get_authorinfo_list() + total_changes = 0.0 + + for i in authorinfo_list: + total_changes += authorinfo_list.get(i).insertions + total_changes += authorinfo_list.get(i).deletions + + if authorinfo_list: + message_json = '\t\t\t"message": "' + _(HISTORICAL_INFO_TEXT) + '",\n' + changes_json = "" + + for i in sorted(authorinfo_list): + author_email = self.changes.get_latest_email_by_author(i) + authorinfo = authorinfo_list.get(i) + + percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 + name_json = '\t\t\t\t"name": "' + i + '",\n' + email_json = '\t\t\t\t"email": "' + author_email + '",\n' + gravatar_json = '\t\t\t\t"gravatar": "' + gravatar.get_url(author_email) + '",\n' + commits_json = '\t\t\t\t"commits": ' + str(authorinfo.commits) + ",\n" + insertions_json = '\t\t\t\t"insertions": ' + str(authorinfo.insertions) + ",\n" + deletions_json = '\t\t\t\t"deletions": ' + str(authorinfo.deletions) + ",\n" + percentage_json = '\t\t\t\t"percentage_of_changes": ' + "{0:.2f}".format(percentage) + "\n" + + changes_json += ( + "{\n" + + name_json + + email_json + + gravatar_json + + commits_json + + insertions_json + + deletions_json + + percentage_json + + "\t\t\t}" + ) + changes_json += "," + else: + changes_json = changes_json[:-1] + + print('\t\t"changes": {\n' + message_json + '\t\t\t"authors": [\n\t\t\t' + changes_json + "]\n\t\t}", end="") + else: + print('\t\t"exception": "' + _(NO_COMMITED_FILES_TEXT) + '"') + + def output_text(self): + authorinfo_list = self.changes.get_authorinfo_list() + total_changes = 0.0 + + for i in authorinfo_list: + total_changes += authorinfo_list.get(i).insertions + total_changes += authorinfo_list.get(i).deletions + + if authorinfo_list: + print(textwrap.fill(_(HISTORICAL_INFO_TEXT) + ":", width=terminal.get_size()[0]) + "\n") + terminal.printb( + terminal.ljust(_("Author"), 21) + + terminal.rjust(_("Commits"), 13) + + terminal.rjust(_("Insertions"), 14) + + terminal.rjust(_("Deletions"), 15) + + terminal.rjust(_("% of changes"), 16) + ) + + for i in sorted(authorinfo_list): + authorinfo = authorinfo_list.get(i) + percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 + + print(terminal.ljust(i, 20)[0:20 - terminal.get_excess_column_count(i)], end=" ") + print(str(authorinfo.commits).rjust(13), end=" ") + print(str(authorinfo.insertions).rjust(13), end=" ") + print(str(authorinfo.deletions).rjust(14), end=" ") + print("{0:.2f}".format(percentage).rjust(15)) + else: + print(_(NO_COMMITED_FILES_TEXT) + ".") + + def output_xml(self): + authorinfo_list = self.changes.get_authorinfo_list() + total_changes = 0.0 + + for i in authorinfo_list: + total_changes += authorinfo_list.get(i).insertions + total_changes += authorinfo_list.get(i).deletions + + if authorinfo_list: + message_xml = "\t\t" + _(HISTORICAL_INFO_TEXT) + "\n" + changes_xml = "" + + for i in sorted(authorinfo_list): + author_email = self.changes.get_latest_email_by_author(i) + authorinfo = authorinfo_list.get(i) + + percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 + name_xml = "\t\t\t\t" + i + "\n" + email_xml = "\t\t\t\t" + author_email + "\n" + gravatar_xml = "\t\t\t\t" + gravatar.get_url(author_email) + "\n" + commits_xml = "\t\t\t\t" + str(authorinfo.commits) + "\n" + insertions_xml = "\t\t\t\t" + str(authorinfo.insertions) + "\n" + deletions_xml = "\t\t\t\t" + str(authorinfo.deletions) + "\n" + percentage_xml = ( + "\t\t\t\t" + "{0:.2f}".format(percentage) + "\n" + ) + + changes_xml += ( + "\t\t\t\n" + + name_xml + + email_xml + + gravatar_xml + + commits_xml + + insertions_xml + + deletions_xml + + percentage_xml + + "\t\t\t\n" + ) + + print("\t\n" + message_xml + "\t\t\n" + changes_xml + "\t\t\n\t") + else: + print("\t\n\t\t" + _(NO_COMMITED_FILES_TEXT) + "\n\t") diff --git a/gitinspector/output/extensionsoutput.py b/gitinspector/output/extensionsoutput.py index dceb372d..f1ae6124 100644 --- a/gitinspector/output/extensionsoutput.py +++ b/gitinspector/output/extensionsoutput.py @@ -18,7 +18,6 @@ # along with gitinspector. If not, see . - import textwrap from ..localization import N_ from .. import extensions, terminal @@ -28,70 +27,93 @@ EXTENSIONS_INFO_TEXT = N_("The extensions below were found in the repository history") EXTENSIONS_MARKED_TEXT = N_("(extensions used during statistical analysis are marked)") + class ExtensionsOutput(Outputable): - @staticmethod - def is_marked(extension): - if extension in extensions.__extensions__ or "**" in extensions.__extensions__: - return True - - return False - - def output_html(self): - if extensions.__located_extensions__: - extensions_xml = "
" - extensions_xml += "

{0} {1}.

".format(_(EXTENSIONS_INFO_TEXT), _(EXTENSIONS_MARKED_TEXT)) - - for i in sorted(extensions.__located_extensions__): - if ExtensionsOutput.is_marked(i): - extensions_xml += "" + i + "" - else: - extensions_xml += i - extensions_xml += " " - - extensions_xml += "

" - print(extensions_xml) - - def output_json(self): - if extensions.__located_extensions__: - message_json = "\t\t\t\"message\": \"" + _(EXTENSIONS_INFO_TEXT) + "\",\n" - used_extensions_json = "" - unused_extensions_json = "" - - for i in sorted(extensions.__located_extensions__): - if ExtensionsOutput.is_marked(i): - used_extensions_json += "\"" + i + "\", " - else: - unused_extensions_json += "\"" + i + "\", " - - used_extensions_json = used_extensions_json[:-2] - unused_extensions_json = unused_extensions_json[:-2] - - print(",\n\t\t\"extensions\": {\n" + message_json + "\t\t\t\"used\": [ " + used_extensions_json + - " ],\n\t\t\t\"unused\": [ " + unused_extensions_json + " ]\n" + "\t\t}", end="") - - def output_text(self): - if extensions.__located_extensions__: - print("\n" + textwrap.fill("{0} {1}:".format(_(EXTENSIONS_INFO_TEXT), _(EXTENSIONS_MARKED_TEXT)), - width=terminal.get_size()[0])) - - for i in sorted(extensions.__located_extensions__): - if ExtensionsOutput.is_marked(i): - print("[" + terminal.__bold__ + i + terminal.__normal__ + "]", end=" ") - else: - print (i, end=" ") - print("") - - def output_xml(self): - if extensions.__located_extensions__: - message_xml = "\t\t" + _(EXTENSIONS_INFO_TEXT) + "\n" - used_extensions_xml = "" - unused_extensions_xml = "" - - for i in sorted(extensions.__located_extensions__): - if ExtensionsOutput.is_marked(i): - used_extensions_xml += "\t\t\t" + i + "\n" - else: - unused_extensions_xml += "\t\t\t" + i + "\n" - - print("\t\n" + message_xml + "\t\t\n" + used_extensions_xml + "\t\t\n" + - "\t\t\n" + unused_extensions_xml + "\t\t\n" + "\t") + @staticmethod + def is_marked(extension): + if extension in extensions.__extensions__ or "**" in extensions.__extensions__: + return True + + return False + + def output_html(self): + if extensions.__located_extensions__: + extensions_xml = '
' + extensions_xml += "

{0} {1}.

".format(_(EXTENSIONS_INFO_TEXT), _(EXTENSIONS_MARKED_TEXT)) + + for i in sorted(extensions.__located_extensions__): + if ExtensionsOutput.is_marked(i): + extensions_xml += "" + i + "" + else: + extensions_xml += i + extensions_xml += " " + + extensions_xml += "

" + print(extensions_xml) + + def output_json(self): + if extensions.__located_extensions__: + message_json = '\t\t\t"message": "' + _(EXTENSIONS_INFO_TEXT) + '",\n' + used_extensions_json = "" + unused_extensions_json = "" + + for i in sorted(extensions.__located_extensions__): + if ExtensionsOutput.is_marked(i): + used_extensions_json += '"' + i + '", ' + else: + unused_extensions_json += '"' + i + '", ' + + used_extensions_json = used_extensions_json[:-2] + unused_extensions_json = unused_extensions_json[:-2] + + print( + ',\n\t\t"extensions": {\n' + + message_json + + '\t\t\t"used": [ ' + + used_extensions_json + + ' ],\n\t\t\t"unused": [ ' + + unused_extensions_json + + " ]\n" + + "\t\t}", + end="", + ) + + def output_text(self): + if extensions.__located_extensions__: + print( + "\n" + + textwrap.fill( + "{0} {1}:".format(_(EXTENSIONS_INFO_TEXT), _(EXTENSIONS_MARKED_TEXT)), width=terminal.get_size()[0] + ) + ) + + for i in sorted(extensions.__located_extensions__): + if ExtensionsOutput.is_marked(i): + print("[" + terminal.__bold__ + i + terminal.__normal__ + "]", end=" ") + else: + print(i, end=" ") + print("") + + def output_xml(self): + if extensions.__located_extensions__: + message_xml = "\t\t" + _(EXTENSIONS_INFO_TEXT) + "\n" + used_extensions_xml = "" + unused_extensions_xml = "" + + for i in sorted(extensions.__located_extensions__): + if ExtensionsOutput.is_marked(i): + used_extensions_xml += "\t\t\t" + i + "\n" + else: + unused_extensions_xml += "\t\t\t" + i + "\n" + + print( + "\t\n" + + message_xml + + "\t\t\n" + + used_extensions_xml + + "\t\t\n" + + "\t\t\n" + + unused_extensions_xml + + "\t\t\n" + + "\t" + ) diff --git a/gitinspector/output/filteringoutput.py b/gitinspector/output/filteringoutput.py index b122ec9f..dcefeb55 100644 --- a/gitinspector/output/filteringoutput.py +++ b/gitinspector/output/filteringoutput.py @@ -18,7 +18,6 @@ # along with gitinspector. If not, see . - import textwrap from ..localization import N_ from ..filtering import __filters__, has_filtered @@ -26,96 +25,110 @@ from .outputable import Outputable FILTERING_INFO_TEXT = N_("The following files were excluded from the statistics due to the specified exclusion patterns") -FILTERING_AUTHOR_INFO_TEXT = N_("The following authors were excluded from the statistics due to the specified exclusion patterns") -FILTERING_EMAIL_INFO_TEXT = N_("The authors with the following emails were excluded from the statistics due to the specified " \ - "exclusion patterns") -FILTERING_COMMIT_INFO_TEXT = N_("The following commit revisions were excluded from the statistics due to the specified " \ - "exclusion patterns") +FILTERING_AUTHOR_INFO_TEXT = N_( + "The following authors were excluded from the statistics due to the specified exclusion patterns" +) +FILTERING_EMAIL_INFO_TEXT = N_( + "The authors with the following emails were excluded from the statistics due to the specified " "exclusion patterns" +) +FILTERING_COMMIT_INFO_TEXT = N_( + "The following commit revisions were excluded from the statistics due to the specified " "exclusion patterns" +) + class FilteringOutput(Outputable): - @staticmethod - def __output_html_section__(info_string, filtered): - filtering_xml = "" - - if filtered: - filtering_xml += "

" + info_string + "."+ "

" - - for i in filtered: - filtering_xml += "

" + i + "

" - - return filtering_xml - - def output_html(self): - if has_filtered(): - filtering_xml = "
" - FilteringOutput.__output_html_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) - FilteringOutput.__output_html_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1]) - FilteringOutput.__output_html_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1]) - FilteringOutput.__output_html_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1]) - filtering_xml += "
" - - print(filtering_xml) - - @staticmethod - def __output_json_section__(info_string, filtered, container_tagname): - if filtered: - message_json = "\t\t\t\t\"message\": \"" + info_string + "\",\n" - filtering_json = "" - - for i in filtered: - filtering_json += "\t\t\t\t\t\"" + i + "\",\n" - else: - filtering_json = filtering_json[:-3] - - return "\n\t\t\t\"{0}\": {{\n".format(container_tagname) + message_json + \ - "\t\t\t\t\"entries\": [\n" + filtering_json + "\"\n\t\t\t\t]\n\t\t\t}," - - return "" - - def output_json(self): - if has_filtered(): - output = ",\n\t\t\"filtering\": {" - output += FilteringOutput.__output_json_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1], "files") - output += FilteringOutput.__output_json_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1], "authors") - output += FilteringOutput.__output_json_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1], "emails") - output += FilteringOutput.__output_json_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1], "revision") - output = output[:-1] - output += "\n\t\t}" - print(output, end="") - - @staticmethod - def __output_text_section__(info_string, filtered): - if filtered: - print("\n" + textwrap.fill(info_string + ":", width=terminal.get_size()[0])) - - for i in filtered: - (width, _unused) = terminal.get_size() - print("...%s" % i[-width+3:] if len(i) > width else i) - - def output_text(self): - FilteringOutput.__output_text_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) - FilteringOutput.__output_text_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1]) - FilteringOutput.__output_text_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1]) - FilteringOutput.__output_text_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1]) - - @staticmethod - def __output_xml_section__(info_string, filtered, container_tagname): - if filtered: - message_xml = "\t\t\t" + info_string + "\n" - filtering_xml = "" - - for i in filtered: - filtering_xml += "\t\t\t\t" + i + "\n" - - print("\t\t<{0}>".format(container_tagname)) - print(message_xml + "\t\t\t\n" + filtering_xml + "\t\t\t\n") - print("\t\t".format(container_tagname)) - - def output_xml(self): - if has_filtered(): - print("\t") - FilteringOutput.__output_xml_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1], "files") - FilteringOutput.__output_xml_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1], "authors") - FilteringOutput.__output_xml_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1], "emails") - FilteringOutput.__output_xml_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1], "revision") - print("\t") + @staticmethod + def __output_html_section__(info_string, filtered): + filtering_xml = "" + + if filtered: + filtering_xml += "

" + info_string + "." + "

" + + for i in filtered: + filtering_xml += "

" + i + "

" + + return filtering_xml + + def output_html(self): + if has_filtered(): + filtering_xml = '
' + FilteringOutput.__output_html_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) + FilteringOutput.__output_html_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1]) + FilteringOutput.__output_html_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1]) + FilteringOutput.__output_html_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1]) + filtering_xml += "
" + + print(filtering_xml) + + @staticmethod + def __output_json_section__(info_string, filtered, container_tagname): + if filtered: + message_json = '\t\t\t\t"message": "' + info_string + '",\n' + filtering_json = "" + + for i in filtered: + filtering_json += '\t\t\t\t\t"' + i + '",\n' + else: + filtering_json = filtering_json[:-3] + + return ( + '\n\t\t\t"{0}": {{\n'.format(container_tagname) + + message_json + + '\t\t\t\t"entries": [\n' + + filtering_json + + '"\n\t\t\t\t]\n\t\t\t},' + ) + + return "" + + def output_json(self): + if has_filtered(): + output = ',\n\t\t"filtering": {' + output += FilteringOutput.__output_json_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1], "files") + output += FilteringOutput.__output_json_section__( + _(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1], "authors" + ) + output += FilteringOutput.__output_json_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1], "emails") + output += FilteringOutput.__output_json_section__( + _(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1], "revision" + ) + output = output[:-1] + output += "\n\t\t}" + print(output, end="") + + @staticmethod + def __output_text_section__(info_string, filtered): + if filtered: + print("\n" + textwrap.fill(info_string + ":", width=terminal.get_size()[0])) + + for i in filtered: + (width, _unused) = terminal.get_size() + print("...%s" % i[-width + 3:] if len(i) > width else i) + + def output_text(self): + FilteringOutput.__output_text_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) + FilteringOutput.__output_text_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1]) + FilteringOutput.__output_text_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1]) + FilteringOutput.__output_text_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1]) + + @staticmethod + def __output_xml_section__(info_string, filtered, container_tagname): + if filtered: + message_xml = "\t\t\t" + info_string + "\n" + filtering_xml = "" + + for i in filtered: + filtering_xml += "\t\t\t\t" + i + "\n" + + print("\t\t<{0}>".format(container_tagname)) + print(message_xml + "\t\t\t\n" + filtering_xml + "\t\t\t\n") + print("\t\t".format(container_tagname)) + + def output_xml(self): + if has_filtered(): + print("\t") + FilteringOutput.__output_xml_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1], "files") + FilteringOutput.__output_xml_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1], "authors") + FilteringOutput.__output_xml_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1], "emails") + FilteringOutput.__output_xml_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1], "revision") + print("\t") diff --git a/gitinspector/output/metricsoutput.py b/gitinspector/output/metricsoutput.py index f9195938..befe5aea 100644 --- a/gitinspector/output/metricsoutput.py +++ b/gitinspector/output/metricsoutput.py @@ -18,143 +18,168 @@ # along with gitinspector. If not, see . - from ..changes import FileDiff from ..localization import N_ -from ..metrics import (__metric_eloc__, METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD, METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD) +from ..metrics import __metric_eloc__, METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD, METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD from .outputable import Outputable ELOC_INFO_TEXT = N_("The following files are suspiciously big (in order of severity)") CYCLOMATIC_COMPLEXITY_TEXT = N_("The following files have an elevated cyclomatic complexity (in order of severity)") -CYCLOMATIC_COMPLEXITY_DENSITY_TEXT = N_("The following files have an elevated cyclomatic complexity density " \ - "(in order of severity)") +CYCLOMATIC_COMPLEXITY_DENSITY_TEXT = N_( + "The following files have an elevated cyclomatic complexity density " "(in order of severity)" +) METRICS_MISSING_INFO_TEXT = N_("No metrics violations were found in the repository") METRICS_VIOLATION_SCORES = [[1.0, "minimal"], [1.25, "minor"], [1.5, "medium"], [2.0, "bad"], [3.0, "severe"]] + def __get_metrics_score__(ceiling, value): - for i in reversed(METRICS_VIOLATION_SCORES): - if value > ceiling * i[0]: - return i[1] + for i in reversed(METRICS_VIOLATION_SCORES): + if value > ceiling * i[0]: + return i[1] + class MetricsOutput(Outputable): - def __init__(self, metrics): - self.metrics = metrics - Outputable.__init__(self) - - def output_text(self): - if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: - print("\n" + _(METRICS_MISSING_INFO_TEXT) + ".") - - if self.metrics.eloc: - print("\n" + _(ELOC_INFO_TEXT) + ":") - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): - print(_("{0} ({1} estimated lines of code)").format(i[1], str(i[0]))) - - if self.metrics.cyclomatic_complexity: - print("\n" + _(CYCLOMATIC_COMPLEXITY_TEXT) + ":") - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): - print(_("{0} ({1} in cyclomatic complexity)").format(i[1], str(i[0]))) - - if self.metrics.cyclomatic_complexity_density: - print("\n" + _(CYCLOMATIC_COMPLEXITY_DENSITY_TEXT) + ":") - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True): - print(_("{0} ({1:.3f} in cyclomatic complexity density)").format(i[1], i[0])) - - def output_html(self): - metrics_xml = "
" - - if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: - metrics_xml += "

" + _(METRICS_MISSING_INFO_TEXT) + ".

" - - if self.metrics.eloc: - metrics_xml += "

" + _(ELOC_INFO_TEXT) + ".

" - for num, i in enumerate(sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True)): - metrics_xml += "
" if num % 2 == 1 else "\">") + \ - _("{0} ({1} estimated lines of code)").format(i[1], str(i[0])) + "
" - metrics_xml += "
" - - if self.metrics.cyclomatic_complexity: - metrics_xml += "

" + _(CYCLOMATIC_COMPLEXITY_TEXT) + "

" - for num, i in enumerate(sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True)): - metrics_xml += "
" if num % 2 == 1 else "\">") + \ - _("{0} ({1} in cyclomatic complexity)").format(i[1], str(i[0])) + "
" - metrics_xml += "
" - - if self.metrics.cyclomatic_complexity_density: - metrics_xml += "

" + _(CYCLOMATIC_COMPLEXITY_DENSITY_TEXT) + "

" - for num, i in enumerate(sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True)): - metrics_xml += "
" if num % 2 == 1 else "\">") + \ - _("{0} ({1:.3f} in cyclomatic complexity density)").format(i[1], i[0]) + "
" - metrics_xml += "
" - - metrics_xml += "
" - print(metrics_xml) - - def output_json(self): - if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: - print(",\n\t\t\"metrics\": {\n\t\t\t\"message\": \"" + _(METRICS_MISSING_INFO_TEXT) + "\"\n\t\t}", end="") - else: - eloc_json = "" - - if self.metrics.eloc: - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): - eloc_json += "{\n\t\t\t\t\"type\": \"estimated-lines-of-code\",\n" - eloc_json += "\t\t\t\t\"file_name\": \"" + i[1] + "\",\n" - eloc_json += "\t\t\t\t\"value\": " + str(i[0]) + "\n" - eloc_json += "\t\t\t}," - else: - if not self.metrics.cyclomatic_complexity: - eloc_json = eloc_json[:-1] - - if self.metrics.cyclomatic_complexity: - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): - eloc_json += "{\n\t\t\t\t\"type\": \"cyclomatic-complexity\",\n" - eloc_json += "\t\t\t\t\"file_name\": \"" + i[1] + "\",\n" - eloc_json += "\t\t\t\t\"value\": " + str(i[0]) + "\n" - eloc_json += "\t\t\t}," - else: - if not self.metrics.cyclomatic_complexity_density: - eloc_json = eloc_json[:-1] - - if self.metrics.cyclomatic_complexity_density: - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True): - eloc_json += "{\n\t\t\t\t\"type\": \"cyclomatic-complexity-density\",\n" - eloc_json += "\t\t\t\t\"file_name\": \"" + i[1] + "\",\n" - eloc_json += "\t\t\t\t\"value\": {0:.3f}\n".format(i[0]) - eloc_json += "\t\t\t}," - else: - eloc_json = eloc_json[:-1] - - print(",\n\t\t\"metrics\": {\n\t\t\t\"violations\": [\n\t\t\t" + eloc_json + "]\n\t\t}", end="") - def output_xml(self): - if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: - print("\t\n\t\t" + _(METRICS_MISSING_INFO_TEXT) + "\n\t") - else: - eloc_xml = "" - - if self.metrics.eloc: - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): - eloc_xml += "\t\t\t\n" - eloc_xml += "\t\t\t\t" + i[1] + "\n" - eloc_xml += "\t\t\t\t" + str(i[0]) + "\n" - eloc_xml += "\t\t\t\n" - - if self.metrics.cyclomatic_complexity: - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): - eloc_xml += "\t\t\t\n" - eloc_xml += "\t\t\t\t" + i[1] + "\n" - eloc_xml += "\t\t\t\t" + str(i[0]) + "\n" - eloc_xml += "\t\t\t\n" - - if self.metrics.cyclomatic_complexity_density: - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True): - eloc_xml += "\t\t\t\n" - eloc_xml += "\t\t\t\t" + i[1] + "\n" - eloc_xml += "\t\t\t\t{0:.3f}\n".format(i[0]) - eloc_xml += "\t\t\t\n" - - print("\t\n\t\t\n" + eloc_xml + "\t\t\n\t") + def __init__(self, metrics): + self.metrics = metrics + Outputable.__init__(self) + + def output_text(self): + if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: + print("\n" + _(METRICS_MISSING_INFO_TEXT) + ".") + + if self.metrics.eloc: + print("\n" + _(ELOC_INFO_TEXT) + ":") + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): + print(_("{0} ({1} estimated lines of code)").format(i[1], str(i[0]))) + + if self.metrics.cyclomatic_complexity: + print("\n" + _(CYCLOMATIC_COMPLEXITY_TEXT) + ":") + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): + print(_("{0} ({1} in cyclomatic complexity)").format(i[1], str(i[0]))) + + if self.metrics.cyclomatic_complexity_density: + print("\n" + _(CYCLOMATIC_COMPLEXITY_DENSITY_TEXT) + ":") + for i in sorted( + set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True + ): + print(_("{0} ({1:.3f} in cyclomatic complexity density)").format(i[1], i[0])) + + def output_html(self): + metrics_xml = '
' + + if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: + metrics_xml += "

" + _(METRICS_MISSING_INFO_TEXT) + ".

" + + if self.metrics.eloc: + metrics_xml += "

" + _(ELOC_INFO_TEXT) + ".

" + for num, i in enumerate(sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True)): + metrics_xml += ( + '
' if num % 2 == 1 else '">') + + _("{0} ({1} estimated lines of code)").format(i[1], str(i[0])) + + "
" + ) + metrics_xml += "
" + + if self.metrics.cyclomatic_complexity: + metrics_xml += "

" + _(CYCLOMATIC_COMPLEXITY_TEXT) + "

" + for num, i in enumerate( + sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True) + ): + metrics_xml += ( + '
' if num % 2 == 1 else '">') + + _("{0} ({1} in cyclomatic complexity)").format(i[1], str(i[0])) + + "
" + ) + metrics_xml += "
" + + if self.metrics.cyclomatic_complexity_density: + metrics_xml += "

" + _(CYCLOMATIC_COMPLEXITY_DENSITY_TEXT) + "

" + for num, i in enumerate( + sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True) + ): + metrics_xml += ( + '
' if num % 2 == 1 else '">') + + _("{0} ({1:.3f} in cyclomatic complexity density)").format(i[1], i[0]) + + "
" + ) + metrics_xml += "
" + + metrics_xml += "
" + print(metrics_xml) + + def output_json(self): + if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: + print(',\n\t\t"metrics": {\n\t\t\t"message": "' + _(METRICS_MISSING_INFO_TEXT) + '"\n\t\t}', end="") + else: + eloc_json = "" + + if self.metrics.eloc: + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): + eloc_json += '{\n\t\t\t\t"type": "estimated-lines-of-code",\n' + eloc_json += '\t\t\t\t"file_name": "' + i[1] + '",\n' + eloc_json += '\t\t\t\t"value": ' + str(i[0]) + "\n" + eloc_json += "\t\t\t}," + else: + if not self.metrics.cyclomatic_complexity: + eloc_json = eloc_json[:-1] + + if self.metrics.cyclomatic_complexity: + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): + eloc_json += '{\n\t\t\t\t"type": "cyclomatic-complexity",\n' + eloc_json += '\t\t\t\t"file_name": "' + i[1] + '",\n' + eloc_json += '\t\t\t\t"value": ' + str(i[0]) + "\n" + eloc_json += "\t\t\t}," + else: + if not self.metrics.cyclomatic_complexity_density: + eloc_json = eloc_json[:-1] + + if self.metrics.cyclomatic_complexity_density: + for i in sorted( + set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True + ): + eloc_json += '{\n\t\t\t\t"type": "cyclomatic-complexity-density",\n' + eloc_json += '\t\t\t\t"file_name": "' + i[1] + '",\n' + eloc_json += '\t\t\t\t"value": {0:.3f}\n'.format(i[0]) + eloc_json += "\t\t\t}," + else: + eloc_json = eloc_json[:-1] + + print(',\n\t\t"metrics": {\n\t\t\t"violations": [\n\t\t\t' + eloc_json + "]\n\t\t}", end="") + + def output_xml(self): + if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: + print("\t\n\t\t" + _(METRICS_MISSING_INFO_TEXT) + "\n\t") + else: + eloc_xml = "" + + if self.metrics.eloc: + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): + eloc_xml += "\t\t\t\n" + eloc_xml += "\t\t\t\t" + i[1] + "\n" + eloc_xml += "\t\t\t\t" + str(i[0]) + "\n" + eloc_xml += "\t\t\t\n" + + if self.metrics.cyclomatic_complexity: + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): + eloc_xml += "\t\t\t\n" + eloc_xml += "\t\t\t\t" + i[1] + "\n" + eloc_xml += "\t\t\t\t" + str(i[0]) + "\n" + eloc_xml += "\t\t\t\n" + + if self.metrics.cyclomatic_complexity_density: + for i in sorted( + set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True + ): + eloc_xml += "\t\t\t\n" + eloc_xml += "\t\t\t\t" + i[1] + "\n" + eloc_xml += "\t\t\t\t{0:.3f}\n".format(i[0]) + eloc_xml += "\t\t\t\n" + + print("\t\n\t\t\n" + eloc_xml + "\t\t\n\t") diff --git a/gitinspector/output/outputable.py b/gitinspector/output/outputable.py index b9a804d0..2d49d182 100644 --- a/gitinspector/output/outputable.py +++ b/gitinspector/output/outputable.py @@ -18,28 +18,29 @@ # along with gitinspector. If not, see . - from .. import format + class Outputable(object): - def output_html(self): - raise NotImplementedError(_("HTML output not yet supported in") + " \"" + self.__class__.__name__ + "\".") + def output_html(self): + raise NotImplementedError(_("HTML output not yet supported in") + ' "' + self.__class__.__name__ + '".') + + def output_json(self): + raise NotImplementedError(_("JSON output not yet supported in") + ' "' + self.__class__.__name__ + '".') - def output_json(self): - raise NotImplementedError(_("JSON output not yet supported in") + " \"" + self.__class__.__name__ + "\".") + def output_text(self): + raise NotImplementedError(_("Text output not yet supported in") + ' "' + self.__class__.__name__ + '".') - def output_text(self): - raise NotImplementedError(_("Text output not yet supported in") + " \"" + self.__class__.__name__ + "\".") + def output_xml(self): + raise NotImplementedError(_("XML output not yet supported in") + ' "' + self.__class__.__name__ + '".') - def output_xml(self): - raise NotImplementedError(_("XML output not yet supported in") + " \"" + self.__class__.__name__ + "\".") def output(outputable): - if format.get_selected() == "html" or format.get_selected() == "htmlembedded": - outputable.output_html() - elif format.get_selected() == "json": - outputable.output_json() - elif format.get_selected() == "text": - outputable.output_text() - else: - outputable.output_xml() + if format.get_selected() == "html" or format.get_selected() == "htmlembedded": + outputable.output_html() + elif format.get_selected() == "json": + outputable.output_json() + elif format.get_selected() == "text": + outputable.output_text() + else: + outputable.output_xml() diff --git a/gitinspector/output/responsibilitiesoutput.py b/gitinspector/output/responsibilitiesoutput.py index 7d2a1f73..2cc37a3f 100644 --- a/gitinspector/output/responsibilitiesoutput.py +++ b/gitinspector/output/responsibilitiesoutput.py @@ -18,126 +18,130 @@ # along with gitinspector. If not, see . - import textwrap from ..localization import N_ from .. import format, gravatar, terminal from .. import responsibilities as resp from .outputable import Outputable -RESPONSIBILITIES_INFO_TEXT = N_("The following responsibilities, by author, were found in the current " - "revision of the repository (comments are excluded from the line count, " - "if possible)") +RESPONSIBILITIES_INFO_TEXT = N_( + "The following responsibilities, by author, were found in the current " + "revision of the repository (comments are excluded from the line count, " + "if possible)" +) MOSTLY_RESPONSIBLE_FOR_TEXT = N_("is mostly responsible for") -class ResponsibilitiesOutput(Outputable): - def __init__(self, changes, blame): - self.changes = changes - self.blame = blame - Outputable.__init__(self) - - def output_text(self): - print("\n" + textwrap.fill(_(RESPONSIBILITIES_INFO_TEXT) + ":", width=terminal.get_size()[0])) - - for i in sorted(set(i[0] for i in self.blame.blames)): - responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) - - if responsibilities: - print("\n" + i, _(MOSTLY_RESPONSIBLE_FOR_TEXT) + ":") - - for j, entry in enumerate(responsibilities): - (width, _unused) = terminal.get_size() - width -= 7 - - print(str(entry[0]).rjust(6), end=" ") - print("...%s" % entry[1][-width+3:] if len(entry[1]) > width else entry[1]) - - if j >= 9: - break - - def output_html(self): - resp_xml = "
" - resp_xml += "

" + _(RESPONSIBILITIES_INFO_TEXT) + ".

" - - for i in sorted(set(i[0] for i in self.blame.blames)): - responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) - - if responsibilities: - resp_xml += "
" - - if format.get_selected() == "html": - author_email = self.changes.get_latest_email_by_author(i) - resp_xml += "

{1} {2}

".format(gravatar.get_url(author_email, size=32), - i, _(MOSTLY_RESPONSIBLE_FOR_TEXT)) - else: - resp_xml += "

{0} {1}

".format(i, _(MOSTLY_RESPONSIBLE_FOR_TEXT)) - - for j, entry in enumerate(responsibilities): - resp_xml += "" if j % 2 == 1 else ">") + entry[1] + \ - " (" + str(entry[0]) + " eloc)
" - if j >= 9: - break - - resp_xml += "
" - resp_xml += "
" - print(resp_xml) - def output_json(self): - message_json = "\t\t\t\"message\": \"" + _(RESPONSIBILITIES_INFO_TEXT) + "\",\n" - resp_json = "" - - for i in sorted(set(i[0] for i in self.blame.blames)): - responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) - - if responsibilities: - author_email = self.changes.get_latest_email_by_author(i) - - resp_json += "{\n" - resp_json += "\t\t\t\t\"name\": \"" + i + "\",\n" - resp_json += "\t\t\t\t\"email\": \"" + author_email + "\",\n" - resp_json += "\t\t\t\t\"gravatar\": \"" + gravatar.get_url(author_email) + "\",\n" - resp_json += "\t\t\t\t\"files\": [\n\t\t\t\t" - - for j, entry in enumerate(responsibilities): - resp_json += "{\n" - resp_json += "\t\t\t\t\t\"name\": \"" + entry[1] + "\",\n" - resp_json += "\t\t\t\t\t\"rows\": " + str(entry[0]) + "\n" - resp_json += "\t\t\t\t}," - - if j >= 9: - break - - resp_json = resp_json[:-1] - resp_json += "]\n\t\t\t}," - - resp_json = resp_json[:-1] - print(",\n\t\t\"responsibilities\": {\n" + message_json + "\t\t\t\"authors\": [\n\t\t\t" + resp_json + "]\n\t\t}", end="") - - def output_xml(self): - message_xml = "\t\t" + _(RESPONSIBILITIES_INFO_TEXT) + "\n" - resp_xml = "" - - for i in sorted(set(i[0] for i in self.blame.blames)): - responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) - if responsibilities: - author_email = self.changes.get_latest_email_by_author(i) - - resp_xml += "\t\t\t\n" - resp_xml += "\t\t\t\t" + i + "\n" - resp_xml += "\t\t\t\t" + author_email + "\n" - resp_xml += "\t\t\t\t" + gravatar.get_url(author_email) + "\n" - resp_xml += "\t\t\t\t\n" +class ResponsibilitiesOutput(Outputable): + def __init__(self, changes, blame): + self.changes = changes + self.blame = blame + Outputable.__init__(self) + + def output_text(self): + print("\n" + textwrap.fill(_(RESPONSIBILITIES_INFO_TEXT) + ":", width=terminal.get_size()[0])) + + for i in sorted(set(i[0] for i in self.blame.blames)): + responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) - for j, entry in enumerate(responsibilities): - resp_xml += "\t\t\t\t\t\n" - resp_xml += "\t\t\t\t\t\t" + entry[1] + "\n" - resp_xml += "\t\t\t\t\t\t" + str(entry[0]) + "\n" - resp_xml += "\t\t\t\t\t\n" + if responsibilities: + print("\n" + i, _(MOSTLY_RESPONSIBLE_FOR_TEXT) + ":") + + for j, entry in enumerate(responsibilities): + (width, _unused) = terminal.get_size() + width -= 7 + + print(str(entry[0]).rjust(6), end=" ") + print("...%s" % entry[1][-width + 3:] if len(entry[1]) > width else entry[1]) + + if j >= 9: + break + + def output_html(self): + resp_xml = '
' + resp_xml += "

" + _(RESPONSIBILITIES_INFO_TEXT) + ".

" + + for i in sorted(set(i[0] for i in self.blame.blames)): + responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) + + if responsibilities: + resp_xml += "
" + + if format.get_selected() == "html": + author_email = self.changes.get_latest_email_by_author(i) + resp_xml += '

{1} {2}

'.format( + gravatar.get_url(author_email, size=32), i, _(MOSTLY_RESPONSIBLE_FOR_TEXT) + ) + else: + resp_xml += "

{0} {1}

".format(i, _(MOSTLY_RESPONSIBLE_FOR_TEXT)) + + for j, entry in enumerate(responsibilities): + resp_xml += ( + "' if j % 2 == 1 else ">") + entry[1] + " (" + str(entry[0]) + " eloc)
" + ) + if j >= 9: + break + + resp_xml += "
" + resp_xml += "
" + print(resp_xml) + + def output_json(self): + message_json = '\t\t\t"message": "' + _(RESPONSIBILITIES_INFO_TEXT) + '",\n' + resp_json = "" + + for i in sorted(set(i[0] for i in self.blame.blames)): + responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) + + if responsibilities: + author_email = self.changes.get_latest_email_by_author(i) + + resp_json += "{\n" + resp_json += '\t\t\t\t"name": "' + i + '",\n' + resp_json += '\t\t\t\t"email": "' + author_email + '",\n' + resp_json += '\t\t\t\t"gravatar": "' + gravatar.get_url(author_email) + '",\n' + resp_json += '\t\t\t\t"files": [\n\t\t\t\t' + + for j, entry in enumerate(responsibilities): + resp_json += "{\n" + resp_json += '\t\t\t\t\t"name": "' + entry[1] + '",\n' + resp_json += '\t\t\t\t\t"rows": ' + str(entry[0]) + "\n" + resp_json += "\t\t\t\t}," + + if j >= 9: + break + + resp_json = resp_json[:-1] + resp_json += "]\n\t\t\t}," + + resp_json = resp_json[:-1] + print(',\n\t\t"responsibilities": {\n' + message_json + '\t\t\t"authors": [\n\t\t\t' + resp_json + "]\n\t\t}", end="") + + def output_xml(self): + message_xml = "\t\t" + _(RESPONSIBILITIES_INFO_TEXT) + "\n" + resp_xml = "" + + for i in sorted(set(i[0] for i in self.blame.blames)): + responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) + if responsibilities: + author_email = self.changes.get_latest_email_by_author(i) + + resp_xml += "\t\t\t\n" + resp_xml += "\t\t\t\t" + i + "\n" + resp_xml += "\t\t\t\t" + author_email + "\n" + resp_xml += "\t\t\t\t" + gravatar.get_url(author_email) + "\n" + resp_xml += "\t\t\t\t\n" + + for j, entry in enumerate(responsibilities): + resp_xml += "\t\t\t\t\t\n" + resp_xml += "\t\t\t\t\t\t" + entry[1] + "\n" + resp_xml += "\t\t\t\t\t\t" + str(entry[0]) + "\n" + resp_xml += "\t\t\t\t\t\n" - if j >= 9: - break + if j >= 9: + break - resp_xml += "\t\t\t\t\n" - resp_xml += "\t\t\t\n" + resp_xml += "\t\t\t\t
\n" + resp_xml += "\t\t\t
\n" - print("\t\n" + message_xml + "\t\t\n" + resp_xml + "\t\t\n\t") + print("\t\n" + message_xml + "\t\t\n" + resp_xml + "\t\t\n\t") diff --git a/gitinspector/output/timelineoutput.py b/gitinspector/output/timelineoutput.py index a51bf488..79f1ff0e 100644 --- a/gitinspector/output/timelineoutput.py +++ b/gitinspector/output/timelineoutput.py @@ -18,7 +18,6 @@ # along with gitinspector. If not, see . - import textwrap from ..localization import N_ from .. import format, gravatar, terminal, timeline @@ -27,182 +26,195 @@ TIMELINE_INFO_TEXT = N_("The following history timeline has been gathered from the repository") MODIFIED_ROWS_TEXT = N_("Modified Rows:") + def __output_row__text__(timeline_data, periods, names): - print("\n" + terminal.__bold__ + terminal.ljust(_("Author"), 20), end=" ") + print("\n" + terminal.__bold__ + terminal.ljust(_("Author"), 20), end=" ") - for period in periods: - print(terminal.rjust(period, 10), end=" ") + for period in periods: + print(terminal.rjust(period, 10), end=" ") - print(terminal.__normal__) + print(terminal.__normal__) - for name in names: - if timeline_data.is_author_in_periods(periods, name[0]): - print(terminal.ljust(name[0], 20)[0:20 - terminal.get_excess_column_count(name[0])], end=" ") + for name in names: + if timeline_data.is_author_in_periods(periods, name[0]): + print(terminal.ljust(name[0], 20)[0:20 - terminal.get_excess_column_count(name[0])], end=" ") - for period in periods: - multiplier = timeline_data.get_multiplier(period, 9) - signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) - signs_str = (signs[1] * "-" + signs[0] * "+") - print (("." if timeline_data.is_author_in_period(period, name[0]) and - len(signs_str) == 0 else signs_str).rjust(10), end=" ") - print("") + for period in periods: + multiplier = timeline_data.get_multiplier(period, 9) + signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) + signs_str = signs[1] * "-" + signs[0] * "+" + print( + ("." if timeline_data.is_author_in_period(period, name[0]) and len(signs_str) == 0 else signs_str).rjust( + 10 + ), + end=" ", + ) + print("") - print(terminal.__bold__ + terminal.ljust(_(MODIFIED_ROWS_TEXT), 20) + terminal.__normal__, end=" ") + print(terminal.__bold__ + terminal.ljust(_(MODIFIED_ROWS_TEXT), 20) + terminal.__normal__, end=" ") - for period in periods: - total_changes = str(timeline_data.get_total_changes_in_period(period)[2]) + for period in periods: + total_changes = str(timeline_data.get_total_changes_in_period(period)[2]) - if hasattr(total_changes, 'decode'): - total_changes = total_changes.decode("utf-8", "replace") + if hasattr(total_changes, "decode"): + total_changes = total_changes.decode("utf-8", "replace") - print(terminal.rjust(total_changes, 10), end=" ") + print(terminal.rjust(total_changes, 10), end=" ") + + print("") - print("") def __output_row__html__(timeline_data, periods, names): - timeline_xml = "" + timeline_xml = '
" + _("Author") + "
" + + for period in periods: + timeline_xml += "" - for period in periods: - timeline_xml += "" + timeline_xml += "" + i = 0 - timeline_xml += "" - i = 0 + for name in names: + if timeline_data.is_author_in_periods(periods, name[0]): + timeline_xml += "' if i % 2 == 1 else ">") - for name in names: - if timeline_data.is_author_in_periods(periods, name[0]): - timeline_xml += "" if i % 2 == 1 else ">") + if format.get_selected() == "html": + timeline_xml += ''.format(gravatar.get_url(name[1]), name[0]) + else: + timeline_xml += "" - if format.get_selected() == "html": - timeline_xml += "".format(gravatar.get_url(name[1]), name[0]) - else: - timeline_xml += "" + for period in periods: + multiplier = timeline_data.get_multiplier(period, 18) + signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) + signs_str = signs[1] * '
 
' + signs[0] * '
 
' - for period in periods: - multiplier = timeline_data.get_multiplier(period, 18) - signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) - signs_str = (signs[1] * "
 
" + signs[0] * "
 
") + timeline_xml += "" + timeline_xml += "" + i = i + 1 - timeline_xml += "" - timeline_xml += "" - i = i + 1 + timeline_xml += "" - timeline_xml += "" + for period in periods: + total_changes = timeline_data.get_total_changes_in_period(period) + timeline_xml += "" - for period in periods: - total_changes = timeline_data.get_total_changes_in_period(period) - timeline_xml += "" + timeline_xml += "
' + _("Author") + "" + str(period) + "" + str(period) + "
{1}" + name[0] + "{1}" + name[0] + "" + ( + "." if timeline_data.is_author_in_period(period, name[0]) and len(signs_str) == 0 else signs_str + ) + timeline_xml += "
" + ("." if timeline_data.is_author_in_period(period, name[0]) and len(signs_str) == 0 else signs_str) - timeline_xml += "
" + _(MODIFIED_ROWS_TEXT) + "
" + _(MODIFIED_ROWS_TEXT) + "" + str(total_changes[2]) + "" + str(total_changes[2]) + "
" + print(timeline_xml) - timeline_xml += "" - print(timeline_xml) class TimelineOutput(Outputable): - def __init__(self, changes, useweeks): - self.changes = changes - self.useweeks = useweeks - Outputable.__init__(self) - - def output_text(self): - if self.changes.get_commits(): - print("\n" + textwrap.fill(_(TIMELINE_INFO_TEXT) + ":", width=terminal.get_size()[0])) - - timeline_data = timeline.TimelineData(self.changes, self.useweeks) - periods = timeline_data.get_periods() - names = timeline_data.get_authors() - (width, _unused) = terminal.get_size() - max_periods_per_row = int((width - 21) / 11) - - for i in range(0, len(periods), max_periods_per_row): - __output_row__text__(timeline_data, periods[i:i+max_periods_per_row], names) - - def output_html(self): - if self.changes.get_commits(): - timeline_data = timeline.TimelineData(self.changes, self.useweeks) - periods = timeline_data.get_periods() - names = timeline_data.get_authors() - max_periods_per_row = 8 - - timeline_xml = "
" - timeline_xml += "

" + _(TIMELINE_INFO_TEXT) + ".

" - print(timeline_xml) - - for i in range(0, len(periods), max_periods_per_row): - __output_row__html__(timeline_data, periods[i:i+max_periods_per_row], names) - - timeline_xml = "
" - print(timeline_xml) - - def output_json(self): - if self.changes.get_commits(): - message_json = "\t\t\t\"message\": \"" + _(TIMELINE_INFO_TEXT) + "\",\n" - timeline_json = "" - periods_json = "\t\t\t\"period_length\": \"{0}\",\n".format("week" if self.useweeks else "month") - periods_json += "\t\t\t\"periods\": [\n\t\t\t" - - timeline_data = timeline.TimelineData(self.changes, self.useweeks) - periods = timeline_data.get_periods() - names = timeline_data.get_authors() - - for period in periods: - name_json = "\t\t\t\t\"name\": \"" + str(period) + "\",\n" - authors_json = "\t\t\t\t\"authors\": [\n\t\t\t\t" - - for name in names: - if timeline_data.is_author_in_period(period, name[0]): - multiplier = timeline_data.get_multiplier(period, 24) - signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) - signs_str = (signs[1] * "-" + signs[0] * "+") - - if len(signs_str) == 0: - signs_str = "." - - authors_json += "{\n\t\t\t\t\t\"name\": \"" + name[0] + "\",\n" - authors_json += "\t\t\t\t\t\"email\": \"" + name[1] + "\",\n" - authors_json += "\t\t\t\t\t\"gravatar\": \"" + gravatar.get_url(name[1]) + "\",\n" - authors_json += "\t\t\t\t\t\"work\": \"" + signs_str + "\"\n\t\t\t\t}," - else: - authors_json = authors_json[:-1] - - authors_json += "],\n" - modified_rows_json = "\t\t\t\t\"modified_rows\": " + \ - str(timeline_data.get_total_changes_in_period(period)[2]) + "\n" - timeline_json += "{\n" + name_json + authors_json + modified_rows_json + "\t\t\t}," - else: - timeline_json = timeline_json[:-1] - - print(",\n\t\t\"timeline\": {\n" + message_json + periods_json + timeline_json + "]\n\t\t}", end="") - - def output_xml(self): - if self.changes.get_commits(): - message_xml = "\t\t" + _(TIMELINE_INFO_TEXT) + "\n" - timeline_xml = "" - periods_xml = "\t\t\n".format("week" if self.useweeks else "month") - - timeline_data = timeline.TimelineData(self.changes, self.useweeks) - periods = timeline_data.get_periods() - names = timeline_data.get_authors() - - for period in periods: - name_xml = "\t\t\t\t" + str(period) + "\n" - authors_xml = "\t\t\t\t\n" - - for name in names: - if timeline_data.is_author_in_period(period, name[0]): - multiplier = timeline_data.get_multiplier(period, 24) - signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) - signs_str = (signs[1] * "-" + signs[0] * "+") - - if len(signs_str) == 0: - signs_str = "." - - authors_xml += "\t\t\t\t\t\n\t\t\t\t\t\t" + name[0] + "\n" - authors_xml += "\t\t\t\t\t\t" + name[1] + "\n" - authors_xml += "\t\t\t\t\t\t" + gravatar.get_url(name[1]) + "\n" - authors_xml += "\t\t\t\t\t\t" + signs_str + "\n\t\t\t\t\t\n" - - authors_xml += "\t\t\t\t\n" - modified_rows_xml = "\t\t\t\t" + \ - str(timeline_data.get_total_changes_in_period(period)[2]) + "\n" - timeline_xml += "\t\t\t\n" + name_xml + authors_xml + modified_rows_xml + "\t\t\t\n" - - print("\t\n" + message_xml + periods_xml + timeline_xml + "\t\t\n\t") + def __init__(self, changes, useweeks): + self.changes = changes + self.useweeks = useweeks + Outputable.__init__(self) + + def output_text(self): + if self.changes.get_commits(): + print("\n" + textwrap.fill(_(TIMELINE_INFO_TEXT) + ":", width=terminal.get_size()[0])) + + timeline_data = timeline.TimelineData(self.changes, self.useweeks) + periods = timeline_data.get_periods() + names = timeline_data.get_authors() + (width, _unused) = terminal.get_size() + max_periods_per_row = int((width - 21) / 11) + + for i in range(0, len(periods), max_periods_per_row): + __output_row__text__(timeline_data, periods[i:i + max_periods_per_row], names) + + def output_html(self): + if self.changes.get_commits(): + timeline_data = timeline.TimelineData(self.changes, self.useweeks) + periods = timeline_data.get_periods() + names = timeline_data.get_authors() + max_periods_per_row = 8 + + timeline_xml = '
' + timeline_xml += "

" + _(TIMELINE_INFO_TEXT) + ".

" + print(timeline_xml) + + for i in range(0, len(periods), max_periods_per_row): + __output_row__html__(timeline_data, periods[i:i + max_periods_per_row], names) + + timeline_xml = "
" + print(timeline_xml) + + def output_json(self): + if self.changes.get_commits(): + message_json = '\t\t\t"message": "' + _(TIMELINE_INFO_TEXT) + '",\n' + timeline_json = "" + periods_json = '\t\t\t"period_length": "{0}",\n'.format("week" if self.useweeks else "month") + periods_json += '\t\t\t"periods": [\n\t\t\t' + + timeline_data = timeline.TimelineData(self.changes, self.useweeks) + periods = timeline_data.get_periods() + names = timeline_data.get_authors() + + for period in periods: + name_json = '\t\t\t\t"name": "' + str(period) + '",\n' + authors_json = '\t\t\t\t"authors": [\n\t\t\t\t' + + for name in names: + if timeline_data.is_author_in_period(period, name[0]): + multiplier = timeline_data.get_multiplier(period, 24) + signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) + signs_str = signs[1] * "-" + signs[0] * "+" + + if len(signs_str) == 0: + signs_str = "." + + authors_json += '{\n\t\t\t\t\t"name": "' + name[0] + '",\n' + authors_json += '\t\t\t\t\t"email": "' + name[1] + '",\n' + authors_json += '\t\t\t\t\t"gravatar": "' + gravatar.get_url(name[1]) + '",\n' + authors_json += '\t\t\t\t\t"work": "' + signs_str + '"\n\t\t\t\t},' + else: + authors_json = authors_json[:-1] + + authors_json += "],\n" + modified_rows_json = ( + '\t\t\t\t"modified_rows": ' + str(timeline_data.get_total_changes_in_period(period)[2]) + "\n" + ) + timeline_json += "{\n" + name_json + authors_json + modified_rows_json + "\t\t\t}," + else: + timeline_json = timeline_json[:-1] + + print(',\n\t\t"timeline": {\n' + message_json + periods_json + timeline_json + "]\n\t\t}", end="") + + def output_xml(self): + if self.changes.get_commits(): + message_xml = "\t\t" + _(TIMELINE_INFO_TEXT) + "\n" + timeline_xml = "" + periods_xml = '\t\t\n'.format("week" if self.useweeks else "month") + + timeline_data = timeline.TimelineData(self.changes, self.useweeks) + periods = timeline_data.get_periods() + names = timeline_data.get_authors() + + for period in periods: + name_xml = "\t\t\t\t" + str(period) + "\n" + authors_xml = "\t\t\t\t\n" + + for name in names: + if timeline_data.is_author_in_period(period, name[0]): + multiplier = timeline_data.get_multiplier(period, 24) + signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) + signs_str = signs[1] * "-" + signs[0] * "+" + + if len(signs_str) == 0: + signs_str = "." + + authors_xml += "\t\t\t\t\t\n\t\t\t\t\t\t" + name[0] + "\n" + authors_xml += "\t\t\t\t\t\t" + name[1] + "\n" + authors_xml += "\t\t\t\t\t\t" + gravatar.get_url(name[1]) + "\n" + authors_xml += "\t\t\t\t\t\t" + signs_str + "\n\t\t\t\t\t\n" + + authors_xml += "\t\t\t\t\n" + modified_rows_xml = ( + "\t\t\t\t" + + str(timeline_data.get_total_changes_in_period(period)[2]) + + "\n" + ) + timeline_xml += "\t\t\t\n" + name_xml + authors_xml + modified_rows_xml + "\t\t\t\n" + + print("\t\n" + message_xml + periods_xml + timeline_xml + "\t\t\n\t") diff --git a/gitinspector/responsibilities.py b/gitinspector/responsibilities.py index 94b2b516..6a3a0c95 100644 --- a/gitinspector/responsibilities.py +++ b/gitinspector/responsibilities.py @@ -18,20 +18,19 @@ # along with gitinspector. If not, see . - - class ResponsibiltyEntry(object): - blames = {} + blames = {} + class Responsibilities(object): - @staticmethod - def get(blame, author_name): - author_blames = {} + @staticmethod + def get(blame, author_name): + author_blames = {} - for i in list(blame.blames.items()): - if author_name == i[0][0]: - total_rows = i[1].rows - i[1].comments - if total_rows > 0: - author_blames[i[0][1]] = total_rows + for i in list(blame.blames.items()): + if author_name == i[0][0]: + total_rows = i[1].rows - i[1].comments + if total_rows > 0: + author_blames[i[0][1]] = total_rows - return sorted(author_blames.items()) + return sorted(author_blames.items()) diff --git a/gitinspector/terminal.py b/gitinspector/terminal.py index ed387d44..781c0e48 100644 --- a/gitinspector/terminal.py +++ b/gitinspector/terminal.py @@ -29,130 +29,151 @@ DEFAULT_TERMINAL_SIZE = (80, 25) + def __get_size_windows__(): - res = None - try: - from ctypes import windll, create_string_buffer - - handler = windll.kernel32.GetStdHandle(-12) # stderr - csbi = create_string_buffer(22) - res = windll.kernel32.GetConsoleScreenBufferInfo(handler, csbi) - except: - return DEFAULT_TERMINAL_SIZE - - if res: - import struct - (_, _, _, _, _, left, top, right, bottom, _, _) = struct.unpack("hhhhHhhhhhh", csbi.raw) - sizex = right - left + 1 - sizey = bottom - top + 1 - return sizex, sizey - else: - return DEFAULT_TERMINAL_SIZE + res = None + try: + from ctypes import windll, create_string_buffer + + handler = windll.kernel32.GetStdHandle(-12) # stderr + csbi = create_string_buffer(22) + res = windll.kernel32.GetConsoleScreenBufferInfo(handler, csbi) + except: + return DEFAULT_TERMINAL_SIZE + + if res: + import struct + + (_, _, _, _, _, left, top, right, bottom, _, _) = struct.unpack("hhhhHhhhhhh", csbi.raw) + sizex = right - left + 1 + sizey = bottom - top + 1 + return sizex, sizey + else: + return DEFAULT_TERMINAL_SIZE + def __get_size_linux__(): - def ioctl_get_window_size(file_descriptor): - try: - import fcntl, termios, struct - size = struct.unpack('hh', fcntl.ioctl(file_descriptor, termios.TIOCGWINSZ, "1234")) - except: - return DEFAULT_TERMINAL_SIZE - - return size - - size = ioctl_get_window_size(0) or ioctl_get_window_size(1) or ioctl_get_window_size(2) - - if not size: - try: - file_descriptor = os.open(os.ctermid(), os.O_RDONLY) - size = ioctl_get_window_size(file_descriptor) - os.close(file_descriptor) - except: - pass - if not size: - try: - size = (os.environ["LINES"], os.environ["COLUMNS"]) - except: - return DEFAULT_TERMINAL_SIZE - - return int(size[1]), int(size[0]) + def ioctl_get_window_size(file_descriptor): + try: + import fcntl, termios, struct + + size = struct.unpack("hh", fcntl.ioctl(file_descriptor, termios.TIOCGWINSZ, "1234")) + except: + return DEFAULT_TERMINAL_SIZE + + return size + + size = ioctl_get_window_size(0) or ioctl_get_window_size(1) or ioctl_get_window_size(2) + + if not size: + try: + file_descriptor = os.open(os.ctermid(), os.O_RDONLY) + size = ioctl_get_window_size(file_descriptor) + os.close(file_descriptor) + except: + pass + if not size: + try: + size = (os.environ["LINES"], os.environ["COLUMNS"]) + except: + return DEFAULT_TERMINAL_SIZE + + return int(size[1]), int(size[0]) + def clear_row(): - print("\r", end="") + print("\r", end="") + def skip_escapes(skip): - if skip: - global __bold__ - global __normal__ - __bold__ = "" - __normal__ = "" + if skip: + global __bold__ + global __normal__ + __bold__ = "" + __normal__ = "" + def printb(string): - print(__bold__ + string + __normal__) + print(__bold__ + string + __normal__) + def get_size(): - width = 0 - height = 0 + width = 0 + height = 0 - if sys.stdout.isatty(): - current_os = platform.system() + if sys.stdout.isatty(): + current_os = platform.system() - if current_os == "Windows": - (width, height) = __get_size_windows__() - elif current_os == "Linux" or current_os == "Darwin" or current_os.startswith("CYGWIN"): - (width, height) = __get_size_linux__() + if current_os == "Windows": + (width, height) = __get_size_windows__() + elif current_os == "Linux" or current_os == "Darwin" or current_os.startswith("CYGWIN"): + (width, height) = __get_size_linux__() - if width > 0: - return (width, height) + if width > 0: + return (width, height) + + return DEFAULT_TERMINAL_SIZE - return DEFAULT_TERMINAL_SIZE def set_stdout_encoding(): - if not sys.stdout.isatty() and sys.version_info < (3,): - sys.stdout = codecs.getwriter("utf-8")(sys.stdout) + if not sys.stdout.isatty() and sys.version_info < (3,): + sys.stdout = codecs.getwriter("utf-8")(sys.stdout) + def set_stdin_encoding(): - if not sys.stdin.isatty() and sys.version_info < (3,): - sys.stdin = codecs.getreader("utf-8")(sys.stdin) + if not sys.stdin.isatty() and sys.version_info < (3,): + sys.stdin = codecs.getreader("utf-8")(sys.stdin) + def convert_command_line_to_utf8(): - try: - argv = [] + try: + argv = [] - for arg in sys.argv: - argv.append(arg.decode(sys.stdin.encoding, "replace")) + for arg in sys.argv: + argv.append(arg.decode(sys.stdin.encoding, "replace")) + + return argv + except AttributeError: + return sys.argv - return argv - except AttributeError: - return sys.argv def check_terminal_encoding(): - if sys.stdout.isatty() and (sys.stdout.encoding == None or sys.stdin.encoding == None): - print(_("WARNING: The terminal encoding is not correctly configured. gitinspector might malfunction. " - "The encoding can be configured with the environment variable 'PYTHONIOENCODING'."), file=sys.stderr) + if sys.stdout.isatty() and (sys.stdout.encoding is None or sys.stdin.encoding is None): + print( + _( + "WARNING: The terminal encoding is not correctly configured. gitinspector might malfunction. " + "The encoding can be configured with the environment variable 'PYTHONIOENCODING'." + ), + file=sys.stderr, + ) + def get_excess_column_count(string): - width_mapping = {'F': 2, 'H': 1, 'W': 2, 'Na': 1, 'N': 1, 'A': 1} - result = 0 + width_mapping = {"F": 2, "H": 1, "W": 2, "Na": 1, "N": 1, "A": 1} + result = 0 + + for i in string: + width = unicodedata.east_asian_width(i) + result += width_mapping[width] - for i in string: - width = unicodedata.east_asian_width(i) - result += width_mapping[width] + return result - len(string) - return result - len(string) def ljust(string, pad): - return string.ljust(pad - get_excess_column_count(string)) + return string.ljust(pad - get_excess_column_count(string)) + def rjust(string, pad): - return string.rjust(pad - get_excess_column_count(string)) + return string.rjust(pad - get_excess_column_count(string)) + def output_progress(text, pos, length): - if sys.stdout.isatty(): - (width, _unused) = get_size() - progress_text = text.format(100 * pos / length) + if sys.stdout.isatty(): + (width, _unused) = get_size() + progress_text = text.format(100 * pos / length) - if len(progress_text) > width: - progress_text = "...%s" % progress_text[-width+3:] + if len(progress_text) > width: + progress_text = "...%s" % progress_text[-width + 3:] - print("\r{0}\r{1}".format(" " * width, progress_text), end="") - sys.stdout.flush() + print("\r{0}\r{1}".format(" " * width, progress_text), end="") + sys.stdout.flush() diff --git a/gitinspector/timeline.py b/gitinspector/timeline.py index b8a23867..f3f9dedf 100644 --- a/gitinspector/timeline.py +++ b/gitinspector/timeline.py @@ -20,81 +20,81 @@ import datetime + class TimelineData(object): - def __init__(self, changes, useweeks): - authordateinfo_list = sorted(changes.get_authordateinfo_list().items()) - self.changes = changes - self.entries = {} - self.total_changes_by_period = {} - self.useweeks = useweeks - - for i in authordateinfo_list: - key = None - - if useweeks: - yearweek = datetime.date(int(i[0][0][0:4]), int(i[0][0][5:7]), int(i[0][0][8:10])).isocalendar() - key = (i[0][1], str(yearweek[0]) + "W" + "{0:02d}".format(yearweek[1])) - else: - key = (i[0][1], i[0][0][0:7]) - - if self.entries.get(key, None) == None: - self.entries[key] = i[1] - else: - self.entries[key].insertions += i[1].insertions - self.entries[key].deletions += i[1].deletions - - for period in self.get_periods(): - total_insertions = 0 - total_deletions = 0 - - for author in self.get_authors(): - entry = self.entries.get((author[0], period), None) - if entry != None: - total_insertions += entry.insertions - total_deletions += entry.deletions - - self.total_changes_by_period[period] = (total_insertions, total_deletions, - total_insertions + total_deletions) - - def get_periods(self): - return sorted(set([i[1] for i in self.entries])) - - def get_total_changes_in_period(self, period): - return self.total_changes_by_period[period] - - def get_authors(self): - return sorted(set([(i[0][0], self.changes.get_latest_email_by_author(i[0][0])) for i in list(self.entries.items())])) - - def get_author_signs_in_period(self, author, period, multiplier): - authorinfo = self.entries.get((author, period), None) - total = float(self.total_changes_by_period[period][2]) - - if authorinfo: - i = multiplier * (self.entries[(author, period)].insertions / total) - j = multiplier * (self.entries[(author, period)].deletions / total) - return (int(i), int(j)) - else: - return (0, 0) - - def get_multiplier(self, period, max_width): - multiplier = 0 - - while True: - for i in self.entries: - entry = self.entries.get(i) - - if period == i[1]: - changes_in_period = float(self.total_changes_by_period[i[1]][2]) - if multiplier * (entry.insertions + entry.deletions) / changes_in_period > max_width: - return multiplier - - multiplier += 0.25 - - def is_author_in_period(self, period, author): - return self.entries.get((author, period), None) != None - - def is_author_in_periods(self, periods, author): - for period in periods: - if self.is_author_in_period(period, author): - return True - return False + def __init__(self, changes, useweeks): + authordateinfo_list = sorted(changes.get_authordateinfo_list().items()) + self.changes = changes + self.entries = {} + self.total_changes_by_period = {} + self.useweeks = useweeks + + for i in authordateinfo_list: + key = None + + if useweeks: + yearweek = datetime.date(int(i[0][0][0:4]), int(i[0][0][5:7]), int(i[0][0][8:10])).isocalendar() + key = (i[0][1], str(yearweek[0]) + "W" + "{0:02d}".format(yearweek[1])) + else: + key = (i[0][1], i[0][0][0:7]) + + if self.entries.get(key, None) is None: + self.entries[key] = i[1] + else: + self.entries[key].insertions += i[1].insertions + self.entries[key].deletions += i[1].deletions + + for period in self.get_periods(): + total_insertions = 0 + total_deletions = 0 + + for author in self.get_authors(): + entry = self.entries.get((author[0], period), None) + if entry is not None: + total_insertions += entry.insertions + total_deletions += entry.deletions + + self.total_changes_by_period[period] = (total_insertions, total_deletions, total_insertions + total_deletions) + + def get_periods(self): + return sorted(set([i[1] for i in self.entries])) + + def get_total_changes_in_period(self, period): + return self.total_changes_by_period[period] + + def get_authors(self): + return sorted(set([(i[0][0], self.changes.get_latest_email_by_author(i[0][0])) for i in list(self.entries.items())])) + + def get_author_signs_in_period(self, author, period, multiplier): + authorinfo = self.entries.get((author, period), None) + total = float(self.total_changes_by_period[period][2]) + + if authorinfo: + i = multiplier * (self.entries[(author, period)].insertions / total) + j = multiplier * (self.entries[(author, period)].deletions / total) + return (int(i), int(j)) + else: + return (0, 0) + + def get_multiplier(self, period, max_width): + multiplier = 0 + + while True: + for i in self.entries: + entry = self.entries.get(i) + + if period == i[1]: + changes_in_period = float(self.total_changes_by_period[i[1]][2]) + if multiplier * (entry.insertions + entry.deletions) / changes_in_period > max_width: + return multiplier + + multiplier += 0.25 + + def is_author_in_period(self, period, author): + return self.entries.get((author, period), None) is not None + + def is_author_in_periods(self, periods, author): + for period in periods: + if self.is_author_in_period(period, author): + return True + return False diff --git a/gitinspector/version.py b/gitinspector/version.py index 8e8f28c8..ef0c1034 100644 --- a/gitinspector/version.py +++ b/gitinspector/version.py @@ -18,17 +18,21 @@ # along with gitinspector. If not, see . - from . import localization + localization.init() __version__ = "0.5.0dev" -__doc__ = _("""Copyright © 2012-2015 Ejwa Software. All rights reserved. +__doc__ = _( + """Copyright © 2012-2015 Ejwa Software. All rights reserved. License GPLv3+: GNU GPL version 3 or later . This is free software: you are free to change and redistribute it. There is NO WARRANTY, to the extent permitted by law. -Written by Adam Waldenberg.""") +Written by Adam Waldenberg.""" +) + + def output(): - print("gitinspector {0}\n".format(__version__) + __doc__) + print("gitinspector {0}\n".format(__version__) + __doc__) diff --git a/tests/test_comment.py b/tests/test_comment.py index 8e495bc2..cfd617f3 100644 --- a/tests/test_comment.py +++ b/tests/test_comment.py @@ -19,29 +19,31 @@ from __future__ import unicode_literals import os -import sys import unittest import gitinspector.comment + def __test_extension__(commented_file, extension): - base = os.path.dirname(os.path.realpath(__file__)) - tex_file = open(base + commented_file, "r") - tex = tex_file.readlines() - tex_file.close() + base = os.path.dirname(os.path.realpath(__file__)) + tex_file = open(base + commented_file, "r") + tex = tex_file.readlines() + tex_file.close() + + is_inside_comment = False + comment_counter = 0 + for i in tex: + (_, is_inside_comment) = gitinspector.comment.handle_comment_block(is_inside_comment, extension, i) + if is_inside_comment or gitinspector.comment.is_comment(extension, i): + comment_counter += 1 - is_inside_comment = False - comment_counter = 0 - for i in tex: - (_, is_inside_comment) = gitinspector.comment.handle_comment_block(is_inside_comment, extension, i) - if is_inside_comment or gitinspector.comment.is_comment(extension, i): - comment_counter += 1 + return comment_counter - return comment_counter class TexFileTest(unittest.TestCase): def test(self): - comment_counter = __test_extension__("/resources/commented_file.tex", "tex") - self.assertEqual(comment_counter, 30) + comment_counter = __test_extension__("/resources/commented_file.tex", "tex") + self.assertEqual(comment_counter, 30) + class CppFileTest(unittest.TestCase): def test(self): From 306f52eda9775435f024ff64f58c160cb501017b Mon Sep 17 00:00:00 2001 From: JP White Date: Sat, 27 Feb 2021 22:42:11 -0500 Subject: [PATCH 09/46] Removing requirements.txt generation --- .github/workflows/python-package.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index f6802463..eb9a54b4 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -26,7 +26,6 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - make requirements if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Lint with flake8 run: | From e7259120f08ce6cfd8b83053105b0a9e5b37e405 Mon Sep 17 00:00:00 2001 From: JP White Date: Sat, 27 Feb 2021 22:59:59 -0500 Subject: [PATCH 10/46] Ci Setup --- .coveragerc | 2 ++ .coveralls.yml | 0 .github/workflows/python-package.yml | 14 ++++++++++++++ pyproject.toml | 2 ++ 4 files changed, 18 insertions(+) create mode 100644 .coveragerc create mode 100644 .coveralls.yml create mode 100644 pyproject.toml diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..ce2f455f --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +relative_files = True \ No newline at end of file diff --git a/.coveralls.yml b/.coveralls.yml new file mode 100644 index 00000000..e69de29b diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index eb9a54b4..3331350e 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -33,3 +33,17 @@ jobs: - name: Test with pytest run: | make test-coverage + - name: Report coverage to Coveralls + uses: AndreMiras/coveralls-python-action@develop + with: + parallel: true + flag-name: Unit Test + + coveralls_finish: + needs: test + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: AndreMiras/coveralls-python-action@develop + with: + parallel-finished: true \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..7349e8f9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,2 @@ +[tool.coverage.run] +relative_files = True \ No newline at end of file From 903a5f5d766826a79863abf406954626d071edd9 Mon Sep 17 00:00:00 2001 From: JP White Date: Sat, 27 Feb 2021 23:08:48 -0500 Subject: [PATCH 11/46] CI --- .github/workflows/python-package.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 3331350e..5b038e47 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -10,7 +10,7 @@ on: branches: [ master ] jobs: - build: + test: runs-on: ubuntu-latest strategy: @@ -33,17 +33,18 @@ jobs: - name: Test with pytest run: | make test-coverage - - name: Report coverage to Coveralls - uses: AndreMiras/coveralls-python-action@develop + - name: Coveralls + uses: coverallsapp/github-action@master with: + github-token: ${{ secrets.GITHUB_TOKEN }} parallel: true - flag-name: Unit Test coveralls_finish: needs: test runs-on: ubuntu-latest steps: - name: Coveralls Finished - uses: AndreMiras/coveralls-python-action@develop + uses: coverallsapp/github-action@master with: + github-token: ${{ secrets.github_token }} parallel-finished: true \ No newline at end of file From 7a5bb05912f4cee1c1aae823189403b5290d990e Mon Sep 17 00:00:00 2001 From: JP White Date: Sat, 27 Feb 2021 23:13:20 -0500 Subject: [PATCH 12/46] Fixing pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7349e8f9..aef55bfe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,2 @@ [tool.coverage.run] -relative_files = True \ No newline at end of file +relative_files = true \ No newline at end of file From d35acca21e88d1785fa8a909f190b5016594133b Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 15:11:27 -0500 Subject: [PATCH 13/46] Testing --- .coveralls.yml | 0 .github/workflows/python-package.yml | 2 ++ Makefile | 1 + Pipfile | 5 +++-- README.md | 9 +++++---- 5 files changed, 11 insertions(+), 6 deletions(-) delete mode 100644 .coveralls.yml diff --git a/.coveralls.yml b/.coveralls.yml deleted file mode 100644 index e69de29b..00000000 diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 5b038e47..27bac205 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -31,6 +31,8 @@ jobs: run: | make lint - name: Test with pytest + env: + COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} run: | make test-coverage - name: Coveralls diff --git a/Makefile b/Makefile index 1ce9448f..79d557f5 100644 --- a/Makefile +++ b/Makefile @@ -52,6 +52,7 @@ test-debug: ## run tests with debugging enabled test-coverage: ## check code coverage quickly with the default Python coverage run --source gitinspector -m pytest coverage report -m + coveralls release: dist ## package and upload a release twine upload dist/* diff --git a/Pipfile b/Pipfile index ee9798a2..274af415 100644 --- a/Pipfile +++ b/Pipfile @@ -4,14 +4,15 @@ verify_ssl = true name = "pypi" [packages] -twine = "*" -coverage = "*" [dev-packages] pytest = "*" flake8 = "*" autopep8 = "*" black = "*" +twine = "*" +coverage = "*" +coveralls = "*" [requires] python_version = "3.8" diff --git a/README.md b/README.md index 65e19460..66f545db 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,12 @@ -[![Latest release](https://img.shields.io/github/release/ejwa/gitinspector.svg?style=flat-square)](https://github.com/ejwa/gitinspector/releases/latest) -[![License](https://img.shields.io/github/license/ejwa/gitinspector.svg?style=flat-square)](https://github.com/ejwa/gitinspector/blob/master/LICENSE.txt) +[![Coverage Status](https://coveralls.io/repos/github/jpwhite3/gitinspector/badge.svg?branch=master)](https://coveralls.io/github/jpwhite3/gitinspector?branch=master) +[![Latest release](https://img.shields.io/github/release/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/releases/latest) +[![License](https://img.shields.io/github/license/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/blob/master/LICENSE.txt)

+ src="https://raw.githubusercontent.com/jpwhite3/gitinspector/master/gitinspector/html/gitinspector_piclet.png"/>  About Gitinspector

- + Gitinspector is a statistical analysis tool for git repositories. The default analysis shows general statistics per author, which can be complemented with a timeline analysis that shows the workload and activity of each author. Under normal operation, it filters the results to only show statistics about a number of given extensions and by default only includes source files in the statistical analysis. This tool was originally written to help fetch repository statistics from student projects in the course Object-oriented Programming Project (TDA367/DIT211) at Chalmers University of Technology and Gothenburg University. From 83a9217c54d1b4821831e8fc29ed8db14919ee6d Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 16:19:41 -0500 Subject: [PATCH 14/46] CI --- .github/workflows/python-package.yml | 6 ++++-- Makefile | 2 ++ requirements.txt | 2 ++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 27bac205..253ada93 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -31,12 +31,12 @@ jobs: run: | make lint - name: Test with pytest - env: - COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} run: | make test-coverage - name: Coveralls uses: coverallsapp/github-action@master + env: + COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} with: github-token: ${{ secrets.GITHUB_TOKEN }} parallel: true @@ -47,6 +47,8 @@ jobs: steps: - name: Coveralls Finished uses: coverallsapp/github-action@master + env: + COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} with: github-token: ${{ secrets.github_token }} parallel-finished: true \ No newline at end of file diff --git a/Makefile b/Makefile index 79d557f5..54c296cc 100644 --- a/Makefile +++ b/Makefile @@ -52,6 +52,8 @@ test-debug: ## run tests with debugging enabled test-coverage: ## check code coverage quickly with the default Python coverage run --source gitinspector -m pytest coverage report -m + +test-coverage-report: test-coverage ## Report coverage to Coveralls coveralls release: dist ## package and upload a release diff --git a/requirements.txt b/requirements.txt index 6f48a812..ae8dfef7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,6 +19,8 @@ chardet==4.0.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2 click==7.1.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' colorama==0.4.4; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' coverage==5.4 +coveralls==3.0.0 +docopt==0.6.2 docutils==0.16; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' flake8==3.8.4 idna==2.10; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' From 636fd09fb86b3a1a2b9eebd24b01770f134f2991 Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 16:25:12 -0500 Subject: [PATCH 15/46] Coveralls --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 253ada93..fcc98add 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -32,7 +32,7 @@ jobs: make lint - name: Test with pytest run: | - make test-coverage + make test-coverage-report - name: Coveralls uses: coverallsapp/github-action@master env: From 05293284def3783ba43981edb1e3fe1be31d4d62 Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 16:27:32 -0500 Subject: [PATCH 16/46] Coveralls --- .github/workflows/python-package.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index fcc98add..f1c7c6f7 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -31,6 +31,8 @@ jobs: run: | make lint - name: Test with pytest + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | make test-coverage-report - name: Coveralls From 20e731955949150aa7f09487a6e9b58d648beba3 Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 16:30:18 -0500 Subject: [PATCH 17/46] Coveralls --- .github/workflows/python-package.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index f1c7c6f7..290765c8 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -33,6 +33,7 @@ jobs: - name: Test with pytest env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} run: | make test-coverage-report - name: Coveralls From 5127a2767ae5785eba9bb44d69a428a8136a8cfc Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 16:35:47 -0500 Subject: [PATCH 18/46] Coveralls --- .github/workflows/python-package.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 290765c8..c945dd8a 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -34,15 +34,11 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} - run: | - make test-coverage-report - - name: Coveralls - uses: coverallsapp/github-action@master - env: - COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} with: github-token: ${{ secrets.GITHUB_TOKEN }} parallel: true + run: | + make test-coverage-report coveralls_finish: needs: test From e936701efe661ac8d09de9094b2e480faaac3ce4 Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 16:38:12 -0500 Subject: [PATCH 19/46] Coveralls --- .github/workflows/python-package.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index c945dd8a..008a6ed9 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -34,9 +34,6 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - parallel: true run: | make test-coverage-report From 784f0b1a20d00548b84e5ecc33ffd8dae9210145 Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 16:50:30 -0500 Subject: [PATCH 20/46] Build Badge --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 66f545db..9efc7ac6 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +![Build Status](https://github.com/jpwhite3/gitinspector/actions/workflows/python-package.yml/badge.svg) + [![Coverage Status](https://coveralls.io/repos/github/jpwhite3/gitinspector/badge.svg?branch=master)](https://coveralls.io/github/jpwhite3/gitinspector?branch=master) [![Latest release](https://img.shields.io/github/release/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/releases/latest) [![License](https://img.shields.io/github/license/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/blob/master/LICENSE.txt) From 6843dee14ea4247d0a8c27d6a1fa337953938f0b Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 16:56:49 -0500 Subject: [PATCH 21/46] Build Badge --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 9efc7ac6..01dc06dd 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,14 @@ ![Build Status](https://github.com/jpwhite3/gitinspector/actions/workflows/python-package.yml/badge.svg) - [![Coverage Status](https://coveralls.io/repos/github/jpwhite3/gitinspector/badge.svg?branch=master)](https://coveralls.io/github/jpwhite3/gitinspector?branch=master) [![Latest release](https://img.shields.io/github/release/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/releases/latest) [![License](https://img.shields.io/github/license/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/blob/master/LICENSE.txt) +

 About Gitinspector

- + Gitinspector is a statistical analysis tool for git repositories. The default analysis shows general statistics per author, which can be complemented with a timeline analysis that shows the workload and activity of each author. Under normal operation, it filters the results to only show statistics about a number of given extensions and by default only includes source files in the statistical analysis. This tool was originally written to help fetch repository statistics from student projects in the course Object-oriented Programming Project (TDA367/DIT211) at Chalmers University of Technology and Gothenburg University. From 0bfacc1d664d1f3a78262bfe22d81281c035852e Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 17:26:45 -0500 Subject: [PATCH 22/46] gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 376b4e5c..f20deee6 100644 --- a/.gitignore +++ b/.gitignore @@ -6,5 +6,6 @@ node_modules *.egg-info *.pyc *.tgz +.DS_Store Pipfile.lock .coverage \ No newline at end of file From 4dfb3146284b26fb812156ca46ef1f4161f5524d Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 19:24:36 -0500 Subject: [PATCH 23/46] Save --- Makefile | 2 +- Pipfile | 4 --- gitinspector/filtering.py | 4 +-- gitinspector/output/blameoutput.py | 2 +- gitinspector/output/changesoutput.py | 2 +- gitinspector/output/filteringoutput.py | 2 +- gitinspector/output/responsibilitiesoutput.py | 2 +- gitinspector/output/timelineoutput.py | 6 ++-- gitinspector/terminal.py | 2 +- tests/test_basedir.py | 31 +++++++++++++++++++ tests/test_blame.py | 24 ++++++++++++++ 11 files changed, 66 insertions(+), 15 deletions(-) create mode 100644 tests/test_basedir.py create mode 100644 tests/test_blame.py diff --git a/Makefile b/Makefile index 54c296cc..752e5106 100644 --- a/Makefile +++ b/Makefile @@ -38,7 +38,7 @@ lint: ## check style with flake8 # stop the build if there are Python syntax errors or undefined names flake8 gitinspector tests --count --select=E9,F63,F7,F82 --show-source --statistics --builtins="_" # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 gitinspector tests --count --ignore=E722,W503,E401,C901 --exit-zero --max-complexity=10 --max-line-length=127 --statistics --builtins="_" + flake8 gitinspector tests --count --ignore=E203,E722,W503,E401,C901 --exit-zero --max-complexity=10 --max-line-length=127 --statistics --builtins="_" format: ## auto format all the code with black black gitinspector --line-length 127 diff --git a/Pipfile b/Pipfile index 274af415..de372288 100644 --- a/Pipfile +++ b/Pipfile @@ -8,14 +8,10 @@ name = "pypi" [dev-packages] pytest = "*" flake8 = "*" -autopep8 = "*" black = "*" twine = "*" coverage = "*" coveralls = "*" -[requires] -python_version = "3.8" - [pipenv] allow_prereleases = true diff --git a/gitinspector/filtering.py b/gitinspector/filtering.py index ee8d825c..4fca2143 100644 --- a/gitinspector/filtering.py +++ b/gitinspector/filtering.py @@ -42,8 +42,8 @@ def get(): def __add_one__(string): for i in __filters__: - if (i + ":").lower() == string[0:len(i) + 1].lower(): - __filters__[i][0].add(string[len(i) + 1:]) + if (i + ":").lower() == string[0 : len(i) + 1].lower(): + __filters__[i][0].add(string[len(i) + 1 :]) return __filters__["file"][0].add(string) diff --git a/gitinspector/output/blameoutput.py b/gitinspector/output/blameoutput.py index ee35947f..e8ac7178 100644 --- a/gitinspector/output/blameoutput.py +++ b/gitinspector/output/blameoutput.py @@ -144,7 +144,7 @@ def output_text(self): ) for i in sorted(self.blame.get_summed_blames().items()): - print(terminal.ljust(i[0], 20)[0:20 - terminal.get_excess_column_count(i[0])], end=" ") + print(terminal.ljust(i[0], 20)[0 : 20 - terminal.get_excess_column_count(i[0])], end=" ") print(str(i[1].rows).rjust(10), end=" ") print("{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)).rjust(14), end=" ") print("{0:.1f}".format(float(i[1].skew) / i[1].rows).rjust(12), end=" ") diff --git a/gitinspector/output/changesoutput.py b/gitinspector/output/changesoutput.py index a7175d9d..f11a28a7 100644 --- a/gitinspector/output/changesoutput.py +++ b/gitinspector/output/changesoutput.py @@ -163,7 +163,7 @@ def output_text(self): authorinfo = authorinfo_list.get(i) percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 - print(terminal.ljust(i, 20)[0:20 - terminal.get_excess_column_count(i)], end=" ") + print(terminal.ljust(i, 20)[0 : 20 - terminal.get_excess_column_count(i)], end=" ") print(str(authorinfo.commits).rjust(13), end=" ") print(str(authorinfo.insertions).rjust(13), end=" ") print(str(authorinfo.deletions).rjust(14), end=" ") diff --git a/gitinspector/output/filteringoutput.py b/gitinspector/output/filteringoutput.py index dcefeb55..00b50135 100644 --- a/gitinspector/output/filteringoutput.py +++ b/gitinspector/output/filteringoutput.py @@ -103,7 +103,7 @@ def __output_text_section__(info_string, filtered): for i in filtered: (width, _unused) = terminal.get_size() - print("...%s" % i[-width + 3:] if len(i) > width else i) + print("...%s" % i[-width + 3 :] if len(i) > width else i) def output_text(self): FilteringOutput.__output_text_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) diff --git a/gitinspector/output/responsibilitiesoutput.py b/gitinspector/output/responsibilitiesoutput.py index 2cc37a3f..a084beb6 100644 --- a/gitinspector/output/responsibilitiesoutput.py +++ b/gitinspector/output/responsibilitiesoutput.py @@ -52,7 +52,7 @@ def output_text(self): width -= 7 print(str(entry[0]).rjust(6), end=" ") - print("...%s" % entry[1][-width + 3:] if len(entry[1]) > width else entry[1]) + print("...%s" % entry[1][-width + 3 :] if len(entry[1]) > width else entry[1]) if j >= 9: break diff --git a/gitinspector/output/timelineoutput.py b/gitinspector/output/timelineoutput.py index 79f1ff0e..29c97ae8 100644 --- a/gitinspector/output/timelineoutput.py +++ b/gitinspector/output/timelineoutput.py @@ -37,7 +37,7 @@ def __output_row__text__(timeline_data, periods, names): for name in names: if timeline_data.is_author_in_periods(periods, name[0]): - print(terminal.ljust(name[0], 20)[0:20 - terminal.get_excess_column_count(name[0])], end=" ") + print(terminal.ljust(name[0], 20)[0 : 20 - terminal.get_excess_column_count(name[0])], end=" ") for period in periods: multiplier = timeline_data.get_multiplier(period, 9) @@ -121,7 +121,7 @@ def output_text(self): max_periods_per_row = int((width - 21) / 11) for i in range(0, len(periods), max_periods_per_row): - __output_row__text__(timeline_data, periods[i:i + max_periods_per_row], names) + __output_row__text__(timeline_data, periods[i : i + max_periods_per_row], names) def output_html(self): if self.changes.get_commits(): @@ -135,7 +135,7 @@ def output_html(self): print(timeline_xml) for i in range(0, len(periods), max_periods_per_row): - __output_row__html__(timeline_data, periods[i:i + max_periods_per_row], names) + __output_row__html__(timeline_data, periods[i : i + max_periods_per_row], names) timeline_xml = "" print(timeline_xml) diff --git a/gitinspector/terminal.py b/gitinspector/terminal.py index 781c0e48..4c6ba997 100644 --- a/gitinspector/terminal.py +++ b/gitinspector/terminal.py @@ -173,7 +173,7 @@ def output_progress(text, pos, length): progress_text = text.format(100 * pos / length) if len(progress_text) > width: - progress_text = "...%s" % progress_text[-width + 3:] + progress_text = "...%s" % progress_text[-width + 3 :] print("\r{0}\r{1}".format(" " * width, progress_text), end="") sys.stdout.flush() diff --git a/tests/test_basedir.py b/tests/test_basedir.py new file mode 100644 index 00000000..a9833f80 --- /dev/null +++ b/tests/test_basedir.py @@ -0,0 +1,31 @@ +import os +import unittest +from pathlib import Path +from gitinspector import basedir + +class TestBasedirModule(unittest.TestCase): + + @classmethod + def setUpClass(cls): + pass + + def setUp(self): + self.TEST_BASEDIR = Path(os.path.dirname(os.path.abspath(__file__))) + self.PROJECT_BASEDIR = Path(self.TEST_BASEDIR).parent + self.MODULE_BASEDIR = Path(self.PROJECT_BASEDIR, 'gitinspector') + self.CWD = os.getcwd() + + def test_get_basedir(self): + expected = str(self.MODULE_BASEDIR) + actual = basedir.get_basedir() + self.assertEqual(expected, actual) + + def test_get_basedir_git(self): + expected = self.CWD + actual = basedir.get_basedir_git() + self.assertEqual(expected, actual) + + def test_get_basedir_git_with_path(self): + expected = str(self.PROJECT_BASEDIR) + actual = basedir.get_basedir_git(self.TEST_BASEDIR) + self.assertEqual(expected, actual) \ No newline at end of file diff --git a/tests/test_blame.py b/tests/test_blame.py new file mode 100644 index 00000000..7b0c4c8b --- /dev/null +++ b/tests/test_blame.py @@ -0,0 +1,24 @@ +import os +import unittest +from pathlib import Path +from gitinspector import blame + +class TestBlameModule(unittest.TestCase): + + @classmethod + def setUpClass(cls): + pass + + def setUp(self): + self.TEST_BASEDIR = Path(os.path.dirname(os.path.abspath(__file__))) + self.PROJECT_BASEDIR = Path(self.TEST_BASEDIR).parent + self.MODULE_BASEDIR = Path(self.PROJECT_BASEDIR, 'gitinspector') + self.CWD = os.getcwd() + + def test_BlameEntry_attrs(self): + blame_entry = blame.BlameEntry() + expected = 0 + self.assertEqual(expected, blame_entry.rows) + self.assertEqual(expected, blame_entry.skew) + self.assertEqual(expected, blame_entry.comments) + From 6ef6397e63791cf26b515aae55e6bf4fed5e3470 Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 19:53:38 -0500 Subject: [PATCH 24/46] Badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 01dc06dd..b9eb755b 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ ![Build Status](https://github.com/jpwhite3/gitinspector/actions/workflows/python-package.yml/badge.svg) -[![Coverage Status](https://coveralls.io/repos/github/jpwhite3/gitinspector/badge.svg?branch=master)](https://coveralls.io/github/jpwhite3/gitinspector?branch=master) +[[Coverage Status](https://coveralls.io/repos/github/jpwhite3/gitinspector/badge.svg?branch=master)](https://coveralls.io/github/jpwhite3/gitinspector?branch=master) [![Latest release](https://img.shields.io/github/release/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/releases/latest) [![License](https://img.shields.io/github/license/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/blob/master/LICENSE.txt) From 799c15342d95f86b6876b029d4a1fc948d017766 Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 19:54:06 -0500 Subject: [PATCH 25/46] Badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b9eb755b..01dc06dd 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ ![Build Status](https://github.com/jpwhite3/gitinspector/actions/workflows/python-package.yml/badge.svg) -[[Coverage Status](https://coveralls.io/repos/github/jpwhite3/gitinspector/badge.svg?branch=master)](https://coveralls.io/github/jpwhite3/gitinspector?branch=master) +[![Coverage Status](https://coveralls.io/repos/github/jpwhite3/gitinspector/badge.svg?branch=master)](https://coveralls.io/github/jpwhite3/gitinspector?branch=master) [![Latest release](https://img.shields.io/github/release/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/releases/latest) [![License](https://img.shields.io/github/license/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/blob/master/LICENSE.txt) From 53d81bcd2612dbc47e73c71ee43baae83c1ec252 Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 28 Feb 2021 20:47:50 -0500 Subject: [PATCH 26/46] tests --- tests/test_basedir.py | 3 ++- tests/test_blame.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_basedir.py b/tests/test_basedir.py index a9833f80..bb138fa6 100644 --- a/tests/test_basedir.py +++ b/tests/test_basedir.py @@ -3,6 +3,7 @@ from pathlib import Path from gitinspector import basedir + class TestBasedirModule(unittest.TestCase): @classmethod @@ -28,4 +29,4 @@ def test_get_basedir_git(self): def test_get_basedir_git_with_path(self): expected = str(self.PROJECT_BASEDIR) actual = basedir.get_basedir_git(self.TEST_BASEDIR) - self.assertEqual(expected, actual) \ No newline at end of file + self.assertEqual(expected, actual) diff --git a/tests/test_blame.py b/tests/test_blame.py index 7b0c4c8b..98019c0f 100644 --- a/tests/test_blame.py +++ b/tests/test_blame.py @@ -3,6 +3,7 @@ from pathlib import Path from gitinspector import blame + class TestBlameModule(unittest.TestCase): @classmethod @@ -21,4 +22,3 @@ def test_BlameEntry_attrs(self): self.assertEqual(expected, blame_entry.rows) self.assertEqual(expected, blame_entry.skew) self.assertEqual(expected, blame_entry.comments) - From 23cbb297d922426a1c0b3040331e746b98466f07 Mon Sep 17 00:00:00 2001 From: JP White Date: Tue, 2 Mar 2021 22:04:56 -0500 Subject: [PATCH 27/46] More tests --- tests/test_changes.py | 97 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 tests/test_changes.py diff --git a/tests/test_changes.py b/tests/test_changes.py new file mode 100644 index 00000000..784eaec9 --- /dev/null +++ b/tests/test_changes.py @@ -0,0 +1,97 @@ +import unittest +from gitinspector import changes + + +FAKE_FILE_NAME = 'Arbitrary.ext' +FAKE_COMMIT_STRING = "1614563270|2021-02-28|53d81bcd2612dbc47e73c71ee43baae83c1ec252|JP White|jpwhite3@gmail.com" + + +class TestAuthorInfo(unittest.TestCase): + + def test_AuthorInfo_attrs(self): + author = changes.AuthorInfo() + expected_email = None + expected_insertions = 0 + expected_deletions = 0 + expected_commits = 0 + self.assertEqual(expected_email, author.email) + self.assertEqual(expected_insertions, author.insertions) + self.assertEqual(expected_deletions, author.deletions) + self.assertEqual(expected_commits, author.commits) + + +class TestFileDiff(unittest.TestCase): + + @classmethod + def setUpClass(cls): + pass + + def setUp(self): + pass + + def test_FileDiff_init(self): + test_string = 'ArbitraryName|-++-+' + file_diff = changes.FileDiff(test_string) + expected_name = 'ArbitraryName' + self.assertEqual(expected_name, file_diff.name) + expected_insertions = 3 + self.assertEqual(expected_insertions, file_diff.insertions) + expected_deletions = 2 + self.assertEqual(expected_deletions, file_diff.deletions) + + def test_is_not_filediff_line(self): + actual = changes.FileDiff.is_filediff_line(FAKE_FILE_NAME) + self.assertFalse(actual) + + def test_is_filediff_line(self): + test_file_diff_string = "arbitrary|--- a/file.txt" + actual = changes.FileDiff.is_filediff_line(test_file_diff_string) + self.assertTrue(actual) + + def test_get_extension(self): + expected = 'ext' + actual = changes.FileDiff.get_extension(FAKE_FILE_NAME) + self.assertEqual(actual, expected) + + def test_get_extension_from_file_without_extension(self): + test_file_name = 'Arbitrary' + expected = '' + actual = changes.FileDiff.get_extension(test_file_name) + self.assertEqual(actual, expected) + + def test_get_filename(self): + expected = FAKE_FILE_NAME + actual = changes.FileDiff.get_filename(expected) + self.assertEqual(actual, expected) + + def test_is_not_valid_extension(self): + result = changes.FileDiff.is_valid_extension(FAKE_FILE_NAME) + self.assertFalse(result) + + def test_is_valid_extension(self): + test_file_name = 'Arbitrary.cpp' + result = changes.FileDiff.is_valid_extension(test_file_name) + self.assertTrue(result) + + +class TestCommitClass(unittest.TestCase): + + @classmethod + def setUpClass(cls): + pass + + def setUp(self): + pass + + def test_Commit_init(self): + commit = changes.Commit(FAKE_COMMIT_STRING) + expected_timestamp = '1614563270' + expected_date = '2021-02-28' + expected_sha = '53d81bcd2612dbc47e73c71ee43baae83c1ec252' + expected_author = 'JP White' + expected_email = 'jpwhite3@gmail.com' + self.assertEqual(expected_timestamp, commit.timestamp) + self.assertEqual(expected_date, commit.date) + self.assertEqual(expected_sha, commit.sha) + self.assertEqual(expected_author, commit.author) + self.assertEqual(expected_email, commit.email) \ No newline at end of file From 6bf75a4177385ec48d29d6a70fbce2c7d81e3eb9 Mon Sep 17 00:00:00 2001 From: JP White Date: Thu, 4 Mar 2021 00:01:39 -0500 Subject: [PATCH 28/46] More tests --- gitinspector/extensions.py | 2 +- tests/test_changes.py | 20 +++++++++++++++- tests/test_config.py | 49 ++++++++++++++++++++++++++++++++++++++ tests/test_extensions.py | 27 +++++++++++++++++++++ 4 files changed, 96 insertions(+), 2 deletions(-) create mode 100644 tests/test_config.py create mode 100644 tests/test_extensions.py diff --git a/gitinspector/extensions.py b/gitinspector/extensions.py index 4d1f53b9..374e5438 100644 --- a/gitinspector/extensions.py +++ b/gitinspector/extensions.py @@ -18,7 +18,7 @@ # along with gitinspector. If not, see . -DEFAULT_EXTENSIONS = ["java", "c", "cc", "cpp", "h", "hh", "hpp", "py", "glsl", "rb", "js", "sql"] +DEFAULT_EXTENSIONS = ["java", "c", "cc", "cpp", "h", "hh", "hpp", "py", "glsl", "rb", "js", "sql", "go"] __extensions__ = DEFAULT_EXTENSIONS __located_extensions__ = set() diff --git a/tests/test_changes.py b/tests/test_changes.py index 784eaec9..593fa4d1 100644 --- a/tests/test_changes.py +++ b/tests/test_changes.py @@ -94,4 +94,22 @@ def test_Commit_init(self): self.assertEqual(expected_date, commit.date) self.assertEqual(expected_sha, commit.sha) self.assertEqual(expected_author, commit.author) - self.assertEqual(expected_email, commit.email) \ No newline at end of file + self.assertEqual(expected_email, commit.email) + + def test_get_author_and_email(self): + expected_author = 'JP White' + expected_email = 'jpwhite3@gmail.com' + actual_author, actual_email = changes.Commit.get_author_and_email(FAKE_COMMIT_STRING) + self.assertEqual(expected_author, actual_author) + self.assertEqual(expected_email, actual_email) + + def test_is_commit_line(self): + result = changes.Commit.is_commit_line(FAKE_COMMIT_STRING) + self.assertTrue(result) + + def test_add_filediff(self): + commit = changes.Commit(FAKE_COMMIT_STRING) + commit.add_filediff(1) + expected = [1] + actual = commit.get_filediffs() + self.assertEqual(expected, actual) diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 00000000..5260866a --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,49 @@ +import unittest +from gitinspector import config + + +class TestConfig(unittest.TestCase): + + def test_GitConfig_init(self): + expected_run = 'run' + expected_repo = 'repo' + expected_global_only = False + test_config = config.GitConfig(expected_run, expected_repo) + self.assertEqual(expected_run, test_config.run) + self.assertEqual(expected_repo, test_config.repo) + self.assertEqual(expected_global_only, test_config.global_only) + + def test_read_git_config_unknown_variable(self): + expected_result = '' + test_config = config.GitConfig('arbitrary', '.') + actual_result = test_config.__read_git_config__('unknown') + self.assertEqual(expected_result, actual_result) + + def test_read_git_config(self): + expected_result = '1' + test_config = config.GitConfig('arbitrary', '.') + actual_result = test_config.__read_git_config__('arbitrary') + self.assertEqual(expected_result, actual_result) + + def test_read_git_config_string(self): + expected_result = (True, '1') + test_config = config.GitConfig('arbitrary', '.') + actual_result = test_config.__read_git_config_string__('arbitrary') + self.assertEqual(expected_result, actual_result) + + def test_read_git_config_string_unknown(self): + expected_result = (False, None) + test_config = config.GitConfig('arbitrary', '.') + actual_result = test_config.__read_git_config_string__('unknown') + self.assertEqual(expected_result, actual_result) + + def test_read(self): + class Dummy(): + pass + test_config = config.GitConfig(Dummy(), '.') + + with self.assertRaises(AttributeError): + self.assertFalse(test_config.run.hard) + + test_config.read() + self.assertFalse(test_config.run.hard) diff --git a/tests/test_extensions.py b/tests/test_extensions.py new file mode 100644 index 00000000..3b30fa6f --- /dev/null +++ b/tests/test_extensions.py @@ -0,0 +1,27 @@ +import unittest +from gitinspector import extensions + + +class TestExtensions(unittest.TestCase): + + def test_001_extensions_get(self): + expected = extensions.DEFAULT_EXTENSIONS + actual = extensions.get() + self.assertEqual(expected, actual) + + def test_002_extensions_define(self): + expected = 'txt,md' + extensions.define(expected) + actual = extensions.get() + self.assertEqual(expected.split(","), actual) + + def test_003_add_located(self): + expected = set('*') + extensions.add_located('') + actual = extensions.get_located() + self.assertEqual(expected, actual) + + expected = set(['ext', '*']) + extensions.add_located('ext') + actual = extensions.get_located() + self.assertEqual(expected, actual) From 8bbac5717f24cacf608315b679b541bfc7ad9aeb Mon Sep 17 00:00:00 2001 From: JP White Date: Thu, 4 Mar 2021 00:11:07 -0500 Subject: [PATCH 29/46] More tests --- tests/test_config.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/tests/test_config.py b/tests/test_config.py index 5260866a..07cc7dfe 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -19,18 +19,6 @@ def test_read_git_config_unknown_variable(self): actual_result = test_config.__read_git_config__('unknown') self.assertEqual(expected_result, actual_result) - def test_read_git_config(self): - expected_result = '1' - test_config = config.GitConfig('arbitrary', '.') - actual_result = test_config.__read_git_config__('arbitrary') - self.assertEqual(expected_result, actual_result) - - def test_read_git_config_string(self): - expected_result = (True, '1') - test_config = config.GitConfig('arbitrary', '.') - actual_result = test_config.__read_git_config_string__('arbitrary') - self.assertEqual(expected_result, actual_result) - def test_read_git_config_string_unknown(self): expected_result = (False, None) test_config = config.GitConfig('arbitrary', '.') From c52bd200dbb8c3e4e5258e8e5cb7c80bebeef3cc Mon Sep 17 00:00:00 2001 From: JP White Date: Sat, 13 Mar 2021 23:12:42 -0500 Subject: [PATCH 30/46] More tests --- Makefile | 2 +- tests/test_changes.py | 12 +++---- tests/test_config.py | 12 +++---- tests/test_filtering.py | 36 ++++++++++++++++++++ tests/test_format.py | 75 +++++++++++++++++++++++++++++++++++++++++ 5 files changed, 124 insertions(+), 13 deletions(-) create mode 100644 tests/test_filtering.py create mode 100644 tests/test_format.py diff --git a/Makefile b/Makefile index 752e5106..3c1f6b9e 100644 --- a/Makefile +++ b/Makefile @@ -41,7 +41,7 @@ lint: ## check style with flake8 flake8 gitinspector tests --count --ignore=E203,E722,W503,E401,C901 --exit-zero --max-complexity=10 --max-line-length=127 --statistics --builtins="_" format: ## auto format all the code with black - black gitinspector --line-length 127 + black ./gitinspector --line-length 127 test: ## run tests quickly with the default Python pytest diff --git a/tests/test_changes.py b/tests/test_changes.py index 593fa4d1..eec102eb 100644 --- a/tests/test_changes.py +++ b/tests/test_changes.py @@ -65,13 +65,13 @@ def test_get_filename(self): self.assertEqual(actual, expected) def test_is_not_valid_extension(self): - result = changes.FileDiff.is_valid_extension(FAKE_FILE_NAME) - self.assertFalse(result) + return_value = changes.FileDiff.is_valid_extension(FAKE_FILE_NAME) + self.assertFalse(return_value) def test_is_valid_extension(self): test_file_name = 'Arbitrary.cpp' - result = changes.FileDiff.is_valid_extension(test_file_name) - self.assertTrue(result) + return_value = changes.FileDiff.is_valid_extension(test_file_name) + self.assertTrue(return_value) class TestCommitClass(unittest.TestCase): @@ -104,8 +104,8 @@ def test_get_author_and_email(self): self.assertEqual(expected_email, actual_email) def test_is_commit_line(self): - result = changes.Commit.is_commit_line(FAKE_COMMIT_STRING) - self.assertTrue(result) + return_value = changes.Commit.is_commit_line(FAKE_COMMIT_STRING) + self.assertTrue(return_value) def test_add_filediff(self): commit = changes.Commit(FAKE_COMMIT_STRING) diff --git a/tests/test_config.py b/tests/test_config.py index 07cc7dfe..a8d330ca 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -14,16 +14,16 @@ def test_GitConfig_init(self): self.assertEqual(expected_global_only, test_config.global_only) def test_read_git_config_unknown_variable(self): - expected_result = '' + expected_return_value = '' test_config = config.GitConfig('arbitrary', '.') - actual_result = test_config.__read_git_config__('unknown') - self.assertEqual(expected_result, actual_result) + actual_return_value = test_config.__read_git_config__('unknown') + self.assertEqual(expected_return_value, actual_return_value) def test_read_git_config_string_unknown(self): - expected_result = (False, None) + expected_return_value = (False, None) test_config = config.GitConfig('arbitrary', '.') - actual_result = test_config.__read_git_config_string__('unknown') - self.assertEqual(expected_result, actual_result) + actual_return_value = test_config.__read_git_config_string__('unknown') + self.assertEqual(expected_return_value, actual_return_value) def test_read(self): class Dummy(): diff --git a/tests/test_filtering.py b/tests/test_filtering.py new file mode 100644 index 00000000..327dbd3a --- /dev/null +++ b/tests/test_filtering.py @@ -0,0 +1,36 @@ +import unittest +from gitinspector import filtering + +TEST_STRING = 'arbitrary' + + +class TestFiltering(unittest.TestCase): + + def test_InvalidRegExpError(self): + with self.assertRaises(filtering.InvalidRegExpError): + raise filtering.InvalidRegExpError(TEST_STRING) + + def test_get(self): + expected = filtering.__filters__ + actual = filtering.get() + self.assertEqual(expected, actual) + + def test_add(self): + filtering.add(TEST_STRING) + expected = [{TEST_STRING}, set()] + actual = filtering.get()['file'] + self.assertEqual(expected, actual) + + def test_get_filered(self): + filtering.add(TEST_STRING) + expected = set() + actual = filtering.get_filered() + self.assertEqual(expected, actual) + + def test_has_filtered(self): + self.assertFalse(filtering.has_filtered()) + + def test_set_filtered(self): + test_commit_sha = '53d81bcd2612dbc47e73c71ee43baae83c1ec252' + return_value = filtering.set_filtered(test_commit_sha) + self.assertFalse(return_value) diff --git a/tests/test_format.py b/tests/test_format.py new file mode 100644 index 00000000..4a230d67 --- /dev/null +++ b/tests/test_format.py @@ -0,0 +1,75 @@ +import os +import sys +import json +import unittest +from hashlib import sha256 +from gitinspector import format +from io import StringIO +from contextlib import contextmanager + +TEST_STRING = 'arbitrary' + + +class DummyRepo: + name = TEST_STRING + + +@contextmanager +def print_capture(*args, **kwds): + temp_out = StringIO() # Create the in-memory "file" + try: + sys.stdout = temp_out # Replace default stdout (terminal) with our stream + yield temp_out + finally: + sys.stdout = sys.__stdout__ # Restore default stdout + + +class TestFormat(unittest.TestCase): + + def test_InvalidFormatError(self): + with self.assertRaises(format.InvalidFormatError): + raise format.InvalidFormatError(TEST_STRING) + + def test_select(self): + test_format = 'json' + return_value = format.select(test_format) + self.assertTrue(return_value) + + def test_get_selected(self): + test_format = 'json' + format.select(test_format) + expected = test_format + actual = format.get_selected() + self.assertEqual(expected, actual) + + def test_is_interactive_format(self): + test_format = 'json' + format.select(test_format) + return_value = format.is_interactive_format() + self.assertFalse(return_value) + + def test__output_html_template__(self): + test_template_path = os.path.join('html', 'html.header') + return_value = format.__output_html_template__(test_template_path) + return_value_hash = sha256(return_value.encode('utf-8')).hexdigest() + expected_hash = '6b113dca32e7947e21ad9ad910c4995e62672ca4c0bc34577e33d2e328da7b3a' + self.assertEqual(expected_hash, return_value_hash) + + def test__get_zip_file_content__(self): + return_value = format.__get_zip_file_content__('LICENSE.txt') + return_value_hash = sha256(return_value.encode('utf-8')).hexdigest() + expected_hash = '52cb566b16d84314b92b91361ed072eaaf166e8d3dfa3d0fd3577613925f205c' + self.assertEqual(expected_hash, return_value_hash) + + def test_json_output_header_and_footer(self): + test_format = 'json' + format.select(test_format) + repos = [DummyRepo()] + with print_capture() as output: + format.output_header(repos) + format.output_footer() + output_text = output.getvalue()[:-2].replace('\n', '').replace('\t', '')[:-2] + "}}" + output_json = json.loads(output_text) + self.assertIn('report_date', output_json['gitinspector']) + self.assertEqual(output_json['gitinspector']['repository'], 'arbitrary') + self.assertEqual(output_json['gitinspector']['version'], '0.5.0dev') From 9956e7684e3db6a7e08b2bfcb3be247dcff8a94b Mon Sep 17 00:00:00 2001 From: JP White Date: Wed, 17 Mar 2021 21:52:23 -0400 Subject: [PATCH 31/46] More tests --- gitinspector/gitinspector.py | 8 ++++---- tests/test_gitinspector.py | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 4 deletions(-) create mode 100644 tests/test_gitinspector.py diff --git a/gitinspector/gitinspector.py b/gitinspector/gitinspector.py index 2f8ca3a0..84d95832 100644 --- a/gitinspector/gitinspector.py +++ b/gitinspector/gitinspector.py @@ -103,9 +103,9 @@ def process(self, repos): def __check_python_version__(): - if sys.version_info < (2, 6): + if sys.version_info < (3, 6): python_version = str(sys.version_info[0]) + "." + str(sys.version_info[1]) - sys.exit(_("gitinspector requires at least Python 2.6 to run (version {0} was found).").format(python_version)) + sys.exit(_("gitinspector requires at least Python 3.6 to run (version {0} was found).").format(python_version)) def __get_validated_git_repos__(repos_relative): @@ -127,10 +127,10 @@ def __get_validated_git_repos__(repos_relative): return repos -def main(): +def main(argv=None): terminal.check_terminal_encoding() terminal.set_stdin_encoding() - argv = terminal.convert_command_line_to_utf8() + argv = terminal.convert_command_line_to_utf8() if argv is None else argv run = Runner() repos = [] diff --git a/tests/test_gitinspector.py b/tests/test_gitinspector.py new file mode 100644 index 00000000..74f3d07e --- /dev/null +++ b/tests/test_gitinspector.py @@ -0,0 +1,35 @@ +import unittest +import json +import pytest +from gitinspector import gitinspector + +TEST_STRING = 'arbitrary' + + +class TestGitInspector(unittest.TestCase): + + @pytest.fixture(autouse=True) + def capsys(self, capsys): + self.capsys = capsys + + def test_Runner(self): + test_runner = gitinspector.Runner() + expected_attrs = { + "hard": False, + "include_metrics": False, + "list_file_types": False, + "localize_output": False, + "responsibilities": False, + "grading": False, + "timeline": False, + "useweeks": False + } + for key, val in expected_attrs.items(): + self.assertEqual(getattr(test_runner, key), val) + + def test_main(self): + self.maxDiff = None + gitinspector.main() + out, err = self.capsys.readouterr() + json.loads(out) + self.assertEqual(err, '') From ec67defc58b4f3b71687515737de715c2fa7f734 Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 4 Apr 2021 21:29:32 -0400 Subject: [PATCH 32/46] Automating release --- .github/workflows/auto-merge.yml | 13 ++++++++++ .github/workflows/python-package.yml | 17 +++++++------ .github/workflows/release.yml | 36 ++++++++++++++++++++++++++++ Makefile | 14 ++++++++++- requirements.txt | 13 +++++----- 5 files changed, 79 insertions(+), 14 deletions(-) create mode 100644 .github/workflows/auto-merge.yml create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml new file mode 100644 index 00000000..6d21280a --- /dev/null +++ b/.github/workflows/auto-merge.yml @@ -0,0 +1,13 @@ +name: auto-merge + +on: + pull_request: + +jobs: + auto-merge: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: ahmadnassri/action-dependabot-auto-merge@v2 + with: + github-token: ${{ secrets.mytoken }} \ No newline at end of file diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 008a6ed9..8a299c4d 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -18,30 +18,33 @@ jobs: python-version: [3.6, 3.7, 3.8, 3.9] steps: - - uses: actions/checkout@v2 + - name: Checkout + uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} + - name: Install dependencies run: | python -m pip install --upgrade pip if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Lint with flake8 - run: | - make lint + run: make lint + - name: Test with pytest env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} - run: | - make test-coverage-report + run: make test-coverage-report - coveralls_finish: + coverage: needs: test runs-on: ubuntu-latest steps: - - name: Coveralls Finished + - name: Send Results to Coveralls uses: coverallsapp/github-action@master env: COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..9e720ce0 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,36 @@ +name: Release + +on: + push: + tags: + - 'v*.*.*' + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.x' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip wheel twine + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + + - name: Test + run: make dist + + - name: Release + id: release + uses: softprops/action-gh-release@v1 + with: + files: dist/* + fail_on_unmatched_files: true + prerelease: ${{ endsWith(github.ref, 'dev') || endsWith(github.ref, 'pre') }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/Makefile b/Makefile index 3c1f6b9e..7a48fe1d 100644 --- a/Makefile +++ b/Makefile @@ -59,7 +59,19 @@ test-coverage-report: test-coverage ## Report coverage to Coveralls release: dist ## package and upload a release twine upload dist/* -dist: clean requirements ## builds source and wheel package +tag-version: + @export VERSION_TAG=`python3 -c "from gitinspector.version import __version__; print(__version__)"` \ + && git tag v$$VERSION_TAG + +untag-version: + @export VERSION_TAG=`python3 -c "from gitinspector.version import __version__; print(__version__)"` \ + && git tag -d v$$VERSION_TAG + +push-tagged-version: tag-version + @export VERSION_TAG=`python3 -c "from gitinspector.version import __version__; print(__version__)"` \ + && git push origin v$$VERSION_TAG + +dist: clean ## builds source and wheel package python3 setup.py sdist python3 setup.py bdist_wheel ls -l dist diff --git a/requirements.txt b/requirements.txt index ae8dfef7..17c68dce 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,21 +11,21 @@ -i https://pypi.org/simple appdirs==1.4.4 attrs==20.3.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -autopep8==1.5.5 black==20.8b1 bleach==3.3.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' certifi==2020.12.5 chardet==4.0.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' click==7.1.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' colorama==0.4.4; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' -coverage==5.4 -coveralls==3.0.0 +coverage==5.5 +coveralls==3.0.1 docopt==0.6.2 docutils==0.16; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' flake8==3.8.4 idna==2.10; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' +importlib-metadata==3.7.2; python_version >= '3.6' iniconfig==1.1.1 -keyring==22.3.0; python_version >= '3.6' +keyring==23.0.0; python_version >= '3.6' mccabe==0.6.1 mypy-extensions==0.4.3 packaging==20.9; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' @@ -35,7 +35,7 @@ pluggy==0.13.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2 py==1.10.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' pycodestyle==2.6.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' pyflakes==2.2.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -pygments==2.8.0; python_version >= '3.5' +pygments==2.8.1; python_version >= '3.5' pyparsing==2.4.7; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3' pytest==6.2.2 readme-renderer==29.0 @@ -45,9 +45,10 @@ requests==2.25.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3 rfc3986==1.4.0 six==1.15.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' toml==0.10.2; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3' -tqdm==4.58.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' +tqdm==4.59.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' twine==3.3.0 typed-ast==1.4.2 typing-extensions==3.7.4.3 urllib3==1.26.3; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4' webencodings==0.5.1 +zipp==3.4.1; python_version >= '3.6' From caf5e88f9e427267210e8dd20e3e298f32261c5b Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 4 Apr 2021 21:32:53 -0400 Subject: [PATCH 33/46] Adding pipfile.lock to automate requirements --- .gitignore | 1 - Pipfile.lock | 464 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 464 insertions(+), 1 deletion(-) create mode 100644 Pipfile.lock diff --git a/.gitignore b/.gitignore index f20deee6..b43e5d88 100644 --- a/.gitignore +++ b/.gitignore @@ -7,5 +7,4 @@ node_modules *.pyc *.tgz .DS_Store -Pipfile.lock .coverage \ No newline at end of file diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 00000000..fe1bbc62 --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,464 @@ +{ + "_meta": { + "hash": { + "sha256": "eeaad7bc007adaa51ede465d1ccad2bf56d6ba3c6feef74d7218a91a2ceb4074" + }, + "pipfile-spec": 6, + "requires": {}, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": {}, + "develop": { + "appdirs": { + "hashes": [ + "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", + "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" + ], + "version": "==1.4.4" + }, + "attrs": { + "hashes": [ + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.3.0" + }, + "black": { + "hashes": [ + "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea" + ], + "index": "pypi", + "version": "==20.8b1" + }, + "bleach": { + "hashes": [ + "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125", + "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==3.3.0" + }, + "certifi": { + "hashes": [ + "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", + "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" + ], + "version": "==2020.12.5" + }, + "chardet": { + "hashes": [ + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==4.0.0" + }, + "click": { + "hashes": [ + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==7.1.2" + }, + "colorama": { + "hashes": [ + "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", + "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==0.4.4" + }, + "coverage": { + "hashes": [ + "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c", + "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6", + "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45", + "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a", + "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03", + "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529", + "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a", + "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a", + "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2", + "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6", + "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759", + "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53", + "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a", + "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4", + "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff", + "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502", + "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793", + "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb", + "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905", + "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821", + "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b", + "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81", + "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0", + "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b", + "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3", + "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184", + "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701", + "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a", + "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82", + "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638", + "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5", + "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083", + "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6", + "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90", + "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465", + "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a", + "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3", + "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e", + "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066", + "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf", + "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b", + "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae", + "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669", + "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873", + "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b", + "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6", + "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb", + "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160", + "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c", + "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079", + "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d", + "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6" + ], + "index": "pypi", + "version": "==5.5" + }, + "coveralls": { + "hashes": [ + "sha256:7bd173b3425733661ba3063c88f180127cc2b20e9740686f86d2622b31b41385", + "sha256:cbb942ae5ef3d2b55388cb5b43e93a269544911535f1e750e1c656aef019ce60" + ], + "index": "pypi", + "version": "==3.0.1" + }, + "docopt": { + "hashes": [ + "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" + ], + "version": "==0.6.2" + }, + "docutils": { + "hashes": [ + "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", + "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==0.16" + }, + "flake8": { + "hashes": [ + "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839", + "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b" + ], + "index": "pypi", + "version": "==3.8.4" + }, + "idna": { + "hashes": [ + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.10" + }, + "importlib-metadata": { + "hashes": [ + "sha256:18d5ff601069f98d5d605b6a4b50c18a34811d655c55548adc833e687289acde", + "sha256:407d13f55dc6f2a844e62325d18ad7019a436c4bfcaee34cda35f2be6e7c3e34" + ], + "markers": "python_version >= '3.6'", + "version": "==3.7.2" + }, + "iniconfig": { + "hashes": [ + "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", + "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" + ], + "version": "==1.1.1" + }, + "keyring": { + "hashes": [ + "sha256:237ff44888ba9b3918a7dcb55c8f1db909c95b6f071bfb46c6918f33f453a68a", + "sha256:29f407fd5509c014a6086f17338c70215c8d1ab42d5d49e0254273bc0a64bbfc" + ], + "markers": "python_version >= '3.6'", + "version": "==23.0.0" + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" + ], + "version": "==0.6.1" + }, + "mypy-extensions": { + "hashes": [ + "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", + "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" + ], + "version": "==0.4.3" + }, + "packaging": { + "hashes": [ + "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", + "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.9" + }, + "pathspec": { + "hashes": [ + "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd", + "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d" + ], + "version": "==0.8.1" + }, + "pkginfo": { + "hashes": [ + "sha256:029a70cb45c6171c329dfc890cde0879f8c52d6f3922794796e06f577bb03db4", + "sha256:9fdbea6495622e022cc72c2e5e1b735218e4ffb2a2a69cde2694a6c1f16afb75" + ], + "version": "==1.7.0" + }, + "pluggy": { + "hashes": [ + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.13.1" + }, + "py": { + "hashes": [ + "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", + "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.10.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", + "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.6.0" + }, + "pyflakes": { + "hashes": [ + "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", + "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.2.0" + }, + "pygments": { + "hashes": [ + "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94", + "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8" + ], + "markers": "python_version >= '3.5'", + "version": "==2.8.1" + }, + "pyparsing": { + "hashes": [ + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.4.7" + }, + "pytest": { + "hashes": [ + "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9", + "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839" + ], + "index": "pypi", + "version": "==6.2.2" + }, + "readme-renderer": { + "hashes": [ + "sha256:63b4075c6698fcfa78e584930f07f39e05d46f3ec97f65006e430b595ca6348c", + "sha256:92fd5ac2bf8677f310f3303aa4bce5b9d5f9f2094ab98c29f13791d7b805a3db" + ], + "version": "==29.0" + }, + "regex": { + "hashes": [ + "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538", + "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4", + "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc", + "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa", + "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444", + "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1", + "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af", + "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8", + "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9", + "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88", + "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba", + "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364", + "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e", + "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7", + "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0", + "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31", + "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683", + "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee", + "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b", + "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884", + "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c", + "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e", + "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562", + "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85", + "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c", + "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6", + "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d", + "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b", + "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70", + "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b", + "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b", + "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f", + "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0", + "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5", + "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5", + "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f", + "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e", + "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512", + "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d", + "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917", + "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f" + ], + "version": "==2020.11.13" + }, + "requests": { + "hashes": [ + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==2.25.1" + }, + "requests-toolbelt": { + "hashes": [ + "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f", + "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0" + ], + "version": "==0.9.1" + }, + "rfc3986": { + "hashes": [ + "sha256:112398da31a3344dc25dbf477d8df6cb34f9278a94fee2625d89e4514be8bb9d", + "sha256:af9147e9aceda37c91a05f4deb128d4b4b49d6b199775fd2d2927768abdc8f50" + ], + "version": "==1.4.0" + }, + "six": { + "hashes": [ + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.15.0" + }, + "toml": { + "hashes": [ + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.10.2" + }, + "tqdm": { + "hashes": [ + "sha256:9fdf349068d047d4cfbe24862c425883af1db29bcddf4b0eeb2524f6fbdb23c7", + "sha256:d666ae29164da3e517fcf125e41d4fe96e5bb375cd87ff9763f6b38b5592fe33" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==4.59.0" + }, + "twine": { + "hashes": [ + "sha256:2f6942ec2a17417e19d2dd372fc4faa424c87ee9ce49b4e20c427eb00a0f3f41", + "sha256:fcffa8fc37e8083a5be0728371f299598870ee1eccc94e9a25cef7b1dcfa8297" + ], + "index": "pypi", + "version": "==3.3.0" + }, + "typed-ast": { + "hashes": [ + "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1", + "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d", + "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6", + "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd", + "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37", + "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151", + "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07", + "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440", + "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70", + "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496", + "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea", + "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400", + "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc", + "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606", + "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc", + "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581", + "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412", + "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a", + "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2", + "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787", + "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f", + "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937", + "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64", + "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487", + "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b", + "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41", + "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a", + "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3", + "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166", + "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10" + ], + "version": "==1.4.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", + "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", + "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" + ], + "version": "==3.7.4.3" + }, + "urllib3": { + "hashes": [ + "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", + "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.26.3" + }, + "webencodings": { + "hashes": [ + "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", + "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" + ], + "version": "==0.5.1" + }, + "zipp": { + "hashes": [ + "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", + "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" + ], + "markers": "python_version >= '3.6'", + "version": "==3.4.1" + } + } +} From da74b94f72e66d99f638c651c852d560781862df Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 4 Apr 2021 21:44:16 -0400 Subject: [PATCH 34/46] Create codeql-analysis.yml --- .github/workflows/codeql-analysis.yml | 67 +++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 .github/workflows/codeql-analysis.yml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 00000000..bcd8d655 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,67 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ master ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ master ] + schedule: + - cron: '27 19 * * 5' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + language: [ 'cpp', 'javascript', 'python' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] + # Learn more: + # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 From 32fc7bab6f0b1f6cf1d7301ce0add4e6e5e47c07 Mon Sep 17 00:00:00 2001 From: JP White Date: Sun, 4 Apr 2021 22:21:19 -0400 Subject: [PATCH 35/46] Removing CodeQL scanning for .cpp files --- .github/workflows/codeql-analysis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index bcd8d655..3113e504 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -28,7 +28,7 @@ jobs: strategy: fail-fast: false matrix: - language: [ 'cpp', 'javascript', 'python' ] + language: [ 'javascript', 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more: # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed From 488ec31f4f41daa49efffd5f80c1a145b046f8a8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Apr 2021 18:41:21 +0000 Subject: [PATCH 36/46] Bump urllib3 from 1.26.3 to 1.26.4 Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.26.3 to 1.26.4. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/main/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.26.3...1.26.4) Signed-off-by: dependabot[bot] --- Pipfile.lock | 239 +++++++++++++++++++++++++++++------------------ requirements.txt | 63 ++++++------- 2 files changed, 179 insertions(+), 123 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index fe1bbc62..712d524c 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -27,7 +27,6 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "black": { @@ -42,7 +41,6 @@ "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125", "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.3.0" }, "certifi": { @@ -52,28 +50,67 @@ ], "version": "==2020.12.5" }, + "cffi": { + "hashes": [ + "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813", + "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06", + "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea", + "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee", + "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396", + "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73", + "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315", + "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1", + "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49", + "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892", + "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482", + "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058", + "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5", + "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53", + "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045", + "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3", + "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5", + "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e", + "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c", + "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369", + "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827", + "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053", + "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa", + "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4", + "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322", + "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132", + "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62", + "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa", + "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0", + "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396", + "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e", + "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991", + "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6", + "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1", + "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406", + "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d", + "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c" + ], + "version": "==1.14.5" + }, "chardet": { "hashes": [ "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.0.0" }, "click": { "hashes": [ - "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", - "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + "sha256:681c9380a24b22fec089c8e5ffe40aa16a0da79f248a26fe2481bfa8170bfcc1", + "sha256:e4315a188403c0258bbc4a4e31863e48fc301c4e95b8007a8eeda0391158df13" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==7.1.2" + "version": "==8.0.0a1" }, "colorama": { "hashes": [ "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.4.4" }, "coverage": { @@ -142,6 +179,23 @@ "index": "pypi", "version": "==3.0.1" }, + "cryptography": { + "hashes": [ + "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d", + "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959", + "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6", + "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873", + "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2", + "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713", + "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1", + "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177", + "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250", + "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca", + "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d", + "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9" + ], + "version": "==3.4.7" + }, "docopt": { "hashes": [ "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" @@ -150,11 +204,10 @@ }, "docutils": { "hashes": [ - "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", - "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" + "sha256:a71042bb7207c03d5647f280427f14bfbd1a65c9eb84f4b341d85fafb6bb4bdf", + "sha256:e2ffeea817964356ba4470efba7c2f42b6b0de0b04e66378507e3e2504bbff4c" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==0.16" + "version": "==0.17" }, "flake8": { "hashes": [ @@ -169,16 +222,14 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "importlib-metadata": { "hashes": [ - "sha256:18d5ff601069f98d5d605b6a4b50c18a34811d655c55548adc833e687289acde", - "sha256:407d13f55dc6f2a844e62325d18ad7019a436c4bfcaee34cda35f2be6e7c3e34" + "sha256:c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a", + "sha256:d2d46ef77ffc85cbf7dac7e81dd663fde71c45326131bea8033b9bad42268ebe" ], - "markers": "python_version >= '3.6'", - "version": "==3.7.2" + "version": "==3.10.0" }, "iniconfig": { "hashes": [ @@ -187,13 +238,20 @@ ], "version": "==1.1.1" }, + "jeepney": { + "hashes": [ + "sha256:7d59b6622675ca9e993a6bd38de845051d315f8b0c72cca3aef733a20b648657", + "sha256:aec56c0eb1691a841795111e184e13cad504f7703b9a64f63020816afa79a8ae" + ], + "markers": "sys_platform == 'linux'", + "version": "==0.6.0" + }, "keyring": { "hashes": [ - "sha256:237ff44888ba9b3918a7dcb55c8f1db909c95b6f071bfb46c6918f33f453a68a", - "sha256:29f407fd5509c014a6086f17338c70215c8d1ab42d5d49e0254273bc0a64bbfc" + "sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8", + "sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48" ], - "markers": "python_version >= '3.6'", - "version": "==23.0.0" + "version": "==23.0.1" }, "mccabe": { "hashes": [ @@ -214,7 +272,6 @@ "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.9" }, "pathspec": { @@ -233,18 +290,16 @@ }, "pluggy": { "hashes": [ - "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", - "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" + "sha256:265a94bf44ca13662f12fcd1b074c14d4b269a712f051b6f644ef7e705d6735f", + "sha256:467f0219e89bb5061a8429c6fc5cf055fa3983a0e68e84a1d205046306b37d9e" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.13.1" + "version": "==1.0.0.dev0" }, "py": { "hashes": [ "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.10.0" }, "pycodestyle": { @@ -252,15 +307,20 @@ "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.6.0" }, + "pycparser": { + "hashes": [ + "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", + "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" + ], + "version": "==2.20" + }, "pyflakes": { "hashes": [ "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.2.0" }, "pygments": { @@ -268,16 +328,14 @@ "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94", "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8" ], - "markers": "python_version >= '3.5'", "version": "==2.8.1" }, "pyparsing": { "hashes": [ - "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", - "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + "sha256:1c6409312ce2ce2997896af5756753778d5f1603666dba5587804f09ad82ed27", + "sha256:f4896b4cc085a1f8f8ae53a1a90db5a86b3825ff73eb974dffee3d9e701007f4" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.4.7" + "version": "==3.0.0b2" }, "pytest": { "hashes": [ @@ -296,56 +354,55 @@ }, "regex": { "hashes": [ - "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538", - "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4", - "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc", - "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa", - "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444", - "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1", - "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af", - "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8", - "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9", - "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88", - "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba", - "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364", - "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e", - "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7", - "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0", - "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31", - "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683", - "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee", - "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b", - "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884", - "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c", - "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e", - "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562", - "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85", - "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c", - "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6", - "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d", - "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b", - "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70", - "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b", - "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b", - "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f", - "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0", - "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5", - "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5", - "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f", - "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e", - "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512", - "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d", - "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917", - "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f" - ], - "version": "==2020.11.13" + "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5", + "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79", + "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31", + "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500", + "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11", + "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14", + "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3", + "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439", + "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c", + "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82", + "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711", + "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093", + "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a", + "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb", + "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8", + "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17", + "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000", + "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d", + "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480", + "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc", + "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0", + "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9", + "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765", + "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e", + "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a", + "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07", + "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f", + "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac", + "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7", + "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed", + "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968", + "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7", + "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2", + "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4", + "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87", + "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8", + "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10", + "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29", + "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605", + "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6", + "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042" + ], + "version": "==2021.4.4" }, "requests": { "hashes": [ "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.25.1" }, "requests-toolbelt": { @@ -362,12 +419,19 @@ ], "version": "==1.4.0" }, + "secretstorage": { + "hashes": [ + "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f", + "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195" + ], + "markers": "sys_platform == 'linux'", + "version": "==3.3.1" + }, "six": { "hashes": [ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "toml": { @@ -375,16 +439,14 @@ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.2" }, "tqdm": { "hashes": [ - "sha256:9fdf349068d047d4cfbe24862c425883af1db29bcddf4b0eeb2524f6fbdb23c7", - "sha256:d666ae29164da3e517fcf125e41d4fe96e5bb375cd87ff9763f6b38b5592fe33" + "sha256:daec693491c52e9498632dfbe9ccfc4882a557f5fa08982db1b4d3adbe0887c3", + "sha256:ebdebdb95e3477ceea267decfc0784859aa3df3e27e22d23b83e9b272bf157ae" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==4.59.0" + "version": "==4.60.0" }, "twine": { "hashes": [ @@ -439,11 +501,11 @@ }, "urllib3": { "hashes": [ - "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", - "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" + "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", + "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.26.3" + "index": "pypi", + "version": "==1.26.4" }, "webencodings": { "hashes": [ @@ -457,7 +519,6 @@ "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" ], - "markers": "python_version >= '3.6'", "version": "==3.4.1" } } diff --git a/requirements.txt b/requirements.txt index 17c68dce..8df2067c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,54 +1,49 @@ -# -# These requirements were autogenerated by pipenv -# To regenerate from the project's Pipfile, run: -# -# pipenv lock --requirements --dev -# - -# Note: in pipenv 2020.x, "--dev" changed to emit both default and development -# requirements. To emit only development requirements, pass "--dev-only". - --i https://pypi.org/simple +-i https://pypi.org/simple/ appdirs==1.4.4 -attrs==20.3.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' +attrs==20.3.0 black==20.8b1 -bleach==3.3.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' +bleach==3.3.0 certifi==2020.12.5 -chardet==4.0.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' -click==7.1.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' -colorama==0.4.4; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' +cffi==1.14.5 +chardet==4.0.0 +click==8.0.0a1 +colorama==0.4.4 coverage==5.5 coveralls==3.0.1 +cryptography==3.4.7 docopt==0.6.2 -docutils==0.16; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' +docutils==0.17 flake8==3.8.4 -idna==2.10; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -importlib-metadata==3.7.2; python_version >= '3.6' +idna==2.10 +importlib-metadata==3.10.0 iniconfig==1.1.1 -keyring==23.0.0; python_version >= '3.6' +jeepney==0.6.0 ; sys_platform == 'linux' +keyring==23.0.1 mccabe==0.6.1 mypy-extensions==0.4.3 -packaging==20.9; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' +packaging==20.9 pathspec==0.8.1 pkginfo==1.7.0 -pluggy==0.13.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -py==1.10.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -pycodestyle==2.6.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -pyflakes==2.2.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -pygments==2.8.1; python_version >= '3.5' -pyparsing==2.4.7; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3' +pluggy==1.0.0.dev0 +py==1.10.0 +pycodestyle==2.6.0 +pycparser==2.20 +pyflakes==2.2.0 +pygments==2.8.1 +pyparsing==3.0.0b2 pytest==6.2.2 readme-renderer==29.0 -regex==2020.11.13 +regex==2021.4.4 requests-toolbelt==0.9.1 -requests==2.25.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' +requests==2.25.1 rfc3986==1.4.0 -six==1.15.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -toml==0.10.2; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3' -tqdm==4.59.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' +secretstorage==3.3.1 ; sys_platform == 'linux' +six==1.15.0 +toml==0.10.2 +tqdm==4.60.0 twine==3.3.0 typed-ast==1.4.2 typing-extensions==3.7.4.3 -urllib3==1.26.3; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4' +urllib3==1.26.4 webencodings==0.5.1 -zipp==3.4.1; python_version >= '3.6' +zipp==3.4.1 From 5208d91d5b41dbb4bed2034878bbda36baa264f0 Mon Sep 17 00:00:00 2001 From: JP White Date: Tue, 6 Apr 2021 21:00:12 -0400 Subject: [PATCH 37/46] Update auto-merge.yml --- .github/workflows/auto-merge.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml index 6d21280a..3554b67a 100644 --- a/.github/workflows/auto-merge.yml +++ b/.github/workflows/auto-merge.yml @@ -10,4 +10,4 @@ jobs: - uses: actions/checkout@v2 - uses: ahmadnassri/action-dependabot-auto-merge@v2 with: - github-token: ${{ secrets.mytoken }} \ No newline at end of file + github-token: ${{ secrets.GITHUB_TOKEN }} From 7d4643033d3dc99fa52abdd2a5709772a5846223 Mon Sep 17 00:00:00 2001 From: JP White Date: Thu, 13 May 2021 13:42:09 -0400 Subject: [PATCH 38/46] More tests --- tests/test_gravatar.py | 13 +++++++++++++ tests/test_interval.py | 22 ++++++++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 tests/test_gravatar.py create mode 100644 tests/test_interval.py diff --git a/tests/test_gravatar.py b/tests/test_gravatar.py new file mode 100644 index 00000000..7cacbb77 --- /dev/null +++ b/tests/test_gravatar.py @@ -0,0 +1,13 @@ +import unittest +from gitinspector import gravatar + +TEST_STRING = 'arbitrary' + + +class TestGravatar(unittest.TestCase): + + def test_get_url(self): + expected_url = 'https://www.gravatar.com/avatar/c181b12d45d1fd849f885221f3ee3f39?default=identicon' + arbitrary_email = TEST_STRING + '@example.com' + actual_url = gravatar.get_url(arbitrary_email) + self.assertEqual(expected_url, actual_url) diff --git a/tests/test_interval.py b/tests/test_interval.py new file mode 100644 index 00000000..d34a377d --- /dev/null +++ b/tests/test_interval.py @@ -0,0 +1,22 @@ +import unittest +from gitinspector import interval + +TEST_STRING = 'arbitrary' + + +class TestInterval(unittest.TestCase): + + def test_has_interval(self): + actual = interval.has_interval() + self.assertFalse(actual) + + def test_get_since(self): + expected = '' + actual = interval.get_since() + self.assertEqual(expected, actual) + + def test_set_since(self): + expected = '--since=' + TEST_STRING + interval.set_since(TEST_STRING) + actual = interval.get_since() + self.assertEqual(expected, actual) From 54960ee35ab0dcc308e22c1e6cfd115811715f85 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Jun 2021 04:24:13 +0000 Subject: [PATCH 39/46] Bump urllib3 from 1.26.4 to 1.26.5 Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.26.4 to 1.26.5. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/main/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.26.4...1.26.5) --- updated-dependencies: - dependency-name: urllib3 dependency-type: indirect ... Signed-off-by: dependabot[bot] --- Pipfile.lock | 144 +++++++++++++++++++++++++---------------------- requirements.txt | 24 ++++---- 2 files changed, 90 insertions(+), 78 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 712d524c..9ce08143 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -24,10 +24,10 @@ }, "attrs": { "hashes": [ - "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", - "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" + "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1", + "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb" ], - "version": "==20.3.0" + "version": "==21.2.0" }, "black": { "hashes": [ @@ -45,22 +45,31 @@ }, "certifi": { "hashes": [ - "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", - "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" + "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee", + "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8" ], - "version": "==2020.12.5" + "version": "==2021.5.30" }, "cffi": { "hashes": [ "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813", + "sha256:04c468b622ed31d408fea2346bec5bbffba2cc44226302a0de1ade9f5ea3d373", + "sha256:06d7cd1abac2ffd92e65c0609661866709b4b2d82dd15f611e602b9b188b0b69", + "sha256:06db6321b7a68b2bd6df96d08a5adadc1fa0e8f419226e25b2a5fbf6ccc7350f", "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06", + "sha256:0f861a89e0043afec2a51fd177a567005847973be86f709bbb044d7f42fc4e05", "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea", "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee", + "sha256:1bf1ac1984eaa7675ca8d5745a8cb87ef7abecb5592178406e55858d411eadc0", "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396", + "sha256:24a570cd11895b60829e941f2613a4f79df1a27344cbbb82164ef2e0116f09c7", + "sha256:24ec4ff2c5c0c8f9c6b87d5bb53555bf267e1e6f70e52e5a9740d32861d36b6f", "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73", "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315", + "sha256:293e7ea41280cb28c6fcaaa0b1aa1f533b8ce060b9e701d78511e1e6c4a1de76", "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1", "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49", + "sha256:3c3f39fa737542161d8b0d680df2ec249334cd70a8f420f71c9304bd83c3cbed", "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892", "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482", "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058", @@ -68,6 +77,7 @@ "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53", "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045", "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3", + "sha256:681d07b0d1e3c462dd15585ef5e33cb021321588bebd910124ef4f4fb71aef55", "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5", "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e", "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c", @@ -85,8 +95,10 @@ "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e", "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991", "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6", + "sha256:cc5a8e069b9ebfa22e26d0e6b97d6f9781302fe7f4f2b8776c3e1daea35f1adc", "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1", "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406", + "sha256:df5052c5d867c1ea0b311fb7c3cd28b19df469c056f7fdcfe88c7473aa63e333", "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d", "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c" ], @@ -101,10 +113,10 @@ }, "click": { "hashes": [ - "sha256:681c9380a24b22fec089c8e5ffe40aa16a0da79f248a26fe2481bfa8170bfcc1", - "sha256:e4315a188403c0258bbc4a4e31863e48fc301c4e95b8007a8eeda0391158df13" + "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a", + "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6" ], - "version": "==8.0.0a1" + "version": "==8.0.1" }, "colorama": { "hashes": [ @@ -204,10 +216,10 @@ }, "docutils": { "hashes": [ - "sha256:a71042bb7207c03d5647f280427f14bfbd1a65c9eb84f4b341d85fafb6bb4bdf", - "sha256:e2ffeea817964356ba4470efba7c2f42b6b0de0b04e66378507e3e2504bbff4c" + "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125", + "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61" ], - "version": "==0.17" + "version": "==0.17.1" }, "flake8": { "hashes": [ @@ -226,10 +238,10 @@ }, "importlib-metadata": { "hashes": [ - "sha256:c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a", - "sha256:d2d46ef77ffc85cbf7dac7e81dd663fde71c45326131bea8033b9bad42268ebe" + "sha256:960d52ba7c21377c990412aca380bf3642d734c2eaab78a2c39319f67c6a5786", + "sha256:e592faad8de1bda9fe920cf41e15261e7131bcf266c30306eec00e8e225c1dd5" ], - "version": "==3.10.0" + "version": "==4.4.0" }, "iniconfig": { "hashes": [ @@ -325,10 +337,10 @@ }, "pygments": { "hashes": [ - "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94", - "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8" + "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f", + "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e" ], - "version": "==2.8.1" + "version": "==2.9.0" }, "pyparsing": { "hashes": [ @@ -414,10 +426,10 @@ }, "rfc3986": { "hashes": [ - "sha256:112398da31a3344dc25dbf477d8df6cb34f9278a94fee2625d89e4514be8bb9d", - "sha256:af9147e9aceda37c91a05f4deb128d4b4b49d6b199775fd2d2927768abdc8f50" + "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835", + "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97" ], - "version": "==1.4.0" + "version": "==1.5.0" }, "secretstorage": { "hashes": [ @@ -429,10 +441,10 @@ }, "six": { "hashes": [ - "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", - "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "version": "==1.15.0" + "version": "==1.16.0" }, "toml": { "hashes": [ @@ -443,10 +455,10 @@ }, "tqdm": { "hashes": [ - "sha256:daec693491c52e9498632dfbe9ccfc4882a557f5fa08982db1b4d3adbe0887c3", - "sha256:ebdebdb95e3477ceea267decfc0784859aa3df3e27e22d23b83e9b272bf157ae" + "sha256:736524215c690621b06fc89d0310a49822d75e599fcd0feb7cc742b98d692493", + "sha256:cd5791b5d7c3f2f1819efc81d36eb719a38e0906a7380365c556779f585ea042" ], - "version": "==4.60.0" + "version": "==4.61.0" }, "twine": { "hashes": [ @@ -458,54 +470,54 @@ }, "typed-ast": { "hashes": [ - "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1", - "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d", - "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6", - "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd", - "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37", - "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151", - "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07", - "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440", - "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70", - "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496", - "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea", - "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400", - "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc", - "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606", - "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc", - "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581", - "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412", - "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a", - "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2", - "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787", - "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f", - "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937", - "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64", - "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487", - "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b", - "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41", - "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a", - "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3", - "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166", - "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10" - ], - "version": "==1.4.2" + "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace", + "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff", + "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266", + "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528", + "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6", + "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808", + "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4", + "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363", + "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341", + "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04", + "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41", + "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e", + "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3", + "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899", + "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805", + "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c", + "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c", + "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39", + "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a", + "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3", + "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7", + "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f", + "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075", + "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0", + "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40", + "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428", + "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927", + "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3", + "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f", + "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65" + ], + "version": "==1.4.3" }, "typing-extensions": { "hashes": [ - "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", - "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", - "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" + "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", + "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", + "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" ], - "version": "==3.7.4.3" + "version": "==3.10.0.0" }, "urllib3": { "hashes": [ - "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", - "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" + "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c", + "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098" ], "index": "pypi", - "version": "==1.26.4" + "version": "==1.26.5" }, "webencodings": { "hashes": [ diff --git a/requirements.txt b/requirements.txt index 8df2067c..3cb32dbd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,21 +1,21 @@ -i https://pypi.org/simple/ appdirs==1.4.4 -attrs==20.3.0 +attrs==21.2.0 black==20.8b1 bleach==3.3.0 -certifi==2020.12.5 +certifi==2021.5.30 cffi==1.14.5 chardet==4.0.0 -click==8.0.0a1 +click==8.0.1 colorama==0.4.4 coverage==5.5 coveralls==3.0.1 cryptography==3.4.7 docopt==0.6.2 -docutils==0.17 +docutils==0.17.1 flake8==3.8.4 idna==2.10 -importlib-metadata==3.10.0 +importlib-metadata==4.4.0 iniconfig==1.1.1 jeepney==0.6.0 ; sys_platform == 'linux' keyring==23.0.1 @@ -29,21 +29,21 @@ py==1.10.0 pycodestyle==2.6.0 pycparser==2.20 pyflakes==2.2.0 -pygments==2.8.1 +pygments==2.9.0 pyparsing==3.0.0b2 pytest==6.2.2 readme-renderer==29.0 regex==2021.4.4 requests-toolbelt==0.9.1 requests==2.25.1 -rfc3986==1.4.0 +rfc3986==1.5.0 secretstorage==3.3.1 ; sys_platform == 'linux' -six==1.15.0 +six==1.16.0 toml==0.10.2 -tqdm==4.60.0 +tqdm==4.61.0 twine==3.3.0 -typed-ast==1.4.2 -typing-extensions==3.7.4.3 -urllib3==1.26.4 +typed-ast==1.4.3 +typing-extensions==3.10.0.0 +urllib3==1.26.5 webencodings==0.5.1 zipp==3.4.1 From 7911fcf93e724c0e38d9f940a1afd97f24cbd24b Mon Sep 17 00:00:00 2001 From: JP White Date: Sat, 19 Jun 2021 20:05:01 -0400 Subject: [PATCH 40/46] This change-set drops support for Python 2.7 and lower. It also adds the beginnings of a robust test suite, and automated build process. --- .github/workflows/auto-merge.yml | 13 ------------- .github/workflows/python-package.yml | 13 ------------- README.md | 11 ++++------- 3 files changed, 4 insertions(+), 33 deletions(-) delete mode 100644 .github/workflows/auto-merge.yml diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml deleted file mode 100644 index 3554b67a..00000000 --- a/.github/workflows/auto-merge.yml +++ /dev/null @@ -1,13 +0,0 @@ -name: auto-merge - -on: - pull_request: - -jobs: - auto-merge: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: ahmadnassri/action-dependabot-auto-merge@v2 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 8a299c4d..605d1590 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -37,17 +37,4 @@ jobs: - name: Test with pytest env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} run: make test-coverage-report - - coverage: - needs: test - runs-on: ubuntu-latest - steps: - - name: Send Results to Coveralls - uses: coverallsapp/github-action@master - env: - COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} - with: - github-token: ${{ secrets.github_token }} - parallel-finished: true \ No newline at end of file diff --git a/README.md b/README.md index 01dc06dd..6592034f 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,8 @@ -![Build Status](https://github.com/jpwhite3/gitinspector/actions/workflows/python-package.yml/badge.svg) -[![Coverage Status](https://coveralls.io/repos/github/jpwhite3/gitinspector/badge.svg?branch=master)](https://coveralls.io/github/jpwhite3/gitinspector?branch=master) -[![Latest release](https://img.shields.io/github/release/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/releases/latest) -[![License](https://img.shields.io/github/license/jpwhite3/gitinspector.svg?style=flat-square)](https://github.com/jpwhite3/gitinspector/blob/master/LICENSE.txt) - +[![Latest release](https://img.shields.io/github/release/ejwa/gitinspector.svg?style=flat-square)](https://github.com/ejwa/gitinspector/releases/latest) +[![License](https://img.shields.io/github/license/ejwa/gitinspector.svg?style=flat-square)](https://github.com/ejwa/gitinspector/blob/master/LICENSE.txt)

+ src="https://raw.githubusercontent.com/ejwa/gitinspector/master/gitinspector/html/gitinspector_piclet.png"/>  About Gitinspector

@@ -56,4 +53,4 @@ The Debian packages offered with releases of gitinspector are unofficial and ver An [npm](https://npmjs.com) package is provided for convenience as well. To install it globally, execute `npm i -g gitinspector`. ### License -gitinspector is licensed under the *GNU GPL v3*. The gitinspector logo is partly based on the git logo; based on the work of Jason Long. The logo is licensed under the *Creative Commons Attribution 3.0 Unported License*. +gitinspector is licensed under the *GNU GPL v3*. The gitinspector logo is partly based on the git logo; based on the work of Jason Long. The logo is licensed under the *Creative Commons Attribution 3.0 Unported License*. \ No newline at end of file From 7b9d394791d306baadd5b2a43db7d149ce846a2b Mon Sep 17 00:00:00 2001 From: JP White Date: Wed, 23 Jun 2021 11:25:16 -0400 Subject: [PATCH 41/46] Converting space indentation back to tabs --- Pipfile | 2 +- Pipfile.lock | 427 +++++++-------- gitinspector/basedir.py | 54 +- gitinspector/blame.py | 336 ++++++------ gitinspector/changes.py | 512 +++++++++--------- gitinspector/clone.py | 48 +- gitinspector/comment.py | 222 ++++---- gitinspector/config.py | 140 ++--- gitinspector/extensions.py | 16 +- gitinspector/filtering.py | 100 ++-- gitinspector/format.py | 216 ++++---- gitinspector/gitinspector.py | 324 +++++------ gitinspector/gravatar.py | 20 +- gitinspector/help.py | 4 +- gitinspector/interval.py | 24 +- gitinspector/localization.py | 104 ++-- gitinspector/metrics.py | 246 ++++----- gitinspector/optval.py | 56 +- gitinspector/output/blameoutput.py | 300 +++++----- gitinspector/output/changesoutput.py | 366 +++++++------ gitinspector/output/extensionsoutput.py | 173 +++--- gitinspector/output/filteringoutput.py | 192 ++++--- gitinspector/output/metricsoutput.py | 286 +++++----- gitinspector/output/outputable.py | 32 +- gitinspector/output/responsibilitiesoutput.py | 220 ++++---- gitinspector/output/timelineoutput.py | 323 ++++++----- gitinspector/responsibilities.py | 20 +- gitinspector/terminal.py | 178 +++--- gitinspector/timeline.py | 152 +++--- gitinspector/version.py | 4 +- 30 files changed, 2501 insertions(+), 2596 deletions(-) diff --git a/Pipfile b/Pipfile index de372288..74e63d59 100644 --- a/Pipfile +++ b/Pipfile @@ -4,11 +4,11 @@ verify_ssl = true name = "pypi" [packages] +black-but-with-tabs-instead-of-spaces = "*" [dev-packages] pytest = "*" flake8 = "*" -black = "*" twine = "*" coverage = "*" coveralls = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 9ce08143..630f3e54 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "eeaad7bc007adaa51ede465d1ccad2bf56d6ba3c6feef74d7218a91a2ceb4074" + "sha256": "87e9949234210245765703c4d654f0f7205eec399e5d2acba50242218b858a45" }, "pipfile-spec": 6, "requires": {}, @@ -13,8 +13,7 @@ } ] }, - "default": {}, - "develop": { + "default": { "appdirs": { "hashes": [ "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", @@ -27,20 +26,152 @@ "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1", "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==21.2.0" }, - "black": { + "black-but-with-tabs-instead-of-spaces": { "hashes": [ - "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea" + "sha256:01b00ac677000874b86c6f22efc965ab2cc16645a27b86b01bac2fed68a5a12e", + "sha256:bd5dd0842cef0a2c6714bd7381c8ead9106f68c64c64c706679a6a7fabb7ba48" ], "index": "pypi", - "version": "==20.8b1" + "version": "==19.11" + }, + "click": { + "hashes": [ + "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a", + "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6" + ], + "markers": "python_version >= '3.6'", + "version": "==8.0.1" + }, + "mypy-extensions": { + "hashes": [ + "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", + "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" + ], + "version": "==0.4.3" + }, + "pathspec": { + "hashes": [ + "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd", + "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d" + ], + "version": "==0.8.1" + }, + "regex": { + "hashes": [ + "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5", + "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79", + "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31", + "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500", + "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11", + "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14", + "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3", + "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439", + "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c", + "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82", + "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711", + "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093", + "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a", + "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb", + "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8", + "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17", + "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000", + "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d", + "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480", + "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc", + "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0", + "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9", + "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765", + "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e", + "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a", + "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07", + "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f", + "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac", + "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7", + "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed", + "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968", + "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7", + "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2", + "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4", + "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87", + "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8", + "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10", + "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29", + "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605", + "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6", + "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042" + ], + "version": "==2021.4.4" + }, + "toml": { + "hashes": [ + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.10.2" + }, + "typed-ast": { + "hashes": [ + "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace", + "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff", + "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266", + "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528", + "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6", + "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808", + "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4", + "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363", + "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341", + "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04", + "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41", + "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e", + "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3", + "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899", + "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805", + "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c", + "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c", + "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39", + "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a", + "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3", + "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7", + "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f", + "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075", + "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0", + "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40", + "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428", + "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927", + "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3", + "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f", + "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65" + ], + "version": "==1.4.3" + }, + "typing-extensions": { + "hashes": [ + "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", + "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", + "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" + ], + "version": "==3.10.0.0" + } + }, + "develop": { + "attrs": { + "hashes": [ + "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1", + "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==21.2.0" }, "bleach": { "hashes": [ "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125", "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.3.0" }, "certifi": { @@ -50,79 +181,20 @@ ], "version": "==2021.5.30" }, - "cffi": { - "hashes": [ - "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813", - "sha256:04c468b622ed31d408fea2346bec5bbffba2cc44226302a0de1ade9f5ea3d373", - "sha256:06d7cd1abac2ffd92e65c0609661866709b4b2d82dd15f611e602b9b188b0b69", - "sha256:06db6321b7a68b2bd6df96d08a5adadc1fa0e8f419226e25b2a5fbf6ccc7350f", - "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06", - "sha256:0f861a89e0043afec2a51fd177a567005847973be86f709bbb044d7f42fc4e05", - "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea", - "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee", - "sha256:1bf1ac1984eaa7675ca8d5745a8cb87ef7abecb5592178406e55858d411eadc0", - "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396", - "sha256:24a570cd11895b60829e941f2613a4f79df1a27344cbbb82164ef2e0116f09c7", - "sha256:24ec4ff2c5c0c8f9c6b87d5bb53555bf267e1e6f70e52e5a9740d32861d36b6f", - "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73", - "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315", - "sha256:293e7ea41280cb28c6fcaaa0b1aa1f533b8ce060b9e701d78511e1e6c4a1de76", - "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1", - "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49", - "sha256:3c3f39fa737542161d8b0d680df2ec249334cd70a8f420f71c9304bd83c3cbed", - "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892", - "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482", - "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058", - "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5", - "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53", - "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045", - "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3", - "sha256:681d07b0d1e3c462dd15585ef5e33cb021321588bebd910124ef4f4fb71aef55", - "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5", - "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e", - "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c", - "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369", - "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827", - "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053", - "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa", - "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4", - "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322", - "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132", - "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62", - "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa", - "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0", - "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396", - "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e", - "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991", - "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6", - "sha256:cc5a8e069b9ebfa22e26d0e6b97d6f9781302fe7f4f2b8776c3e1daea35f1adc", - "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1", - "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406", - "sha256:df5052c5d867c1ea0b311fb7c3cd28b19df469c056f7fdcfe88c7473aa63e333", - "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d", - "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c" - ], - "version": "==1.14.5" - }, "chardet": { "hashes": [ "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.0.0" }, - "click": { - "hashes": [ - "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a", - "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6" - ], - "version": "==8.0.1" - }, "colorama": { "hashes": [ "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.4.4" }, "coverage": { @@ -185,28 +257,11 @@ }, "coveralls": { "hashes": [ - "sha256:7bd173b3425733661ba3063c88f180127cc2b20e9740686f86d2622b31b41385", - "sha256:cbb942ae5ef3d2b55388cb5b43e93a269544911535f1e750e1c656aef019ce60" + "sha256:172fb79c5f61c6ede60554f2cac46deff6d64ee735991fb2124fb414e188bdb4", + "sha256:9b3236e086627340bf2c95f89f757d093cbed43d17179d3f4fb568c347e7d29a" ], "index": "pypi", - "version": "==3.0.1" - }, - "cryptography": { - "hashes": [ - "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d", - "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959", - "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6", - "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873", - "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2", - "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713", - "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1", - "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177", - "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250", - "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca", - "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d", - "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9" - ], - "version": "==3.4.7" + "version": "==3.1.0" }, "docopt": { "hashes": [ @@ -219,29 +274,32 @@ "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125", "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.17.1" }, "flake8": { "hashes": [ - "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839", - "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b" + "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b", + "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907" ], "index": "pypi", - "version": "==3.8.4" + "version": "==3.9.2" }, "idna": { "hashes": [ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "importlib-metadata": { "hashes": [ - "sha256:960d52ba7c21377c990412aca380bf3642d734c2eaab78a2c39319f67c6a5786", - "sha256:e592faad8de1bda9fe920cf41e15261e7131bcf266c30306eec00e8e225c1dd5" + "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00", + "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139" ], - "version": "==4.4.0" + "markers": "python_version >= '3.6'", + "version": "==4.5.0" }, "iniconfig": { "hashes": [ @@ -250,19 +308,12 @@ ], "version": "==1.1.1" }, - "jeepney": { - "hashes": [ - "sha256:7d59b6622675ca9e993a6bd38de845051d315f8b0c72cca3aef733a20b648657", - "sha256:aec56c0eb1691a841795111e184e13cad504f7703b9a64f63020816afa79a8ae" - ], - "markers": "sys_platform == 'linux'", - "version": "==0.6.0" - }, "keyring": { "hashes": [ "sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8", "sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48" ], + "markers": "python_version >= '3.6'", "version": "==23.0.1" }, "mccabe": { @@ -272,27 +323,14 @@ ], "version": "==0.6.1" }, - "mypy-extensions": { - "hashes": [ - "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", - "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" - ], - "version": "==0.4.3" - }, "packaging": { "hashes": [ "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.9" }, - "pathspec": { - "hashes": [ - "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd", - "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d" - ], - "version": "==0.8.1" - }, "pkginfo": { "hashes": [ "sha256:029a70cb45c6171c329dfc890cde0879f8c52d6f3922794796e06f577bb03db4", @@ -302,60 +340,59 @@ }, "pluggy": { "hashes": [ - "sha256:265a94bf44ca13662f12fcd1b074c14d4b269a712f051b6f644ef7e705d6735f", - "sha256:467f0219e89bb5061a8429c6fc5cf055fa3983a0e68e84a1d205046306b37d9e" + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], - "version": "==1.0.0.dev0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.13.1" }, "py": { "hashes": [ "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.10.0" }, "pycodestyle": { "hashes": [ - "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", - "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" + "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", + "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef" ], - "version": "==2.6.0" - }, - "pycparser": { - "hashes": [ - "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", - "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" - ], - "version": "==2.20" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.7.0" }, "pyflakes": { "hashes": [ - "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", - "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" + "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3", + "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db" ], - "version": "==2.2.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.3.1" }, "pygments": { "hashes": [ "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f", "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e" ], + "markers": "python_version >= '3.5'", "version": "==2.9.0" }, "pyparsing": { "hashes": [ - "sha256:1c6409312ce2ce2997896af5756753778d5f1603666dba5587804f09ad82ed27", - "sha256:f4896b4cc085a1f8f8ae53a1a90db5a86b3825ff73eb974dffee3d9e701007f4" + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "version": "==3.0.0b2" + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.4.7" }, "pytest": { "hashes": [ - "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9", - "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839" + "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b", + "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890" ], "index": "pypi", - "version": "==6.2.2" + "version": "==6.2.4" }, "readme-renderer": { "hashes": [ @@ -364,57 +401,12 @@ ], "version": "==29.0" }, - "regex": { - "hashes": [ - "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5", - "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79", - "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31", - "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500", - "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11", - "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14", - "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3", - "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439", - "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c", - "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82", - "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711", - "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093", - "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a", - "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb", - "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8", - "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17", - "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000", - "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d", - "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480", - "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc", - "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0", - "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9", - "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765", - "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e", - "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a", - "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07", - "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f", - "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac", - "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7", - "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed", - "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968", - "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7", - "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2", - "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4", - "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87", - "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8", - "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10", - "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29", - "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605", - "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6", - "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042" - ], - "version": "==2021.4.4" - }, "requests": { "hashes": [ "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.25.1" }, "requests-toolbelt": { @@ -431,19 +423,12 @@ ], "version": "==1.5.0" }, - "secretstorage": { - "hashes": [ - "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f", - "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195" - ], - "markers": "sys_platform == 'linux'", - "version": "==3.3.1" - }, "six": { "hashes": [ "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.16.0" }, "toml": { @@ -451,72 +436,31 @@ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.2" }, "tqdm": { "hashes": [ - "sha256:736524215c690621b06fc89d0310a49822d75e599fcd0feb7cc742b98d692493", - "sha256:cd5791b5d7c3f2f1819efc81d36eb719a38e0906a7380365c556779f585ea042" + "sha256:24be966933e942be5f074c29755a95b315c69a91f839a29139bf26ffffe2d3fd", + "sha256:aa0c29f03f298951ac6318f7c8ce584e48fa22ec26396e6411e43d038243bdb2" ], - "version": "==4.61.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==4.61.1" }, "twine": { "hashes": [ - "sha256:2f6942ec2a17417e19d2dd372fc4faa424c87ee9ce49b4e20c427eb00a0f3f41", - "sha256:fcffa8fc37e8083a5be0728371f299598870ee1eccc94e9a25cef7b1dcfa8297" + "sha256:16f706f2f1687d7ce30e7effceee40ed0a09b7c33b9abb5ef6434e5551565d83", + "sha256:a56c985264b991dc8a8f4234eb80c5af87fa8080d0c224ad8f2cd05a2c22e83b" ], "index": "pypi", - "version": "==3.3.0" - }, - "typed-ast": { - "hashes": [ - "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace", - "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff", - "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266", - "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528", - "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6", - "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808", - "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4", - "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363", - "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341", - "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04", - "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41", - "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e", - "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3", - "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899", - "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805", - "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c", - "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c", - "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39", - "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a", - "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3", - "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7", - "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f", - "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075", - "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0", - "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40", - "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428", - "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927", - "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3", - "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f", - "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65" - ], - "version": "==1.4.3" - }, - "typing-extensions": { - "hashes": [ - "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", - "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", - "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" - ], - "version": "==3.10.0.0" + "version": "==3.4.1" }, "urllib3": { "hashes": [ "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c", "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098" ], - "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.26.5" }, "webencodings": { @@ -531,6 +475,7 @@ "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" ], + "markers": "python_version >= '3.6'", "version": "==3.4.1" } } diff --git a/gitinspector/basedir.py b/gitinspector/basedir.py index 42f2160c..e2d62437 100644 --- a/gitinspector/basedir.py +++ b/gitinspector/basedir.py @@ -23,43 +23,43 @@ def get_basedir(): - if hasattr(sys, "frozen"): # exists when running via py2exe - return sys.prefix - else: - return os.path.dirname(os.path.realpath(__file__)) + if hasattr(sys, "frozen"): # exists when running via py2exe + return sys.prefix + else: + return os.path.dirname(os.path.realpath(__file__)) def get_basedir_git(path=None): - previous_directory = None + previous_directory = None - if path is not None: - previous_directory = os.getcwd() - os.chdir(path) + if path is not None: + previous_directory = os.getcwd() + os.chdir(path) - bare_command = subprocess.Popen( - ["git", "rev-parse", "--is-bare-repository"], stdout=subprocess.PIPE, stderr=open(os.devnull, "w") - ) + bare_command = subprocess.Popen( + ["git", "rev-parse", "--is-bare-repository"], stdout=subprocess.PIPE, stderr=open(os.devnull, "w") + ) - isbare = bare_command.stdout.readlines() - bare_command.wait() + isbare = bare_command.stdout.readlines() + bare_command.wait() - if bare_command.returncode != 0: - sys.exit(_('Error processing git repository at "%s".' % os.getcwd())) + if bare_command.returncode != 0: + sys.exit(_('Error processing git repository at "%s".' % os.getcwd())) - isbare = isbare[0].decode("utf-8", "replace").strip() == "true" - absolute_path = None + isbare = isbare[0].decode("utf-8", "replace").strip() == "true" + absolute_path = None - if isbare: - absolute_path = subprocess.Popen(["git", "rev-parse", "--git-dir"], stdout=subprocess.PIPE).stdout - else: - absolute_path = subprocess.Popen(["git", "rev-parse", "--show-toplevel"], stdout=subprocess.PIPE).stdout + if isbare: + absolute_path = subprocess.Popen(["git", "rev-parse", "--git-dir"], stdout=subprocess.PIPE).stdout + else: + absolute_path = subprocess.Popen(["git", "rev-parse", "--show-toplevel"], stdout=subprocess.PIPE).stdout - absolute_path = absolute_path.readlines() + absolute_path = absolute_path.readlines() - if len(absolute_path) == 0: - sys.exit(_("Unable to determine absolute path of git repository.")) + if len(absolute_path) == 0: + sys.exit(_("Unable to determine absolute path of git repository.")) - if path is not None: - os.chdir(previous_directory) + if path is not None: + os.chdir(previous_directory) - return absolute_path[0].decode("utf-8", "replace").strip() + return absolute_path[0].decode("utf-8", "replace").strip() diff --git a/gitinspector/blame.py b/gitinspector/blame.py index 9c55eb61..f4d7b317 100644 --- a/gitinspector/blame.py +++ b/gitinspector/blame.py @@ -31,9 +31,9 @@ class BlameEntry(object): - rows = 0 - skew = 0 # Used when calculating average code age. - comments = 0 + rows = 0 + skew = 0 # Used when calculating average code age. + comments = 0 __thread_lock__ = threading.BoundedSemaphore(NUM_THREADS) @@ -43,175 +43,173 @@ class BlameEntry(object): class BlameThread(threading.Thread): - def __init__(self, useweeks, changes, blame_command, extension, blames, filename): - __thread_lock__.acquire() # Lock controlling the number of threads running - threading.Thread.__init__(self) - - self.useweeks = useweeks - self.changes = changes - self.blame_command = blame_command - self.extension = extension - self.blames = blames - self.filename = filename - - self.is_inside_comment = False - - def __clear_blamechunk_info__(self): - self.blamechunk_email = None - self.blamechunk_is_last = False - self.blamechunk_is_prior = False - self.blamechunk_revision = None - self.blamechunk_time = None - - def __handle_blamechunk_content__(self, content): - author = None - (comments, self.is_inside_comment) = comment.handle_comment_block(self.is_inside_comment, self.extension, content) - - if self.blamechunk_is_prior and interval.get_since(): - return - try: - author = self.changes.get_latest_author_by_email(self.blamechunk_email) - except KeyError: - return - - if ( - not filtering.set_filtered(author, "author") - and not filtering.set_filtered(self.blamechunk_email, "email") - and not filtering.set_filtered(self.blamechunk_revision, "revision") - ): - - __blame_lock__.acquire() # Global lock used to protect calls from here... - - if self.blames.get((author, self.filename), None) is None: - self.blames[(author, self.filename)] = BlameEntry() - - self.blames[(author, self.filename)].comments += comments - self.blames[(author, self.filename)].rows += 1 - - if (self.blamechunk_time - self.changes.first_commit_date).days > 0: - self.blames[(author, self.filename)].skew += (self.changes.last_commit_date - self.blamechunk_time).days / ( - 7.0 if self.useweeks else AVG_DAYS_PER_MONTH - ) - - __blame_lock__.release() # ...to here. - - def run(self): - git_blame_r = subprocess.Popen(self.blame_command, stdout=subprocess.PIPE).stdout - rows = git_blame_r.readlines() - git_blame_r.close() - - self.__clear_blamechunk_info__() - - # pylint: disable=W0201 - for j in range(0, len(rows)): - row = rows[j].decode("utf-8", "replace").strip() - keyval = row.split(" ", 2) - - if self.blamechunk_is_last: - self.__handle_blamechunk_content__(row) - self.__clear_blamechunk_info__() - elif keyval[0] == "boundary": - self.blamechunk_is_prior = True - elif keyval[0] == "author-mail": - self.blamechunk_email = keyval[1].lstrip("<").rstrip(">") - elif keyval[0] == "author-time": - self.blamechunk_time = datetime.date.fromtimestamp(int(keyval[1])) - elif keyval[0] == "filename": - self.blamechunk_is_last = True - elif Blame.is_revision(keyval[0]): - self.blamechunk_revision = keyval[0] - - __thread_lock__.release() # Lock controlling the number of threads running + def __init__(self, useweeks, changes, blame_command, extension, blames, filename): + __thread_lock__.acquire() # Lock controlling the number of threads running + threading.Thread.__init__(self) + + self.useweeks = useweeks + self.changes = changes + self.blame_command = blame_command + self.extension = extension + self.blames = blames + self.filename = filename + + self.is_inside_comment = False + + def __clear_blamechunk_info__(self): + self.blamechunk_email = None + self.blamechunk_is_last = False + self.blamechunk_is_prior = False + self.blamechunk_revision = None + self.blamechunk_time = None + + def __handle_blamechunk_content__(self, content): + author = None + (comments, self.is_inside_comment) = comment.handle_comment_block(self.is_inside_comment, self.extension, content) + + if self.blamechunk_is_prior and interval.get_since(): + return + try: + author = self.changes.get_latest_author_by_email(self.blamechunk_email) + except KeyError: + return + + if ( + not filtering.set_filtered(author, "author") + and not filtering.set_filtered(self.blamechunk_email, "email") + and not filtering.set_filtered(self.blamechunk_revision, "revision") + ): + + __blame_lock__.acquire() # Global lock used to protect calls from here... + + if self.blames.get((author, self.filename), None) is None: + self.blames[(author, self.filename)] = BlameEntry() + + self.blames[(author, self.filename)].comments += comments + self.blames[(author, self.filename)].rows += 1 + + if (self.blamechunk_time - self.changes.first_commit_date).days > 0: + self.blames[(author, self.filename)].skew += (self.changes.last_commit_date - self.blamechunk_time).days / ( + 7.0 if self.useweeks else AVG_DAYS_PER_MONTH + ) + + __blame_lock__.release() # ...to here. + + def run(self): + git_blame_r = subprocess.Popen(self.blame_command, stdout=subprocess.PIPE).stdout + rows = git_blame_r.readlines() + git_blame_r.close() + + self.__clear_blamechunk_info__() + + # pylint: disable=W0201 + for j in range(0, len(rows)): + row = rows[j].decode("utf-8", "replace").strip() + keyval = row.split(" ", 2) + + if self.blamechunk_is_last: + self.__handle_blamechunk_content__(row) + self.__clear_blamechunk_info__() + elif keyval[0] == "boundary": + self.blamechunk_is_prior = True + elif keyval[0] == "author-mail": + self.blamechunk_email = keyval[1].lstrip("<").rstrip(">") + elif keyval[0] == "author-time": + self.blamechunk_time = datetime.date.fromtimestamp(int(keyval[1])) + elif keyval[0] == "filename": + self.blamechunk_is_last = True + elif Blame.is_revision(keyval[0]): + self.blamechunk_revision = keyval[0] + + __thread_lock__.release() # Lock controlling the number of threads running PROGRESS_TEXT = N_("Checking how many rows belong to each author (2 of 2): {0:.0f}%") class Blame(object): - def __init__(self, repo, hard, useweeks, changes): - self.blames = {} - ls_tree_p = subprocess.Popen( - ["git", "ls-tree", "--name-only", "-r", interval.get_ref()], stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - lines = ls_tree_p.communicate()[0].splitlines() - ls_tree_p.stdout.close() - - if ls_tree_p.returncode == 0: - progress_text = _(PROGRESS_TEXT) - - if repo is not None: - progress_text = "[%s] " % repo.name + progress_text - - for i, row in enumerate(lines): - row = row.strip().decode("unicode_escape", "ignore") - row = row.encode("latin-1", "replace") - row = row.decode("utf-8", "replace").strip('"').strip("'").strip() - - if ( - FileDiff.get_extension(row) in extensions.get_located() - and FileDiff.is_valid_extension(row) - and not filtering.set_filtered(FileDiff.get_filename(row)) - ): - blame_command = [ - _f - for _f in ["git", "blame", "--line-porcelain", "-w"] - + (["-C", "-C", "-M"] if hard else []) - + [interval.get_since(), interval.get_ref(), "--", row] - if _f - ] - thread = BlameThread( - useweeks, changes, blame_command, FileDiff.get_extension(row), self.blames, row.strip() - ) - thread.daemon = True - thread.start() - - if format.is_interactive_format(): - terminal.output_progress(progress_text, i, len(lines)) - - # Make sure all threads have completed. - for i in range(0, NUM_THREADS): - __thread_lock__.acquire() - - # We also have to release them for future use. - for i in range(0, NUM_THREADS): - __thread_lock__.release() - - def __iadd__(self, other): - try: - self.blames.update(other.blames) - return self - except AttributeError: - return other - - @staticmethod - def is_revision(string): - revision = re.search("([0-9a-f]{40})", string) - - if revision is None: - return False - - return revision.group(1).strip() - - @staticmethod - def get_stability(author, blamed_rows, changes): - if author in changes.get_authorinfo_list(): - author_insertions = changes.get_authorinfo_list()[author].insertions - return 100 if author_insertions == 0 else 100.0 * blamed_rows / author_insertions - return 100 - - @staticmethod - def get_time(string): - time = re.search(r" \(.*?(\d\d\d\d-\d\d-\d\d)", string) - return time.group(1).strip() - - def get_summed_blames(self): - summed_blames = {} - for i in list(self.blames.items()): - if summed_blames.get(i[0][0], None) is None: - summed_blames[i[0][0]] = BlameEntry() - - summed_blames[i[0][0]].rows += i[1].rows - summed_blames[i[0][0]].skew += i[1].skew - summed_blames[i[0][0]].comments += i[1].comments - - return summed_blames + def __init__(self, repo, hard, useweeks, changes): + self.blames = {} + ls_tree_p = subprocess.Popen( + ["git", "ls-tree", "--name-only", "-r", interval.get_ref()], stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + lines = ls_tree_p.communicate()[0].splitlines() + ls_tree_p.stdout.close() + + if ls_tree_p.returncode == 0: + progress_text = _(PROGRESS_TEXT) + + if repo is not None: + progress_text = "[%s] " % repo.name + progress_text + + for i, row in enumerate(lines): + row = row.strip().decode("unicode_escape", "ignore") + row = row.encode("latin-1", "replace") + row = row.decode("utf-8", "replace").strip('"').strip("'").strip() + + if ( + FileDiff.get_extension(row) in extensions.get_located() + and FileDiff.is_valid_extension(row) + and not filtering.set_filtered(FileDiff.get_filename(row)) + ): + blame_command = [ + _f + for _f in ["git", "blame", "--line-porcelain", "-w"] + + (["-C", "-C", "-M"] if hard else []) + + [interval.get_since(), interval.get_ref(), "--", row] + if _f + ] + thread = BlameThread(useweeks, changes, blame_command, FileDiff.get_extension(row), self.blames, row.strip()) + thread.daemon = True + thread.start() + + if format.is_interactive_format(): + terminal.output_progress(progress_text, i, len(lines)) + + # Make sure all threads have completed. + for i in range(0, NUM_THREADS): + __thread_lock__.acquire() + + # We also have to release them for future use. + for i in range(0, NUM_THREADS): + __thread_lock__.release() + + def __iadd__(self, other): + try: + self.blames.update(other.blames) + return self + except AttributeError: + return other + + @staticmethod + def is_revision(string): + revision = re.search("([0-9a-f]{40})", string) + + if revision is None: + return False + + return revision.group(1).strip() + + @staticmethod + def get_stability(author, blamed_rows, changes): + if author in changes.get_authorinfo_list(): + author_insertions = changes.get_authorinfo_list()[author].insertions + return 100 if author_insertions == 0 else 100.0 * blamed_rows / author_insertions + return 100 + + @staticmethod + def get_time(string): + time = re.search(r" \(.*?(\d\d\d\d-\d\d-\d\d)", string) + return time.group(1).strip() + + def get_summed_blames(self): + summed_blames = {} + for i in list(self.blames.items()): + if summed_blames.get(i[0][0], None) is None: + summed_blames[i[0][0]] = BlameEntry() + + summed_blames[i[0][0]].rows += i[1].rows + summed_blames[i[0][0]].skew += i[1].skew + summed_blames[i[0][0]].comments += i[1].comments + + return summed_blames diff --git a/gitinspector/changes.py b/gitinspector/changes.py index 479507a0..640d617d 100644 --- a/gitinspector/changes.py +++ b/gitinspector/changes.py @@ -35,289 +35,285 @@ class FileDiff(object): - def __init__(self, string): - commit_line = string.split("|") + def __init__(self, string): + commit_line = string.split("|") - if commit_line.__len__() == 2: - self.name = commit_line[0].strip() - self.insertions = commit_line[1].count("+") - self.deletions = commit_line[1].count("-") + if commit_line.__len__() == 2: + self.name = commit_line[0].strip() + self.insertions = commit_line[1].count("+") + self.deletions = commit_line[1].count("-") - @staticmethod - def is_filediff_line(string): - string = string.split("|") - return string.__len__() == 2 and string[1].find("Bin") == -1 and ("+" in string[1] or "-" in string[1]) + @staticmethod + def is_filediff_line(string): + string = string.split("|") + return string.__len__() == 2 and string[1].find("Bin") == -1 and ("+" in string[1] or "-" in string[1]) - @staticmethod - def get_extension(string): - string = string.split("|")[0].strip().strip("{}").strip('"').strip("'") - return os.path.splitext(string)[1][1:] + @staticmethod + def get_extension(string): + string = string.split("|")[0].strip().strip("{}").strip('"').strip("'") + return os.path.splitext(string)[1][1:] - @staticmethod - def get_filename(string): - return string.split("|")[0].strip().strip("{}").strip('"').strip("'") + @staticmethod + def get_filename(string): + return string.split("|")[0].strip().strip("{}").strip('"').strip("'") - @staticmethod - def is_valid_extension(string): - extension = FileDiff.get_extension(string) + @staticmethod + def is_valid_extension(string): + extension = FileDiff.get_extension(string) - for i in extensions.get(): - if (extension == "" and i == "*") or extension == i or i == "**": - return True - return False + for i in extensions.get(): + if (extension == "" and i == "*") or extension == i or i == "**": + return True + return False class Commit(object): - def __init__(self, string): - self.filediffs = [] - commit_line = string.split("|") + def __init__(self, string): + self.filediffs = [] + commit_line = string.split("|") - if commit_line.__len__() == 5: - self.timestamp = commit_line[0] - self.date = commit_line[1] - self.sha = commit_line[2] - self.author = commit_line[3].strip() - self.email = commit_line[4].strip() + if commit_line.__len__() == 5: + self.timestamp = commit_line[0] + self.date = commit_line[1] + self.sha = commit_line[2] + self.author = commit_line[3].strip() + self.email = commit_line[4].strip() - def __lt__(self, other): - return self.timestamp.__lt__(other.timestamp) # only used for sorting; we just consider the timestamp. + def __lt__(self, other): + return self.timestamp.__lt__(other.timestamp) # only used for sorting; we just consider the timestamp. - def add_filediff(self, filediff): - self.filediffs.append(filediff) + def add_filediff(self, filediff): + self.filediffs.append(filediff) - def get_filediffs(self): - return self.filediffs + def get_filediffs(self): + return self.filediffs - @staticmethod - def get_author_and_email(string): - commit_line = string.split("|") + @staticmethod + def get_author_and_email(string): + commit_line = string.split("|") - if commit_line.__len__() == 5: - return (commit_line[3].strip(), commit_line[4].strip()) + if commit_line.__len__() == 5: + return (commit_line[3].strip(), commit_line[4].strip()) - @staticmethod - def is_commit_line(string): - return string.split("|").__len__() == 5 + @staticmethod + def is_commit_line(string): + return string.split("|").__len__() == 5 class AuthorInfo(object): - email = None - insertions = 0 - deletions = 0 - commits = 0 + email = None + insertions = 0 + deletions = 0 + commits = 0 class ChangesThread(threading.Thread): - def __init__(self, hard, changes, first_hash, second_hash, offset): - __thread_lock__.acquire() # Lock controlling the number of threads running - threading.Thread.__init__(self) - - self.hard = hard - self.changes = changes - self.first_hash = first_hash - self.second_hash = second_hash - self.offset = offset - - @staticmethod - def create(hard, changes, first_hash, second_hash, offset): - thread = ChangesThread(hard, changes, first_hash, second_hash, offset) - thread.daemon = True - thread.start() - - def run(self): - git_log_r = subprocess.Popen( - [ - _f - for _f in [ - "git", - "log", - "--reverse", - "--pretty=%ct|%cd|%H|%aN|%aE", - "--stat=100000,8192", - "--no-merges", - "-w", - interval.get_since(), - interval.get_until(), - "--date=short", - ] - + (["-C", "-C", "-M"] if self.hard else []) - + [self.first_hash + self.second_hash] - if _f - ], - stdout=subprocess.PIPE, - ).stdout - lines = git_log_r.readlines() - git_log_r.close() - - commit = None - found_valid_extension = False - is_filtered = False - commits = [] - - __changes_lock__.acquire() # Global lock used to protect calls from here... - - for i in lines: - j = i.strip().decode("unicode_escape", "ignore") - j = j.encode("latin-1", "replace") - j = j.decode("utf-8", "replace") - - if Commit.is_commit_line(j): - (author, email) = Commit.get_author_and_email(j) - self.changes.emails_by_author[author] = email - self.changes.authors_by_email[email] = author - - if Commit.is_commit_line(j) or i is lines[-1]: - if found_valid_extension: - bisect.insort(commits, commit) - - found_valid_extension = False - is_filtered = False - commit = Commit(j) - - if Commit.is_commit_line(j) and ( - filtering.set_filtered(commit.author, "author") - or filtering.set_filtered(commit.email, "email") - or filtering.set_filtered(commit.sha, "revision") - or filtering.set_filtered(commit.sha, "message") - ): - is_filtered = True - - if FileDiff.is_filediff_line(j) and not filtering.set_filtered(FileDiff.get_filename(j)) and not is_filtered: - extensions.add_located(FileDiff.get_extension(j)) - - if FileDiff.is_valid_extension(j): - found_valid_extension = True - filediff = FileDiff(j) - commit.add_filediff(filediff) - - self.changes.commits[self.offset // CHANGES_PER_THREAD] = commits - __changes_lock__.release() # ...to here. - __thread_lock__.release() # Lock controlling the number of threads running + def __init__(self, hard, changes, first_hash, second_hash, offset): + __thread_lock__.acquire() # Lock controlling the number of threads running + threading.Thread.__init__(self) + + self.hard = hard + self.changes = changes + self.first_hash = first_hash + self.second_hash = second_hash + self.offset = offset + + @staticmethod + def create(hard, changes, first_hash, second_hash, offset): + thread = ChangesThread(hard, changes, first_hash, second_hash, offset) + thread.daemon = True + thread.start() + + def run(self): + git_log_r = subprocess.Popen( + [ + _f + for _f in [ + "git", + "log", + "--reverse", + "--pretty=%ct|%cd|%H|%aN|%aE", + "--stat=100000,8192", + "--no-merges", + "-w", + interval.get_since(), + interval.get_until(), + "--date=short", + ] + + (["-C", "-C", "-M"] if self.hard else []) + + [self.first_hash + self.second_hash] + if _f + ], + stdout=subprocess.PIPE, + ).stdout + lines = git_log_r.readlines() + git_log_r.close() + + commit = None + found_valid_extension = False + is_filtered = False + commits = [] + + __changes_lock__.acquire() # Global lock used to protect calls from here... + + for i in lines: + j = i.strip().decode("unicode_escape", "ignore") + j = j.encode("latin-1", "replace") + j = j.decode("utf-8", "replace") + + if Commit.is_commit_line(j): + (author, email) = Commit.get_author_and_email(j) + self.changes.emails_by_author[author] = email + self.changes.authors_by_email[email] = author + + if Commit.is_commit_line(j) or i is lines[-1]: + if found_valid_extension: + bisect.insort(commits, commit) + + found_valid_extension = False + is_filtered = False + commit = Commit(j) + + if Commit.is_commit_line(j) and ( + filtering.set_filtered(commit.author, "author") + or filtering.set_filtered(commit.email, "email") + or filtering.set_filtered(commit.sha, "revision") + or filtering.set_filtered(commit.sha, "message") + ): + is_filtered = True + + if FileDiff.is_filediff_line(j) and not filtering.set_filtered(FileDiff.get_filename(j)) and not is_filtered: + extensions.add_located(FileDiff.get_extension(j)) + + if FileDiff.is_valid_extension(j): + found_valid_extension = True + filediff = FileDiff(j) + commit.add_filediff(filediff) + + self.changes.commits[self.offset // CHANGES_PER_THREAD] = commits + __changes_lock__.release() # ...to here. + __thread_lock__.release() # Lock controlling the number of threads running PROGRESS_TEXT = N_("Fetching and calculating primary statistics (1 of 2): {0:.0f}%") class Changes(object): - authors = {} - authors_dateinfo = {} - authors_by_email = {} - emails_by_author = {} - - def __init__(self, repo, hard): - self.commits = [] - interval.set_ref("HEAD") - git_rev_list_p = subprocess.Popen( - [ - _f - for _f in ["git", "rev-list", "--reverse", "--no-merges", interval.get_since(), interval.get_until(), "HEAD"] - if _f - ], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - lines = git_rev_list_p.communicate()[0].splitlines() - git_rev_list_p.stdout.close() - - if git_rev_list_p.returncode == 0 and len(lines) > 0: - progress_text = _(PROGRESS_TEXT) - if repo is not None: - progress_text = "[%s] " % repo.name + progress_text - - chunks = len(lines) // CHANGES_PER_THREAD - self.commits = [None] * (chunks if len(lines) % CHANGES_PER_THREAD == 0 else chunks + 1) - first_hash = "" - - for i, entry in enumerate(lines): - if i % CHANGES_PER_THREAD == CHANGES_PER_THREAD - 1: - entry = entry.decode("utf-8", "replace").strip() - second_hash = entry - ChangesThread.create(hard, self, first_hash, second_hash, i) - first_hash = entry + ".." - - if format.is_interactive_format(): - terminal.output_progress(progress_text, i, len(lines)) - else: - if CHANGES_PER_THREAD - 1 != i % CHANGES_PER_THREAD: - entry = entry.decode("utf-8", "replace").strip() - second_hash = entry - ChangesThread.create(hard, self, first_hash, second_hash, i) - - # Make sure all threads have completed. - for i in range(0, NUM_THREADS): - __thread_lock__.acquire() - - # We also have to release them for future use. - for i in range(0, NUM_THREADS): - __thread_lock__.release() - - self.commits = [item for sublist in self.commits for item in sublist] - - if len(self.commits) > 0: - if interval.has_interval(): - interval.set_ref(self.commits[-1].sha) - - self.first_commit_date = datetime.date( - int(self.commits[0].date[0:4]), int(self.commits[0].date[5:7]), int(self.commits[0].date[8:10]) - ) - self.last_commit_date = datetime.date( - int(self.commits[-1].date[0:4]), int(self.commits[-1].date[5:7]), int(self.commits[-1].date[8:10]) - ) - - def __iadd__(self, other): - try: - self.authors.update(other.authors) - self.authors_dateinfo.update(other.authors_dateinfo) - self.authors_by_email.update(other.authors_by_email) - self.emails_by_author.update(other.emails_by_author) - - for commit in other.commits: - bisect.insort(self.commits, commit) - if not self.commits and not other.commits: - self.commits = [] - - return self - except AttributeError: - return other - - def get_commits(self): - return self.commits - - @staticmethod - def modify_authorinfo(authors, key, commit): - if authors.get(key, None) is None: - authors[key] = AuthorInfo() - - if commit.get_filediffs(): - authors[key].commits += 1 - - for j in commit.get_filediffs(): - authors[key].insertions += j.insertions - authors[key].deletions += j.deletions - - def get_authorinfo_list(self): - if not self.authors: - for i in self.commits: - Changes.modify_authorinfo(self.authors, i.author, i) - - return self.authors - - def get_authordateinfo_list(self): - if not self.authors_dateinfo: - for i in self.commits: - Changes.modify_authorinfo(self.authors_dateinfo, (i.date, i.author), i) - - return self.authors_dateinfo - - def get_latest_author_by_email(self, name): - if not hasattr(name, "decode"): - name = str.encode(name) - try: - name = name.decode("unicode_escape", "ignore") - except UnicodeEncodeError: - pass - - return self.authors_by_email[name] - - def get_latest_email_by_author(self, name): - return self.emails_by_author[name] + authors = {} + authors_dateinfo = {} + authors_by_email = {} + emails_by_author = {} + + def __init__(self, repo, hard): + self.commits = [] + interval.set_ref("HEAD") + git_rev_list_p = subprocess.Popen( + [_f for _f in ["git", "rev-list", "--reverse", "--no-merges", interval.get_since(), interval.get_until(), "HEAD"] if _f], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + lines = git_rev_list_p.communicate()[0].splitlines() + git_rev_list_p.stdout.close() + + if git_rev_list_p.returncode == 0 and len(lines) > 0: + progress_text = _(PROGRESS_TEXT) + if repo is not None: + progress_text = "[%s] " % repo.name + progress_text + + chunks = len(lines) // CHANGES_PER_THREAD + self.commits = [None] * (chunks if len(lines) % CHANGES_PER_THREAD == 0 else chunks + 1) + first_hash = "" + + for i, entry in enumerate(lines): + if i % CHANGES_PER_THREAD == CHANGES_PER_THREAD - 1: + entry = entry.decode("utf-8", "replace").strip() + second_hash = entry + ChangesThread.create(hard, self, first_hash, second_hash, i) + first_hash = entry + ".." + + if format.is_interactive_format(): + terminal.output_progress(progress_text, i, len(lines)) + else: + if CHANGES_PER_THREAD - 1 != i % CHANGES_PER_THREAD: + entry = entry.decode("utf-8", "replace").strip() + second_hash = entry + ChangesThread.create(hard, self, first_hash, second_hash, i) + + # Make sure all threads have completed. + for i in range(0, NUM_THREADS): + __thread_lock__.acquire() + + # We also have to release them for future use. + for i in range(0, NUM_THREADS): + __thread_lock__.release() + + self.commits = [item for sublist in self.commits for item in sublist] + + if len(self.commits) > 0: + if interval.has_interval(): + interval.set_ref(self.commits[-1].sha) + + self.first_commit_date = datetime.date( + int(self.commits[0].date[0:4]), int(self.commits[0].date[5:7]), int(self.commits[0].date[8:10]) + ) + self.last_commit_date = datetime.date( + int(self.commits[-1].date[0:4]), int(self.commits[-1].date[5:7]), int(self.commits[-1].date[8:10]) + ) + + def __iadd__(self, other): + try: + self.authors.update(other.authors) + self.authors_dateinfo.update(other.authors_dateinfo) + self.authors_by_email.update(other.authors_by_email) + self.emails_by_author.update(other.emails_by_author) + + for commit in other.commits: + bisect.insort(self.commits, commit) + if not self.commits and not other.commits: + self.commits = [] + + return self + except AttributeError: + return other + + def get_commits(self): + return self.commits + + @staticmethod + def modify_authorinfo(authors, key, commit): + if authors.get(key, None) is None: + authors[key] = AuthorInfo() + + if commit.get_filediffs(): + authors[key].commits += 1 + + for j in commit.get_filediffs(): + authors[key].insertions += j.insertions + authors[key].deletions += j.deletions + + def get_authorinfo_list(self): + if not self.authors: + for i in self.commits: + Changes.modify_authorinfo(self.authors, i.author, i) + + return self.authors + + def get_authordateinfo_list(self): + if not self.authors_dateinfo: + for i in self.commits: + Changes.modify_authorinfo(self.authors_dateinfo, (i.date, i.author), i) + + return self.authors_dateinfo + + def get_latest_author_by_email(self, name): + if not hasattr(name, "decode"): + name = str.encode(name) + try: + name = name.decode("unicode_escape", "ignore") + except UnicodeEncodeError: + pass + + return self.authors_by_email[name] + + def get_latest_email_by_author(self, name): + return self.emails_by_author[name] diff --git a/gitinspector/clone.py b/gitinspector/clone.py index e81a2cad..fc78e833 100644 --- a/gitinspector/clone.py +++ b/gitinspector/clone.py @@ -25,41 +25,41 @@ import tempfile try: - from urllib.parse import urlparse + from urllib.parse import urlparse except: - from urllib.parse import urlparse + from urllib.parse import urlparse __cloned_paths__ = [] def create(url): - class Repository(object): - def __init__(self, name, location): - self.name = name - self.location = location + class Repository(object): + def __init__(self, name, location): + self.name = name + self.location = location - parsed_url = urlparse(url) + parsed_url = urlparse(url) - if ( - parsed_url.scheme == "file" - or parsed_url.scheme == "git" - or parsed_url.scheme == "http" - or parsed_url.scheme == "https" - or parsed_url.scheme == "ssh" - ): - path = tempfile.mkdtemp(suffix=".gitinspector") - git_clone = subprocess.Popen(["git", "clone", url, path], stdout=sys.stderr) - git_clone.wait() + if ( + parsed_url.scheme == "file" + or parsed_url.scheme == "git" + or parsed_url.scheme == "http" + or parsed_url.scheme == "https" + or parsed_url.scheme == "ssh" + ): + path = tempfile.mkdtemp(suffix=".gitinspector") + git_clone = subprocess.Popen(["git", "clone", url, path], stdout=sys.stderr) + git_clone.wait() - if git_clone.returncode != 0: - sys.exit(git_clone.returncode) + if git_clone.returncode != 0: + sys.exit(git_clone.returncode) - __cloned_paths__.append(path) - return Repository(os.path.basename(parsed_url.path), path) + __cloned_paths__.append(path) + return Repository(os.path.basename(parsed_url.path), path) - return Repository(None, os.path.abspath(url)) + return Repository(None, os.path.abspath(url)) def delete(): - for path in __cloned_paths__: - shutil.rmtree(path, ignore_errors=True) + for path in __cloned_paths__: + shutil.rmtree(path, ignore_errors=True) diff --git a/gitinspector/comment.py b/gitinspector/comment.py index d174c6e9..b04ee8f7 100644 --- a/gitinspector/comment.py +++ b/gitinspector/comment.py @@ -19,138 +19,138 @@ __comment_begining__ = { - "java": "/*", - "c": "/*", - "cc": "/*", - "cpp": "/*", - "cs": "/*", - "h": "/*", - "hh": "/*", - "hpp": "/*", - "hs": "{-", - "html": "", - "php": "*/", - "py": '"""', - "glsl": "*/", - "rb": "=end", - "js": "*/", - "jspx": "-->", - "scala": "*/", - "sql": "*/", - "tex": "\\end{comment}", - "xhtml": "-->", - "xml": "-->", - "ml": "*)", - "mli": "*)", - "go": "*/", - "ly": "%}", - "ily": "%}", + "java": "*/", + "c": "*/", + "cc": "*/", + "cpp": "*/", + "cs": "*/", + "h": "*/", + "hh": "*/", + "hpp": "*/", + "hs": "-}", + "html": "-->", + "php": "*/", + "py": '"""', + "glsl": "*/", + "rb": "=end", + "js": "*/", + "jspx": "-->", + "scala": "*/", + "sql": "*/", + "tex": "\\end{comment}", + "xhtml": "-->", + "xml": "-->", + "ml": "*)", + "mli": "*)", + "go": "*/", + "ly": "%}", + "ily": "%}", } __comment__ = { - "java": "//", - "c": "//", - "cc": "//", - "cpp": "//", - "cs": "//", - "h": "//", - "hh": "//", - "hpp": "//", - "hs": "--", - "pl": "#", - "php": "//", - "py": "#", - "glsl": "//", - "rb": "#", - "robot": "#", - "rs": "//", - "rlib": "//", - "js": "//", - "scala": "//", - "sql": "--", - "tex": "%", - "ada": "--", - "ads": "--", - "adb": "--", - "pot": "#", - "po": "#", - "go": "//", - "ly": "%", - "ily": "%", + "java": "//", + "c": "//", + "cc": "//", + "cpp": "//", + "cs": "//", + "h": "//", + "hh": "//", + "hpp": "//", + "hs": "--", + "pl": "#", + "php": "//", + "py": "#", + "glsl": "//", + "rb": "#", + "robot": "#", + "rs": "//", + "rlib": "//", + "js": "//", + "scala": "//", + "sql": "--", + "tex": "%", + "ada": "--", + "ads": "--", + "adb": "--", + "pot": "#", + "po": "#", + "go": "//", + "ly": "%", + "ily": "%", } __comment_markers_must_be_at_begining__ = {"tex": True} def __has_comment_begining__(extension, string): - if __comment_markers_must_be_at_begining__.get(extension, None): - return string.find(__comment_begining__[extension]) == 0 - elif __comment_begining__.get(extension, None) is not None and string.find(__comment_end__[extension], 2) == -1: - return string.find(__comment_begining__[extension]) != -1 + if __comment_markers_must_be_at_begining__.get(extension, None): + return string.find(__comment_begining__[extension]) == 0 + elif __comment_begining__.get(extension, None) is not None and string.find(__comment_end__[extension], 2) == -1: + return string.find(__comment_begining__[extension]) != -1 - return False + return False def __has_comment_end__(extension, string): - if __comment_markers_must_be_at_begining__.get(extension, None): - return string.find(__comment_end__[extension]) == 0 - elif __comment_end__.get(extension, None) is not None: - return string.find(__comment_end__[extension]) != -1 + if __comment_markers_must_be_at_begining__.get(extension, None): + return string.find(__comment_end__[extension]) == 0 + elif __comment_end__.get(extension, None) is not None: + return string.find(__comment_end__[extension]) != -1 - return False + return False def is_comment(extension, string): - if __comment_begining__.get(extension, None) is not None and string.strip().startswith(__comment_begining__[extension]): - return True - if __comment_end__.get(extension, None) is not None and string.strip().endswith(__comment_end__[extension]): - return True - if __comment__.get(extension, None) is not None and string.strip().startswith(__comment__[extension]): - return True + if __comment_begining__.get(extension, None) is not None and string.strip().startswith(__comment_begining__[extension]): + return True + if __comment_end__.get(extension, None) is not None and string.strip().endswith(__comment_end__[extension]): + return True + if __comment__.get(extension, None) is not None and string.strip().startswith(__comment__[extension]): + return True - return False + return False def handle_comment_block(is_inside_comment, extension, content): - comments = 0 - - if is_comment(extension, content): - comments += 1 - if is_inside_comment: - if __has_comment_end__(extension, content): - is_inside_comment = False - else: - comments += 1 - elif __has_comment_begining__(extension, content) and not __has_comment_end__(extension, content): - is_inside_comment = True - - return (comments, is_inside_comment) + comments = 0 + + if is_comment(extension, content): + comments += 1 + if is_inside_comment: + if __has_comment_end__(extension, content): + is_inside_comment = False + else: + comments += 1 + elif __has_comment_begining__(extension, content) and not __has_comment_end__(extension, content): + is_inside_comment = True + + return (comments, is_inside_comment) diff --git a/gitinspector/config.py b/gitinspector/config.py index 824161a7..ee446999 100644 --- a/gitinspector/config.py +++ b/gitinspector/config.py @@ -24,73 +24,73 @@ class GitConfig(object): - def __init__(self, run, repo, global_only=False): - self.run = run - self.repo = repo - self.global_only = global_only - - def __read_git_config__(self, variable): - previous_directory = os.getcwd() - os.chdir(self.repo) - setting = subprocess.Popen( - [_f for _f in ["git", "config", "--global" if self.global_only else "", "inspector." + variable] if _f], - stdout=subprocess.PIPE, - ).stdout - os.chdir(previous_directory) - - try: - setting = setting.readlines()[0] - setting = setting.decode("utf-8", "replace").strip() - except IndexError: - setting = "" - - return setting - - def __read_git_config_bool__(self, variable): - try: - variable = self.__read_git_config__(variable) - return optval.get_boolean_argument(False if variable == "" else variable) - except optval.InvalidOptionArgument: - return False - - def __read_git_config_string__(self, variable): - string = self.__read_git_config__(variable) - return (True, string) if len(string) > 0 else (False, None) - - def read(self): - var = self.__read_git_config_string__("file-types") - if var[0]: - extensions.define(var[1]) - - var = self.__read_git_config_string__("exclude") - if var[0]: - filtering.add(var[1]) - - var = self.__read_git_config_string__("format") - if var[0] and not format.select(var[1]): - raise format.InvalidFormatError(_("specified output format not supported.")) - - self.run.hard = self.__read_git_config_bool__("hard") - self.run.list_file_types = self.__read_git_config_bool__("list-file-types") - self.run.localize_output = self.__read_git_config_bool__("localize-output") - self.run.metrics = self.__read_git_config_bool__("metrics") - self.run.responsibilities = self.__read_git_config_bool__("responsibilities") - self.run.useweeks = self.__read_git_config_bool__("weeks") - - var = self.__read_git_config_string__("since") - if var[0]: - interval.set_since(var[1]) - - var = self.__read_git_config_string__("until") - if var[0]: - interval.set_until(var[1]) - - self.run.timeline = self.__read_git_config_bool__("timeline") - - if self.__read_git_config_bool__("grading"): - self.run.hard = True - self.run.list_file_types = True - self.run.metrics = True - self.run.responsibilities = True - self.run.timeline = True - self.run.useweeks = True + def __init__(self, run, repo, global_only=False): + self.run = run + self.repo = repo + self.global_only = global_only + + def __read_git_config__(self, variable): + previous_directory = os.getcwd() + os.chdir(self.repo) + setting = subprocess.Popen( + [_f for _f in ["git", "config", "--global" if self.global_only else "", "inspector." + variable] if _f], + stdout=subprocess.PIPE, + ).stdout + os.chdir(previous_directory) + + try: + setting = setting.readlines()[0] + setting = setting.decode("utf-8", "replace").strip() + except IndexError: + setting = "" + + return setting + + def __read_git_config_bool__(self, variable): + try: + variable = self.__read_git_config__(variable) + return optval.get_boolean_argument(False if variable == "" else variable) + except optval.InvalidOptionArgument: + return False + + def __read_git_config_string__(self, variable): + string = self.__read_git_config__(variable) + return (True, string) if len(string) > 0 else (False, None) + + def read(self): + var = self.__read_git_config_string__("file-types") + if var[0]: + extensions.define(var[1]) + + var = self.__read_git_config_string__("exclude") + if var[0]: + filtering.add(var[1]) + + var = self.__read_git_config_string__("format") + if var[0] and not format.select(var[1]): + raise format.InvalidFormatError(_("specified output format not supported.")) + + self.run.hard = self.__read_git_config_bool__("hard") + self.run.list_file_types = self.__read_git_config_bool__("list-file-types") + self.run.localize_output = self.__read_git_config_bool__("localize-output") + self.run.metrics = self.__read_git_config_bool__("metrics") + self.run.responsibilities = self.__read_git_config_bool__("responsibilities") + self.run.useweeks = self.__read_git_config_bool__("weeks") + + var = self.__read_git_config_string__("since") + if var[0]: + interval.set_since(var[1]) + + var = self.__read_git_config_string__("until") + if var[0]: + interval.set_until(var[1]) + + self.run.timeline = self.__read_git_config_bool__("timeline") + + if self.__read_git_config_bool__("grading"): + self.run.hard = True + self.run.list_file_types = True + self.run.metrics = True + self.run.responsibilities = True + self.run.timeline = True + self.run.useweeks = True diff --git a/gitinspector/extensions.py b/gitinspector/extensions.py index 374e5438..882883c2 100644 --- a/gitinspector/extensions.py +++ b/gitinspector/extensions.py @@ -25,20 +25,20 @@ def get(): - return __extensions__ + return __extensions__ def define(string): - global __extensions__ - __extensions__ = string.split(",") + global __extensions__ + __extensions__ = string.split(",") def add_located(string): - if len(string) == 0: - __located_extensions__.add("*") - else: - __located_extensions__.add(string) + if len(string) == 0: + __located_extensions__.add("*") + else: + __located_extensions__.add(string) def get_located(): - return __located_extensions__ + return __located_extensions__ diff --git a/gitinspector/filtering.py b/gitinspector/filtering.py index 4fca2143..8cf20abc 100644 --- a/gitinspector/filtering.py +++ b/gitinspector/filtering.py @@ -22,83 +22,83 @@ import subprocess __filters__ = { - "file": [set(), set()], - "author": [set(), set()], - "email": [set(), set()], - "revision": [set(), set()], - "message": [set(), None], + "file": [set(), set()], + "author": [set(), set()], + "email": [set(), set()], + "revision": [set(), set()], + "message": [set(), None], } class InvalidRegExpError(ValueError): - def __init__(self, msg): - super(InvalidRegExpError, self).__init__(msg) - self.msg = msg + def __init__(self, msg): + super(InvalidRegExpError, self).__init__(msg) + self.msg = msg def get(): - return __filters__ + return __filters__ def __add_one__(string): - for i in __filters__: - if (i + ":").lower() == string[0 : len(i) + 1].lower(): - __filters__[i][0].add(string[len(i) + 1 :]) - return - __filters__["file"][0].add(string) + for i in __filters__: + if (i + ":").lower() == string[0 : len(i) + 1].lower(): + __filters__[i][0].add(string[len(i) + 1 :]) + return + __filters__["file"][0].add(string) def add(string): - rules = string.split(",") - for rule in rules: - __add_one__(rule) + rules = string.split(",") + for rule in rules: + __add_one__(rule) def clear(): - for i in __filters__: - __filters__[i][0] = set() + for i in __filters__: + __filters__[i][0] = set() def get_filered(filter_type="file"): - return __filters__[filter_type][1] + return __filters__[filter_type][1] def has_filtered(): - for i in __filters__: - if __filters__[i][1]: - return True - return False + for i in __filters__: + if __filters__[i][1]: + return True + return False def __find_commit_message__(sha): - git_show_r = subprocess.Popen( - [_f for _f in ["git", "show", "-s", "--pretty=%B", "-w", sha] if _f], stdout=subprocess.PIPE - ).stdout + git_show_r = subprocess.Popen( + [_f for _f in ["git", "show", "-s", "--pretty=%B", "-w", sha] if _f], stdout=subprocess.PIPE + ).stdout - commit_message = git_show_r.read() - git_show_r.close() + commit_message = git_show_r.read() + git_show_r.close() - commit_message = commit_message.strip().decode("unicode_escape", "ignore") - commit_message = commit_message.encode("latin-1", "replace") - return commit_message.decode("utf-8", "replace") + commit_message = commit_message.strip().decode("unicode_escape", "ignore") + commit_message = commit_message.encode("latin-1", "replace") + return commit_message.decode("utf-8", "replace") def set_filtered(string, filter_type="file"): - string = string.strip() - - if len(string) > 0: - for i in __filters__[filter_type][0]: - search_for = string - - if filter_type == "message": - search_for = __find_commit_message__(string) - try: - if re.search(i, search_for) is not None: - if filter_type == "message": - __add_one__("revision:" + string) - else: - __filters__[filter_type][1].add(string) - return True - except: - raise InvalidRegExpError(_("invalid regular expression specified")) - return False + string = string.strip() + + if len(string) > 0: + for i in __filters__[filter_type][0]: + search_for = string + + if filter_type == "message": + search_for = __find_commit_message__(string) + try: + if re.search(i, search_for) is not None: + if filter_type == "message": + __add_one__("revision:" + string) + else: + __filters__[filter_type][1].add(string) + return True + except: + raise InvalidRegExpError(_("invalid regular expression specified")) + return False diff --git a/gitinspector/format.py b/gitinspector/format.py index 20448710..ca8f9125 100644 --- a/gitinspector/format.py +++ b/gitinspector/format.py @@ -34,41 +34,41 @@ class InvalidFormatError(Exception): - def __init__(self, msg): - super(InvalidFormatError, self).__init__(msg) - self.msg = msg + def __init__(self, msg): + super(InvalidFormatError, self).__init__(msg) + self.msg = msg def select(format): - global __selected_format__ - __selected_format__ = format + global __selected_format__ + __selected_format__ = format - return format in __available_formats__ + return format in __available_formats__ def get_selected(): - return __selected_format__ + return __selected_format__ def is_interactive_format(): - return __selected_format__ == "text" + return __selected_format__ == "text" def __output_html_template__(name): - template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), name) - file_r = open(template_path, "rb") - template = file_r.read().decode("utf-8", "replace") + template_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), name) + file_r = open(template_path, "rb") + template = file_r.read().decode("utf-8", "replace") - file_r.close() - return template + file_r.close() + return template def __get_zip_file_content__(name, file_name="/html/flot.zip"): - zip_file = zipfile.ZipFile(basedir.get_basedir() + file_name, "r") - content = zip_file.read(name) + zip_file = zipfile.ZipFile(basedir.get_basedir() + file_name, "r") + content = zip_file.read(name) - zip_file.close() - return content.decode("utf-8", "replace") + zip_file.close() + return content.decode("utf-8", "replace") INFO_ONE_REPOSITORY = N_("Statistical information for the repository '{0}' was gathered on {1}.") @@ -76,98 +76,96 @@ def __get_zip_file_content__(name, file_name="/html/flot.zip"): def output_header(repos): - repos_string = ", ".join([repo.name for repo in repos]) - - if __selected_format__ == "html" or __selected_format__ == "htmlembedded": - base = basedir.get_basedir() - html_header = __output_html_template__(base + "/html/html.header") - tablesorter_js = __get_zip_file_content__("jquery.tablesorter.min.js", "/html/jquery.tablesorter.min.js.zip").encode( - "latin-1", "replace" - ) - tablesorter_js = tablesorter_js.decode("utf-8", "ignore") - flot_js = __get_zip_file_content__("jquery.flot.js") - pie_js = __get_zip_file_content__("jquery.flot.pie.js") - resize_js = __get_zip_file_content__("jquery.flot.resize.js") - - logo_file = open(base + "/html/gitinspector_piclet.png", "rb") - logo = logo_file.read() - logo_file.close() - logo = base64.b64encode(logo) - - if __selected_format__ == "htmlembedded": - jquery_js = ">" + __get_zip_file_content__("jquery.js") - else: - jquery_js = ' src="https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js">' - - print( - html_header.format( - title=_("Repository statistics for '{0}'").format(repos_string), - jquery=jquery_js, - jquery_tablesorter=tablesorter_js, - jquery_flot=flot_js, - jquery_flot_pie=pie_js, - jquery_flot_resize=resize_js, - logo=logo.decode("utf-8", "replace"), - logo_text=_( - "The output has been generated by {0} {1}. The statistical analysis tool" " for git repositories." - ).format('gitinspector', version.__version__), - repo_text=_(INFO_ONE_REPOSITORY if len(repos) <= 1 else INFO_MANY_REPOSITORIES).format( - repos_string, localization.get_date() - ), - show_minor_authors=_("Show minor authors"), - hide_minor_authors=_("Hide minor authors"), - show_minor_rows=_("Show rows with minor work"), - hide_minor_rows=_("Hide rows with minor work"), - ) - ) - elif __selected_format__ == "json": - print('{\n\t"gitinspector": {') - print('\t\t"version": "' + version.__version__ + '",') - - if len(repos) <= 1: - print('\t\t"repository": "' + repos_string + '",') - else: - repos_json = '\t\t"repositories": [ ' - - for repo in repos: - repos_json += '"' + repo.name + '", ' - - print(repos_json[:-2] + " ],") - - print('\t\t"report_date": "' + time.strftime("%Y/%m/%d") + '",') - - elif __selected_format__ == "xml": - print("") - print("\t" + version.__version__ + "") - - if len(repos) <= 1: - print("\t" + repos_string + "") - else: - print("\t") - - for repo in repos: - print("\t\t" + repo.name + "") - - print("\t") - - print("\t" + time.strftime("%Y/%m/%d") + "") - else: - print( - textwrap.fill( - _(INFO_ONE_REPOSITORY if len(repos) <= 1 else INFO_MANY_REPOSITORIES).format( - repos_string, localization.get_date() - ), - width=terminal.get_size()[0], - ) - ) + repos_string = ", ".join([repo.name for repo in repos]) + + if __selected_format__ == "html" or __selected_format__ == "htmlembedded": + base = basedir.get_basedir() + html_header = __output_html_template__(base + "/html/html.header") + tablesorter_js = __get_zip_file_content__("jquery.tablesorter.min.js", "/html/jquery.tablesorter.min.js.zip").encode( + "latin-1", "replace" + ) + tablesorter_js = tablesorter_js.decode("utf-8", "ignore") + flot_js = __get_zip_file_content__("jquery.flot.js") + pie_js = __get_zip_file_content__("jquery.flot.pie.js") + resize_js = __get_zip_file_content__("jquery.flot.resize.js") + + logo_file = open(base + "/html/gitinspector_piclet.png", "rb") + logo = logo_file.read() + logo_file.close() + logo = base64.b64encode(logo) + + if __selected_format__ == "htmlembedded": + jquery_js = ">" + __get_zip_file_content__("jquery.js") + else: + jquery_js = ' src="https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js">' + + print( + html_header.format( + title=_("Repository statistics for '{0}'").format(repos_string), + jquery=jquery_js, + jquery_tablesorter=tablesorter_js, + jquery_flot=flot_js, + jquery_flot_pie=pie_js, + jquery_flot_resize=resize_js, + logo=logo.decode("utf-8", "replace"), + logo_text=_("The output has been generated by {0} {1}. The statistical analysis tool" " for git repositories.").format( + 'gitinspector', version.__version__ + ), + repo_text=_(INFO_ONE_REPOSITORY if len(repos) <= 1 else INFO_MANY_REPOSITORIES).format( + repos_string, localization.get_date() + ), + show_minor_authors=_("Show minor authors"), + hide_minor_authors=_("Hide minor authors"), + show_minor_rows=_("Show rows with minor work"), + hide_minor_rows=_("Hide rows with minor work"), + ) + ) + elif __selected_format__ == "json": + print('{\n\t"gitinspector": {') + print('\t\t"version": "' + version.__version__ + '",') + + if len(repos) <= 1: + print('\t\t"repository": "' + repos_string + '",') + else: + repos_json = '\t\t"repositories": [ ' + + for repo in repos: + repos_json += '"' + repo.name + '", ' + + print(repos_json[:-2] + " ],") + + print('\t\t"report_date": "' + time.strftime("%Y/%m/%d") + '",') + + elif __selected_format__ == "xml": + print("") + print("\t" + version.__version__ + "") + + if len(repos) <= 1: + print("\t" + repos_string + "") + else: + print("\t") + + for repo in repos: + print("\t\t" + repo.name + "") + + print("\t") + + print("\t" + time.strftime("%Y/%m/%d") + "") + else: + print( + textwrap.fill( + _(INFO_ONE_REPOSITORY if len(repos) <= 1 else INFO_MANY_REPOSITORIES).format(repos_string, localization.get_date()), + width=terminal.get_size()[0], + ) + ) def output_footer(): - if __selected_format__ == "html" or __selected_format__ == "htmlembedded": - base = basedir.get_basedir() - html_footer = __output_html_template__(base + "/html/html.footer") - print(html_footer) - elif __selected_format__ == "json": - print("\n\t}\n}") - elif __selected_format__ == "xml": - print("") + if __selected_format__ == "html" or __selected_format__ == "htmlembedded": + base = basedir.get_basedir() + html_footer = __output_html_template__(base + "/html/html.footer") + print(html_footer) + elif __selected_format__ == "json": + print("\n\t}\n}") + elif __selected_format__ == "xml": + print("") diff --git a/gitinspector/gitinspector.py b/gitinspector/gitinspector.py index 84d95832..0de6a412 100644 --- a/gitinspector/gitinspector.py +++ b/gitinspector/gitinspector.py @@ -40,200 +40,200 @@ class Runner(object): - def __init__(self): - self.hard = False - self.include_metrics = False - self.list_file_types = False - self.localize_output = False - self.responsibilities = False - self.grading = False - self.timeline = False - self.useweeks = False + def __init__(self): + self.hard = False + self.include_metrics = False + self.list_file_types = False + self.localize_output = False + self.responsibilities = False + self.grading = False + self.timeline = False + self.useweeks = False - def process(self, repos): - localization.check_compatibility(version.__version__) + def process(self, repos): + localization.check_compatibility(version.__version__) - if not self.localize_output: - localization.disable() + if not self.localize_output: + localization.disable() - terminal.skip_escapes(not sys.stdout.isatty()) - terminal.set_stdout_encoding() - previous_directory = os.getcwd() - summed_blames = Blame.__new__(Blame) - summed_changes = Changes.__new__(Changes) - summed_metrics = MetricsLogic.__new__(MetricsLogic) + terminal.skip_escapes(not sys.stdout.isatty()) + terminal.set_stdout_encoding() + previous_directory = os.getcwd() + summed_blames = Blame.__new__(Blame) + summed_changes = Changes.__new__(Changes) + summed_metrics = MetricsLogic.__new__(MetricsLogic) - for repo in repos: - os.chdir(repo.location) - repo = repo if len(repos) > 1 else None - changes = Changes(repo, self.hard) - summed_blames += Blame(repo, self.hard, self.useweeks, changes) - summed_changes += changes + for repo in repos: + os.chdir(repo.location) + repo = repo if len(repos) > 1 else None + changes = Changes(repo, self.hard) + summed_blames += Blame(repo, self.hard, self.useweeks, changes) + summed_changes += changes - if self.include_metrics: - summed_metrics += MetricsLogic() + if self.include_metrics: + summed_metrics += MetricsLogic() - if sys.stdout.isatty() and format.is_interactive_format(): - terminal.clear_row() - else: - os.chdir(previous_directory) + if sys.stdout.isatty() and format.is_interactive_format(): + terminal.clear_row() + else: + os.chdir(previous_directory) - format.output_header(repos) - outputable.output(ChangesOutput(summed_changes)) + format.output_header(repos) + outputable.output(ChangesOutput(summed_changes)) - if summed_changes.get_commits(): - outputable.output(BlameOutput(summed_changes, summed_blames)) + if summed_changes.get_commits(): + outputable.output(BlameOutput(summed_changes, summed_blames)) - if self.timeline: - outputable.output(TimelineOutput(summed_changes, self.useweeks)) + if self.timeline: + outputable.output(TimelineOutput(summed_changes, self.useweeks)) - if self.include_metrics: - outputable.output(MetricsOutput(summed_metrics)) + if self.include_metrics: + outputable.output(MetricsOutput(summed_metrics)) - if self.responsibilities: - outputable.output(ResponsibilitiesOutput(summed_changes, summed_blames)) + if self.responsibilities: + outputable.output(ResponsibilitiesOutput(summed_changes, summed_blames)) - outputable.output(FilteringOutput()) + outputable.output(FilteringOutput()) - if self.list_file_types: - outputable.output(ExtensionsOutput()) + if self.list_file_types: + outputable.output(ExtensionsOutput()) - format.output_footer() - os.chdir(previous_directory) + format.output_footer() + os.chdir(previous_directory) def __check_python_version__(): - if sys.version_info < (3, 6): - python_version = str(sys.version_info[0]) + "." + str(sys.version_info[1]) - sys.exit(_("gitinspector requires at least Python 3.6 to run (version {0} was found).").format(python_version)) + if sys.version_info < (3, 6): + python_version = str(sys.version_info[0]) + "." + str(sys.version_info[1]) + sys.exit(_("gitinspector requires at least Python 3.6 to run (version {0} was found).").format(python_version)) def __get_validated_git_repos__(repos_relative): - if not repos_relative: - repos_relative = "." + if not repos_relative: + repos_relative = "." - repos = [] + repos = [] - # Try to clone the repos or return the same directory and bail out. - for repo in repos_relative: - cloned_repo = clone.create(repo) + # Try to clone the repos or return the same directory and bail out. + for repo in repos_relative: + cloned_repo = clone.create(repo) - if cloned_repo.name is None: - cloned_repo.location = basedir.get_basedir_git(cloned_repo.location) - cloned_repo.name = os.path.basename(cloned_repo.location) + if cloned_repo.name is None: + cloned_repo.location = basedir.get_basedir_git(cloned_repo.location) + cloned_repo.name = os.path.basename(cloned_repo.location) - repos.append(cloned_repo) + repos.append(cloned_repo) - return repos + return repos def main(argv=None): - terminal.check_terminal_encoding() - terminal.set_stdin_encoding() - argv = terminal.convert_command_line_to_utf8() if argv is None else argv - run = Runner() - repos = [] - - try: - opts, args = optval.gnu_getopt( - argv[1:], - "f:F:hHlLmrTwx:", - [ - "exclude=", - "file-types=", - "format=", - "hard:true", - "help", - "list-file-types:true", - "localize-output:true", - "metrics:true", - "responsibilities:true", - "since=", - "grading:true", - "timeline:true", - "until=", - "version", - "weeks:true", - ], - ) - repos = __get_validated_git_repos__(set(args)) - - # We need the repos above to be set before we read the git config. - GitConfig(run, repos[-1].location).read() - clear_x_on_next_pass = True - - for o, a in opts: - if o in ("-h", "--help"): - help.output() - sys.exit(0) - elif o in ("-f", "--file-types"): - extensions.define(a) - elif o in ("-F", "--format"): - if not format.select(a): - raise format.InvalidFormatError(_("specified output format not supported.")) - elif o == "-H": - run.hard = True - elif o == "--hard": - run.hard = optval.get_boolean_argument(a) - elif o == "-l": - run.list_file_types = True - elif o == "--list-file-types": - run.list_file_types = optval.get_boolean_argument(a) - elif o == "-L": - run.localize_output = True - elif o == "--localize-output": - run.localize_output = optval.get_boolean_argument(a) - elif o == "-m": - run.include_metrics = True - elif o == "--metrics": - run.include_metrics = optval.get_boolean_argument(a) - elif o == "-r": - run.responsibilities = True - elif o == "--responsibilities": - run.responsibilities = optval.get_boolean_argument(a) - elif o == "--since": - interval.set_since(a) - elif o == "--version": - version.output() - sys.exit(0) - elif o == "--grading": - grading = optval.get_boolean_argument(a) - run.include_metrics = grading - run.list_file_types = grading - run.responsibilities = grading - run.grading = grading - run.hard = grading - run.timeline = grading - run.useweeks = grading - elif o == "-T": - run.timeline = True - elif o == "--timeline": - run.timeline = optval.get_boolean_argument(a) - elif o == "--until": - interval.set_until(a) - elif o == "-w": - run.useweeks = True - elif o == "--weeks": - run.useweeks = optval.get_boolean_argument(a) - elif o in ("-x", "--exclude"): - if clear_x_on_next_pass: - clear_x_on_next_pass = False - filtering.clear() - filtering.add(a) - - __check_python_version__() - run.process(repos) - - except (filtering.InvalidRegExpError, format.InvalidFormatError, optval.InvalidOptionArgument, getopt.error) as exception: - print(sys.argv[0], "\b:", exception.msg, file=sys.stderr) - print(_("Try `{0} --help' for more information.").format(sys.argv[0]), file=sys.stderr) - sys.exit(2) + terminal.check_terminal_encoding() + terminal.set_stdin_encoding() + argv = terminal.convert_command_line_to_utf8() if argv is None else argv + run = Runner() + repos = [] + + try: + opts, args = optval.gnu_getopt( + argv[1:], + "f:F:hHlLmrTwx:", + [ + "exclude=", + "file-types=", + "format=", + "hard:true", + "help", + "list-file-types:true", + "localize-output:true", + "metrics:true", + "responsibilities:true", + "since=", + "grading:true", + "timeline:true", + "until=", + "version", + "weeks:true", + ], + ) + repos = __get_validated_git_repos__(set(args)) + + # We need the repos above to be set before we read the git config. + GitConfig(run, repos[-1].location).read() + clear_x_on_next_pass = True + + for o, a in opts: + if o in ("-h", "--help"): + help.output() + sys.exit(0) + elif o in ("-f", "--file-types"): + extensions.define(a) + elif o in ("-F", "--format"): + if not format.select(a): + raise format.InvalidFormatError(_("specified output format not supported.")) + elif o == "-H": + run.hard = True + elif o == "--hard": + run.hard = optval.get_boolean_argument(a) + elif o == "-l": + run.list_file_types = True + elif o == "--list-file-types": + run.list_file_types = optval.get_boolean_argument(a) + elif o == "-L": + run.localize_output = True + elif o == "--localize-output": + run.localize_output = optval.get_boolean_argument(a) + elif o == "-m": + run.include_metrics = True + elif o == "--metrics": + run.include_metrics = optval.get_boolean_argument(a) + elif o == "-r": + run.responsibilities = True + elif o == "--responsibilities": + run.responsibilities = optval.get_boolean_argument(a) + elif o == "--since": + interval.set_since(a) + elif o == "--version": + version.output() + sys.exit(0) + elif o == "--grading": + grading = optval.get_boolean_argument(a) + run.include_metrics = grading + run.list_file_types = grading + run.responsibilities = grading + run.grading = grading + run.hard = grading + run.timeline = grading + run.useweeks = grading + elif o == "-T": + run.timeline = True + elif o == "--timeline": + run.timeline = optval.get_boolean_argument(a) + elif o == "--until": + interval.set_until(a) + elif o == "-w": + run.useweeks = True + elif o == "--weeks": + run.useweeks = optval.get_boolean_argument(a) + elif o in ("-x", "--exclude"): + if clear_x_on_next_pass: + clear_x_on_next_pass = False + filtering.clear() + filtering.add(a) + + __check_python_version__() + run.process(repos) + + except (filtering.InvalidRegExpError, format.InvalidFormatError, optval.InvalidOptionArgument, getopt.error) as exception: + print(sys.argv[0], "\b:", exception.msg, file=sys.stderr) + print(_("Try `{0} --help' for more information.").format(sys.argv[0]), file=sys.stderr) + sys.exit(2) @atexit.register def cleanup(): - clone.delete() + clone.delete() if __name__ == "__main__": - main() + main() diff --git a/gitinspector/gravatar.py b/gitinspector/gravatar.py index 2b56f6f8..20f78cf1 100644 --- a/gitinspector/gravatar.py +++ b/gitinspector/gravatar.py @@ -21,21 +21,21 @@ import hashlib try: - from urllib.parse import urlencode + from urllib.parse import urlencode except: - from urllib.parse import urlencode + from urllib.parse import urlencode from . import format def get_url(email, size=20): - md5hash = hashlib.md5(email.encode("utf-8").lower().strip()).hexdigest() - base_url = "https://www.gravatar.com/avatar/" + md5hash - params = None + md5hash = hashlib.md5(email.encode("utf-8").lower().strip()).hexdigest() + base_url = "https://www.gravatar.com/avatar/" + md5hash + params = None - if format.get_selected() == "html": - params = {"default": "identicon", "size": size} - elif format.get_selected() == "xml" or format.get_selected() == "json": - params = {"default": "identicon"} + if format.get_selected() == "html": + params = {"default": "identicon", "size": size} + elif format.get_selected() == "xml" or format.get_selected() == "json": + params = {"default": "identicon"} - return base_url + "?" + urlencode(params) + return base_url + "?" + urlencode(params) diff --git a/gitinspector/help.py b/gitinspector/help.py index c7178ecc..9a5f85f1 100644 --- a/gitinspector/help.py +++ b/gitinspector/help.py @@ -24,7 +24,7 @@ __doc__ = _( - """Usage: {0} [OPTION]... [REPOSITORY]... + """Usage: {0} [OPTION]... [REPOSITORY]... List information about the repository in REPOSITORY. If no repository is specified, the current directory is used. If multiple repositories are given, information will be merged into a unified statistical report. @@ -81,4 +81,4 @@ def output(): - print(__doc__.format(sys.argv[0], ",".join(DEFAULT_EXTENSIONS), ",".join(__available_formats__))) + print(__doc__.format(sys.argv[0], ",".join(DEFAULT_EXTENSIONS), ",".join(__available_formats__))) diff --git a/gitinspector/interval.py b/gitinspector/interval.py index 5f458556..43e3366b 100644 --- a/gitinspector/interval.py +++ b/gitinspector/interval.py @@ -19,9 +19,9 @@ try: - from shlex import quote + from shlex import quote except ImportError: - from pipes import quote + from pipes import quote __since__ = "" @@ -31,31 +31,31 @@ def has_interval(): - return __since__ + __until__ != "" + return __since__ + __until__ != "" def get_since(): - return __since__ + return __since__ def set_since(since): - global __since__ - __since__ = "--since=" + quote(since) + global __since__ + __since__ = "--since=" + quote(since) def get_until(): - return __until__ + return __until__ def set_until(until): - global __until__ - __until__ = "--until=" + quote(until) + global __until__ + __until__ = "--until=" + quote(until) def get_ref(): - return __ref__ + return __ref__ def set_ref(ref): - global __ref__ - __ref__ = ref + global __ref__ + __ref__ = ref diff --git a/gitinspector/localization.py b/gitinspector/localization.py index a282d536..1827c87c 100644 --- a/gitinspector/localization.py +++ b/gitinspector/localization.py @@ -33,81 +33,81 @@ # Dummy function used to handle string constants def N_(message): - return message + return message def init(): - global __enabled__ - global __installed__ - global __translation__ + global __enabled__ + global __installed__ + global __translation__ - if not __installed__: - try: - locale.setlocale(locale.LC_ALL, "") - except locale.Error: - __translation__ = gettext.NullTranslations() - else: - lang = locale.getlocale() + if not __installed__: + try: + locale.setlocale(locale.LC_ALL, "") + except locale.Error: + __translation__ = gettext.NullTranslations() + else: + lang = locale.getlocale() - # Fix for non-POSIX-compliant systems (Windows et al.). - if os.getenv("LANG") is None: - lang = locale.getdefaultlocale() + # Fix for non-POSIX-compliant systems (Windows et al.). + if os.getenv("LANG") is None: + lang = locale.getdefaultlocale() - if lang[0]: - os.environ["LANG"] = lang[0] + if lang[0]: + os.environ["LANG"] = lang[0] - if lang[0] is not None: - filename = basedir.get_basedir() + "/translations/messages_%s.mo" % lang[0][0:2] + if lang[0] is not None: + filename = basedir.get_basedir() + "/translations/messages_%s.mo" % lang[0][0:2] - try: - __translation__ = gettext.GNUTranslations(open(filename, "rb")) - except IOError: - __translation__ = gettext.NullTranslations() - else: - print("WARNING: Localization disabled because the system language could not be determined.", file=sys.stderr) - __translation__ = gettext.NullTranslations() + try: + __translation__ = gettext.GNUTranslations(open(filename, "rb")) + except IOError: + __translation__ = gettext.NullTranslations() + else: + print("WARNING: Localization disabled because the system language could not be determined.", file=sys.stderr) + __translation__ = gettext.NullTranslations() - __enabled__ = True - __installed__ = True - __translation__.install() + __enabled__ = True + __installed__ = True + __translation__.install() def check_compatibility(version): - if isinstance(__translation__, gettext.GNUTranslations): - header_pattern = re.compile("^([^:\n]+): *(.*?) *$", re.MULTILINE) - header_entries = dict(header_pattern.findall(_(""))) + if isinstance(__translation__, gettext.GNUTranslations): + header_pattern = re.compile("^([^:\n]+): *(.*?) *$", re.MULTILINE) + header_entries = dict(header_pattern.findall(_(""))) - if header_entries["Project-Id-Version"] != "gitinspector {0}".format(version): - print( - "WARNING: The translation for your system locale is not up to date with the current gitinspector " - "version. The current maintainer of this locale is {0}.".format(header_entries["Last-Translator"]), - file=sys.stderr, - ) + if header_entries["Project-Id-Version"] != "gitinspector {0}".format(version): + print( + "WARNING: The translation for your system locale is not up to date with the current gitinspector " + "version. The current maintainer of this locale is {0}.".format(header_entries["Last-Translator"]), + file=sys.stderr, + ) def get_date(): - if __enabled__ and isinstance(__translation__, gettext.GNUTranslations): - date = time.strftime("%x") + if __enabled__ and isinstance(__translation__, gettext.GNUTranslations): + date = time.strftime("%x") - if hasattr(date, "decode"): - date = date.decode("utf-8", "replace") + if hasattr(date, "decode"): + date = date.decode("utf-8", "replace") - return date - else: - return time.strftime("%Y/%m/%d") + return date + else: + return time.strftime("%Y/%m/%d") def enable(): - if isinstance(__translation__, gettext.GNUTranslations): - __translation__.install(True) + if isinstance(__translation__, gettext.GNUTranslations): + __translation__.install(True) - global __enabled__ - __enabled__ = True + global __enabled__ + __enabled__ = True def disable(): - global __enabled__ - __enabled__ = False + global __enabled__ + __enabled__ = False - if __installed__: - gettext.NullTranslations().install() + if __installed__: + gettext.NullTranslations().install() diff --git a/gitinspector/metrics.py b/gitinspector/metrics.py index ee969bd5..079874b0 100644 --- a/gitinspector/metrics.py +++ b/gitinspector/metrics.py @@ -24,46 +24,46 @@ from . import comment, filtering, interval __metric_eloc__ = { - "java": 500, - "c": 500, - "cpp": 500, - "cs": 500, - "h": 300, - "hpp": 300, - "php": 500, - "py": 500, - "glsl": 1000, - "rb": 500, - "js": 500, - "sql": 1000, - "xml": 1000, + "java": 500, + "c": 500, + "cpp": 500, + "cs": 500, + "h": 300, + "hpp": 300, + "php": 500, + "py": 500, + "glsl": 1000, + "rb": 500, + "js": 500, + "sql": 1000, + "xml": 1000, } __metric_cc_tokens__ = [ - [ - ["java", "js", "c", "cc", "cpp"], - ["else", r"for\s+\(.*\)", r"if\s+\(.*\)", r"case\s+\w+:", "default:", r"while\s+\(.*\)"], - ["assert", "break", "continue", "return"], - ], - [ - ["cs"], - [ - "else", - r"for\s+\(.*\)", - r"foreach\s+\(.*\)", - r"goto\s+\w+:", - r"if\s+\(.*\)", - r"case\s+\w+:", - "default:", - r"while\s+\(.*\)", - ], - ["assert", "break", "continue", "return"], - ], - [ - ["py"], - [r"^\s+elif .*:$", r"^\s+else:$", r"^\s+for .*:", r"^\s+if .*:$", r"^\s+while .*:$"], - [r"^\s+assert", "break", "continue", "return"], - ], + [ + ["java", "js", "c", "cc", "cpp"], + ["else", r"for\s+\(.*\)", r"if\s+\(.*\)", r"case\s+\w+:", "default:", r"while\s+\(.*\)"], + ["assert", "break", "continue", "return"], + ], + [ + ["cs"], + [ + "else", + r"for\s+\(.*\)", + r"foreach\s+\(.*\)", + r"goto\s+\w+:", + r"if\s+\(.*\)", + r"case\s+\w+:", + "default:", + r"while\s+\(.*\)", + ], + ["assert", "break", "continue", "return"], + ], + [ + ["py"], + [r"^\s+elif .*:$", r"^\s+else:$", r"^\s+for .*:", r"^\s+if .*:$", r"^\s+while .*:$"], + [r"^\s+assert", "break", "continue", "return"], + ], ] METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD = 50 @@ -71,89 +71,89 @@ class MetricsLogic(object): - def __init__(self): - self.eloc = {} - self.cyclomatic_complexity = {} - self.cyclomatic_complexity_density = {} - - ls_tree_p = subprocess.Popen( - ["git", "ls-tree", "--name-only", "-r", interval.get_ref()], stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - lines = ls_tree_p.communicate()[0].splitlines() - ls_tree_p.stdout.close() - - if ls_tree_p.returncode == 0: - for i in lines: - i = i.strip().decode("unicode_escape", "ignore") - i = i.encode("latin-1", "replace") - i = i.decode("utf-8", "replace").strip('"').strip("'").strip() - - if FileDiff.is_valid_extension(i) and not filtering.set_filtered(FileDiff.get_filename(i)): - file_r = subprocess.Popen( - ["git", "show", interval.get_ref() + ":{0}".format(i.strip())], stdout=subprocess.PIPE - ).stdout.readlines() - - extension = FileDiff.get_extension(i) - lines = MetricsLogic.get_eloc(file_r, extension) - cycc = MetricsLogic.get_cyclomatic_complexity(file_r, extension) - - if __metric_eloc__.get(extension, None) is not None and __metric_eloc__[extension] < lines: - self.eloc[i.strip()] = lines - - if METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD < cycc: - self.cyclomatic_complexity[i.strip()] = cycc - - if lines > 0 and METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD < cycc / float(lines): - self.cyclomatic_complexity_density[i.strip()] = cycc / float(lines) - - def __iadd__(self, other): - try: - self.eloc.update(other.eloc) - self.cyclomatic_complexity.update(other.cyclomatic_complexity) - self.cyclomatic_complexity_density.update(other.cyclomatic_complexity_density) - return self - except AttributeError: - return other - - @staticmethod - def get_cyclomatic_complexity(file_r, extension): - is_inside_comment = False - cc_counter = 0 - - entry_tokens = None - exit_tokens = None - - for i in __metric_cc_tokens__: - if extension in i[0]: - entry_tokens = i[1] - exit_tokens = i[2] - - if entry_tokens or exit_tokens: - for i in file_r: - i = i.decode("utf-8", "replace") - (_, is_inside_comment) = comment.handle_comment_block(is_inside_comment, extension, i) - - if not is_inside_comment and not comment.is_comment(extension, i): - for j in entry_tokens: - if re.search(j, i, re.DOTALL): - cc_counter += 2 - for j in exit_tokens: - if re.search(j, i, re.DOTALL): - cc_counter += 1 - return cc_counter - - return -1 - - @staticmethod - def get_eloc(file_r, extension): - is_inside_comment = False - eloc_counter = 0 - - for i in file_r: - i = i.decode("utf-8", "replace") - (_, is_inside_comment) = comment.handle_comment_block(is_inside_comment, extension, i) - - if not is_inside_comment and not comment.is_comment(extension, i): - eloc_counter += 1 - - return eloc_counter + def __init__(self): + self.eloc = {} + self.cyclomatic_complexity = {} + self.cyclomatic_complexity_density = {} + + ls_tree_p = subprocess.Popen( + ["git", "ls-tree", "--name-only", "-r", interval.get_ref()], stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + lines = ls_tree_p.communicate()[0].splitlines() + ls_tree_p.stdout.close() + + if ls_tree_p.returncode == 0: + for i in lines: + i = i.strip().decode("unicode_escape", "ignore") + i = i.encode("latin-1", "replace") + i = i.decode("utf-8", "replace").strip('"').strip("'").strip() + + if FileDiff.is_valid_extension(i) and not filtering.set_filtered(FileDiff.get_filename(i)): + file_r = subprocess.Popen( + ["git", "show", interval.get_ref() + ":{0}".format(i.strip())], stdout=subprocess.PIPE + ).stdout.readlines() + + extension = FileDiff.get_extension(i) + lines = MetricsLogic.get_eloc(file_r, extension) + cycc = MetricsLogic.get_cyclomatic_complexity(file_r, extension) + + if __metric_eloc__.get(extension, None) is not None and __metric_eloc__[extension] < lines: + self.eloc[i.strip()] = lines + + if METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD < cycc: + self.cyclomatic_complexity[i.strip()] = cycc + + if lines > 0 and METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD < cycc / float(lines): + self.cyclomatic_complexity_density[i.strip()] = cycc / float(lines) + + def __iadd__(self, other): + try: + self.eloc.update(other.eloc) + self.cyclomatic_complexity.update(other.cyclomatic_complexity) + self.cyclomatic_complexity_density.update(other.cyclomatic_complexity_density) + return self + except AttributeError: + return other + + @staticmethod + def get_cyclomatic_complexity(file_r, extension): + is_inside_comment = False + cc_counter = 0 + + entry_tokens = None + exit_tokens = None + + for i in __metric_cc_tokens__: + if extension in i[0]: + entry_tokens = i[1] + exit_tokens = i[2] + + if entry_tokens or exit_tokens: + for i in file_r: + i = i.decode("utf-8", "replace") + (_, is_inside_comment) = comment.handle_comment_block(is_inside_comment, extension, i) + + if not is_inside_comment and not comment.is_comment(extension, i): + for j in entry_tokens: + if re.search(j, i, re.DOTALL): + cc_counter += 2 + for j in exit_tokens: + if re.search(j, i, re.DOTALL): + cc_counter += 1 + return cc_counter + + return -1 + + @staticmethod + def get_eloc(file_r, extension): + is_inside_comment = False + eloc_counter = 0 + + for i in file_r: + i = i.decode("utf-8", "replace") + (_, is_inside_comment) = comment.handle_comment_block(is_inside_comment, extension, i) + + if not is_inside_comment and not comment.is_comment(extension, i): + eloc_counter += 1 + + return eloc_counter diff --git a/gitinspector/optval.py b/gitinspector/optval.py index 558e3a2f..5fd09690 100644 --- a/gitinspector/optval.py +++ b/gitinspector/optval.py @@ -22,51 +22,51 @@ class InvalidOptionArgument(Exception): - def __init__(self, msg): - super(InvalidOptionArgument, self).__init__(msg) - self.msg = msg + def __init__(self, msg): + super(InvalidOptionArgument, self).__init__(msg) + self.msg = msg def __find_arg_in_options__(arg, options): - for opt in options: - if opt[0].find(arg) == 0: - return opt + for opt in options: + if opt[0].find(arg) == 0: + return opt - return None + return None def __find_options_to_extend__(long_options): - options_to_extend = [] + options_to_extend = [] - for num, arg in enumerate(long_options): - arg = arg.split(":") - if len(arg) == 2: - long_options[num] = arg[0] + "=" - options_to_extend.append(("--" + arg[0], arg[1])) + for num, arg in enumerate(long_options): + arg = arg.split(":") + if len(arg) == 2: + long_options[num] = arg[0] + "=" + options_to_extend.append(("--" + arg[0], arg[1])) - return options_to_extend + return options_to_extend # This is a duplicate of gnu_getopt, but with support for optional arguments in long options, in the form; "arg:default_value". def gnu_getopt(args, options, long_options): - options_to_extend = __find_options_to_extend__(long_options) + options_to_extend = __find_options_to_extend__(long_options) - for num, arg in enumerate(args): - opt = __find_arg_in_options__(arg, options_to_extend) - if opt: - args[num] = arg + "=" + opt[1] + for num, arg in enumerate(args): + opt = __find_arg_in_options__(arg, options_to_extend) + if opt: + args[num] = arg + "=" + opt[1] - return getopt.gnu_getopt(args, options, long_options) + return getopt.gnu_getopt(args, options, long_options) def get_boolean_argument(arg): - if isinstance(arg, bool): - return arg - elif arg is None or arg.lower() == "false" or arg.lower() == "f" or arg == "0": - return False - elif arg.lower() == "true" or arg.lower() == "t" or arg == "1": - return True - - raise InvalidOptionArgument(_("The given option argument is not a valid boolean.")) + if isinstance(arg, bool): + return arg + elif arg is None or arg.lower() == "false" or arg.lower() == "f" or arg == "0": + return False + elif arg.lower() == "true" or arg.lower() == "t" or arg == "1": + return True + + raise InvalidOptionArgument(_("The given option argument is not a valid boolean.")) diff --git a/gitinspector/output/blameoutput.py b/gitinspector/output/blameoutput.py index e8ac7178..fb035571 100644 --- a/gitinspector/output/blameoutput.py +++ b/gitinspector/output/blameoutput.py @@ -27,159 +27,155 @@ from .outputable import Outputable BLAME_INFO_TEXT = N_( - "Below are the number of rows from each author that have survived and are still " "intact in the current revision" + "Below are the number of rows from each author that have survived and are still " "intact in the current revision" ) class BlameOutput(Outputable): - def __init__(self, changes, blame): - if format.is_interactive_format(): - print("") - - self.changes = changes - self.blame = blame - Outputable.__init__(self) - - def output_html(self): - blame_xml = '
' - blame_xml += "

" + _(BLAME_INFO_TEXT) + '.

' - blame_xml += "".format( - _("Author"), _("Rows"), _("Stability"), _("Age"), _("% in comments") - ) - blame_xml += "" - chart_data = "" - blames = sorted(self.blame.get_summed_blames().items()) - total_blames = 0 - - for i in blames: - total_blames += i[1].rows - - for i, entry in enumerate(blames): - work_percentage = str("{0:.2f}".format(100.0 * entry[1].rows / total_blames)) - blame_xml += "' if i % 2 == 1 else ">") - - if format.get_selected() == "html": - author_email = self.changes.get_latest_email_by_author(entry[0]) - blame_xml += ''.format(gravatar.get_url(author_email), entry[0]) - else: - blame_xml += "" - - blame_xml += "" - blame_xml += "") - blame_xml += "" - blame_xml += "" - blame_xml += '" - blame_xml += "" - chart_data += "{{label: {0}, data: {1}}}".format(json.dumps(entry[0]), work_percentage) - - if blames[-1] != entry: - chart_data += ", " - - blame_xml += '
{0} {1} {2} {3} {4}
{1}" + entry[0] + "" + str(entry[1].rows) + "" + ("{0:.1f}".format(Blame.get_stability(entry[0], entry[1].rows, self.changes)) + "" + "{0:.1f}".format(float(entry[1].skew) / entry[1].rows) + "" + "{0:.2f}".format(100.0 * entry[1].comments / entry[1].rows) + "' + work_percentage + "
 
' - blame_xml += '
' - blame_xml += '
" - - print(blame_xml) - - def output_json(self): - message_json = '\t\t\t"message": "' + _(BLAME_INFO_TEXT) + '",\n' - blame_json = "" - - for i in sorted(self.blame.get_summed_blames().items()): - author_email = self.changes.get_latest_email_by_author(i[0]) - - name_json = '\t\t\t\t"name": "' + i[0] + '",\n' - email_json = '\t\t\t\t"email": "' + author_email + '",\n' - gravatar_json = '\t\t\t\t"gravatar": "' + gravatar.get_url(author_email) + '",\n' - rows_json = '\t\t\t\t"rows": ' + str(i[1].rows) + ",\n" - stability_json = ( - '\t\t\t\t"stability": ' + "{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)) + ",\n" - ) - age_json = '\t\t\t\t"age": ' + "{0:.1f}".format(float(i[1].skew) / i[1].rows) + ",\n" - percentage_in_comments_json = ( - '\t\t\t\t"percentage_in_comments": ' + "{0:.2f}".format(100.0 * i[1].comments / i[1].rows) + "\n" - ) - blame_json += ( - "{\n" - + name_json - + email_json - + gravatar_json - + rows_json - + stability_json - + age_json - + percentage_in_comments_json - + "\t\t\t}," - ) - else: - blame_json = blame_json[:-1] - - print(',\n\t\t"blame": {\n' + message_json + '\t\t\t"authors": [\n\t\t\t' + blame_json + "]\n\t\t}", end="") - - def output_text(self): - if sys.stdout.isatty() and format.is_interactive_format(): - terminal.clear_row() - - print(textwrap.fill(_(BLAME_INFO_TEXT) + ":", width=terminal.get_size()[0]) + "\n") - terminal.printb( - terminal.ljust(_("Author"), 21) - + terminal.rjust(_("Rows"), 10) - + terminal.rjust(_("Stability"), 15) - + terminal.rjust(_("Age"), 13) - + terminal.rjust(_("% in comments"), 20) - ) - - for i in sorted(self.blame.get_summed_blames().items()): - print(terminal.ljust(i[0], 20)[0 : 20 - terminal.get_excess_column_count(i[0])], end=" ") - print(str(i[1].rows).rjust(10), end=" ") - print("{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)).rjust(14), end=" ") - print("{0:.1f}".format(float(i[1].skew) / i[1].rows).rjust(12), end=" ") - print("{0:.2f}".format(100.0 * i[1].comments / i[1].rows).rjust(19)) - - def output_xml(self): - message_xml = "\t\t" + _(BLAME_INFO_TEXT) + "\n" - blame_xml = "" - - for i in sorted(self.blame.get_summed_blames().items()): - author_email = self.changes.get_latest_email_by_author(i[0]) - - name_xml = "\t\t\t\t" + i[0] + "\n" - email_xml = "\t\t\t\t" + author_email + "\n" - gravatar_xml = "\t\t\t\t" + gravatar.get_url(author_email) + "\n" - rows_xml = "\t\t\t\t" + str(i[1].rows) + "\n" - stability_xml = ( - "\t\t\t\t" + "{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)) + "\n" - ) - age_xml = "\t\t\t\t" + "{0:.1f}".format(float(i[1].skew) / i[1].rows) + "\n" - percentage_in_comments_xml = ( - "\t\t\t\t" - + "{0:.2f}".format(100.0 * i[1].comments / i[1].rows) - + "\n" - ) - blame_xml += ( - "\t\t\t\n" - + name_xml - + email_xml - + gravatar_xml - + rows_xml - + stability_xml - + age_xml - + percentage_in_comments_xml - + "\t\t\t\n" - ) - - print("\t\n" + message_xml + "\t\t\n" + blame_xml + "\t\t\n\t") + def __init__(self, changes, blame): + if format.is_interactive_format(): + print("") + + self.changes = changes + self.blame = blame + Outputable.__init__(self) + + def output_html(self): + blame_xml = '
' + blame_xml += "

" + _(BLAME_INFO_TEXT) + '.

' + blame_xml += "".format( + _("Author"), _("Rows"), _("Stability"), _("Age"), _("% in comments") + ) + blame_xml += "" + chart_data = "" + blames = sorted(self.blame.get_summed_blames().items()) + total_blames = 0 + + for i in blames: + total_blames += i[1].rows + + for i, entry in enumerate(blames): + work_percentage = str("{0:.2f}".format(100.0 * entry[1].rows / total_blames)) + blame_xml += "' if i % 2 == 1 else ">") + + if format.get_selected() == "html": + author_email = self.changes.get_latest_email_by_author(entry[0]) + blame_xml += ''.format(gravatar.get_url(author_email), entry[0]) + else: + blame_xml += "" + + blame_xml += "" + blame_xml += "") + blame_xml += "" + blame_xml += "" + blame_xml += '" + blame_xml += "" + chart_data += "{{label: {0}, data: {1}}}".format(json.dumps(entry[0]), work_percentage) + + if blames[-1] != entry: + chart_data += ", " + + blame_xml += '
{0} {1} {2} {3} {4}
{1}" + entry[0] + "" + str(entry[1].rows) + "" + ("{0:.1f}".format(Blame.get_stability(entry[0], entry[1].rows, self.changes)) + "" + "{0:.1f}".format(float(entry[1].skew) / entry[1].rows) + "" + "{0:.2f}".format(100.0 * entry[1].comments / entry[1].rows) + "' + work_percentage + "
 
' + blame_xml += '
' + blame_xml += '
" + + print(blame_xml) + + def output_json(self): + message_json = '\t\t\t"message": "' + _(BLAME_INFO_TEXT) + '",\n' + blame_json = "" + + for i in sorted(self.blame.get_summed_blames().items()): + author_email = self.changes.get_latest_email_by_author(i[0]) + + name_json = '\t\t\t\t"name": "' + i[0] + '",\n' + email_json = '\t\t\t\t"email": "' + author_email + '",\n' + gravatar_json = '\t\t\t\t"gravatar": "' + gravatar.get_url(author_email) + '",\n' + rows_json = '\t\t\t\t"rows": ' + str(i[1].rows) + ",\n" + stability_json = '\t\t\t\t"stability": ' + "{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)) + ",\n" + age_json = '\t\t\t\t"age": ' + "{0:.1f}".format(float(i[1].skew) / i[1].rows) + ",\n" + percentage_in_comments_json = ( + '\t\t\t\t"percentage_in_comments": ' + "{0:.2f}".format(100.0 * i[1].comments / i[1].rows) + "\n" + ) + blame_json += ( + "{\n" + + name_json + + email_json + + gravatar_json + + rows_json + + stability_json + + age_json + + percentage_in_comments_json + + "\t\t\t}," + ) + else: + blame_json = blame_json[:-1] + + print(',\n\t\t"blame": {\n' + message_json + '\t\t\t"authors": [\n\t\t\t' + blame_json + "]\n\t\t}", end="") + + def output_text(self): + if sys.stdout.isatty() and format.is_interactive_format(): + terminal.clear_row() + + print(textwrap.fill(_(BLAME_INFO_TEXT) + ":", width=terminal.get_size()[0]) + "\n") + terminal.printb( + terminal.ljust(_("Author"), 21) + + terminal.rjust(_("Rows"), 10) + + terminal.rjust(_("Stability"), 15) + + terminal.rjust(_("Age"), 13) + + terminal.rjust(_("% in comments"), 20) + ) + + for i in sorted(self.blame.get_summed_blames().items()): + print(terminal.ljust(i[0], 20)[0 : 20 - terminal.get_excess_column_count(i[0])], end=" ") + print(str(i[1].rows).rjust(10), end=" ") + print("{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)).rjust(14), end=" ") + print("{0:.1f}".format(float(i[1].skew) / i[1].rows).rjust(12), end=" ") + print("{0:.2f}".format(100.0 * i[1].comments / i[1].rows).rjust(19)) + + def output_xml(self): + message_xml = "\t\t" + _(BLAME_INFO_TEXT) + "\n" + blame_xml = "" + + for i in sorted(self.blame.get_summed_blames().items()): + author_email = self.changes.get_latest_email_by_author(i[0]) + + name_xml = "\t\t\t\t" + i[0] + "\n" + email_xml = "\t\t\t\t" + author_email + "\n" + gravatar_xml = "\t\t\t\t" + gravatar.get_url(author_email) + "\n" + rows_xml = "\t\t\t\t" + str(i[1].rows) + "\n" + stability_xml = ( + "\t\t\t\t" + "{0:.1f}".format(Blame.get_stability(i[0], i[1].rows, self.changes)) + "\n" + ) + age_xml = "\t\t\t\t" + "{0:.1f}".format(float(i[1].skew) / i[1].rows) + "\n" + percentage_in_comments_xml = ( + "\t\t\t\t" + "{0:.2f}".format(100.0 * i[1].comments / i[1].rows) + "\n" + ) + blame_xml += ( + "\t\t\t\n" + + name_xml + + email_xml + + gravatar_xml + + rows_xml + + stability_xml + + age_xml + + percentage_in_comments_xml + + "\t\t\t\n" + ) + + print("\t\n" + message_xml + "\t\t\n" + blame_xml + "\t\t\n\t") diff --git a/gitinspector/output/changesoutput.py b/gitinspector/output/changesoutput.py index f11a28a7..02eada07 100644 --- a/gitinspector/output/changesoutput.py +++ b/gitinspector/output/changesoutput.py @@ -29,187 +29,185 @@ class ChangesOutput(Outputable): - def __init__(self, changes): - self.changes = changes - Outputable.__init__(self) - - def output_html(self): - authorinfo_list = self.changes.get_authorinfo_list() - total_changes = 0.0 - changes_xml = '
' - chart_data = "" - - for i in authorinfo_list: - total_changes += authorinfo_list.get(i).insertions - total_changes += authorinfo_list.get(i).deletions - - if authorinfo_list: - changes_xml += "

" + _(HISTORICAL_INFO_TEXT) + '.

' - changes_xml += "".format( - _("Author"), _("Commits"), _("Insertions"), _("Deletions"), _("% of changes") - ) - changes_xml += "" - - for i, entry in enumerate(sorted(authorinfo_list)): - authorinfo = authorinfo_list.get(entry) - percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 - - changes_xml += "' if i % 2 == 1 else ">") - - if format.get_selected() == "html": - changes_xml += ''.format( - gravatar.get_url(self.changes.get_latest_email_by_author(entry)), entry - ) - else: - changes_xml += "" - - changes_xml += "" - changes_xml += "" - changes_xml += "" - changes_xml += "" - changes_xml += "" - chart_data += "{{label: {0}, data: {1}}}".format(json.dumps(entry), "{0:.2f}".format(percentage)) - - if sorted(authorinfo_list)[-1] != entry: - chart_data += ", " - - changes_xml += '
{0} {1} {2} {3} {4}
{1}" + entry + "" + str(authorinfo.commits) + "" + str(authorinfo.insertions) + "" + str(authorinfo.deletions) + "" + "{0:.2f}".format(percentage) + "
 
' - changes_xml += '
' - changes_xml += '" - else: - changes_xml += "

" + _(NO_COMMITED_FILES_TEXT) + ".

" - - changes_xml += "
" - print(changes_xml) - - def output_json(self): - authorinfo_list = self.changes.get_authorinfo_list() - total_changes = 0.0 - - for i in authorinfo_list: - total_changes += authorinfo_list.get(i).insertions - total_changes += authorinfo_list.get(i).deletions - - if authorinfo_list: - message_json = '\t\t\t"message": "' + _(HISTORICAL_INFO_TEXT) + '",\n' - changes_json = "" - - for i in sorted(authorinfo_list): - author_email = self.changes.get_latest_email_by_author(i) - authorinfo = authorinfo_list.get(i) - - percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 - name_json = '\t\t\t\t"name": "' + i + '",\n' - email_json = '\t\t\t\t"email": "' + author_email + '",\n' - gravatar_json = '\t\t\t\t"gravatar": "' + gravatar.get_url(author_email) + '",\n' - commits_json = '\t\t\t\t"commits": ' + str(authorinfo.commits) + ",\n" - insertions_json = '\t\t\t\t"insertions": ' + str(authorinfo.insertions) + ",\n" - deletions_json = '\t\t\t\t"deletions": ' + str(authorinfo.deletions) + ",\n" - percentage_json = '\t\t\t\t"percentage_of_changes": ' + "{0:.2f}".format(percentage) + "\n" - - changes_json += ( - "{\n" - + name_json - + email_json - + gravatar_json - + commits_json - + insertions_json - + deletions_json - + percentage_json - + "\t\t\t}" - ) - changes_json += "," - else: - changes_json = changes_json[:-1] - - print('\t\t"changes": {\n' + message_json + '\t\t\t"authors": [\n\t\t\t' + changes_json + "]\n\t\t}", end="") - else: - print('\t\t"exception": "' + _(NO_COMMITED_FILES_TEXT) + '"') - - def output_text(self): - authorinfo_list = self.changes.get_authorinfo_list() - total_changes = 0.0 - - for i in authorinfo_list: - total_changes += authorinfo_list.get(i).insertions - total_changes += authorinfo_list.get(i).deletions - - if authorinfo_list: - print(textwrap.fill(_(HISTORICAL_INFO_TEXT) + ":", width=terminal.get_size()[0]) + "\n") - terminal.printb( - terminal.ljust(_("Author"), 21) - + terminal.rjust(_("Commits"), 13) - + terminal.rjust(_("Insertions"), 14) - + terminal.rjust(_("Deletions"), 15) - + terminal.rjust(_("% of changes"), 16) - ) - - for i in sorted(authorinfo_list): - authorinfo = authorinfo_list.get(i) - percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 - - print(terminal.ljust(i, 20)[0 : 20 - terminal.get_excess_column_count(i)], end=" ") - print(str(authorinfo.commits).rjust(13), end=" ") - print(str(authorinfo.insertions).rjust(13), end=" ") - print(str(authorinfo.deletions).rjust(14), end=" ") - print("{0:.2f}".format(percentage).rjust(15)) - else: - print(_(NO_COMMITED_FILES_TEXT) + ".") - - def output_xml(self): - authorinfo_list = self.changes.get_authorinfo_list() - total_changes = 0.0 - - for i in authorinfo_list: - total_changes += authorinfo_list.get(i).insertions - total_changes += authorinfo_list.get(i).deletions - - if authorinfo_list: - message_xml = "\t\t" + _(HISTORICAL_INFO_TEXT) + "\n" - changes_xml = "" - - for i in sorted(authorinfo_list): - author_email = self.changes.get_latest_email_by_author(i) - authorinfo = authorinfo_list.get(i) - - percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 - name_xml = "\t\t\t\t" + i + "\n" - email_xml = "\t\t\t\t" + author_email + "\n" - gravatar_xml = "\t\t\t\t" + gravatar.get_url(author_email) + "\n" - commits_xml = "\t\t\t\t" + str(authorinfo.commits) + "\n" - insertions_xml = "\t\t\t\t" + str(authorinfo.insertions) + "\n" - deletions_xml = "\t\t\t\t" + str(authorinfo.deletions) + "\n" - percentage_xml = ( - "\t\t\t\t" + "{0:.2f}".format(percentage) + "\n" - ) - - changes_xml += ( - "\t\t\t\n" - + name_xml - + email_xml - + gravatar_xml - + commits_xml - + insertions_xml - + deletions_xml - + percentage_xml - + "\t\t\t\n" - ) - - print("\t\n" + message_xml + "\t\t\n" + changes_xml + "\t\t\n\t") - else: - print("\t\n\t\t" + _(NO_COMMITED_FILES_TEXT) + "\n\t") + def __init__(self, changes): + self.changes = changes + Outputable.__init__(self) + + def output_html(self): + authorinfo_list = self.changes.get_authorinfo_list() + total_changes = 0.0 + changes_xml = '
' + chart_data = "" + + for i in authorinfo_list: + total_changes += authorinfo_list.get(i).insertions + total_changes += authorinfo_list.get(i).deletions + + if authorinfo_list: + changes_xml += "

" + _(HISTORICAL_INFO_TEXT) + '.

' + changes_xml += "".format( + _("Author"), _("Commits"), _("Insertions"), _("Deletions"), _("% of changes") + ) + changes_xml += "" + + for i, entry in enumerate(sorted(authorinfo_list)): + authorinfo = authorinfo_list.get(entry) + percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 + + changes_xml += "' if i % 2 == 1 else ">") + + if format.get_selected() == "html": + changes_xml += ''.format( + gravatar.get_url(self.changes.get_latest_email_by_author(entry)), entry + ) + else: + changes_xml += "" + + changes_xml += "" + changes_xml += "" + changes_xml += "" + changes_xml += "" + changes_xml += "" + chart_data += "{{label: {0}, data: {1}}}".format(json.dumps(entry), "{0:.2f}".format(percentage)) + + if sorted(authorinfo_list)[-1] != entry: + chart_data += ", " + + changes_xml += '
{0} {1} {2} {3} {4}
{1}" + entry + "" + str(authorinfo.commits) + "" + str(authorinfo.insertions) + "" + str(authorinfo.deletions) + "" + "{0:.2f}".format(percentage) + "
 
' + changes_xml += '
' + changes_xml += '" + else: + changes_xml += "

" + _(NO_COMMITED_FILES_TEXT) + ".

" + + changes_xml += "
" + print(changes_xml) + + def output_json(self): + authorinfo_list = self.changes.get_authorinfo_list() + total_changes = 0.0 + + for i in authorinfo_list: + total_changes += authorinfo_list.get(i).insertions + total_changes += authorinfo_list.get(i).deletions + + if authorinfo_list: + message_json = '\t\t\t"message": "' + _(HISTORICAL_INFO_TEXT) + '",\n' + changes_json = "" + + for i in sorted(authorinfo_list): + author_email = self.changes.get_latest_email_by_author(i) + authorinfo = authorinfo_list.get(i) + + percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 + name_json = '\t\t\t\t"name": "' + i + '",\n' + email_json = '\t\t\t\t"email": "' + author_email + '",\n' + gravatar_json = '\t\t\t\t"gravatar": "' + gravatar.get_url(author_email) + '",\n' + commits_json = '\t\t\t\t"commits": ' + str(authorinfo.commits) + ",\n" + insertions_json = '\t\t\t\t"insertions": ' + str(authorinfo.insertions) + ",\n" + deletions_json = '\t\t\t\t"deletions": ' + str(authorinfo.deletions) + ",\n" + percentage_json = '\t\t\t\t"percentage_of_changes": ' + "{0:.2f}".format(percentage) + "\n" + + changes_json += ( + "{\n" + + name_json + + email_json + + gravatar_json + + commits_json + + insertions_json + + deletions_json + + percentage_json + + "\t\t\t}" + ) + changes_json += "," + else: + changes_json = changes_json[:-1] + + print('\t\t"changes": {\n' + message_json + '\t\t\t"authors": [\n\t\t\t' + changes_json + "]\n\t\t}", end="") + else: + print('\t\t"exception": "' + _(NO_COMMITED_FILES_TEXT) + '"') + + def output_text(self): + authorinfo_list = self.changes.get_authorinfo_list() + total_changes = 0.0 + + for i in authorinfo_list: + total_changes += authorinfo_list.get(i).insertions + total_changes += authorinfo_list.get(i).deletions + + if authorinfo_list: + print(textwrap.fill(_(HISTORICAL_INFO_TEXT) + ":", width=terminal.get_size()[0]) + "\n") + terminal.printb( + terminal.ljust(_("Author"), 21) + + terminal.rjust(_("Commits"), 13) + + terminal.rjust(_("Insertions"), 14) + + terminal.rjust(_("Deletions"), 15) + + terminal.rjust(_("% of changes"), 16) + ) + + for i in sorted(authorinfo_list): + authorinfo = authorinfo_list.get(i) + percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 + + print(terminal.ljust(i, 20)[0 : 20 - terminal.get_excess_column_count(i)], end=" ") + print(str(authorinfo.commits).rjust(13), end=" ") + print(str(authorinfo.insertions).rjust(13), end=" ") + print(str(authorinfo.deletions).rjust(14), end=" ") + print("{0:.2f}".format(percentage).rjust(15)) + else: + print(_(NO_COMMITED_FILES_TEXT) + ".") + + def output_xml(self): + authorinfo_list = self.changes.get_authorinfo_list() + total_changes = 0.0 + + for i in authorinfo_list: + total_changes += authorinfo_list.get(i).insertions + total_changes += authorinfo_list.get(i).deletions + + if authorinfo_list: + message_xml = "\t\t" + _(HISTORICAL_INFO_TEXT) + "\n" + changes_xml = "" + + for i in sorted(authorinfo_list): + author_email = self.changes.get_latest_email_by_author(i) + authorinfo = authorinfo_list.get(i) + + percentage = 0 if total_changes == 0 else (authorinfo.insertions + authorinfo.deletions) / total_changes * 100 + name_xml = "\t\t\t\t" + i + "\n" + email_xml = "\t\t\t\t" + author_email + "\n" + gravatar_xml = "\t\t\t\t" + gravatar.get_url(author_email) + "\n" + commits_xml = "\t\t\t\t" + str(authorinfo.commits) + "\n" + insertions_xml = "\t\t\t\t" + str(authorinfo.insertions) + "\n" + deletions_xml = "\t\t\t\t" + str(authorinfo.deletions) + "\n" + percentage_xml = "\t\t\t\t" + "{0:.2f}".format(percentage) + "\n" + + changes_xml += ( + "\t\t\t\n" + + name_xml + + email_xml + + gravatar_xml + + commits_xml + + insertions_xml + + deletions_xml + + percentage_xml + + "\t\t\t\n" + ) + + print("\t\n" + message_xml + "\t\t\n" + changes_xml + "\t\t\n\t") + else: + print("\t\n\t\t" + _(NO_COMMITED_FILES_TEXT) + "\n\t") diff --git a/gitinspector/output/extensionsoutput.py b/gitinspector/output/extensionsoutput.py index f1ae6124..d243edbe 100644 --- a/gitinspector/output/extensionsoutput.py +++ b/gitinspector/output/extensionsoutput.py @@ -29,91 +29,88 @@ class ExtensionsOutput(Outputable): - @staticmethod - def is_marked(extension): - if extension in extensions.__extensions__ or "**" in extensions.__extensions__: - return True - - return False - - def output_html(self): - if extensions.__located_extensions__: - extensions_xml = '
' - extensions_xml += "

{0} {1}.

".format(_(EXTENSIONS_INFO_TEXT), _(EXTENSIONS_MARKED_TEXT)) - - for i in sorted(extensions.__located_extensions__): - if ExtensionsOutput.is_marked(i): - extensions_xml += "" + i + "" - else: - extensions_xml += i - extensions_xml += " " - - extensions_xml += "

" - print(extensions_xml) - - def output_json(self): - if extensions.__located_extensions__: - message_json = '\t\t\t"message": "' + _(EXTENSIONS_INFO_TEXT) + '",\n' - used_extensions_json = "" - unused_extensions_json = "" - - for i in sorted(extensions.__located_extensions__): - if ExtensionsOutput.is_marked(i): - used_extensions_json += '"' + i + '", ' - else: - unused_extensions_json += '"' + i + '", ' - - used_extensions_json = used_extensions_json[:-2] - unused_extensions_json = unused_extensions_json[:-2] - - print( - ',\n\t\t"extensions": {\n' - + message_json - + '\t\t\t"used": [ ' - + used_extensions_json - + ' ],\n\t\t\t"unused": [ ' - + unused_extensions_json - + " ]\n" - + "\t\t}", - end="", - ) - - def output_text(self): - if extensions.__located_extensions__: - print( - "\n" - + textwrap.fill( - "{0} {1}:".format(_(EXTENSIONS_INFO_TEXT), _(EXTENSIONS_MARKED_TEXT)), width=terminal.get_size()[0] - ) - ) - - for i in sorted(extensions.__located_extensions__): - if ExtensionsOutput.is_marked(i): - print("[" + terminal.__bold__ + i + terminal.__normal__ + "]", end=" ") - else: - print(i, end=" ") - print("") - - def output_xml(self): - if extensions.__located_extensions__: - message_xml = "\t\t" + _(EXTENSIONS_INFO_TEXT) + "\n" - used_extensions_xml = "" - unused_extensions_xml = "" - - for i in sorted(extensions.__located_extensions__): - if ExtensionsOutput.is_marked(i): - used_extensions_xml += "\t\t\t" + i + "\n" - else: - unused_extensions_xml += "\t\t\t" + i + "\n" - - print( - "\t\n" - + message_xml - + "\t\t\n" - + used_extensions_xml - + "\t\t\n" - + "\t\t\n" - + unused_extensions_xml - + "\t\t\n" - + "\t" - ) + @staticmethod + def is_marked(extension): + if extension in extensions.__extensions__ or "**" in extensions.__extensions__: + return True + + return False + + def output_html(self): + if extensions.__located_extensions__: + extensions_xml = '
' + extensions_xml += "

{0} {1}.

".format(_(EXTENSIONS_INFO_TEXT), _(EXTENSIONS_MARKED_TEXT)) + + for i in sorted(extensions.__located_extensions__): + if ExtensionsOutput.is_marked(i): + extensions_xml += "" + i + "" + else: + extensions_xml += i + extensions_xml += " " + + extensions_xml += "

" + print(extensions_xml) + + def output_json(self): + if extensions.__located_extensions__: + message_json = '\t\t\t"message": "' + _(EXTENSIONS_INFO_TEXT) + '",\n' + used_extensions_json = "" + unused_extensions_json = "" + + for i in sorted(extensions.__located_extensions__): + if ExtensionsOutput.is_marked(i): + used_extensions_json += '"' + i + '", ' + else: + unused_extensions_json += '"' + i + '", ' + + used_extensions_json = used_extensions_json[:-2] + unused_extensions_json = unused_extensions_json[:-2] + + print( + ',\n\t\t"extensions": {\n' + + message_json + + '\t\t\t"used": [ ' + + used_extensions_json + + ' ],\n\t\t\t"unused": [ ' + + unused_extensions_json + + " ]\n" + + "\t\t}", + end="", + ) + + def output_text(self): + if extensions.__located_extensions__: + print( + "\n" + textwrap.fill("{0} {1}:".format(_(EXTENSIONS_INFO_TEXT), _(EXTENSIONS_MARKED_TEXT)), width=terminal.get_size()[0]) + ) + + for i in sorted(extensions.__located_extensions__): + if ExtensionsOutput.is_marked(i): + print("[" + terminal.__bold__ + i + terminal.__normal__ + "]", end=" ") + else: + print(i, end=" ") + print("") + + def output_xml(self): + if extensions.__located_extensions__: + message_xml = "\t\t" + _(EXTENSIONS_INFO_TEXT) + "\n" + used_extensions_xml = "" + unused_extensions_xml = "" + + for i in sorted(extensions.__located_extensions__): + if ExtensionsOutput.is_marked(i): + used_extensions_xml += "\t\t\t" + i + "\n" + else: + unused_extensions_xml += "\t\t\t" + i + "\n" + + print( + "\t\n" + + message_xml + + "\t\t\n" + + used_extensions_xml + + "\t\t\n" + + "\t\t\n" + + unused_extensions_xml + + "\t\t\n" + + "\t" + ) diff --git a/gitinspector/output/filteringoutput.py b/gitinspector/output/filteringoutput.py index 00b50135..2784bd0e 100644 --- a/gitinspector/output/filteringoutput.py +++ b/gitinspector/output/filteringoutput.py @@ -26,109 +26,105 @@ FILTERING_INFO_TEXT = N_("The following files were excluded from the statistics due to the specified exclusion patterns") FILTERING_AUTHOR_INFO_TEXT = N_( - "The following authors were excluded from the statistics due to the specified exclusion patterns" + "The following authors were excluded from the statistics due to the specified exclusion patterns" ) FILTERING_EMAIL_INFO_TEXT = N_( - "The authors with the following emails were excluded from the statistics due to the specified " "exclusion patterns" + "The authors with the following emails were excluded from the statistics due to the specified " "exclusion patterns" ) FILTERING_COMMIT_INFO_TEXT = N_( - "The following commit revisions were excluded from the statistics due to the specified " "exclusion patterns" + "The following commit revisions were excluded from the statistics due to the specified " "exclusion patterns" ) class FilteringOutput(Outputable): - @staticmethod - def __output_html_section__(info_string, filtered): - filtering_xml = "" - - if filtered: - filtering_xml += "

" + info_string + "." + "

" - - for i in filtered: - filtering_xml += "

" + i + "

" - - return filtering_xml - - def output_html(self): - if has_filtered(): - filtering_xml = '
' - FilteringOutput.__output_html_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) - FilteringOutput.__output_html_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1]) - FilteringOutput.__output_html_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1]) - FilteringOutput.__output_html_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1]) - filtering_xml += "
" - - print(filtering_xml) - - @staticmethod - def __output_json_section__(info_string, filtered, container_tagname): - if filtered: - message_json = '\t\t\t\t"message": "' + info_string + '",\n' - filtering_json = "" - - for i in filtered: - filtering_json += '\t\t\t\t\t"' + i + '",\n' - else: - filtering_json = filtering_json[:-3] - - return ( - '\n\t\t\t"{0}": {{\n'.format(container_tagname) - + message_json - + '\t\t\t\t"entries": [\n' - + filtering_json - + '"\n\t\t\t\t]\n\t\t\t},' - ) - - return "" - - def output_json(self): - if has_filtered(): - output = ',\n\t\t"filtering": {' - output += FilteringOutput.__output_json_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1], "files") - output += FilteringOutput.__output_json_section__( - _(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1], "authors" - ) - output += FilteringOutput.__output_json_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1], "emails") - output += FilteringOutput.__output_json_section__( - _(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1], "revision" - ) - output = output[:-1] - output += "\n\t\t}" - print(output, end="") - - @staticmethod - def __output_text_section__(info_string, filtered): - if filtered: - print("\n" + textwrap.fill(info_string + ":", width=terminal.get_size()[0])) - - for i in filtered: - (width, _unused) = terminal.get_size() - print("...%s" % i[-width + 3 :] if len(i) > width else i) - - def output_text(self): - FilteringOutput.__output_text_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) - FilteringOutput.__output_text_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1]) - FilteringOutput.__output_text_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1]) - FilteringOutput.__output_text_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1]) - - @staticmethod - def __output_xml_section__(info_string, filtered, container_tagname): - if filtered: - message_xml = "\t\t\t" + info_string + "\n" - filtering_xml = "" - - for i in filtered: - filtering_xml += "\t\t\t\t" + i + "\n" - - print("\t\t<{0}>".format(container_tagname)) - print(message_xml + "\t\t\t\n" + filtering_xml + "\t\t\t\n") - print("\t\t".format(container_tagname)) - - def output_xml(self): - if has_filtered(): - print("\t") - FilteringOutput.__output_xml_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1], "files") - FilteringOutput.__output_xml_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1], "authors") - FilteringOutput.__output_xml_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1], "emails") - FilteringOutput.__output_xml_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1], "revision") - print("\t") + @staticmethod + def __output_html_section__(info_string, filtered): + filtering_xml = "" + + if filtered: + filtering_xml += "

" + info_string + "." + "

" + + for i in filtered: + filtering_xml += "

" + i + "

" + + return filtering_xml + + def output_html(self): + if has_filtered(): + filtering_xml = '
' + FilteringOutput.__output_html_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) + FilteringOutput.__output_html_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1]) + FilteringOutput.__output_html_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1]) + FilteringOutput.__output_html_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1]) + filtering_xml += "
" + + print(filtering_xml) + + @staticmethod + def __output_json_section__(info_string, filtered, container_tagname): + if filtered: + message_json = '\t\t\t\t"message": "' + info_string + '",\n' + filtering_json = "" + + for i in filtered: + filtering_json += '\t\t\t\t\t"' + i + '",\n' + else: + filtering_json = filtering_json[:-3] + + return ( + '\n\t\t\t"{0}": {{\n'.format(container_tagname) + + message_json + + '\t\t\t\t"entries": [\n' + + filtering_json + + '"\n\t\t\t\t]\n\t\t\t},' + ) + + return "" + + def output_json(self): + if has_filtered(): + output = ',\n\t\t"filtering": {' + output += FilteringOutput.__output_json_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1], "files") + output += FilteringOutput.__output_json_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1], "authors") + output += FilteringOutput.__output_json_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1], "emails") + output += FilteringOutput.__output_json_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1], "revision") + output = output[:-1] + output += "\n\t\t}" + print(output, end="") + + @staticmethod + def __output_text_section__(info_string, filtered): + if filtered: + print("\n" + textwrap.fill(info_string + ":", width=terminal.get_size()[0])) + + for i in filtered: + (width, _unused) = terminal.get_size() + print("...%s" % i[-width + 3 :] if len(i) > width else i) + + def output_text(self): + FilteringOutput.__output_text_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) + FilteringOutput.__output_text_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1]) + FilteringOutput.__output_text_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1]) + FilteringOutput.__output_text_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1]) + + @staticmethod + def __output_xml_section__(info_string, filtered, container_tagname): + if filtered: + message_xml = "\t\t\t" + info_string + "\n" + filtering_xml = "" + + for i in filtered: + filtering_xml += "\t\t\t\t" + i + "\n" + + print("\t\t<{0}>".format(container_tagname)) + print(message_xml + "\t\t\t\n" + filtering_xml + "\t\t\t\n") + print("\t\t".format(container_tagname)) + + def output_xml(self): + if has_filtered(): + print("\t") + FilteringOutput.__output_xml_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1], "files") + FilteringOutput.__output_xml_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1], "authors") + FilteringOutput.__output_xml_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1], "emails") + FilteringOutput.__output_xml_section__(_(FILTERING_COMMIT_INFO_TEXT), __filters__["revision"][1], "revision") + print("\t") diff --git a/gitinspector/output/metricsoutput.py b/gitinspector/output/metricsoutput.py index befe5aea..182cb63f 100644 --- a/gitinspector/output/metricsoutput.py +++ b/gitinspector/output/metricsoutput.py @@ -26,7 +26,7 @@ ELOC_INFO_TEXT = N_("The following files are suspiciously big (in order of severity)") CYCLOMATIC_COMPLEXITY_TEXT = N_("The following files have an elevated cyclomatic complexity (in order of severity)") CYCLOMATIC_COMPLEXITY_DENSITY_TEXT = N_( - "The following files have an elevated cyclomatic complexity density " "(in order of severity)" + "The following files have an elevated cyclomatic complexity density " "(in order of severity)" ) METRICS_MISSING_INFO_TEXT = N_("No metrics violations were found in the repository") @@ -34,152 +34,144 @@ def __get_metrics_score__(ceiling, value): - for i in reversed(METRICS_VIOLATION_SCORES): - if value > ceiling * i[0]: - return i[1] + for i in reversed(METRICS_VIOLATION_SCORES): + if value > ceiling * i[0]: + return i[1] class MetricsOutput(Outputable): - def __init__(self, metrics): - self.metrics = metrics - Outputable.__init__(self) - - def output_text(self): - if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: - print("\n" + _(METRICS_MISSING_INFO_TEXT) + ".") - - if self.metrics.eloc: - print("\n" + _(ELOC_INFO_TEXT) + ":") - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): - print(_("{0} ({1} estimated lines of code)").format(i[1], str(i[0]))) - - if self.metrics.cyclomatic_complexity: - print("\n" + _(CYCLOMATIC_COMPLEXITY_TEXT) + ":") - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): - print(_("{0} ({1} in cyclomatic complexity)").format(i[1], str(i[0]))) - - if self.metrics.cyclomatic_complexity_density: - print("\n" + _(CYCLOMATIC_COMPLEXITY_DENSITY_TEXT) + ":") - for i in sorted( - set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True - ): - print(_("{0} ({1:.3f} in cyclomatic complexity density)").format(i[1], i[0])) - - def output_html(self): - metrics_xml = '
' - - if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: - metrics_xml += "

" + _(METRICS_MISSING_INFO_TEXT) + ".

" - - if self.metrics.eloc: - metrics_xml += "

" + _(ELOC_INFO_TEXT) + ".

" - for num, i in enumerate(sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True)): - metrics_xml += ( - '
' if num % 2 == 1 else '">') - + _("{0} ({1} estimated lines of code)").format(i[1], str(i[0])) - + "
" - ) - metrics_xml += "
" - - if self.metrics.cyclomatic_complexity: - metrics_xml += "

" + _(CYCLOMATIC_COMPLEXITY_TEXT) + "

" - for num, i in enumerate( - sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True) - ): - metrics_xml += ( - '
' if num % 2 == 1 else '">') - + _("{0} ({1} in cyclomatic complexity)").format(i[1], str(i[0])) - + "
" - ) - metrics_xml += "
" - - if self.metrics.cyclomatic_complexity_density: - metrics_xml += "

" + _(CYCLOMATIC_COMPLEXITY_DENSITY_TEXT) + "

" - for num, i in enumerate( - sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True) - ): - metrics_xml += ( - '
' if num % 2 == 1 else '">') - + _("{0} ({1:.3f} in cyclomatic complexity density)").format(i[1], i[0]) - + "
" - ) - metrics_xml += "
" - - metrics_xml += "
" - print(metrics_xml) - - def output_json(self): - if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: - print(',\n\t\t"metrics": {\n\t\t\t"message": "' + _(METRICS_MISSING_INFO_TEXT) + '"\n\t\t}', end="") - else: - eloc_json = "" - - if self.metrics.eloc: - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): - eloc_json += '{\n\t\t\t\t"type": "estimated-lines-of-code",\n' - eloc_json += '\t\t\t\t"file_name": "' + i[1] + '",\n' - eloc_json += '\t\t\t\t"value": ' + str(i[0]) + "\n" - eloc_json += "\t\t\t}," - else: - if not self.metrics.cyclomatic_complexity: - eloc_json = eloc_json[:-1] - - if self.metrics.cyclomatic_complexity: - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): - eloc_json += '{\n\t\t\t\t"type": "cyclomatic-complexity",\n' - eloc_json += '\t\t\t\t"file_name": "' + i[1] + '",\n' - eloc_json += '\t\t\t\t"value": ' + str(i[0]) + "\n" - eloc_json += "\t\t\t}," - else: - if not self.metrics.cyclomatic_complexity_density: - eloc_json = eloc_json[:-1] - - if self.metrics.cyclomatic_complexity_density: - for i in sorted( - set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True - ): - eloc_json += '{\n\t\t\t\t"type": "cyclomatic-complexity-density",\n' - eloc_json += '\t\t\t\t"file_name": "' + i[1] + '",\n' - eloc_json += '\t\t\t\t"value": {0:.3f}\n'.format(i[0]) - eloc_json += "\t\t\t}," - else: - eloc_json = eloc_json[:-1] - - print(',\n\t\t"metrics": {\n\t\t\t"violations": [\n\t\t\t' + eloc_json + "]\n\t\t}", end="") - - def output_xml(self): - if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: - print("\t\n\t\t" + _(METRICS_MISSING_INFO_TEXT) + "\n\t") - else: - eloc_xml = "" - - if self.metrics.eloc: - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): - eloc_xml += "\t\t\t\n" - eloc_xml += "\t\t\t\t" + i[1] + "\n" - eloc_xml += "\t\t\t\t" + str(i[0]) + "\n" - eloc_xml += "\t\t\t\n" - - if self.metrics.cyclomatic_complexity: - for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): - eloc_xml += "\t\t\t\n" - eloc_xml += "\t\t\t\t" + i[1] + "\n" - eloc_xml += "\t\t\t\t" + str(i[0]) + "\n" - eloc_xml += "\t\t\t\n" - - if self.metrics.cyclomatic_complexity_density: - for i in sorted( - set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True - ): - eloc_xml += "\t\t\t\n" - eloc_xml += "\t\t\t\t" + i[1] + "\n" - eloc_xml += "\t\t\t\t{0:.3f}\n".format(i[0]) - eloc_xml += "\t\t\t\n" - - print("\t\n\t\t\n" + eloc_xml + "\t\t\n\t") + def __init__(self, metrics): + self.metrics = metrics + Outputable.__init__(self) + + def output_text(self): + if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: + print("\n" + _(METRICS_MISSING_INFO_TEXT) + ".") + + if self.metrics.eloc: + print("\n" + _(ELOC_INFO_TEXT) + ":") + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): + print(_("{0} ({1} estimated lines of code)").format(i[1], str(i[0]))) + + if self.metrics.cyclomatic_complexity: + print("\n" + _(CYCLOMATIC_COMPLEXITY_TEXT) + ":") + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): + print(_("{0} ({1} in cyclomatic complexity)").format(i[1], str(i[0]))) + + if self.metrics.cyclomatic_complexity_density: + print("\n" + _(CYCLOMATIC_COMPLEXITY_DENSITY_TEXT) + ":") + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True): + print(_("{0} ({1:.3f} in cyclomatic complexity density)").format(i[1], i[0])) + + def output_html(self): + metrics_xml = '
' + + if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: + metrics_xml += "

" + _(METRICS_MISSING_INFO_TEXT) + ".

" + + if self.metrics.eloc: + metrics_xml += "

" + _(ELOC_INFO_TEXT) + ".

" + for num, i in enumerate(sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True)): + metrics_xml += ( + '
' if num % 2 == 1 else '">') + + _("{0} ({1} estimated lines of code)").format(i[1], str(i[0])) + + "
" + ) + metrics_xml += "
" + + if self.metrics.cyclomatic_complexity: + metrics_xml += "

" + _(CYCLOMATIC_COMPLEXITY_TEXT) + "

" + for num, i in enumerate(sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True)): + metrics_xml += ( + '
' if num % 2 == 1 else '">') + + _("{0} ({1} in cyclomatic complexity)").format(i[1], str(i[0])) + + "
" + ) + metrics_xml += "
" + + if self.metrics.cyclomatic_complexity_density: + metrics_xml += "

" + _(CYCLOMATIC_COMPLEXITY_DENSITY_TEXT) + "

" + for num, i in enumerate( + sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True) + ): + metrics_xml += ( + '
' if num % 2 == 1 else '">') + + _("{0} ({1:.3f} in cyclomatic complexity density)").format(i[1], i[0]) + + "
" + ) + metrics_xml += "
" + + metrics_xml += "
" + print(metrics_xml) + + def output_json(self): + if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: + print(',\n\t\t"metrics": {\n\t\t\t"message": "' + _(METRICS_MISSING_INFO_TEXT) + '"\n\t\t}', end="") + else: + eloc_json = "" + + if self.metrics.eloc: + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): + eloc_json += '{\n\t\t\t\t"type": "estimated-lines-of-code",\n' + eloc_json += '\t\t\t\t"file_name": "' + i[1] + '",\n' + eloc_json += '\t\t\t\t"value": ' + str(i[0]) + "\n" + eloc_json += "\t\t\t}," + else: + if not self.metrics.cyclomatic_complexity: + eloc_json = eloc_json[:-1] + + if self.metrics.cyclomatic_complexity: + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): + eloc_json += '{\n\t\t\t\t"type": "cyclomatic-complexity",\n' + eloc_json += '\t\t\t\t"file_name": "' + i[1] + '",\n' + eloc_json += '\t\t\t\t"value": ' + str(i[0]) + "\n" + eloc_json += "\t\t\t}," + else: + if not self.metrics.cyclomatic_complexity_density: + eloc_json = eloc_json[:-1] + + if self.metrics.cyclomatic_complexity_density: + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True): + eloc_json += '{\n\t\t\t\t"type": "cyclomatic-complexity-density",\n' + eloc_json += '\t\t\t\t"file_name": "' + i[1] + '",\n' + eloc_json += '\t\t\t\t"value": {0:.3f}\n'.format(i[0]) + eloc_json += "\t\t\t}," + else: + eloc_json = eloc_json[:-1] + + print(',\n\t\t"metrics": {\n\t\t\t"violations": [\n\t\t\t' + eloc_json + "]\n\t\t}", end="") + + def output_xml(self): + if not self.metrics.eloc and not self.metrics.cyclomatic_complexity and not self.metrics.cyclomatic_complexity_density: + print("\t\n\t\t" + _(METRICS_MISSING_INFO_TEXT) + "\n\t") + else: + eloc_xml = "" + + if self.metrics.eloc: + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.eloc.items())]), reverse=True): + eloc_xml += "\t\t\t\n" + eloc_xml += "\t\t\t\t" + i[1] + "\n" + eloc_xml += "\t\t\t\t" + str(i[0]) + "\n" + eloc_xml += "\t\t\t\n" + + if self.metrics.cyclomatic_complexity: + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity.items())]), reverse=True): + eloc_xml += "\t\t\t\n" + eloc_xml += "\t\t\t\t" + i[1] + "\n" + eloc_xml += "\t\t\t\t" + str(i[0]) + "\n" + eloc_xml += "\t\t\t\n" + + if self.metrics.cyclomatic_complexity_density: + for i in sorted(set([(j, i) for (i, j) in list(self.metrics.cyclomatic_complexity_density.items())]), reverse=True): + eloc_xml += "\t\t\t\n" + eloc_xml += "\t\t\t\t" + i[1] + "\n" + eloc_xml += "\t\t\t\t{0:.3f}\n".format(i[0]) + eloc_xml += "\t\t\t\n" + + print("\t\n\t\t\n" + eloc_xml + "\t\t\n\t") diff --git a/gitinspector/output/outputable.py b/gitinspector/output/outputable.py index 2d49d182..2be7df33 100644 --- a/gitinspector/output/outputable.py +++ b/gitinspector/output/outputable.py @@ -22,25 +22,25 @@ class Outputable(object): - def output_html(self): - raise NotImplementedError(_("HTML output not yet supported in") + ' "' + self.__class__.__name__ + '".') + def output_html(self): + raise NotImplementedError(_("HTML output not yet supported in") + ' "' + self.__class__.__name__ + '".') - def output_json(self): - raise NotImplementedError(_("JSON output not yet supported in") + ' "' + self.__class__.__name__ + '".') + def output_json(self): + raise NotImplementedError(_("JSON output not yet supported in") + ' "' + self.__class__.__name__ + '".') - def output_text(self): - raise NotImplementedError(_("Text output not yet supported in") + ' "' + self.__class__.__name__ + '".') + def output_text(self): + raise NotImplementedError(_("Text output not yet supported in") + ' "' + self.__class__.__name__ + '".') - def output_xml(self): - raise NotImplementedError(_("XML output not yet supported in") + ' "' + self.__class__.__name__ + '".') + def output_xml(self): + raise NotImplementedError(_("XML output not yet supported in") + ' "' + self.__class__.__name__ + '".') def output(outputable): - if format.get_selected() == "html" or format.get_selected() == "htmlembedded": - outputable.output_html() - elif format.get_selected() == "json": - outputable.output_json() - elif format.get_selected() == "text": - outputable.output_text() - else: - outputable.output_xml() + if format.get_selected() == "html" or format.get_selected() == "htmlembedded": + outputable.output_html() + elif format.get_selected() == "json": + outputable.output_json() + elif format.get_selected() == "text": + outputable.output_text() + else: + outputable.output_xml() diff --git a/gitinspector/output/responsibilitiesoutput.py b/gitinspector/output/responsibilitiesoutput.py index a084beb6..5cfd6b37 100644 --- a/gitinspector/output/responsibilitiesoutput.py +++ b/gitinspector/output/responsibilitiesoutput.py @@ -25,123 +25,121 @@ from .outputable import Outputable RESPONSIBILITIES_INFO_TEXT = N_( - "The following responsibilities, by author, were found in the current " - "revision of the repository (comments are excluded from the line count, " - "if possible)" + "The following responsibilities, by author, were found in the current " + "revision of the repository (comments are excluded from the line count, " + "if possible)" ) MOSTLY_RESPONSIBLE_FOR_TEXT = N_("is mostly responsible for") class ResponsibilitiesOutput(Outputable): - def __init__(self, changes, blame): - self.changes = changes - self.blame = blame - Outputable.__init__(self) - - def output_text(self): - print("\n" + textwrap.fill(_(RESPONSIBILITIES_INFO_TEXT) + ":", width=terminal.get_size()[0])) - - for i in sorted(set(i[0] for i in self.blame.blames)): - responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) + def __init__(self, changes, blame): + self.changes = changes + self.blame = blame + Outputable.__init__(self) + + def output_text(self): + print("\n" + textwrap.fill(_(RESPONSIBILITIES_INFO_TEXT) + ":", width=terminal.get_size()[0])) + + for i in sorted(set(i[0] for i in self.blame.blames)): + responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) - if responsibilities: - print("\n" + i, _(MOSTLY_RESPONSIBLE_FOR_TEXT) + ":") - - for j, entry in enumerate(responsibilities): - (width, _unused) = terminal.get_size() - width -= 7 - - print(str(entry[0]).rjust(6), end=" ") - print("...%s" % entry[1][-width + 3 :] if len(entry[1]) > width else entry[1]) - - if j >= 9: - break - - def output_html(self): - resp_xml = '
' - resp_xml += "

" + _(RESPONSIBILITIES_INFO_TEXT) + ".

" - - for i in sorted(set(i[0] for i in self.blame.blames)): - responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) - - if responsibilities: - resp_xml += "
" - - if format.get_selected() == "html": - author_email = self.changes.get_latest_email_by_author(i) - resp_xml += '

{1} {2}

'.format( - gravatar.get_url(author_email, size=32), i, _(MOSTLY_RESPONSIBLE_FOR_TEXT) - ) - else: - resp_xml += "

{0} {1}

".format(i, _(MOSTLY_RESPONSIBLE_FOR_TEXT)) - - for j, entry in enumerate(responsibilities): - resp_xml += ( - "' if j % 2 == 1 else ">") + entry[1] + " (" + str(entry[0]) + " eloc)
" - ) - if j >= 9: - break - - resp_xml += "
" - resp_xml += "
" - print(resp_xml) - - def output_json(self): - message_json = '\t\t\t"message": "' + _(RESPONSIBILITIES_INFO_TEXT) + '",\n' - resp_json = "" - - for i in sorted(set(i[0] for i in self.blame.blames)): - responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) - - if responsibilities: - author_email = self.changes.get_latest_email_by_author(i) - - resp_json += "{\n" - resp_json += '\t\t\t\t"name": "' + i + '",\n' - resp_json += '\t\t\t\t"email": "' + author_email + '",\n' - resp_json += '\t\t\t\t"gravatar": "' + gravatar.get_url(author_email) + '",\n' - resp_json += '\t\t\t\t"files": [\n\t\t\t\t' - - for j, entry in enumerate(responsibilities): - resp_json += "{\n" - resp_json += '\t\t\t\t\t"name": "' + entry[1] + '",\n' - resp_json += '\t\t\t\t\t"rows": ' + str(entry[0]) + "\n" - resp_json += "\t\t\t\t}," - - if j >= 9: - break - - resp_json = resp_json[:-1] - resp_json += "]\n\t\t\t}," - - resp_json = resp_json[:-1] - print(',\n\t\t"responsibilities": {\n' + message_json + '\t\t\t"authors": [\n\t\t\t' + resp_json + "]\n\t\t}", end="") - - def output_xml(self): - message_xml = "\t\t" + _(RESPONSIBILITIES_INFO_TEXT) + "\n" - resp_xml = "" - - for i in sorted(set(i[0] for i in self.blame.blames)): - responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) - if responsibilities: - author_email = self.changes.get_latest_email_by_author(i) - - resp_xml += "\t\t\t\n" - resp_xml += "\t\t\t\t" + i + "\n" - resp_xml += "\t\t\t\t" + author_email + "\n" - resp_xml += "\t\t\t\t" + gravatar.get_url(author_email) + "\n" - resp_xml += "\t\t\t\t\n" - - for j, entry in enumerate(responsibilities): - resp_xml += "\t\t\t\t\t\n" - resp_xml += "\t\t\t\t\t\t" + entry[1] + "\n" - resp_xml += "\t\t\t\t\t\t" + str(entry[0]) + "\n" - resp_xml += "\t\t\t\t\t\n" + if responsibilities: + print("\n" + i, _(MOSTLY_RESPONSIBLE_FOR_TEXT) + ":") - if j >= 9: - break + for j, entry in enumerate(responsibilities): + (width, _unused) = terminal.get_size() + width -= 7 - resp_xml += "\t\t\t\t\n" - resp_xml += "\t\t\t\n" + print(str(entry[0]).rjust(6), end=" ") + print("...%s" % entry[1][-width + 3 :] if len(entry[1]) > width else entry[1]) - print("\t\n" + message_xml + "\t\t\n" + resp_xml + "\t\t\n\t") + if j >= 9: + break + + def output_html(self): + resp_xml = '
' + resp_xml += "

" + _(RESPONSIBILITIES_INFO_TEXT) + ".

" + + for i in sorted(set(i[0] for i in self.blame.blames)): + responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) + + if responsibilities: + resp_xml += "
" + + if format.get_selected() == "html": + author_email = self.changes.get_latest_email_by_author(i) + resp_xml += '

{1} {2}

'.format( + gravatar.get_url(author_email, size=32), i, _(MOSTLY_RESPONSIBLE_FOR_TEXT) + ) + else: + resp_xml += "

{0} {1}

".format(i, _(MOSTLY_RESPONSIBLE_FOR_TEXT)) + + for j, entry in enumerate(responsibilities): + resp_xml += "' if j % 2 == 1 else ">") + entry[1] + " (" + str(entry[0]) + " eloc)
" + if j >= 9: + break + + resp_xml += "
" + resp_xml += "
" + print(resp_xml) + + def output_json(self): + message_json = '\t\t\t"message": "' + _(RESPONSIBILITIES_INFO_TEXT) + '",\n' + resp_json = "" + + for i in sorted(set(i[0] for i in self.blame.blames)): + responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) + + if responsibilities: + author_email = self.changes.get_latest_email_by_author(i) + + resp_json += "{\n" + resp_json += '\t\t\t\t"name": "' + i + '",\n' + resp_json += '\t\t\t\t"email": "' + author_email + '",\n' + resp_json += '\t\t\t\t"gravatar": "' + gravatar.get_url(author_email) + '",\n' + resp_json += '\t\t\t\t"files": [\n\t\t\t\t' + + for j, entry in enumerate(responsibilities): + resp_json += "{\n" + resp_json += '\t\t\t\t\t"name": "' + entry[1] + '",\n' + resp_json += '\t\t\t\t\t"rows": ' + str(entry[0]) + "\n" + resp_json += "\t\t\t\t}," + + if j >= 9: + break + + resp_json = resp_json[:-1] + resp_json += "]\n\t\t\t}," + + resp_json = resp_json[:-1] + print(',\n\t\t"responsibilities": {\n' + message_json + '\t\t\t"authors": [\n\t\t\t' + resp_json + "]\n\t\t}", end="") + + def output_xml(self): + message_xml = "\t\t" + _(RESPONSIBILITIES_INFO_TEXT) + "\n" + resp_xml = "" + + for i in sorted(set(i[0] for i in self.blame.blames)): + responsibilities = sorted(((i[1], i[0]) for i in resp.Responsibilities.get(self.blame, i)), reverse=True) + if responsibilities: + author_email = self.changes.get_latest_email_by_author(i) + + resp_xml += "\t\t\t\n" + resp_xml += "\t\t\t\t" + i + "\n" + resp_xml += "\t\t\t\t" + author_email + "\n" + resp_xml += "\t\t\t\t" + gravatar.get_url(author_email) + "\n" + resp_xml += "\t\t\t\t\n" + + for j, entry in enumerate(responsibilities): + resp_xml += "\t\t\t\t\t\n" + resp_xml += "\t\t\t\t\t\t" + entry[1] + "\n" + resp_xml += "\t\t\t\t\t\t" + str(entry[0]) + "\n" + resp_xml += "\t\t\t\t\t\n" + + if j >= 9: + break + + resp_xml += "\t\t\t\t\n" + resp_xml += "\t\t\t\n" + + print("\t\n" + message_xml + "\t\t\n" + resp_xml + "\t\t\n\t") diff --git a/gitinspector/output/timelineoutput.py b/gitinspector/output/timelineoutput.py index 29c97ae8..9aa6f919 100644 --- a/gitinspector/output/timelineoutput.py +++ b/gitinspector/output/timelineoutput.py @@ -28,193 +28,184 @@ def __output_row__text__(timeline_data, periods, names): - print("\n" + terminal.__bold__ + terminal.ljust(_("Author"), 20), end=" ") + print("\n" + terminal.__bold__ + terminal.ljust(_("Author"), 20), end=" ") - for period in periods: - print(terminal.rjust(period, 10), end=" ") + for period in periods: + print(terminal.rjust(period, 10), end=" ") - print(terminal.__normal__) + print(terminal.__normal__) - for name in names: - if timeline_data.is_author_in_periods(periods, name[0]): - print(terminal.ljust(name[0], 20)[0 : 20 - terminal.get_excess_column_count(name[0])], end=" ") + for name in names: + if timeline_data.is_author_in_periods(periods, name[0]): + print(terminal.ljust(name[0], 20)[0 : 20 - terminal.get_excess_column_count(name[0])], end=" ") - for period in periods: - multiplier = timeline_data.get_multiplier(period, 9) - signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) - signs_str = signs[1] * "-" + signs[0] * "+" - print( - ("." if timeline_data.is_author_in_period(period, name[0]) and len(signs_str) == 0 else signs_str).rjust( - 10 - ), - end=" ", - ) - print("") + for period in periods: + multiplier = timeline_data.get_multiplier(period, 9) + signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) + signs_str = signs[1] * "-" + signs[0] * "+" + print( + ("." if timeline_data.is_author_in_period(period, name[0]) and len(signs_str) == 0 else signs_str).rjust(10), end=" ", + ) + print("") - print(terminal.__bold__ + terminal.ljust(_(MODIFIED_ROWS_TEXT), 20) + terminal.__normal__, end=" ") + print(terminal.__bold__ + terminal.ljust(_(MODIFIED_ROWS_TEXT), 20) + terminal.__normal__, end=" ") - for period in periods: - total_changes = str(timeline_data.get_total_changes_in_period(period)[2]) + for period in periods: + total_changes = str(timeline_data.get_total_changes_in_period(period)[2]) - if hasattr(total_changes, "decode"): - total_changes = total_changes.decode("utf-8", "replace") + if hasattr(total_changes, "decode"): + total_changes = total_changes.decode("utf-8", "replace") - print(terminal.rjust(total_changes, 10), end=" ") + print(terminal.rjust(total_changes, 10), end=" ") - print("") + print("") def __output_row__html__(timeline_data, periods, names): - timeline_xml = '" + timeline_xml = '
' + _("Author") + "
" - for period in periods: - timeline_xml += "" + for period in periods: + timeline_xml += "" - timeline_xml += "" - i = 0 + timeline_xml += "" + i = 0 - for name in names: - if timeline_data.is_author_in_periods(periods, name[0]): - timeline_xml += "' if i % 2 == 1 else ">") + for name in names: + if timeline_data.is_author_in_periods(periods, name[0]): + timeline_xml += "' if i % 2 == 1 else ">") - if format.get_selected() == "html": - timeline_xml += ''.format(gravatar.get_url(name[1]), name[0]) - else: - timeline_xml += "" + if format.get_selected() == "html": + timeline_xml += ''.format(gravatar.get_url(name[1]), name[0]) + else: + timeline_xml += "" - for period in periods: - multiplier = timeline_data.get_multiplier(period, 18) - signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) - signs_str = signs[1] * '
 
' + signs[0] * '
 
' + for period in periods: + multiplier = timeline_data.get_multiplier(period, 18) + signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) + signs_str = signs[1] * '
 
' + signs[0] * '
 
' - timeline_xml += "" - timeline_xml += "" - i = i + 1 + timeline_xml += "" + timeline_xml += "" + i = i + 1 - timeline_xml += "" + timeline_xml += "" - for period in periods: - total_changes = timeline_data.get_total_changes_in_period(period) - timeline_xml += "" + for period in periods: + total_changes = timeline_data.get_total_changes_in_period(period) + timeline_xml += "" - timeline_xml += "
' + _("Author") + "" + str(period) + "" + str(period) + "
{1}" + name[0] + "{1}" + name[0] + "" + ( - "." if timeline_data.is_author_in_period(period, name[0]) and len(signs_str) == 0 else signs_str - ) - timeline_xml += "
" + ("." if timeline_data.is_author_in_period(period, name[0]) and len(signs_str) == 0 else signs_str) + timeline_xml += "
" + _(MODIFIED_ROWS_TEXT) + "
" + _(MODIFIED_ROWS_TEXT) + "" + str(total_changes[2]) + "" + str(total_changes[2]) + "
" - print(timeline_xml) + timeline_xml += "" + print(timeline_xml) class TimelineOutput(Outputable): - def __init__(self, changes, useweeks): - self.changes = changes - self.useweeks = useweeks - Outputable.__init__(self) - - def output_text(self): - if self.changes.get_commits(): - print("\n" + textwrap.fill(_(TIMELINE_INFO_TEXT) + ":", width=terminal.get_size()[0])) - - timeline_data = timeline.TimelineData(self.changes, self.useweeks) - periods = timeline_data.get_periods() - names = timeline_data.get_authors() - (width, _unused) = terminal.get_size() - max_periods_per_row = int((width - 21) / 11) - - for i in range(0, len(periods), max_periods_per_row): - __output_row__text__(timeline_data, periods[i : i + max_periods_per_row], names) - - def output_html(self): - if self.changes.get_commits(): - timeline_data = timeline.TimelineData(self.changes, self.useweeks) - periods = timeline_data.get_periods() - names = timeline_data.get_authors() - max_periods_per_row = 8 - - timeline_xml = '
' - timeline_xml += "

" + _(TIMELINE_INFO_TEXT) + ".

" - print(timeline_xml) - - for i in range(0, len(periods), max_periods_per_row): - __output_row__html__(timeline_data, periods[i : i + max_periods_per_row], names) - - timeline_xml = "
" - print(timeline_xml) - - def output_json(self): - if self.changes.get_commits(): - message_json = '\t\t\t"message": "' + _(TIMELINE_INFO_TEXT) + '",\n' - timeline_json = "" - periods_json = '\t\t\t"period_length": "{0}",\n'.format("week" if self.useweeks else "month") - periods_json += '\t\t\t"periods": [\n\t\t\t' - - timeline_data = timeline.TimelineData(self.changes, self.useweeks) - periods = timeline_data.get_periods() - names = timeline_data.get_authors() - - for period in periods: - name_json = '\t\t\t\t"name": "' + str(period) + '",\n' - authors_json = '\t\t\t\t"authors": [\n\t\t\t\t' - - for name in names: - if timeline_data.is_author_in_period(period, name[0]): - multiplier = timeline_data.get_multiplier(period, 24) - signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) - signs_str = signs[1] * "-" + signs[0] * "+" - - if len(signs_str) == 0: - signs_str = "." - - authors_json += '{\n\t\t\t\t\t"name": "' + name[0] + '",\n' - authors_json += '\t\t\t\t\t"email": "' + name[1] + '",\n' - authors_json += '\t\t\t\t\t"gravatar": "' + gravatar.get_url(name[1]) + '",\n' - authors_json += '\t\t\t\t\t"work": "' + signs_str + '"\n\t\t\t\t},' - else: - authors_json = authors_json[:-1] - - authors_json += "],\n" - modified_rows_json = ( - '\t\t\t\t"modified_rows": ' + str(timeline_data.get_total_changes_in_period(period)[2]) + "\n" - ) - timeline_json += "{\n" + name_json + authors_json + modified_rows_json + "\t\t\t}," - else: - timeline_json = timeline_json[:-1] - - print(',\n\t\t"timeline": {\n' + message_json + periods_json + timeline_json + "]\n\t\t}", end="") - - def output_xml(self): - if self.changes.get_commits(): - message_xml = "\t\t" + _(TIMELINE_INFO_TEXT) + "\n" - timeline_xml = "" - periods_xml = '\t\t\n'.format("week" if self.useweeks else "month") - - timeline_data = timeline.TimelineData(self.changes, self.useweeks) - periods = timeline_data.get_periods() - names = timeline_data.get_authors() - - for period in periods: - name_xml = "\t\t\t\t" + str(period) + "\n" - authors_xml = "\t\t\t\t\n" - - for name in names: - if timeline_data.is_author_in_period(period, name[0]): - multiplier = timeline_data.get_multiplier(period, 24) - signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) - signs_str = signs[1] * "-" + signs[0] * "+" - - if len(signs_str) == 0: - signs_str = "." - - authors_xml += "\t\t\t\t\t\n\t\t\t\t\t\t" + name[0] + "\n" - authors_xml += "\t\t\t\t\t\t" + name[1] + "\n" - authors_xml += "\t\t\t\t\t\t" + gravatar.get_url(name[1]) + "\n" - authors_xml += "\t\t\t\t\t\t" + signs_str + "\n\t\t\t\t\t\n" - - authors_xml += "\t\t\t\t\n" - modified_rows_xml = ( - "\t\t\t\t" - + str(timeline_data.get_total_changes_in_period(period)[2]) - + "\n" - ) - timeline_xml += "\t\t\t\n" + name_xml + authors_xml + modified_rows_xml + "\t\t\t\n" - - print("\t\n" + message_xml + periods_xml + timeline_xml + "\t\t\n\t") + def __init__(self, changes, useweeks): + self.changes = changes + self.useweeks = useweeks + Outputable.__init__(self) + + def output_text(self): + if self.changes.get_commits(): + print("\n" + textwrap.fill(_(TIMELINE_INFO_TEXT) + ":", width=terminal.get_size()[0])) + + timeline_data = timeline.TimelineData(self.changes, self.useweeks) + periods = timeline_data.get_periods() + names = timeline_data.get_authors() + (width, _unused) = terminal.get_size() + max_periods_per_row = int((width - 21) / 11) + + for i in range(0, len(periods), max_periods_per_row): + __output_row__text__(timeline_data, periods[i : i + max_periods_per_row], names) + + def output_html(self): + if self.changes.get_commits(): + timeline_data = timeline.TimelineData(self.changes, self.useweeks) + periods = timeline_data.get_periods() + names = timeline_data.get_authors() + max_periods_per_row = 8 + + timeline_xml = '
' + timeline_xml += "

" + _(TIMELINE_INFO_TEXT) + ".

" + print(timeline_xml) + + for i in range(0, len(periods), max_periods_per_row): + __output_row__html__(timeline_data, periods[i : i + max_periods_per_row], names) + + timeline_xml = "
" + print(timeline_xml) + + def output_json(self): + if self.changes.get_commits(): + message_json = '\t\t\t"message": "' + _(TIMELINE_INFO_TEXT) + '",\n' + timeline_json = "" + periods_json = '\t\t\t"period_length": "{0}",\n'.format("week" if self.useweeks else "month") + periods_json += '\t\t\t"periods": [\n\t\t\t' + + timeline_data = timeline.TimelineData(self.changes, self.useweeks) + periods = timeline_data.get_periods() + names = timeline_data.get_authors() + + for period in periods: + name_json = '\t\t\t\t"name": "' + str(period) + '",\n' + authors_json = '\t\t\t\t"authors": [\n\t\t\t\t' + + for name in names: + if timeline_data.is_author_in_period(period, name[0]): + multiplier = timeline_data.get_multiplier(period, 24) + signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) + signs_str = signs[1] * "-" + signs[0] * "+" + + if len(signs_str) == 0: + signs_str = "." + + authors_json += '{\n\t\t\t\t\t"name": "' + name[0] + '",\n' + authors_json += '\t\t\t\t\t"email": "' + name[1] + '",\n' + authors_json += '\t\t\t\t\t"gravatar": "' + gravatar.get_url(name[1]) + '",\n' + authors_json += '\t\t\t\t\t"work": "' + signs_str + '"\n\t\t\t\t},' + else: + authors_json = authors_json[:-1] + + authors_json += "],\n" + modified_rows_json = '\t\t\t\t"modified_rows": ' + str(timeline_data.get_total_changes_in_period(period)[2]) + "\n" + timeline_json += "{\n" + name_json + authors_json + modified_rows_json + "\t\t\t}," + else: + timeline_json = timeline_json[:-1] + + print(',\n\t\t"timeline": {\n' + message_json + periods_json + timeline_json + "]\n\t\t}", end="") + + def output_xml(self): + if self.changes.get_commits(): + message_xml = "\t\t" + _(TIMELINE_INFO_TEXT) + "\n" + timeline_xml = "" + periods_xml = '\t\t\n'.format("week" if self.useweeks else "month") + + timeline_data = timeline.TimelineData(self.changes, self.useweeks) + periods = timeline_data.get_periods() + names = timeline_data.get_authors() + + for period in periods: + name_xml = "\t\t\t\t" + str(period) + "\n" + authors_xml = "\t\t\t\t\n" + + for name in names: + if timeline_data.is_author_in_period(period, name[0]): + multiplier = timeline_data.get_multiplier(period, 24) + signs = timeline_data.get_author_signs_in_period(name[0], period, multiplier) + signs_str = signs[1] * "-" + signs[0] * "+" + + if len(signs_str) == 0: + signs_str = "." + + authors_xml += "\t\t\t\t\t\n\t\t\t\t\t\t" + name[0] + "\n" + authors_xml += "\t\t\t\t\t\t" + name[1] + "\n" + authors_xml += "\t\t\t\t\t\t" + gravatar.get_url(name[1]) + "\n" + authors_xml += "\t\t\t\t\t\t" + signs_str + "\n\t\t\t\t\t\n" + + authors_xml += "\t\t\t\t\n" + modified_rows_xml = ( + "\t\t\t\t" + str(timeline_data.get_total_changes_in_period(period)[2]) + "\n" + ) + timeline_xml += "\t\t\t\n" + name_xml + authors_xml + modified_rows_xml + "\t\t\t\n" + + print("\t\n" + message_xml + periods_xml + timeline_xml + "\t\t\n\t") diff --git a/gitinspector/responsibilities.py b/gitinspector/responsibilities.py index 6a3a0c95..22c28aab 100644 --- a/gitinspector/responsibilities.py +++ b/gitinspector/responsibilities.py @@ -19,18 +19,18 @@ class ResponsibiltyEntry(object): - blames = {} + blames = {} class Responsibilities(object): - @staticmethod - def get(blame, author_name): - author_blames = {} + @staticmethod + def get(blame, author_name): + author_blames = {} - for i in list(blame.blames.items()): - if author_name == i[0][0]: - total_rows = i[1].rows - i[1].comments - if total_rows > 0: - author_blames[i[0][1]] = total_rows + for i in list(blame.blames.items()): + if author_name == i[0][0]: + total_rows = i[1].rows - i[1].comments + if total_rows > 0: + author_blames[i[0][1]] = total_rows - return sorted(author_blames.items()) + return sorted(author_blames.items()) diff --git a/gitinspector/terminal.py b/gitinspector/terminal.py index 4c6ba997..f05e7376 100644 --- a/gitinspector/terminal.py +++ b/gitinspector/terminal.py @@ -31,149 +31,149 @@ def __get_size_windows__(): - res = None - try: - from ctypes import windll, create_string_buffer + res = None + try: + from ctypes import windll, create_string_buffer - handler = windll.kernel32.GetStdHandle(-12) # stderr - csbi = create_string_buffer(22) - res = windll.kernel32.GetConsoleScreenBufferInfo(handler, csbi) - except: - return DEFAULT_TERMINAL_SIZE + handler = windll.kernel32.GetStdHandle(-12) # stderr + csbi = create_string_buffer(22) + res = windll.kernel32.GetConsoleScreenBufferInfo(handler, csbi) + except: + return DEFAULT_TERMINAL_SIZE - if res: - import struct + if res: + import struct - (_, _, _, _, _, left, top, right, bottom, _, _) = struct.unpack("hhhhHhhhhhh", csbi.raw) - sizex = right - left + 1 - sizey = bottom - top + 1 - return sizex, sizey - else: - return DEFAULT_TERMINAL_SIZE + (_, _, _, _, _, left, top, right, bottom, _, _) = struct.unpack("hhhhHhhhhhh", csbi.raw) + sizex = right - left + 1 + sizey = bottom - top + 1 + return sizex, sizey + else: + return DEFAULT_TERMINAL_SIZE def __get_size_linux__(): - def ioctl_get_window_size(file_descriptor): - try: - import fcntl, termios, struct + def ioctl_get_window_size(file_descriptor): + try: + import fcntl, termios, struct - size = struct.unpack("hh", fcntl.ioctl(file_descriptor, termios.TIOCGWINSZ, "1234")) - except: - return DEFAULT_TERMINAL_SIZE + size = struct.unpack("hh", fcntl.ioctl(file_descriptor, termios.TIOCGWINSZ, "1234")) + except: + return DEFAULT_TERMINAL_SIZE - return size + return size - size = ioctl_get_window_size(0) or ioctl_get_window_size(1) or ioctl_get_window_size(2) + size = ioctl_get_window_size(0) or ioctl_get_window_size(1) or ioctl_get_window_size(2) - if not size: - try: - file_descriptor = os.open(os.ctermid(), os.O_RDONLY) - size = ioctl_get_window_size(file_descriptor) - os.close(file_descriptor) - except: - pass - if not size: - try: - size = (os.environ["LINES"], os.environ["COLUMNS"]) - except: - return DEFAULT_TERMINAL_SIZE + if not size: + try: + file_descriptor = os.open(os.ctermid(), os.O_RDONLY) + size = ioctl_get_window_size(file_descriptor) + os.close(file_descriptor) + except: + pass + if not size: + try: + size = (os.environ["LINES"], os.environ["COLUMNS"]) + except: + return DEFAULT_TERMINAL_SIZE - return int(size[1]), int(size[0]) + return int(size[1]), int(size[0]) def clear_row(): - print("\r", end="") + print("\r", end="") def skip_escapes(skip): - if skip: - global __bold__ - global __normal__ - __bold__ = "" - __normal__ = "" + if skip: + global __bold__ + global __normal__ + __bold__ = "" + __normal__ = "" def printb(string): - print(__bold__ + string + __normal__) + print(__bold__ + string + __normal__) def get_size(): - width = 0 - height = 0 + width = 0 + height = 0 - if sys.stdout.isatty(): - current_os = platform.system() + if sys.stdout.isatty(): + current_os = platform.system() - if current_os == "Windows": - (width, height) = __get_size_windows__() - elif current_os == "Linux" or current_os == "Darwin" or current_os.startswith("CYGWIN"): - (width, height) = __get_size_linux__() + if current_os == "Windows": + (width, height) = __get_size_windows__() + elif current_os == "Linux" or current_os == "Darwin" or current_os.startswith("CYGWIN"): + (width, height) = __get_size_linux__() - if width > 0: - return (width, height) + if width > 0: + return (width, height) - return DEFAULT_TERMINAL_SIZE + return DEFAULT_TERMINAL_SIZE def set_stdout_encoding(): - if not sys.stdout.isatty() and sys.version_info < (3,): - sys.stdout = codecs.getwriter("utf-8")(sys.stdout) + if not sys.stdout.isatty() and sys.version_info < (3,): + sys.stdout = codecs.getwriter("utf-8")(sys.stdout) def set_stdin_encoding(): - if not sys.stdin.isatty() and sys.version_info < (3,): - sys.stdin = codecs.getreader("utf-8")(sys.stdin) + if not sys.stdin.isatty() and sys.version_info < (3,): + sys.stdin = codecs.getreader("utf-8")(sys.stdin) def convert_command_line_to_utf8(): - try: - argv = [] + try: + argv = [] - for arg in sys.argv: - argv.append(arg.decode(sys.stdin.encoding, "replace")) + for arg in sys.argv: + argv.append(arg.decode(sys.stdin.encoding, "replace")) - return argv - except AttributeError: - return sys.argv + return argv + except AttributeError: + return sys.argv def check_terminal_encoding(): - if sys.stdout.isatty() and (sys.stdout.encoding is None or sys.stdin.encoding is None): - print( - _( - "WARNING: The terminal encoding is not correctly configured. gitinspector might malfunction. " - "The encoding can be configured with the environment variable 'PYTHONIOENCODING'." - ), - file=sys.stderr, - ) + if sys.stdout.isatty() and (sys.stdout.encoding is None or sys.stdin.encoding is None): + print( + _( + "WARNING: The terminal encoding is not correctly configured. gitinspector might malfunction. " + "The encoding can be configured with the environment variable 'PYTHONIOENCODING'." + ), + file=sys.stderr, + ) def get_excess_column_count(string): - width_mapping = {"F": 2, "H": 1, "W": 2, "Na": 1, "N": 1, "A": 1} - result = 0 + width_mapping = {"F": 2, "H": 1, "W": 2, "Na": 1, "N": 1, "A": 1} + result = 0 - for i in string: - width = unicodedata.east_asian_width(i) - result += width_mapping[width] + for i in string: + width = unicodedata.east_asian_width(i) + result += width_mapping[width] - return result - len(string) + return result - len(string) def ljust(string, pad): - return string.ljust(pad - get_excess_column_count(string)) + return string.ljust(pad - get_excess_column_count(string)) def rjust(string, pad): - return string.rjust(pad - get_excess_column_count(string)) + return string.rjust(pad - get_excess_column_count(string)) def output_progress(text, pos, length): - if sys.stdout.isatty(): - (width, _unused) = get_size() - progress_text = text.format(100 * pos / length) + if sys.stdout.isatty(): + (width, _unused) = get_size() + progress_text = text.format(100 * pos / length) - if len(progress_text) > width: - progress_text = "...%s" % progress_text[-width + 3 :] + if len(progress_text) > width: + progress_text = "...%s" % progress_text[-width + 3 :] - print("\r{0}\r{1}".format(" " * width, progress_text), end="") - sys.stdout.flush() + print("\r{0}\r{1}".format(" " * width, progress_text), end="") + sys.stdout.flush() diff --git a/gitinspector/timeline.py b/gitinspector/timeline.py index f3f9dedf..e3438ed2 100644 --- a/gitinspector/timeline.py +++ b/gitinspector/timeline.py @@ -22,79 +22,79 @@ class TimelineData(object): - def __init__(self, changes, useweeks): - authordateinfo_list = sorted(changes.get_authordateinfo_list().items()) - self.changes = changes - self.entries = {} - self.total_changes_by_period = {} - self.useweeks = useweeks - - for i in authordateinfo_list: - key = None - - if useweeks: - yearweek = datetime.date(int(i[0][0][0:4]), int(i[0][0][5:7]), int(i[0][0][8:10])).isocalendar() - key = (i[0][1], str(yearweek[0]) + "W" + "{0:02d}".format(yearweek[1])) - else: - key = (i[0][1], i[0][0][0:7]) - - if self.entries.get(key, None) is None: - self.entries[key] = i[1] - else: - self.entries[key].insertions += i[1].insertions - self.entries[key].deletions += i[1].deletions - - for period in self.get_periods(): - total_insertions = 0 - total_deletions = 0 - - for author in self.get_authors(): - entry = self.entries.get((author[0], period), None) - if entry is not None: - total_insertions += entry.insertions - total_deletions += entry.deletions - - self.total_changes_by_period[period] = (total_insertions, total_deletions, total_insertions + total_deletions) - - def get_periods(self): - return sorted(set([i[1] for i in self.entries])) - - def get_total_changes_in_period(self, period): - return self.total_changes_by_period[period] - - def get_authors(self): - return sorted(set([(i[0][0], self.changes.get_latest_email_by_author(i[0][0])) for i in list(self.entries.items())])) - - def get_author_signs_in_period(self, author, period, multiplier): - authorinfo = self.entries.get((author, period), None) - total = float(self.total_changes_by_period[period][2]) - - if authorinfo: - i = multiplier * (self.entries[(author, period)].insertions / total) - j = multiplier * (self.entries[(author, period)].deletions / total) - return (int(i), int(j)) - else: - return (0, 0) - - def get_multiplier(self, period, max_width): - multiplier = 0 - - while True: - for i in self.entries: - entry = self.entries.get(i) - - if period == i[1]: - changes_in_period = float(self.total_changes_by_period[i[1]][2]) - if multiplier * (entry.insertions + entry.deletions) / changes_in_period > max_width: - return multiplier - - multiplier += 0.25 - - def is_author_in_period(self, period, author): - return self.entries.get((author, period), None) is not None - - def is_author_in_periods(self, periods, author): - for period in periods: - if self.is_author_in_period(period, author): - return True - return False + def __init__(self, changes, useweeks): + authordateinfo_list = sorted(changes.get_authordateinfo_list().items()) + self.changes = changes + self.entries = {} + self.total_changes_by_period = {} + self.useweeks = useweeks + + for i in authordateinfo_list: + key = None + + if useweeks: + yearweek = datetime.date(int(i[0][0][0:4]), int(i[0][0][5:7]), int(i[0][0][8:10])).isocalendar() + key = (i[0][1], str(yearweek[0]) + "W" + "{0:02d}".format(yearweek[1])) + else: + key = (i[0][1], i[0][0][0:7]) + + if self.entries.get(key, None) is None: + self.entries[key] = i[1] + else: + self.entries[key].insertions += i[1].insertions + self.entries[key].deletions += i[1].deletions + + for period in self.get_periods(): + total_insertions = 0 + total_deletions = 0 + + for author in self.get_authors(): + entry = self.entries.get((author[0], period), None) + if entry is not None: + total_insertions += entry.insertions + total_deletions += entry.deletions + + self.total_changes_by_period[period] = (total_insertions, total_deletions, total_insertions + total_deletions) + + def get_periods(self): + return sorted(set([i[1] for i in self.entries])) + + def get_total_changes_in_period(self, period): + return self.total_changes_by_period[period] + + def get_authors(self): + return sorted(set([(i[0][0], self.changes.get_latest_email_by_author(i[0][0])) for i in list(self.entries.items())])) + + def get_author_signs_in_period(self, author, period, multiplier): + authorinfo = self.entries.get((author, period), None) + total = float(self.total_changes_by_period[period][2]) + + if authorinfo: + i = multiplier * (self.entries[(author, period)].insertions / total) + j = multiplier * (self.entries[(author, period)].deletions / total) + return (int(i), int(j)) + else: + return (0, 0) + + def get_multiplier(self, period, max_width): + multiplier = 0 + + while True: + for i in self.entries: + entry = self.entries.get(i) + + if period == i[1]: + changes_in_period = float(self.total_changes_by_period[i[1]][2]) + if multiplier * (entry.insertions + entry.deletions) / changes_in_period > max_width: + return multiplier + + multiplier += 0.25 + + def is_author_in_period(self, period, author): + return self.entries.get((author, period), None) is not None + + def is_author_in_periods(self, periods, author): + for period in periods: + if self.is_author_in_period(period, author): + return True + return False diff --git a/gitinspector/version.py b/gitinspector/version.py index ef0c1034..2ebfb49b 100644 --- a/gitinspector/version.py +++ b/gitinspector/version.py @@ -25,7 +25,7 @@ __version__ = "0.5.0dev" __doc__ = _( - """Copyright © 2012-2015 Ejwa Software. All rights reserved. + """Copyright © 2012-2015 Ejwa Software. All rights reserved. License GPLv3+: GNU GPL version 3 or later . This is free software: you are free to change and redistribute it. There is NO WARRANTY, to the extent permitted by law. @@ -35,4 +35,4 @@ def output(): - print("gitinspector {0}\n".format(__version__) + __doc__) + print("gitinspector {0}\n".format(__version__) + __doc__) From e89631028e43c7e6361e29495e4de0b8631e3313 Mon Sep 17 00:00:00 2001 From: JP White Date: Wed, 23 Jun 2021 11:29:24 -0400 Subject: [PATCH 42/46] Resolving pylint R0205 warnings --- Makefile | 3 ++- gitinspector/blame.py | 4 ++-- gitinspector/changes.py | 8 ++++---- gitinspector/clone.py | 2 +- gitinspector/config.py | 2 +- gitinspector/gitinspector.py | 2 +- gitinspector/metrics.py | 2 +- gitinspector/output/outputable.py | 2 +- gitinspector/responsibilities.py | 4 ++-- gitinspector/timeline.py | 2 +- 10 files changed, 16 insertions(+), 15 deletions(-) diff --git a/Makefile b/Makefile index 7a48fe1d..6b80f37f 100644 --- a/Makefile +++ b/Makefile @@ -34,7 +34,8 @@ clean-test: ## remove test and coverage artifacts rm -f .coverage rm -fr .pytest_cache -lint: ## check style with flake8 +lint: ## check style with flake8 and pylint + pylint --rcfile=.pylintrc gitinspector # stop the build if there are Python syntax errors or undefined names flake8 gitinspector tests --count --select=E9,F63,F7,F82 --show-source --statistics --builtins="_" # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide diff --git a/gitinspector/blame.py b/gitinspector/blame.py index f4d7b317..24e85f52 100644 --- a/gitinspector/blame.py +++ b/gitinspector/blame.py @@ -30,7 +30,7 @@ NUM_THREADS = multiprocessing.cpu_count() -class BlameEntry(object): +class BlameEntry(): rows = 0 skew = 0 # Used when calculating average code age. comments = 0 @@ -127,7 +127,7 @@ def run(self): PROGRESS_TEXT = N_("Checking how many rows belong to each author (2 of 2): {0:.0f}%") -class Blame(object): +class Blame(): def __init__(self, repo, hard, useweeks, changes): self.blames = {} ls_tree_p = subprocess.Popen( diff --git a/gitinspector/changes.py b/gitinspector/changes.py index 640d617d..cad9cee0 100644 --- a/gitinspector/changes.py +++ b/gitinspector/changes.py @@ -34,7 +34,7 @@ __changes_lock__ = threading.Lock() -class FileDiff(object): +class FileDiff(): def __init__(self, string): commit_line = string.split("|") @@ -67,7 +67,7 @@ def is_valid_extension(string): return False -class Commit(object): +class Commit(): def __init__(self, string): self.filediffs = [] commit_line = string.split("|") @@ -100,7 +100,7 @@ def is_commit_line(string): return string.split("|").__len__() == 5 -class AuthorInfo(object): +class AuthorInfo(): email = None insertions = 0 deletions = 0 @@ -198,7 +198,7 @@ def run(self): PROGRESS_TEXT = N_("Fetching and calculating primary statistics (1 of 2): {0:.0f}%") -class Changes(object): +class Changes(): authors = {} authors_dateinfo = {} authors_by_email = {} diff --git a/gitinspector/clone.py b/gitinspector/clone.py index fc78e833..2b199a77 100644 --- a/gitinspector/clone.py +++ b/gitinspector/clone.py @@ -33,7 +33,7 @@ def create(url): - class Repository(object): + class Repository(): def __init__(self, name, location): self.name = name self.location = location diff --git a/gitinspector/config.py b/gitinspector/config.py index ee446999..6fc1a7a1 100644 --- a/gitinspector/config.py +++ b/gitinspector/config.py @@ -23,7 +23,7 @@ from . import extensions, filtering, format, interval, optval -class GitConfig(object): +class GitConfig(): def __init__(self, run, repo, global_only=False): self.run = run self.repo = repo diff --git a/gitinspector/gitinspector.py b/gitinspector/gitinspector.py index 0de6a412..ce3ee07d 100644 --- a/gitinspector/gitinspector.py +++ b/gitinspector/gitinspector.py @@ -39,7 +39,7 @@ localization.init() -class Runner(object): +class Runner(): def __init__(self): self.hard = False self.include_metrics = False diff --git a/gitinspector/metrics.py b/gitinspector/metrics.py index 079874b0..875882e9 100644 --- a/gitinspector/metrics.py +++ b/gitinspector/metrics.py @@ -70,7 +70,7 @@ METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD = 0.75 -class MetricsLogic(object): +class MetricsLogic(): def __init__(self): self.eloc = {} self.cyclomatic_complexity = {} diff --git a/gitinspector/output/outputable.py b/gitinspector/output/outputable.py index 2be7df33..1addf39b 100644 --- a/gitinspector/output/outputable.py +++ b/gitinspector/output/outputable.py @@ -21,7 +21,7 @@ from .. import format -class Outputable(object): +class Outputable(): def output_html(self): raise NotImplementedError(_("HTML output not yet supported in") + ' "' + self.__class__.__name__ + '".') diff --git a/gitinspector/responsibilities.py b/gitinspector/responsibilities.py index 22c28aab..73590c6c 100644 --- a/gitinspector/responsibilities.py +++ b/gitinspector/responsibilities.py @@ -18,11 +18,11 @@ # along with gitinspector. If not, see . -class ResponsibiltyEntry(object): +class ResponsibiltyEntry(): blames = {} -class Responsibilities(object): +class Responsibilities(): @staticmethod def get(blame, author_name): author_blames = {} diff --git a/gitinspector/timeline.py b/gitinspector/timeline.py index e3438ed2..3e5b76e3 100644 --- a/gitinspector/timeline.py +++ b/gitinspector/timeline.py @@ -21,7 +21,7 @@ import datetime -class TimelineData(object): +class TimelineData(): def __init__(self, changes, useweeks): authordateinfo_list = sorted(changes.get_authordateinfo_list().items()) self.changes = changes From 271fc04a3f53776f3b0d0c85026980573e8361ed Mon Sep 17 00:00:00 2001 From: JP White Date: Tue, 1 Mar 2022 09:25:57 -0500 Subject: [PATCH 43/46] Removing support for Python 3.6 as it is end of life --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 605d1590..7ef5a8bf 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.6, 3.7, 3.8, 3.9] + python-version: [3.7, 3.8, 3.9, 3.10] steps: - name: Checkout From 28e155b0c57d3a77b8200bfcefaaf38251b7786f Mon Sep 17 00:00:00 2001 From: JP White Date: Tue, 1 Mar 2022 13:18:18 -0500 Subject: [PATCH 44/46] Removing linting and coverage from default CI action --- .github/workflows/python-package.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 7ef5a8bf..3bd1c16a 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -5,7 +5,7 @@ name: Python package on: push: - branches: [ master ] + branches: [ master, develop, feature/** ] pull_request: branches: [ master ] @@ -31,10 +31,7 @@ jobs: python -m pip install --upgrade pip if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Lint with flake8 - run: make lint - - name: Test with pytest env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: make test-coverage-report + run: make test From f4460302b0220ed8213027f2bd1118d9cbe854cd Mon Sep 17 00:00:00 2001 From: JP White Date: Tue, 1 Mar 2022 13:19:54 -0500 Subject: [PATCH 45/46] Fixing Python 3.10 version to 3.10.2 --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 3bd1c16a..d9809260 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8, 3.9, 3.10] + python-version: [3.7, 3.8, 3.9, 3.10.2] steps: - name: Checkout From 639fdea2b49017d641cbb827112cf1db96d2ce13 Mon Sep 17 00:00:00 2001 From: JP White Date: Tue, 1 Mar 2022 13:28:30 -0500 Subject: [PATCH 46/46] Upgrading pytest to 7.0.1 for compatability with Python 3.10 --- Pipfile | 1 + Pipfile.lock | 550 +++++++++++++++++++++++++++-------------------- requirements.txt | 2 +- 3 files changed, 314 insertions(+), 239 deletions(-) diff --git a/Pipfile b/Pipfile index 74e63d59..3b04aa39 100644 --- a/Pipfile +++ b/Pipfile @@ -5,6 +5,7 @@ name = "pypi" [packages] black-but-with-tabs-instead-of-spaces = "*" +pytest = "*" [dev-packages] pytest = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 630f3e54..2f719d93 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "87e9949234210245765703c4d654f0f7205eec399e5d2acba50242218b858a45" + "sha256": "ab3a082c5a80fa8d8b3d938bec3237bd3f5ba9e400869f61cef243ec1c2cfdfc" }, "pipfile-spec": 6, "requires": {}, @@ -23,11 +23,11 @@ }, "attrs": { "hashes": [ - "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1", - "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb" + "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", + "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==21.2.0" + "version": "==21.4.0" }, "black-but-with-tabs-instead-of-spaces": { "hashes": [ @@ -39,11 +39,18 @@ }, "click": { "hashes": [ - "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a", - "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6" + "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1", + "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb" ], "markers": "python_version >= '3.6'", - "version": "==8.0.1" + "version": "==8.0.4" + }, + "iniconfig": { + "hashes": [ + "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", + "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" + ], + "version": "==1.1.1" }, "mypy-extensions": { "hashes": [ @@ -52,58 +59,131 @@ ], "version": "==0.4.3" }, + "packaging": { + "hashes": [ + "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb", + "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522" + ], + "markers": "python_version >= '3.6'", + "version": "==21.3" + }, "pathspec": { "hashes": [ - "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd", - "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d" + "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a", + "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1" + ], + "version": "==0.9.0" + }, + "pluggy": { + "hashes": [ + "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", + "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" + ], + "markers": "python_version >= '3.6'", + "version": "==1.0.0" + }, + "py": { + "hashes": [ + "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", + "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==1.11.0" + }, + "pyparsing": { + "hashes": [ + "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea", + "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484" + ], + "markers": "python_version >= '3.6'", + "version": "==3.0.7" + }, + "pytest": { + "hashes": [ + "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db", + "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171" ], - "version": "==0.8.1" + "index": "pypi", + "version": "==7.0.1" }, "regex": { "hashes": [ - "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5", - "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79", - "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31", - "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500", - "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11", - "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14", - "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3", - "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439", - "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c", - "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82", - "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711", - "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093", - "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a", - "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb", - "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8", - "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17", - "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000", - "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d", - "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480", - "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc", - "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0", - "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9", - "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765", - "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e", - "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a", - "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07", - "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f", - "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac", - "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7", - "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed", - "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968", - "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7", - "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2", - "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4", - "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87", - "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8", - "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10", - "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29", - "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605", - "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6", - "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042" - ], - "version": "==2021.4.4" + "sha256:04611cc0f627fc4a50bc4a9a2e6178a974c6a6a4aa9c1cca921635d2c47b9c87", + "sha256:0b5d6f9aed3153487252d00a18e53f19b7f52a1651bc1d0c4b5844bc286dfa52", + "sha256:0d2f5c3f7057530afd7b739ed42eb04f1011203bc5e4663e1e1d01bb50f813e3", + "sha256:11772be1eb1748e0e197a40ffb82fb8fd0d6914cd147d841d9703e2bef24d288", + "sha256:1333b3ce73269f986b1fa4d5d395643810074dc2de5b9d262eb258daf37dc98f", + "sha256:16f81025bb3556eccb0681d7946e2b35ff254f9f888cff7d2120e8826330315c", + "sha256:1a171eaac36a08964d023eeff740b18a415f79aeb212169080c170ec42dd5184", + "sha256:1d6301f5288e9bdca65fab3de6b7de17362c5016d6bf8ee4ba4cbe833b2eda0f", + "sha256:1e031899cb2bc92c0cf4d45389eff5b078d1936860a1be3aa8c94fa25fb46ed8", + "sha256:1f8c0ae0a0de4e19fddaaff036f508db175f6f03db318c80bbc239a1def62d02", + "sha256:2245441445099411b528379dee83e56eadf449db924648e5feb9b747473f42e3", + "sha256:22709d701e7037e64dae2a04855021b62efd64a66c3ceed99dfd684bfef09e38", + "sha256:24c89346734a4e4d60ecf9b27cac4c1fee3431a413f7aa00be7c4d7bbacc2c4d", + "sha256:25716aa70a0d153cd844fe861d4f3315a6ccafce22b39d8aadbf7fcadff2b633", + "sha256:2dacb3dae6b8cc579637a7b72f008bff50a94cde5e36e432352f4ca57b9e54c4", + "sha256:34316bf693b1d2d29c087ee7e4bb10cdfa39da5f9c50fa15b07489b4ab93a1b5", + "sha256:36b2d700a27e168fa96272b42d28c7ac3ff72030c67b32f37c05616ebd22a202", + "sha256:37978254d9d00cda01acc1997513f786b6b971e57b778fbe7c20e30ae81a97f3", + "sha256:38289f1690a7e27aacd049e420769b996826f3728756859420eeee21cc857118", + "sha256:385ccf6d011b97768a640e9d4de25412204fbe8d6b9ae39ff115d4ff03f6fe5d", + "sha256:3c7ea86b9ca83e30fa4d4cd0eaf01db3ebcc7b2726a25990966627e39577d729", + "sha256:49810f907dfe6de8da5da7d2b238d343e6add62f01a15d03e2195afc180059ed", + "sha256:519c0b3a6fbb68afaa0febf0d28f6c4b0a1074aefc484802ecb9709faf181607", + "sha256:51f02ca184518702975b56affde6c573ebad4e411599005ce4468b1014b4786c", + "sha256:552a39987ac6655dad4bf6f17dd2b55c7b0c6e949d933b8846d2e312ee80005a", + "sha256:596f5ae2eeddb79b595583c2e0285312b2783b0ec759930c272dbf02f851ff75", + "sha256:6014038f52b4b2ac1fa41a58d439a8a00f015b5c0735a0cd4b09afe344c94899", + "sha256:61ebbcd208d78658b09e19c78920f1ad38936a0aa0f9c459c46c197d11c580a0", + "sha256:6213713ac743b190ecbf3f316d6e41d099e774812d470422b3a0f137ea635832", + "sha256:637e27ea1ebe4a561db75a880ac659ff439dec7f55588212e71700bb1ddd5af9", + "sha256:6aa427c55a0abec450bca10b64446331b5ca8f79b648531138f357569705bc4a", + "sha256:6ca45359d7a21644793de0e29de497ef7f1ae7268e346c4faf87b421fea364e6", + "sha256:6db1b52c6f2c04fafc8da17ea506608e6be7086715dab498570c3e55e4f8fbd1", + "sha256:752e7ddfb743344d447367baa85bccd3629c2c3940f70506eb5f01abce98ee68", + "sha256:760c54ad1b8a9b81951030a7e8e7c3ec0964c1cb9fee585a03ff53d9e531bb8e", + "sha256:768632fd8172ae03852e3245f11c8a425d95f65ff444ce46b3e673ae5b057b74", + "sha256:7a0b9f6a1a15d494b35f25ed07abda03209fa76c33564c09c9e81d34f4b919d7", + "sha256:7e070d3aef50ac3856f2ef5ec7214798453da878bb5e5a16c16a61edf1817cc3", + "sha256:7e12949e5071c20ec49ef00c75121ed2b076972132fc1913ddf5f76cae8d10b4", + "sha256:7e26eac9e52e8ce86f915fd33380f1b6896a2b51994e40bb094841e5003429b4", + "sha256:85ffd6b1cb0dfb037ede50ff3bef80d9bf7fa60515d192403af6745524524f3b", + "sha256:8618d9213a863c468a865e9d2ec50221015f7abf52221bc927152ef26c484b4c", + "sha256:8acef4d8a4353f6678fd1035422a937c2170de58a2b29f7da045d5249e934101", + "sha256:8d2f355a951f60f0843f2368b39970e4667517e54e86b1508e76f92b44811a8a", + "sha256:90b6840b6448203228a9d8464a7a0d99aa8fa9f027ef95fe230579abaf8a6ee1", + "sha256:9187500d83fd0cef4669385cbb0961e227a41c0c9bc39219044e35810793edf7", + "sha256:93c20777a72cae8620203ac11c4010365706062aa13aaedd1a21bb07adbb9d5d", + "sha256:93cce7d422a0093cfb3606beae38a8e47a25232eea0f292c878af580a9dc7605", + "sha256:94c623c331a48a5ccc7d25271399aff29729fa202c737ae3b4b28b89d2b0976d", + "sha256:97f32dc03a8054a4c4a5ab5d761ed4861e828b2c200febd4e46857069a483916", + "sha256:9a2bf98ac92f58777c0fafc772bf0493e67fcf677302e0c0a630ee517a43b949", + "sha256:a602bdc8607c99eb5b391592d58c92618dcd1537fdd87df1813f03fed49957a6", + "sha256:a9d24b03daf7415f78abc2d25a208f234e2c585e5e6f92f0204d2ab7b9ab48e3", + "sha256:abfcb0ef78df0ee9df4ea81f03beea41849340ce33a4c4bd4dbb99e23ec781b6", + "sha256:b013f759cd69cb0a62de954d6d2096d648bc210034b79b1881406b07ed0a83f9", + "sha256:b02e3e72665cd02afafb933453b0c9f6c59ff6e3708bd28d0d8580450e7e88af", + "sha256:b52cc45e71657bc4743a5606d9023459de929b2a198d545868e11898ba1c3f59", + "sha256:ba37f11e1d020969e8a779c06b4af866ffb6b854d7229db63c5fdddfceaa917f", + "sha256:bb804c7d0bfbd7e3f33924ff49757de9106c44e27979e2492819c16972ec0da2", + "sha256:bf594cc7cc9d528338d66674c10a5b25e3cde7dd75c3e96784df8f371d77a298", + "sha256:c38baee6bdb7fe1b110b6b3aaa555e6e872d322206b7245aa39572d3fc991ee4", + "sha256:c73d2166e4b210b73d1429c4f1ca97cea9cc090e5302df2a7a0a96ce55373f1c", + "sha256:c9099bf89078675c372339011ccfc9ec310310bf6c292b413c013eb90ffdcafc", + "sha256:cf0db26a1f76aa6b3aa314a74b8facd586b7a5457d05b64f8082a62c9c49582a", + "sha256:d19a34f8a3429bd536996ad53597b805c10352a8561d8382e05830df389d2b43", + "sha256:da80047524eac2acf7c04c18ac7a7da05a9136241f642dd2ed94269ef0d0a45a", + "sha256:de2923886b5d3214be951bc2ce3f6b8ac0d6dfd4a0d0e2a4d2e5523d8046fdfb", + "sha256:defa0652696ff0ba48c8aff5a1fac1eef1ca6ac9c660b047fc8e7623c4eb5093", + "sha256:e54a1eb9fd38f2779e973d2f8958fd575b532fe26013405d1afb9ee2374e7ab8", + "sha256:e5c31d70a478b0ca22a9d2d76d520ae996214019d39ed7dd93af872c7f301e52", + "sha256:ebaeb93f90c0903233b11ce913a7cb8f6ee069158406e056f884854c737d2442", + "sha256:ecfe51abf7f045e0b9cdde71ca9e153d11238679ef7b5da6c82093874adf3338", + "sha256:f99112aed4fb7cee00c7f77e8b964a9b10f69488cdff626ffd797d02e2e4484f", + "sha256:fd914db437ec25bfa410f8aa0aa2f3ba87cdfc04d9919d608d02330947afaeab" + ], + "version": "==2022.1.18" }, "toml": { "hashes": [ @@ -113,81 +193,84 @@ "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.2" }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.1" + }, "typed-ast": { "hashes": [ - "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace", - "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff", - "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266", - "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528", - "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6", - "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808", - "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4", - "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363", - "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341", - "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04", - "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41", - "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e", - "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3", - "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899", - "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805", - "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c", - "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c", - "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39", - "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a", - "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3", - "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7", - "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f", - "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075", - "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0", - "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40", - "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428", - "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927", - "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3", - "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f", - "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65" - ], - "version": "==1.4.3" + "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e", + "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344", + "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266", + "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a", + "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd", + "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d", + "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837", + "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098", + "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e", + "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27", + "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b", + "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596", + "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76", + "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30", + "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4", + "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78", + "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca", + "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985", + "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb", + "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88", + "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7", + "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5", + "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e", + "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7" + ], + "markers": "python_version >= '3.6'", + "version": "==1.5.2" }, "typing-extensions": { "hashes": [ - "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", - "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", - "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" + "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42", + "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2" ], - "version": "==3.10.0.0" + "markers": "python_version >= '3.6'", + "version": "==4.1.1" } }, "develop": { "attrs": { "hashes": [ - "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1", - "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb" + "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", + "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==21.2.0" + "version": "==21.4.0" }, "bleach": { "hashes": [ - "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125", - "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433" + "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da", + "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==3.3.0" + "markers": "python_version >= '3.6'", + "version": "==4.1.0" }, "certifi": { "hashes": [ - "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee", - "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8" + "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872", + "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569" ], - "version": "==2021.5.30" + "version": "==2021.10.8" }, - "chardet": { + "charset-normalizer": { "hashes": [ - "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", - "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" + "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597", + "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==4.0.0" + "markers": "python_version >= '3'", + "version": "==2.0.12" }, "colorama": { "hashes": [ @@ -199,69 +282,58 @@ }, "coverage": { "hashes": [ - "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c", - "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6", - "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45", - "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a", - "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03", - "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529", - "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a", - "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a", - "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2", - "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6", - "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759", - "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53", - "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a", - "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4", - "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff", - "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502", - "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793", - "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb", - "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905", - "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821", - "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b", - "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81", - "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0", - "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b", - "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3", - "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184", - "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701", - "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a", - "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82", - "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638", - "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5", - "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083", - "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6", - "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90", - "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465", - "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a", - "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3", - "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e", - "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066", - "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf", - "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b", - "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae", - "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669", - "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873", - "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b", - "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6", - "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb", - "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160", - "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c", - "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079", - "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d", - "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6" + "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9", + "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d", + "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf", + "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7", + "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6", + "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4", + "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059", + "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39", + "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536", + "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac", + "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c", + "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903", + "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d", + "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05", + "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684", + "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1", + "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f", + "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7", + "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca", + "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad", + "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca", + "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d", + "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92", + "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4", + "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf", + "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6", + "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1", + "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4", + "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359", + "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3", + "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620", + "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512", + "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69", + "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2", + "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518", + "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0", + "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa", + "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4", + "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e", + "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1", + "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2" ], "index": "pypi", - "version": "==5.5" + "version": "==6.3.2" }, "coveralls": { "hashes": [ - "sha256:172fb79c5f61c6ede60554f2cac46deff6d64ee735991fb2124fb414e188bdb4", - "sha256:9b3236e086627340bf2c95f89f757d093cbed43d17179d3f4fb568c347e7d29a" + "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea", + "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026" ], "index": "pypi", - "version": "==3.1.0" + "version": "==3.3.1" }, "docopt": { "hashes": [ @@ -271,35 +343,35 @@ }, "docutils": { "hashes": [ - "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125", - "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61" + "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c", + "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==0.17.1" + "version": "==0.18.1" }, "flake8": { "hashes": [ - "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b", - "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907" + "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d", + "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d" ], "index": "pypi", - "version": "==3.9.2" + "version": "==4.0.1" }, "idna": { "hashes": [ - "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", - "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", + "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.10" + "markers": "python_version >= '3'", + "version": "==3.3" }, "importlib-metadata": { "hashes": [ - "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00", - "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139" + "sha256:b36ffa925fe3139b2f6ff11d6925ffd4fa7bc47870165e3ac260ac7b4f91e6ac", + "sha256:d16e8c1deb60de41b8e8ed21c1a7b947b0bc62fab7e1d470bcdf331cea2e6735" ], - "markers": "python_version >= '3.6'", - "version": "==4.5.0" + "markers": "python_version >= '3.7'", + "version": "==4.11.2" }, "iniconfig": { "hashes": [ @@ -310,11 +382,11 @@ }, "keyring": { "hashes": [ - "sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8", - "sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48" + "sha256:9012508e141a80bd1c0b6778d5c610dd9f8c464d75ac6774248500503f972fb9", + "sha256:b0d28928ac3ec8e42ef4cc227822647a19f1d544f21f96457965dc01cf555261" ], - "markers": "python_version >= '3.6'", - "version": "==23.0.1" + "markers": "python_version >= '3.7'", + "version": "==23.5.0" }, "mccabe": { "hashes": [ @@ -325,89 +397,90 @@ }, "packaging": { "hashes": [ - "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", - "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" + "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb", + "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.9" + "markers": "python_version >= '3.6'", + "version": "==21.3" }, "pkginfo": { "hashes": [ - "sha256:029a70cb45c6171c329dfc890cde0879f8c52d6f3922794796e06f577bb03db4", - "sha256:9fdbea6495622e022cc72c2e5e1b735218e4ffb2a2a69cde2694a6c1f16afb75" + "sha256:542e0d0b6750e2e21c20179803e40ab50598d8066d51097a0e382cba9eb02bff", + "sha256:c24c487c6a7f72c66e816ab1796b96ac6c3d14d49338293d2141664330b55ffc" ], - "version": "==1.7.0" + "version": "==1.8.2" }, "pluggy": { "hashes": [ - "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", - "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" + "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", + "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.13.1" + "markers": "python_version >= '3.6'", + "version": "==1.0.0" }, "py": { "hashes": [ - "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", - "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" + "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", + "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.10.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==1.11.0" }, "pycodestyle": { "hashes": [ - "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", - "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef" + "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20", + "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.7.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==2.8.0" }, "pyflakes": { "hashes": [ - "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3", - "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db" + "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c", + "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.3.1" + "version": "==2.4.0" }, "pygments": { "hashes": [ - "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f", - "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e" + "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65", + "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a" ], "markers": "python_version >= '3.5'", - "version": "==2.9.0" + "version": "==2.11.2" }, "pyparsing": { "hashes": [ - "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", - "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea", + "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.4.7" + "markers": "python_version >= '3.6'", + "version": "==3.0.7" }, "pytest": { "hashes": [ - "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b", - "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890" + "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db", + "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171" ], "index": "pypi", - "version": "==6.2.4" + "version": "==7.0.1" }, "readme-renderer": { "hashes": [ - "sha256:63b4075c6698fcfa78e584930f07f39e05d46f3ec97f65006e430b595ca6348c", - "sha256:92fd5ac2bf8677f310f3303aa4bce5b9d5f9f2094ab98c29f13791d7b805a3db" + "sha256:a50a0f2123a4c1145ac6f420e1a348aafefcc9211c846e3d51df05fe3d865b7d", + "sha256:b512beafa6798260c7d5af3e1b1f097e58bfcd9a575da7c4ddd5e037490a5b85" ], - "version": "==29.0" + "markers": "python_version >= '3.6'", + "version": "==32.0" }, "requests": { "hashes": [ - "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", - "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" + "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61", + "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==2.25.1" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==2.27.1" }, "requests-toolbelt": { "hashes": [ @@ -418,10 +491,11 @@ }, "rfc3986": { "hashes": [ - "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835", - "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97" + "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd", + "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c" ], - "version": "==1.5.0" + "markers": "python_version >= '3.7'", + "version": "==2.0.0" }, "six": { "hashes": [ @@ -431,37 +505,37 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.16.0" }, - "toml": { + "tomli": { "hashes": [ - "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", - "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.10.2" + "markers": "python_version >= '3.7'", + "version": "==2.0.1" }, "tqdm": { "hashes": [ - "sha256:24be966933e942be5f074c29755a95b315c69a91f839a29139bf26ffffe2d3fd", - "sha256:aa0c29f03f298951ac6318f7c8ce584e48fa22ec26396e6411e43d038243bdb2" + "sha256:1d9835ede8e394bb8c9dcbffbca02d717217113adc679236873eeaac5bc0b3cd", + "sha256:e643e071046f17139dea55b880dc9b33822ce21613b4a4f5ea57f202833dbc29" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==4.61.1" + "version": "==4.63.0" }, "twine": { "hashes": [ - "sha256:16f706f2f1687d7ce30e7effceee40ed0a09b7c33b9abb5ef6434e5551565d83", - "sha256:a56c985264b991dc8a8f4234eb80c5af87fa8080d0c224ad8f2cd05a2c22e83b" + "sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19", + "sha256:d0550fca9dc19f3d5e8eadfce0c227294df0a2a951251a4385797c8a6198b7c8" ], "index": "pypi", - "version": "==3.4.1" + "version": "==3.8.0" }, "urllib3": { "hashes": [ - "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c", - "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098" + "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed", + "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.26.5" + "version": "==1.26.8" }, "webencodings": { "hashes": [ @@ -472,11 +546,11 @@ }, "zipp": { "hashes": [ - "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", - "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" + "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d", + "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375" ], - "markers": "python_version >= '3.6'", - "version": "==3.4.1" + "markers": "python_version >= '3.7'", + "version": "==3.7.0" } } } diff --git a/requirements.txt b/requirements.txt index 3cb32dbd..ac615d63 100644 --- a/requirements.txt +++ b/requirements.txt @@ -31,7 +31,7 @@ pycparser==2.20 pyflakes==2.2.0 pygments==2.9.0 pyparsing==3.0.0b2 -pytest==6.2.2 +pytest==7.0.1 readme-renderer==29.0 regex==2021.4.4 requests-toolbelt==0.9.1