From d3dc6d073a5b256d8948b691156f6814e7012c2c Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 19 Feb 2022 17:15:42 -0500 Subject: [PATCH] feat: initial contents --- .gitignore | 7 + LICENSE.txt | 180 +++++++++++++++ MANIFEST.in | 4 + Makefile | 42 ++++ README.rst | 5 + dev-requirements.txt | 6 + setup.cfg | 43 ++++ setup.py | 6 + src/dinghy/__init__.py | 5 + src/dinghy/__main__.py | 5 + src/dinghy/digest.py | 207 ++++++++++++++++++ src/dinghy/graphql/author_frag.graphql | 4 + src/dinghy/graphql/comment_frag.graphql | 16 ++ src/dinghy/graphql/issue_comments.graphql | 20 ++ src/dinghy/graphql/issue_frag.graphql | 44 ++++ src/dinghy/graphql/project_issues.graphql | 28 +++ src/dinghy/graphql/repo_frag.graphql | 8 + src/dinghy/graphql/repo_issues.graphql | 19 ++ src/dinghy/graphql/repo_pull_requests.graphql | 99 +++++++++ src/dinghy/graphql_helpers.py | 113 ++++++++++ src/dinghy/helpers.py | 46 ++++ src/dinghy/jinja_helpers.py | 34 +++ src/dinghy/templates/digest.html.j2 | 135 ++++++++++++ tests/test_helpers.py | 40 ++++ 24 files changed, 1116 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE.txt create mode 100644 MANIFEST.in create mode 100644 Makefile create mode 100644 README.rst create mode 100644 dev-requirements.txt create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 src/dinghy/__init__.py create mode 100644 src/dinghy/__main__.py create mode 100644 src/dinghy/digest.py create mode 100644 src/dinghy/graphql/author_frag.graphql create mode 100644 src/dinghy/graphql/comment_frag.graphql create mode 100644 src/dinghy/graphql/issue_comments.graphql create mode 100644 src/dinghy/graphql/issue_frag.graphql create mode 100644 src/dinghy/graphql/project_issues.graphql create mode 100644 src/dinghy/graphql/repo_frag.graphql create mode 100644 src/dinghy/graphql/repo_issues.graphql create mode 100644 src/dinghy/graphql/repo_pull_requests.graphql create mode 100644 src/dinghy/graphql_helpers.py create mode 100644 src/dinghy/helpers.py create mode 100644 src/dinghy/jinja_helpers.py create mode 100644 src/dinghy/templates/digest.html.j2 create mode 100644 tests/test_helpers.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..37e57d6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +out*.json +dinghy.yaml +token.sh + +*.egg-info/ +build/ +dist/ diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..28ba471 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,180 @@ + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..fca9661 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,4 @@ +include Makefile +include dev-requirements.txt + +recursive-include tests * diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..ede3cdd --- /dev/null +++ b/Makefile @@ -0,0 +1,42 @@ +.PHONY: clean help quality requirements test validate + +.DEFAULT_GOAL := help + +help: ## display this help message + @echo "Please use \`make ' where is one of" + @awk -F ':.*?## ' '/^[a-zA-Z]/ && NF==2 {printf "\033[36m %-25s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) | sort + +clean: ## remove generated byte code, coverage reports, and build artifacts + find . -name '__pycache__' -exec rm -rf {} + + find . -name '*.pyc' -exec rm -f {} + + find . -name '*.pyo' -exec rm -f {} + + find . -name '*~' -exec rm -f {} + + rm -fr build/ + rm -fr dist/ + rm -fr src/*.egg-info + rm -fr .*_cache/ + +requirements: ## install development environment requirements + pip install -r dev-requirements.txt + pip-sync requirements/dev.txt requirements/private.* + +test: ## run tests in the current virtualenv + pytest tests + +black: ## run black to format source + black src tests + +pylint: ## run pylint to find code smells + pylint src tests + + +.PHONY: dist pypi testpypi + +dist: ## build the distributions + python -m build --sdist --wheel + +pypi: ## upload the built distributions to PyPI. + python -m twine upload --verbose dist/* + +testpypi: ## upload the distrubutions to PyPI's testing server. + python -m twine upload --verbose --repository testpypi dist/* diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..4965afa --- /dev/null +++ b/README.rst @@ -0,0 +1,5 @@ +###### +Dinghy +###### + +Dinghy daily digest tool. diff --git a/dev-requirements.txt b/dev-requirements.txt new file mode 100644 index 0000000..e0e2313 --- /dev/null +++ b/dev-requirements.txt @@ -0,0 +1,6 @@ +-e . +black +build +pylint +pytest +twine diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..04b339d --- /dev/null +++ b/setup.cfg @@ -0,0 +1,43 @@ +# Dinghy setup.cfg + +[metadata] +name = dinghy +version = attr: dinghy.__version__ +description = Dinghy daily digest tool +long_description = file: README.rst +long_description_content_type = text/x-rst +url = https://github.com/nedbat/dinghy +author = Ned Batchelder +author_email = ned@nedbatchelder.com +license = Apache +zip_safe = False +classifiers = + Development Status :: 4 - Beta + Intended Audience :: Developers + License :: OSI Approved :: Apache Software License + Natural Language :: English + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + +[options] +packages = + dinghy +package_dir = + = src +install_requires = + aiofiles + aiohttp + glom + jinja2 + pyyaml + wcag-contrast-ratio + +[options.package_data] +dinghy = + graphql/*.* + templates/*.* + +[wheel] +universal = 1 diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..e1db044 --- /dev/null +++ b/setup.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +"""Install dinghy.""" + +import setuptools + +setuptools.setup() diff --git a/src/dinghy/__init__.py b/src/dinghy/__init__.py new file mode 100644 index 0000000..86791b4 --- /dev/null +++ b/src/dinghy/__init__.py @@ -0,0 +1,5 @@ +""" +Dinghy daily digest tool. +""" + +__version__ = "0.1.0" diff --git a/src/dinghy/__main__.py b/src/dinghy/__main__.py new file mode 100644 index 0000000..ab46877 --- /dev/null +++ b/src/dinghy/__main__.py @@ -0,0 +1,5 @@ +"""Enable 'python -m dinghy'.""" + +from dinghy.digest import main + +main() diff --git a/src/dinghy/digest.py b/src/dinghy/digest.py new file mode 100644 index 0000000..bd0a003 --- /dev/null +++ b/src/dinghy/digest.py @@ -0,0 +1,207 @@ +""" +Summarize issue activity in GitHub repos and projects. +""" + +import asyncio +import datetime +import itertools +import operator +import os +import re + +import aiofiles +import yaml +from glom import glom as g + +from .graphql_helpers import build_query, GraphqlHelper +from .helpers import json_save, parse_timedelta +from .jinja_helpers import render_jinja + + +class Summarizer: + """ + Use GitHub GraphQL to get data about recent changes. + """ + + def __init__(self, since): + self.since = since.strftime("%Y-%m-%dT%H:%M:%S") + token = os.environ.get("GITHUB_TOKEN", "") + self.gql = GraphqlHelper("https://api.github.com/graphql", token) + + async def get_repo_issues(self, owner, name): + """ + Get issues from a repo updated since a date, with comments since that date. + + Args: + owner (str): the owner of the repo. + name (str): the name of the repo. + + """ + repo, issues = await self.gql.nodes( + query=build_query("repo_issues.graphql"), + path="repository.issues", + variables=dict(owner=owner, name=name, since=self.since), + ) + issues = await self._populate_issue_comments(issues) + self._add_reasons(issues) + for iss in issues: + iss["comments_to_show"] = iss["comments"]["nodes"] + repo = g(repo, "data.repository") + repo["container_kind"] = "repo" + repo["kind"] = "issues" + return repo, issues + + async def get_project_issues(self, org, number, home_repo=""): + """ + Get issues from a project. + + Args: + org (str): the organization owner of the repo. + number (int): the project number. + home_repo (str): the owner/name of a repo that most issues are in. + """ + project, project_data = await self.gql.nodes( + query=build_query("project_issues.graphql"), + path="organization.project.items", + variables=dict(org=org, projectNumber=number), + ) + issues = [content for data in project_data if (content := data["content"])] + issues = self._trim_since(issues) + issues = await self._populate_issue_comments(issues) + self._add_reasons(issues) + for iss in issues: + iss["other_repo"] = iss["repository"]["nameWithOwner"] != home_repo + iss["comments_to_show"] = iss["comments"]["nodes"] + project = g(project, "data.organization.project") + project["container_kind"] = "project" + project["kind"] = "issues" + return project, issues + + async def get_pull_requests(self, owner, name): + """ + Get pull requests from a repo updated since a date, with comments since that date. + + Args: + owner (str): the owner of the repo. + name (str): the name of the repo. + """ + repo, pulls = await self.gql.nodes( + query=build_query("repo_pull_requests.graphql"), + path="repository.pullRequests", + variables=dict(owner=owner, name=name), + donefn=(lambda nodes: nodes[-1]["updatedAt"] < self.since), + ) + pulls = self._trim_since(pulls) + for pull in pulls: + # Pull requests have complex trees of data, with comments in + # multiple places, and duplications. Reviews can also be finished + # with no comment, but we want them to appear in the digest. + comments = {} + reviews = itertools.chain( + pull["latestReviews"]["nodes"], + pull["latestOpinionatedReviews"]["nodes"], + ) + for rev in reviews: + ncom = 0 + for com in rev["comments"]["nodes"]: + com = comments.setdefault(com["id"], dict(com)) + com["review_state"] = rev["state"] + ncom += 1 + if ncom == 0: + # A completed review with no comment, make it into a comment. + com = comments.setdefault(rev["id"], dict(rev)) + com["review_state"] = rev["state"] + for thread in pull["reviewThreads"]["nodes"]: + for com in thread["comments"]["nodes"]: + comments.setdefault(com["id"], com) + for com in pull["comments"]["nodes"]: + comments.setdefault(com["id"], com) + + pull["comments_to_show"] = self._trim_since(comments.values()) + + self._add_reasons(pulls) + repo = g(repo, "data.repository") + repo["container_kind"] = "repo" + repo["kind"] = "pull requests" + return repo, pulls + + def methods_from_url(self, url): + """Dispatch to a get_* method from a GitHub url.""" + if m := re.fullmatch(r"https://github.com/(.*?)/(.*?)/issues", url): + return self.get_repo_issues, m[1], m[2] + elif m := re.fullmatch(r"https://github.com/(.*?)/(.*?)/pulls", url): + return self.get_pull_requests, m[1], m[2] + elif m := re.fullmatch(r"https://github.com/orgs/(.*?)/projects/(\d+)", url): + return self.get_project_issues, m[1], int(m[2]) + else: + raise Exception(f"Can't understand URL {url!r}") + + def _trim_since(self, nodes): + nodes = [n for n in nodes if n["updatedAt"] > self.since] + nodes.sort(key=operator.itemgetter("updatedAt")) + return nodes + + async def _populate_issue_comments(self, issues): + # Need to get full comments. + queried_issues = [] + issue_queries = [] + for iss in issues: + if iss["comments"]["totalCount"] > len(iss["comments"]["nodes"]): + queried_issues.append(iss) + comments = self.gql.nodes( + query=build_query("issue_comments.graphql"), + path="repository.issue.comments", + variables=dict( + owner=iss["repository"]["owner"]["login"], + name=iss["repository"]["name"], + number=iss["number"], + ), + ) + issue_queries.append(comments) + commentss = await asyncio.gather(*issue_queries) + for iss, (_, comments) in zip(queried_issues, commentss): + iss["comments"]["nodes"] = comments + + # Trim comments to those since our since date. + for iss in issues: + comments = iss["comments"] + comments["nodes"] = self._trim_since(comments["nodes"]) + + return issues + + def _add_reasons(self, issues): + # Why were these issues in the list? + for iss in issues: + iss["reasonCreated"] = iss["createdAt"] > self.since + iss["reasonClosed"] = bool( + iss["closedAt"] and (iss["closedAt"] > self.since) + ) + iss["reasonMerged"] = bool( + iss.get("mergedAt") and (iss["mergedAt"] > self.since) + ) + + +async def make_digest(since, items, digest): + since_date = datetime.datetime.now() - parse_timedelta(since) + summarizer = Summarizer(since=since_date) + tasks = [fn(*args) for fn, *args in map(summarizer.methods_from_url, items)] + results = await asyncio.gather(*tasks) + # $set_env.py: DIGEST_SAVE_RESULT - save digest data in a JSON file. + if int(os.environ.get("DIGEST_SAVE_RESULT", 0)): + await json_save(results, "out_digest.json") + html = render_jinja("digest.html.j2", results=results, since=since_date) + async with aiofiles.open(digest, "w", encoding="utf-8") as html_out: + await html_out.write(html) + + +async def make_digests(conf_file): + with open(conf_file, encoding="utf-8") as y: + config = yaml.safe_load(y) + await asyncio.gather(*(make_digest(**spec) for spec in config)) + + +def main(conf_file="dinghy.yaml"): + """ + Digest all the things! + """ + asyncio.run(make_digests(conf_file)) diff --git a/src/dinghy/graphql/author_frag.graphql b/src/dinghy/graphql/author_frag.graphql new file mode 100644 index 0000000..afe6b70 --- /dev/null +++ b/src/dinghy/graphql/author_frag.graphql @@ -0,0 +1,4 @@ +fragment authorData on Actor { + login + url +} diff --git a/src/dinghy/graphql/comment_frag.graphql b/src/dinghy/graphql/comment_frag.graphql new file mode 100644 index 0000000..2e9bed3 --- /dev/null +++ b/src/dinghy/graphql/comment_frag.graphql @@ -0,0 +1,16 @@ +# Comment is an interface for a number of kinds of comments, not all of which +# have a url. +fragment commentData on Comment { + id + body + updatedAt + author { + ...authorData # fragment: author_frag.graphql + } + ... on IssueComment { + url + } + ... on PullRequestReviewComment { + url + } +} diff --git a/src/dinghy/graphql/issue_comments.graphql b/src/dinghy/graphql/issue_comments.graphql new file mode 100644 index 0000000..fdae516 --- /dev/null +++ b/src/dinghy/graphql/issue_comments.graphql @@ -0,0 +1,20 @@ +query getIssueComments( + $owner: String! + $name: String! + $number: Int! + $after: String +) { + repository(owner: $owner, name: $name) { + issue(number: $number) { + comments(first: 100, after: $after) { + pageInfo { + hasNextPage + endCursor + } + nodes { + ...commentData # fragment: comment_frag.graphql + } + } + } + } +} diff --git a/src/dinghy/graphql/issue_frag.graphql b/src/dinghy/graphql/issue_frag.graphql new file mode 100644 index 0000000..96b50a5 --- /dev/null +++ b/src/dinghy/graphql/issue_frag.graphql @@ -0,0 +1,44 @@ +fragment issueData on Issue { + repository { + ...repoData # fragment: repo_frag.graphql + } + number + url + title + state + createdAt + updatedAt + closedAt + author { + ...authorData # fragment: author_frag.graphql + } + body + comments(last: 100) { + totalCount + nodes { + ...commentData # fragment: comment_frag.graphql + } + } + projectNextItems(first: 100) { + nodes { + project { + owner { + ... on User { + login + } + ... on Organization { + login + } + } + number + } + } + } + labels(first:10) { + nodes { + color + name + } + } + # Issues have timelineItems, but added or removed from projectNext isn't listed. +} diff --git a/src/dinghy/graphql/project_issues.graphql b/src/dinghy/graphql/project_issues.graphql new file mode 100644 index 0000000..8319009 --- /dev/null +++ b/src/dinghy/graphql/project_issues.graphql @@ -0,0 +1,28 @@ +query getProjectIssues( + $org: String! + $projectNumber: Int! + $after: String +) { + organization(login: $org) { + project: projectNext(number: $projectNumber) { + title + url + items(first: 100, after: $after) { + pageInfo { + hasNextPage + endCursor + } + nodes { + content { + ... on Issue { + ...issueData # fragment: issue_frag.graphql + } + # ... on PullRequest { + # number + # } + } + } + } + } + } +} diff --git a/src/dinghy/graphql/repo_frag.graphql b/src/dinghy/graphql/repo_frag.graphql new file mode 100644 index 0000000..1f8101a --- /dev/null +++ b/src/dinghy/graphql/repo_frag.graphql @@ -0,0 +1,8 @@ +fragment repoData on Repository { + owner { + login + } + name + nameWithOwner + url +} diff --git a/src/dinghy/graphql/repo_issues.graphql b/src/dinghy/graphql/repo_issues.graphql new file mode 100644 index 0000000..b5d6c2d --- /dev/null +++ b/src/dinghy/graphql/repo_issues.graphql @@ -0,0 +1,19 @@ +query getRepoIssues( + $owner: String! + $name: String! + $since: String! + $after: String +) { + repository(owner: $owner, name: $name) { + ...repoData + issues(first: 100, filterBy: {since: $since}, after: $after) { + pageInfo { + hasNextPage + endCursor + } + nodes { + ...issueData # fragment: issue_frag.graphql + } + } + } +} diff --git a/src/dinghy/graphql/repo_pull_requests.graphql b/src/dinghy/graphql/repo_pull_requests.graphql new file mode 100644 index 0000000..80d6cc9 --- /dev/null +++ b/src/dinghy/graphql/repo_pull_requests.graphql @@ -0,0 +1,99 @@ +query getPullRequests( + $owner: String! + $name: String! + $after: String +) { + repository(owner: $owner, name: $name) { + ...repoData + pullRequests ( + first: 10 + orderBy: { + field: UPDATED_AT + direction: DESC + } + after: $after + ) { + pageInfo { + hasNextPage + endCursor + } + nodes { + repository { + ...repoData # fragment: repo_frag.graphql + } + author { + ...authorData # fragment: author_frag.graphql + } + number + title + url + createdAt + updatedAt + closedAt + merged + mergedAt + labels(first:10) { + nodes { + color + name + } + } + comments(first: 100) { + totalCount + nodes { + ...commentData # fragment: comment_frag.graphql + } + } + latestOpinionatedReviews(first: 100) { + totalCount + nodes { + id + url + state + author { + ...authorData # fragment: author_frag.graphql + } + body + updatedAt + comments(first: 100) { + totalCount + nodes { + ...commentData # fragment: comment_frag.graphql + } + } + } + } + latestReviews(first: 100) { + totalCount + nodes { + id + url + state + author { + ...authorData # fragment: author_frag.graphql + } + body + updatedAt + comments(first: 100) { + totalCount + nodes { + ...commentData # fragment: comment_frag.graphql + } + } + } + } + reviewThreads(first: 100) { + totalCount + nodes { + comments(first: 100) { + totalCount + nodes { + ...commentData # fragment: comment_frag.graphql + } + } + } + } + } + } + } +} diff --git a/src/dinghy/graphql_helpers.py b/src/dinghy/graphql_helpers.py new file mode 100644 index 0000000..c1f66ee --- /dev/null +++ b/src/dinghy/graphql_helpers.py @@ -0,0 +1,113 @@ +""" +GraphQL helpers. +""" + +import itertools +import os +import pkgutil +import re + +import aiohttp +from glom import glom as g + +from .helpers import json_save + + +JSON_NAMES = (f"out_{i:02}.json" for i in itertools.count()) + + +class GraphqlHelper: + """ + A helper for GraphQL, including error handling and pagination. + """ + + def __init__(self, endpoint, token): + self.endpoint = endpoint + self.headers = {"Authorization": f"Bearer {token}"} + + async def raw_execute(self, query, variables=None): + """ + Execute one GraphQL query, and return the JSON data. + """ + jbody = {"query": query} + if variables: + jbody["variables"] = variables + async with aiohttp.ClientSession( + headers=self.headers, raise_for_status=True + ) as session: + async with session.post(self.endpoint, json=jbody) as response: + return await response.json() + + async def execute(self, query, variables=None): + """ + Execute one GraphQL query, with logging and error handling. + """ + args = ", ".join(f"{k}: {v!r}" for k, v in variables.items()) + print(query.splitlines()[0] + args + ")") + + data = await self.raw_execute(query=query, variables=variables) + + # $set_env.py: DIGEST_SAVE_RESPONSES - save every query response in a JSON file. + if int(os.environ.get("DIGEST_SAVE_RESPONSES", 0)): + await json_save(data, next(JSON_NAMES)) + + if "message" in data: + raise Exception(data["message"]) + if "errors" in data: + err = data["errors"][0] + msg = f"GraphQL error: {err['message']}" + if "path" in err: + msg += f" @{'.'.join(err['path'])}" + if "locations" in err: + loc = err["locations"][0] + msg += f", line {loc['line']} column {loc['column']}" + raise Exception(msg) + if "data" in data and data["data"] is None: + # Another kind of failure response? + raise Exception("GraphQL query returned null") + + return data + + async def nodes(self, query, path, variables=None, donefn=None): + """ + Execute a GraphQL query, and follow the pagination to get all the nodes. + + Returns the last query result (for the information outside the pagination), + and the list of all paginated nodes. + """ + nodes = [] + variables = dict(variables) + while True: + data = await self.execute(query, variables) + fetched = g(data, f"data.{path}") + nodes.extend(fetched["nodes"]) + if not fetched["pageInfo"]["hasNextPage"]: + break + if donefn is not None and donefn(fetched["nodes"]): + break + variables["after"] = fetched["pageInfo"]["endCursor"] + # Remove the nodes from the top-level data we return, to keep things clean. + fetched["nodes"] = [] + return data, nodes + + +def build_query(gql_filename): + """Read a GraphQL file, and complete it with requested fragments.""" + filenames = [gql_filename] + query = [] + + seen_filenames = set() + while filenames: + next_filenames = [] + for filename in filenames: + gtext = pkgutil.get_data("dinghy", f"graphql/{filename}").decode("utf-8") + query.append(gtext) + + for match in re.finditer(r"#\s*fragment: ([.\w]+)", gtext): + frag_name = match[1] + if frag_name not in seen_filenames: + next_filenames.append(frag_name) + seen_filenames.add(frag_name) + filenames = next_filenames + + return "\n".join(query) diff --git a/src/dinghy/helpers.py b/src/dinghy/helpers.py new file mode 100644 index 0000000..b5da8ca --- /dev/null +++ b/src/dinghy/helpers.py @@ -0,0 +1,46 @@ +""" +Misc helpers. +""" + +import datetime +import json +import re + +import aiofiles + + +async def json_save(data, filename): + """Write `data` to `filename` as JSON.""" + async with aiofiles.open(filename, "w", encoding="utf-8") as json_out: + await json_out.write(json.dumps(data, indent=4)) + + +def parse_timedelta(timedelta_str): + """ + Parse a timedelta string ("2h13m") into a timedelta object. + + From https://stackoverflow.com/a/51916936/14343 + + Args: + timedelta_str (str): A string identifying a duration, like "2h13m". + + Returns: + A datetime.timedelta object. + + """ + parts = re.match( + r"""(?x) + ^ + ((?P[.\d]+)w[a-z]*)? + ((?P[.\d]+)d[a-z]*)? + ((?P[.\d]+)h[a-z]*)? + ((?P[.\d]+)m[a-z]*)? + ((?P[.\d]+)s[a-z]*)? + $ + """, + timedelta_str.replace(" ", ""), + ) + if not timedelta_str or parts is None: + raise ValueError(f"Couldn't parse time delta from {timedelta_str!r}") + kwargs = {name: float(val) for name, val in parts.groupdict().items() if val} + return datetime.timedelta(**kwargs) diff --git a/src/dinghy/jinja_helpers.py b/src/dinghy/jinja_helpers.py new file mode 100644 index 0000000..7a3e660 --- /dev/null +++ b/src/dinghy/jinja_helpers.py @@ -0,0 +1,34 @@ +""" +Utilities for working with Jina2 templates. +""" + +import datetime +from pathlib import Path + +import wcag_contrast_ratio +import jinja2 + + +def datetime_format(value, fmt="%m-%d %H:%M"): + """Format a datetime or ISO datetime string, for Jinja filtering.""" + if isinstance(value, str): + value = datetime.datetime.fromisoformat(value.replace("Z", "+00:00")) + return value.strftime(fmt) + + +def textcolor(bg_color): + """Calculate a text color for a background color `bg_color`.""" + rgb = [int(bg_color[i : i + 2], 16) / 255 for i in [0, 2, 4]] + bcontrast = wcag_contrast_ratio.rgb(rgb, (0, 0, 0)) + wcontrast = wcag_contrast_ratio.rgb(rgb, (1, 1, 1)) + return "black" if bcontrast > wcontrast else "white" + + +def render_jinja(template_filename, **variables): + """Render a template file, with variables.""" + jenv = jinja2.Environment(loader=jinja2.FileSystemLoader(Path(__file__).parent)) + jenv.filters["datetime"] = datetime_format + jenv.filters["textcolor"] = textcolor + template = jenv.get_template(f"templates/{template_filename}") + html = template.render(**variables) + return html diff --git a/src/dinghy/templates/digest.html.j2 b/src/dinghy/templates/digest.html.j2 new file mode 100644 index 0000000..c5b0054 --- /dev/null +++ b/src/dinghy/templates/digest.html.j2 @@ -0,0 +1,135 @@ + + + {% set double_bubble = "🗪" %} + {% set empty_box = "☐" %} + {% set checked_box = "🗹" %} + {{ double_bubble }} Activity since {{ since|datetime("%Y-%m-%d") }} + + +

Activity since {{ since|datetime("%Y-%m-%d") }}

+ diff --git a/tests/test_helpers.py b/tests/test_helpers.py new file mode 100644 index 0000000..5a1d5db --- /dev/null +++ b/tests/test_helpers.py @@ -0,0 +1,40 @@ +""" +Test dinghy.helpers +""" + +import datetime + +import pytest + +from dinghy.helpers import parse_timedelta + + +@pytest.mark.parametrize( + "tds, kwargs", + [ + ("1d", dict(days=1)), + ("1day", dict(days=1)), + ("1d2h3m", dict(days=1, hours=2, minutes=3)), + ( + "6 day 7.5 hours 8 min .25 s", + dict(days=6, hours=7.5, minutes=8, seconds=0.25), + ), + ("10 weeks 2minutes", dict(weeks=10, minutes=2)), + ], +) +def test_parse_timedelta(tds, kwargs): + assert parse_timedelta(tds) == datetime.timedelta(**kwargs) + + +@pytest.mark.parametrize( + "tds", + [ + "", + "one", + "123", + "2 years", + ], +) +def test_bad_parse_timedelta(tds): + with pytest.raises(ValueError, match=f"Couldn't parse time delta from {tds!r}"): + parse_timedelta(tds)