From fa927f623edb1facbfbfb7a74ba4447a4a20e123 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 14 Dec 2022 14:11:10 +0800 Subject: [PATCH 001/107] migration logic --- src/exceptions.py | 4 + src/migration.py | 75 +++++++ src/types_.py | 27 +++ tests/unit/helpers.py | 12 + tests/unit/test_migration.py | 412 +++++++++++++++++++++++++++++++++++ 5 files changed, 530 insertions(+) create mode 100644 src/migration.py create mode 100644 tests/unit/test_migration.py diff --git a/src/exceptions.py b/src/exceptions.py index b091de53..18f7bc77 100644 --- a/src/exceptions.py +++ b/src/exceptions.py @@ -30,3 +30,7 @@ class ReconcilliationError(BaseError): class ActionError(BaseError): """A problem with the taking an action occurred.""" + + +class InvalidTableRowLevelError(BaseError): + """A problematic table row is encountered.""" diff --git a/src/migration.py b/src/migration.py new file mode 100644 index 00000000..dc6cadf4 --- /dev/null +++ b/src/migration.py @@ -0,0 +1,75 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Module for transforming index table rows into local files.""" + +import typing +from pathlib import Path + +from . import exceptions, types_ + +GITKEEP_FILE = ".gitkeep" + + +def _validate_row_levels(table_rows: typing.Iterable[types_.TableRow]): + """Check for invalid row levels. + + Args: + table_rows: Table rows from the index file. + + Raises: + InvalidRow exception if invalid row level is encountered. + """ + level = 0 + for row in table_rows: + if row.level <= 0: + raise exceptions.InvalidTableRowLevelError( + f"Invalid level {row.level} in {row!=row.level}" + ) + # Level increase of more than 1 is not possible. + if row.level > level and (difference := row.level - level) > 1: + raise exceptions.InvalidTableRowLevelError( + f"Level difference of {difference} encountered in {row=!r}" + ) + # Level decrease or same level is fine. + level = row.level + + +def migrate( + table_rows: typing.Iterable[types_.TableRow], +) -> typing.Iterable[types_.MigrationDocument]: + """Create migration documents to migrate from server. + + Args: + table_rows: Table rows from the index file in the order of directory hierarcy. + docs_path: Docs directory base path. + + Returns: + Migration documents with navlink to content.\ + .gitkeep file with no content if empty directory. + """ + _validate_row_levels(table_rows=table_rows) + + level = 0 + last_dir_has_file = True + cwd = Path() + for row in table_rows: + # Next set of hierarchies, change cwd path + if row.level <= level: + if not last_dir_has_file: + yield types_.GitkeepFile(path=cwd / GITKEEP_FILE) + while row.level <= level: + level -= 1 + cwd = cwd.parent + + # if row is directory, move cwd + if not row.navlink.link: + last_dir_has_file = False + cwd = cwd / row.path + level = row.level + else: + last_dir_has_file = True + yield types_.DocumentFile(path=cwd / f"{row.path}.md", link=row.navlink.link) + + if not last_dir_has_file: + yield types_.GitkeepFile(path=cwd / GITKEEP_FILE) diff --git a/src/types_.py b/src/types_.py index 48f59479..2886257a 100644 --- a/src/types_.py +++ b/src/types_.py @@ -313,3 +313,30 @@ class ActionReport(typing.NamedTuple): url: Url | None result: ActionResult reason: str | None + + +@dataclasses.dataclass +class MigrationDocument: + """Represents a document to be migrated. + + Attrs: + path: The full document path to be written to. + """ + + path: Path + + +@dataclasses.dataclass +class GitkeepFile(MigrationDocument): + """Represents an empty directory from the index table.""" + + +@dataclasses.dataclass +class DocumentFile(MigrationDocument): + """Represents a document to be migrated from the index table. + + Attrs: + link: Link to content to read from. + """ + + link: str diff --git a/tests/unit/helpers.py b/tests/unit/helpers.py index 5d1fab51..0747a542 100644 --- a/tests/unit/helpers.py +++ b/tests/unit/helpers.py @@ -31,3 +31,15 @@ def assert_substrings_in_string(substrings: typing.Iterable[str], string: str) - """ for substring in substrings: assert substring in string # nosec + + +def path_to_markdown(path: Path) -> Path: + """Generate markdown file from path. + + Args: + path: The path to be converted into markdown path. + + Returns: + Path with last path being a markdown file. + """ + return Path(f"{path}.md") diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py new file mode 100644 index 00000000..46c1b6b7 --- /dev/null +++ b/tests/unit/test_migration.py @@ -0,0 +1,412 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Unit tests for migration module.""" + +# Need access to protected functions for testing +# pylint: disable=protected-access + +from pathlib import Path +from typing import Iterable, List + +import pytest + +from src import exceptions, migration, types_ + +from .helpers import path_to_markdown + + +# Pylint diesn't understand how the walrus operator works +# pylint: disable=undefined-variable,unused-variable +@pytest.mark.parametrize( + "table_rows, expected_error_msg_contents", + [ + pytest.param( + [ + types_.TableRow( + level=-1, + path=(test_path := "path 1"), + navlink=(test_navlink := types_.Navlink(title="title 1", link=None)), + ) + ], + (invalid_msg := "invalid level"), + id="negative table row level", + ), + pytest.param( + [ + types_.TableRow( + level=0, + path=(test_path), + navlink=(test_navlink), + ) + ], + invalid_msg, + id="zero table row level", + ), + pytest.param( + [ + types_.TableRow( + level=2, + path=(test_path), + navlink=(test_navlink), + ) + ], + (level_difference_msg := "level difference"), + id="invalid starting table row level", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=(test_path), + navlink=(test_navlink), + ), + types_.TableRow( + level=3, + path=(test_path), + navlink=(test_navlink), + ), + ], + level_difference_msg, + id="invalid table row level change", + ), + ], +) +def test__validate_row_levels_invalid_rows( + table_rows: Iterable[types_.TableRow], expected_error_msg_contents: str +): + """ + arrange: given table rows with invalid levels + act: when _validate_row_levels is called + assert: InvalidRow exception is raised with excpected error message contents. + """ + with pytest.raises(exceptions.InvalidTableRowLevelError) as exc_info: + migration._validate_row_levels(table_rows=table_rows) + + exc_str = str(exc_info.value).lower() + assert expected_error_msg_contents in exc_str + + +@pytest.mark.parametrize( + "table_rows", + [ + pytest.param( + [ + types_.TableRow( + level=1, + path=("path 1"), + navlink=(types_.Navlink(title="title 1", link=None)), + ), + ], + id="valid level", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=("path 1"), + navlink=(types_.Navlink(title="title 1", link=None)), + ), + types_.TableRow( + level=2, + path=("path 2"), + navlink=(types_.Navlink(title="title 2", link="link")), + ), + ], + id="increasing levels", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=("path 1"), + navlink=(types_.Navlink(title="title 1", link=None)), + ), + types_.TableRow( + level=2, + path=("path 2"), + navlink=(types_.Navlink(title="title 2", link="link 1")), + ), + types_.TableRow( + level=1, + path=("path 3"), + navlink=(types_.Navlink(title="title 3", link="link 2")), + ), + ], + id="descend one level", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=("path 1"), + navlink=(types_.Navlink(title="title 1", link=None)), + ), + types_.TableRow( + level=2, + path=("path 2"), + navlink=(types_.Navlink(title="title 2", link="link 1")), + ), + types_.TableRow( + level=3, + path=("path 3"), + navlink=(types_.Navlink(title="title 3", link="link 2")), + ), + types_.TableRow( + level=1, + path=("path 4"), + navlink=(types_.Navlink(title="title 4", link="link 3")), + ), + ], + id="descend multiple levels", + ), + ], +) +def test__validate_row_levels(table_rows: Iterable[types_.TableRow]): + """ + arrange: given table rows with valid levels + act: when __validate_row_levels is called + assert: no exceptions are raised. + """ + migration._validate_row_levels(table_rows=table_rows) + + +@pytest.mark.parametrize( + "table_rows, expected_files", + [ + pytest.param( + [ + types_.TableRow( + level=1, + path=((path_str := "path 1")), + navlink=((dir_navlink := types_.Navlink(title="title 1", link=None))), + ), + ], + [types_.GitkeepFile(path=Path(path_str) / (gitkeep_file := Path(".gitkeep")))], + id="table row no navlink", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=(path_str), + navlink=(dir_navlink), + ), + types_.TableRow( + level=1, + path=((path_str_2 := "path 2")), + navlink=(dir_navlink), + ), + ], + [ + types_.GitkeepFile(path=Path(path_str) / gitkeep_file), + types_.GitkeepFile(path=Path(path_str_2) / gitkeep_file), + ], + id="multiple empty directories", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=(path_str), + navlink=(dir_navlink), + ), + types_.TableRow( + level=2, + path=(path_str_2), + navlink=(dir_navlink), + ), + ], + [ + types_.GitkeepFile(path=Path(path_str) / Path(path_str_2) / gitkeep_file), + ], + id="nested empty directories", + ), + ], +) +def test_migrate_empty_directory( + table_rows: Iterable[types_.TableRow], + expected_files: List[types_.MigrationDocument], +): + """ + arrange: given valid table rows with no navlink(only directories) + act: when migrate is called + assert: gitkeep files with respective directories are returned. + """ + files = [file for file in migration.migrate(table_rows=table_rows)] + assert files == expected_files + + +@pytest.mark.parametrize( + "table_rows, expected_files", + [ + pytest.param( + [ + types_.TableRow( + level=1, + path=(path_str), + navlink=( + (file_navlink := types_.Navlink(title="title 1", link=(link := "link 1"))) + ), + ), + ], + [types_.DocumentFile(path=path_to_markdown(Path(path_str)), link=link)], + id="single file", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=(path_str), + navlink=(dir_navlink), + ), + types_.TableRow( + level=2, + path=(path_str_2), + navlink=(file_navlink), + ), + ], + [ + types_.DocumentFile( + path=path_to_markdown(Path(path_str) / Path(path_str_2)), link=link + ) + ], + id="single file in directory", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=(path_str), + navlink=(file_navlink), + ), + types_.TableRow( + level=1, + path=(path_str_2), + navlink=(file_navlink), + ), + ], + [ + types_.DocumentFile(path=path_to_markdown(Path(path_str)), link=link), + types_.DocumentFile(path=path_to_markdown(Path(path_str_2)), link=link), + ], + id="multiple files", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=((base_path_dir_str := "base")), + navlink=(dir_navlink), + ), + types_.TableRow( + level=2, + path=(path_str), + navlink=(file_navlink), + ), + types_.TableRow( + level=2, + path=(path_str_2), + navlink=(file_navlink), + ), + ], + [ + types_.DocumentFile( + path=path_to_markdown(Path(base_path_dir_str) / Path(path_str)), link=link + ), + types_.DocumentFile( + path=path_to_markdown(Path(base_path_dir_str) / Path(path_str_2)), link=link + ), + ], + id="multiple files in directory", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=(base_path_dir_str), + navlink=(dir_navlink), + ), + types_.TableRow( + level=2, + path=(path_str), + navlink=(file_navlink), + ), + types_.TableRow( + level=2, + path=(path_str_2), + navlink=(file_navlink), + ), + types_.TableRow( + level=1, + path=((base_path_dir_str_2 := "base 2")), + navlink=(dir_navlink), + ), + types_.TableRow( + level=2, + path=(path_str), + navlink=(file_navlink), + ), + types_.TableRow( + level=2, + path=(path_str_2), + navlink=(file_navlink), + ), + ], + [ + types_.DocumentFile( + path=path_to_markdown(Path(base_path_dir_str) / Path(path_str)), link=link + ), + types_.DocumentFile( + path=path_to_markdown(Path(base_path_dir_str) / Path(path_str_2)), link=link + ), + types_.DocumentFile( + path=path_to_markdown(Path(base_path_dir_str_2) / Path(path_str)), link=link + ), + types_.DocumentFile( + path=path_to_markdown(Path(base_path_dir_str_2) / Path(path_str_2)), link=link + ), + ], + id="multiple files in multiple directory", + ), + pytest.param( + [ + types_.TableRow( + level=1, + path=(base_path_dir_str), + navlink=(dir_navlink), + ), + types_.TableRow( + level=2, + path=(path_str), + navlink=(dir_navlink), + ), + types_.TableRow( + level=3, + path=(path_str_2), + navlink=(file_navlink), + ), + ], + [ + types_.DocumentFile( + path=path_to_markdown( + Path(base_path_dir_str) / Path(path_str) / Path(path_str_2) + ), + link=link, + ), + ], + id="nested directory file", + ), + ], +) +def test_migrate_directory( + table_rows: Iterable[types_.TableRow], + expected_files: List[types_.MigrationDocument], +): + """ + arrange: given valid table rows + act: when migrate is called + assert: document file with correct paths are returned. + """ + files = [file for file in migration.migrate(table_rows=table_rows)] + assert files == expected_files From 0c98f97defb101e901e8185279c50a6f5a581cae Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 14 Dec 2022 14:49:34 +0800 Subject: [PATCH 002/107] add metadata arg docstring --- src/index.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/index.py b/src/index.py index 8e204b84..4fe31951 100644 --- a/src/index.py +++ b/src/index.py @@ -35,6 +35,7 @@ def get(metadata: Metadata, base_path: Path, server_client: Discourse) -> Index: """Retrieve the local and server index information. Args: + metadata: Parsed Metadata.yaml contents base_path: The base path to look for the metadata file in. server_client: A client to the documentation server. From df98fb19d3874464a9444563091c43cc7fa41eb0 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 14 Dec 2022 14:52:04 +0800 Subject: [PATCH 003/107] rename migration types --- src/migration.py | 14 ++++++------- src/types_.py | 8 ++++---- tests/unit/test_migration.py | 38 ++++++++++++++++++------------------ 3 files changed, 30 insertions(+), 30 deletions(-) diff --git a/src/migration.py b/src/migration.py index dc6cadf4..cb707081 100644 --- a/src/migration.py +++ b/src/migration.py @@ -7,6 +7,7 @@ from pathlib import Path from . import exceptions, types_ +from .discourse import Discourse GITKEEP_FILE = ".gitkeep" @@ -35,14 +36,13 @@ def _validate_row_levels(table_rows: typing.Iterable[types_.TableRow]): level = row.level -def migrate( +def extract_docs( table_rows: typing.Iterable[types_.TableRow], -) -> typing.Iterable[types_.MigrationDocument]: - """Create migration documents to migrate from server. +) -> typing.Iterable[types_.MigrationFileMeta]: + """Extract necessary migration documents to build docs directory from server. Args: table_rows: Table rows from the index file in the order of directory hierarcy. - docs_path: Docs directory base path. Returns: Migration documents with navlink to content.\ @@ -57,7 +57,7 @@ def migrate( # Next set of hierarchies, change cwd path if row.level <= level: if not last_dir_has_file: - yield types_.GitkeepFile(path=cwd / GITKEEP_FILE) + yield types_.GitkeepMeta(path=cwd / GITKEEP_FILE) while row.level <= level: level -= 1 cwd = cwd.parent @@ -69,7 +69,7 @@ def migrate( level = row.level else: last_dir_has_file = True - yield types_.DocumentFile(path=cwd / f"{row.path}.md", link=row.navlink.link) + yield types_.DocumentMeta(path=cwd / f"{row.path}.md", link=row.navlink.link) if not last_dir_has_file: - yield types_.GitkeepFile(path=cwd / GITKEEP_FILE) + yield types_.GitkeepMeta(path=cwd / GITKEEP_FILE) diff --git a/src/types_.py b/src/types_.py index 2886257a..b87668e4 100644 --- a/src/types_.py +++ b/src/types_.py @@ -316,8 +316,8 @@ class ActionReport(typing.NamedTuple): @dataclasses.dataclass -class MigrationDocument: - """Represents a document to be migrated. +class MigrationFileMeta: + """Metadata about a document to be migrated. Attrs: path: The full document path to be written to. @@ -327,12 +327,12 @@ class MigrationDocument: @dataclasses.dataclass -class GitkeepFile(MigrationDocument): +class GitkeepMeta(MigrationFileMeta): """Represents an empty directory from the index table.""" @dataclasses.dataclass -class DocumentFile(MigrationDocument): +class DocumentMeta(MigrationFileMeta): """Represents a document to be migrated from the index table. Attrs: diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 46c1b6b7..c8f81c18 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -182,7 +182,7 @@ def test__validate_row_levels(table_rows: Iterable[types_.TableRow]): navlink=((dir_navlink := types_.Navlink(title="title 1", link=None))), ), ], - [types_.GitkeepFile(path=Path(path_str) / (gitkeep_file := Path(".gitkeep")))], + [types_.GitkeepMeta(path=Path(path_str) / (gitkeep_file := Path(".gitkeep")))], id="table row no navlink", ), pytest.param( @@ -199,8 +199,8 @@ def test__validate_row_levels(table_rows: Iterable[types_.TableRow]): ), ], [ - types_.GitkeepFile(path=Path(path_str) / gitkeep_file), - types_.GitkeepFile(path=Path(path_str_2) / gitkeep_file), + types_.GitkeepMeta(path=Path(path_str) / gitkeep_file), + types_.GitkeepMeta(path=Path(path_str_2) / gitkeep_file), ], id="multiple empty directories", ), @@ -218,7 +218,7 @@ def test__validate_row_levels(table_rows: Iterable[types_.TableRow]): ), ], [ - types_.GitkeepFile(path=Path(path_str) / Path(path_str_2) / gitkeep_file), + types_.GitkeepMeta(path=Path(path_str) / Path(path_str_2) / gitkeep_file), ], id="nested empty directories", ), @@ -226,14 +226,14 @@ def test__validate_row_levels(table_rows: Iterable[types_.TableRow]): ) def test_migrate_empty_directory( table_rows: Iterable[types_.TableRow], - expected_files: List[types_.MigrationDocument], + expected_files: List[types_.MigrationFileMeta], ): """ arrange: given valid table rows with no navlink(only directories) act: when migrate is called assert: gitkeep files with respective directories are returned. """ - files = [file for file in migration.migrate(table_rows=table_rows)] + files = [file for file in migration.extract_docs(table_rows=table_rows)] assert files == expected_files @@ -250,7 +250,7 @@ def test_migrate_empty_directory( ), ), ], - [types_.DocumentFile(path=path_to_markdown(Path(path_str)), link=link)], + [types_.DocumentMeta(path=path_to_markdown(Path(path_str)), link=link)], id="single file", ), pytest.param( @@ -267,7 +267,7 @@ def test_migrate_empty_directory( ), ], [ - types_.DocumentFile( + types_.DocumentMeta( path=path_to_markdown(Path(path_str) / Path(path_str_2)), link=link ) ], @@ -287,8 +287,8 @@ def test_migrate_empty_directory( ), ], [ - types_.DocumentFile(path=path_to_markdown(Path(path_str)), link=link), - types_.DocumentFile(path=path_to_markdown(Path(path_str_2)), link=link), + types_.DocumentMeta(path=path_to_markdown(Path(path_str)), link=link), + types_.DocumentMeta(path=path_to_markdown(Path(path_str_2)), link=link), ], id="multiple files", ), @@ -311,10 +311,10 @@ def test_migrate_empty_directory( ), ], [ - types_.DocumentFile( + types_.DocumentMeta( path=path_to_markdown(Path(base_path_dir_str) / Path(path_str)), link=link ), - types_.DocumentFile( + types_.DocumentMeta( path=path_to_markdown(Path(base_path_dir_str) / Path(path_str_2)), link=link ), ], @@ -354,16 +354,16 @@ def test_migrate_empty_directory( ), ], [ - types_.DocumentFile( + types_.DocumentMeta( path=path_to_markdown(Path(base_path_dir_str) / Path(path_str)), link=link ), - types_.DocumentFile( + types_.DocumentMeta( path=path_to_markdown(Path(base_path_dir_str) / Path(path_str_2)), link=link ), - types_.DocumentFile( + types_.DocumentMeta( path=path_to_markdown(Path(base_path_dir_str_2) / Path(path_str)), link=link ), - types_.DocumentFile( + types_.DocumentMeta( path=path_to_markdown(Path(base_path_dir_str_2) / Path(path_str_2)), link=link ), ], @@ -388,7 +388,7 @@ def test_migrate_empty_directory( ), ], [ - types_.DocumentFile( + types_.DocumentMeta( path=path_to_markdown( Path(base_path_dir_str) / Path(path_str) / Path(path_str_2) ), @@ -401,12 +401,12 @@ def test_migrate_empty_directory( ) def test_migrate_directory( table_rows: Iterable[types_.TableRow], - expected_files: List[types_.MigrationDocument], + expected_files: List[types_.MigrationFileMeta], ): """ arrange: given valid table rows act: when migrate is called assert: document file with correct paths are returned. """ - files = [file for file in migration.migrate(table_rows=table_rows)] + files = [file for file in migration.extract_docs(table_rows=table_rows)] assert files == expected_files From 4ca1d358e25c3f42f188004196cb06258d283514 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 14 Dec 2022 17:59:36 +0800 Subject: [PATCH 004/107] document migration module --- src/exceptions.py | 6 +- src/migration.py | 131 +++++++++++++-- src/types_.py | 18 ++ tests/unit/test_migration.py | 318 ++++++++++++++++++++++------------- 4 files changed, 345 insertions(+), 128 deletions(-) diff --git a/src/exceptions.py b/src/exceptions.py index 18f7bc77..896c1f2b 100644 --- a/src/exceptions.py +++ b/src/exceptions.py @@ -32,5 +32,9 @@ class ActionError(BaseError): """A problem with the taking an action occurred.""" -class InvalidTableRowLevelError(BaseError): +class InvalidTableRowError(BaseError): """A problematic table row is encountered.""" + + +class MigrationError(BaseError): + """A problem with migration occurred.""" diff --git a/src/migration.py b/src/migration.py index cb707081..df22f07e 100644 --- a/src/migration.py +++ b/src/migration.py @@ -9,6 +9,7 @@ from . import exceptions, types_ from .discourse import Discourse +EMPTY_DIR_REASON = "" GITKEEP_FILE = ".gitkeep" @@ -24,23 +25,113 @@ def _validate_row_levels(table_rows: typing.Iterable[types_.TableRow]): level = 0 for row in table_rows: if row.level <= 0: - raise exceptions.InvalidTableRowLevelError( - f"Invalid level {row.level} in {row!=row.level}" - ) + raise exceptions.InvalidTableRowError(f"Invalid level {row.level} in {row!=row.level}") # Level increase of more than 1 is not possible. if row.level > level and (difference := row.level - level) > 1: - raise exceptions.InvalidTableRowLevelError( + raise exceptions.InvalidTableRowError( f"Level difference of {difference} encountered in {row=!r}" ) # Level decrease or same level is fine. level = row.level +def _migrate_gitkeep(gitkeep_meta: types_.GitkeepMeta, docs_path: Path): + """Write gitkeep file to docs directory. + + Args: + gitkeep_meta: Gitkeep metadata from empty directory table row. + docs_path: Documentation folder path. + + Returns: + Migration report for gitkeep file creation. + """ + path = docs_path / gitkeep_meta.path + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() + return types_.MigrationReport( + table_row=gitkeep_meta.table_row, + result=types_.ActionResult.SUCCESS, + path=path, + reason=EMPTY_DIR_REASON, + ) + + +def _migrate_document(document_meta: types_.DocumentMeta, discourse: Discourse, docs_path: Path): + """Write document file with content to docs directory. + + Args: + document_meta: Document metadata from directory table row with link. + discourse: Client to the documentation server. + docs_path: The path to the docs directory to migrate all the documentation. + + Returns: + Migration report for document file creation. + """ + try: + content = discourse.retrieve_topic(url=document_meta.link) + path = docs_path / document_meta.path + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(content, encoding="utf-8") + return types_.MigrationReport( + table_row=document_meta.table_row, + result=types_.ActionResult.SUCCESS, + path=path, + reason=None, + ) + except exceptions.DiscourseError as exc: + return types_.MigrationReport( + table_row=document_meta.table_row, + result=types_.ActionResult.FAIL, + path=None, + reason=str(exc), + ) + + +def _run_one( + file_meta: types_.MigrationFileMeta, discourse: Discourse, docs_path: Path +) -> types_.MigrationReport: + """Write document content relative to docs directory. + + Args: + file_meta: Migration file metadata corresponding to a row in index table. + discourse: Client to the documentation server. + docs_path: The path to the docs directory to migrate all the documentation. + + Returns: + Migration report containing migration result. + """ + match type(file_meta): + case types_.GitkeepMeta: + assert isinstance(file_meta, types_.GitkeepMeta) + return _migrate_gitkeep(gitkeep_meta=file_meta, docs_path=docs_path) + case types_.DocumentMeta: + assert isinstance(file_meta, types_.DocumentMeta) + return _migrate_document( + document_meta=file_meta, discourse=discourse, docs_path=docs_path + ) + # Edge case that should not be possible. + case _: # pragma: no cover + raise exceptions.MigrationError( + f"internal error, no implementation for migration file, {file_meta=!r}" + ) + + def extract_docs( table_rows: typing.Iterable[types_.TableRow], ) -> typing.Iterable[types_.MigrationFileMeta]: """Extract necessary migration documents to build docs directory from server. + Algorithm: + 1. For each table row: + 1.1. If row level is smaller than current working level: + 1.1.1. Yield GitkeepMeta if last working directory was empty. + 1.1.2. Navigate to parent directory based on current level and row level. + 1.2. If row is a directory: + 1.2.1. Create a virtual directory with given path + 1.2.2. Set created virtual directory as working directory. + 1.3. If row is a file: Yield DocumentMeta + 2. If last table row was a directory and yielded no DocumentMeta, yield GitkeepMeta. + Args: table_rows: Table rows from the index file in the order of directory hierarcy. @@ -51,13 +142,14 @@ def extract_docs( _validate_row_levels(table_rows=table_rows) level = 0 - last_dir_has_file = True + last_dir_has_file = True # Assume root dir is not empty. + last_dir_row: types_.TableRow | None = None cwd = Path() for row in table_rows: # Next set of hierarchies, change cwd path if row.level <= level: - if not last_dir_has_file: - yield types_.GitkeepMeta(path=cwd / GITKEEP_FILE) + if not last_dir_has_file and last_dir_row is not None: + yield types_.GitkeepMeta(path=cwd / GITKEEP_FILE, table_row=last_dir_row) while row.level <= level: level -= 1 cwd = cwd.parent @@ -65,11 +157,30 @@ def extract_docs( # if row is directory, move cwd if not row.navlink.link: last_dir_has_file = False + last_dir_row = row cwd = cwd / row.path level = row.level else: last_dir_has_file = True - yield types_.DocumentMeta(path=cwd / f"{row.path}.md", link=row.navlink.link) + yield types_.DocumentMeta( + path=cwd / f"{row.path}.md", link=row.navlink.link, table_row=row + ) + + if not last_dir_has_file and last_dir_row: + yield types_.GitkeepMeta(path=cwd / GITKEEP_FILE, table_row=last_dir_row) + - if not last_dir_has_file: - yield types_.GitkeepMeta(path=cwd / GITKEEP_FILE) +def run( + documents: typing.Iterable[types_.MigrationFileMeta], discourse: Discourse, docs_path: Path +) -> typing.Iterable[types_.MigrationReport]: + """Write document content to docs_path. + + Args: + documents: metadata about a file to be migrated to local docs directory. + discourse: Client to the documentation server. + docs_path: The path to the docs directory containing all the documentation. + """ + return [ + _run_one(file_meta=document, discourse=discourse, docs_path=docs_path) + for document in documents + ] diff --git a/src/types_.py b/src/types_.py index b87668e4..d3ce0d38 100644 --- a/src/types_.py +++ b/src/types_.py @@ -320,10 +320,12 @@ class MigrationFileMeta: """Metadata about a document to be migrated. Attrs: + table_row: The navigation table entry. path: The full document path to be written to. """ path: Path + table_row: TableRow @dataclasses.dataclass @@ -340,3 +342,19 @@ class DocumentMeta(MigrationFileMeta): """ link: str + + +class MigrationReport(typing.NamedTuple): + """Post execution report for an action. + + Attrs: + table_row: The navigation table entry. + path: Path the file was written to. None if failed. + result: The action execution result. + reason: The reason, None for success reports. + """ + + table_row: TableRow + path: Path | None + result: ActionResult + reason: str | None diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index c8f81c18..ceefe94d 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -8,10 +8,11 @@ from pathlib import Path from typing import Iterable, List +from unittest import mock import pytest -from src import exceptions, migration, types_ +from src import discourse, exceptions, migration, types_ from .helpers import path_to_markdown @@ -80,7 +81,7 @@ def test__validate_row_levels_invalid_rows( act: when _validate_row_levels is called assert: InvalidRow exception is raised with excpected error message contents. """ - with pytest.raises(exceptions.InvalidTableRowLevelError) as exc_info: + with pytest.raises(exceptions.InvalidTableRowError) as exc_info: migration._validate_row_levels(table_rows=table_rows) exc_str = str(exc_info.value).lower() @@ -176,62 +177,70 @@ def test__validate_row_levels(table_rows: Iterable[types_.TableRow]): [ pytest.param( [ - types_.TableRow( - level=1, - path=((path_str := "path 1")), - navlink=((dir_navlink := types_.Navlink(title="title 1", link=None))), + ( + root_dir_row := types_.TableRow( + level=1, + path="root path 1", + navlink=(dir_navlink := types_.Navlink(title="title 1", link=None)), + ) ), ], - [types_.GitkeepMeta(path=Path(path_str) / (gitkeep_file := Path(".gitkeep")))], + [ + types_.GitkeepMeta( + path=Path(root_dir_row.path) / (gitkeep_file := Path(".gitkeep")), + table_row=root_dir_row, + ) + ], id="table row no navlink", ), pytest.param( [ - types_.TableRow( - level=1, - path=(path_str), - navlink=(dir_navlink), - ), - types_.TableRow( - level=1, - path=((path_str_2 := "path 2")), - navlink=(dir_navlink), + root_dir_row, + ( + root_dir_row_2 := types_.TableRow( + level=1, + path="root path 2", + navlink=dir_navlink, + ) ), ], [ - types_.GitkeepMeta(path=Path(path_str) / gitkeep_file), - types_.GitkeepMeta(path=Path(path_str_2) / gitkeep_file), + types_.GitkeepMeta( + path=Path(root_dir_row.path) / gitkeep_file, table_row=root_dir_row + ), + types_.GitkeepMeta( + path=Path(root_dir_row_2.path) / gitkeep_file, table_row=root_dir_row_2 + ), ], id="multiple empty directories", ), pytest.param( [ - types_.TableRow( - level=1, - path=(path_str), - navlink=(dir_navlink), - ), - types_.TableRow( + root_dir_row, + sub_dir_row := types_.TableRow( level=2, - path=(path_str_2), + path="sub path 1", navlink=(dir_navlink), ), ], [ - types_.GitkeepMeta(path=Path(path_str) / Path(path_str_2) / gitkeep_file), + types_.GitkeepMeta( + path=Path(root_dir_row.path) / Path(sub_dir_row.path) / gitkeep_file, + table_row=sub_dir_row, + ), ], id="nested empty directories", ), ], ) -def test_migrate_empty_directory( +def test_extract_docs_empty_directory_rows( table_rows: Iterable[types_.TableRow], expected_files: List[types_.MigrationFileMeta], ): """ arrange: given valid table rows with no navlink(only directories) act: when migrate is called - assert: gitkeep files with respective directories are returned. + assert: .gitkeep files with respective directories are returned. """ files = [file for file in migration.extract_docs(table_rows=table_rows)] assert files == expected_files @@ -242,164 +251,151 @@ def test_migrate_empty_directory( [ pytest.param( [ - types_.TableRow( + root_file_row := types_.TableRow( level=1, - path=(path_str), + path="root file 1", navlink=( - (file_navlink := types_.Navlink(title="title 1", link=(link := "link 1"))) + file_navlink := types_.Navlink( + title="title 1", link=(link_str := "link 1") + ) ), - ), + ) + ], + [ + types_.DocumentMeta( + path=path_to_markdown(Path(root_file_row.path)), + link=link_str, + table_row=root_file_row, + ) ], - [types_.DocumentMeta(path=path_to_markdown(Path(path_str)), link=link)], id="single file", ), pytest.param( [ - types_.TableRow( - level=1, - path=(path_str), - navlink=(dir_navlink), - ), - types_.TableRow( + root_dir_row, + sub_file_row := types_.TableRow( level=2, - path=(path_str_2), - navlink=(file_navlink), + path="sub file 1", + navlink=file_navlink, ), ], [ types_.DocumentMeta( - path=path_to_markdown(Path(path_str) / Path(path_str_2)), link=link + path=path_to_markdown(Path(root_dir_row.path) / Path(sub_file_row.path)), + link=link_str, + table_row=sub_file_row, ) ], id="single file in directory", ), pytest.param( [ - types_.TableRow( + root_file_row, + root_file_row_2 := types_.TableRow( level=1, - path=(path_str), - navlink=(file_navlink), - ), - types_.TableRow( - level=1, - path=(path_str_2), - navlink=(file_navlink), + path="root file 2", + navlink=file_navlink, ), ], [ - types_.DocumentMeta(path=path_to_markdown(Path(path_str)), link=link), - types_.DocumentMeta(path=path_to_markdown(Path(path_str_2)), link=link), + types_.DocumentMeta( + path=path_to_markdown(Path(root_file_row.path)), + link=link_str, + table_row=root_file_row, + ), + types_.DocumentMeta( + path=path_to_markdown(Path(root_file_row_2.path)), + link=link_str, + table_row=root_file_row_2, + ), ], id="multiple files", ), pytest.param( [ - types_.TableRow( - level=1, - path=((base_path_dir_str := "base")), - navlink=(dir_navlink), - ), - types_.TableRow( - level=2, - path=(path_str), - navlink=(file_navlink), - ), - types_.TableRow( + root_dir_row, + sub_file_row, + sub_file_row_2 := types_.TableRow( level=2, - path=(path_str_2), + path="sub file 2", navlink=(file_navlink), ), ], [ types_.DocumentMeta( - path=path_to_markdown(Path(base_path_dir_str) / Path(path_str)), link=link + path=path_to_markdown(Path(root_dir_row.path) / Path(sub_file_row.path)), + link=link_str, + table_row=sub_file_row, ), types_.DocumentMeta( - path=path_to_markdown(Path(base_path_dir_str) / Path(path_str_2)), link=link + path=path_to_markdown(Path(root_dir_row.path) / Path(sub_file_row_2.path)), + link=link_str, + table_row=sub_file_row_2, ), ], id="multiple files in directory", ), pytest.param( [ - types_.TableRow( - level=1, - path=(base_path_dir_str), - navlink=(dir_navlink), - ), - types_.TableRow( - level=2, - path=(path_str), - navlink=(file_navlink), - ), - types_.TableRow( - level=2, - path=(path_str_2), - navlink=(file_navlink), - ), - types_.TableRow( - level=1, - path=((base_path_dir_str_2 := "base 2")), - navlink=(dir_navlink), - ), - types_.TableRow( - level=2, - path=(path_str), - navlink=(file_navlink), - ), - types_.TableRow( - level=2, - path=(path_str_2), - navlink=(file_navlink), - ), + root_dir_row, + sub_file_row, + sub_file_row_2, + root_dir_row_2, + sub_file_row, + sub_file_row_2, ], [ types_.DocumentMeta( - path=path_to_markdown(Path(base_path_dir_str) / Path(path_str)), link=link + path=path_to_markdown(Path(root_dir_row.path) / Path(sub_file_row.path)), + link=link_str, + table_row=sub_file_row, ), types_.DocumentMeta( - path=path_to_markdown(Path(base_path_dir_str) / Path(path_str_2)), link=link + path=path_to_markdown(Path(root_dir_row.path) / Path(sub_file_row_2.path)), + link=link_str, + table_row=sub_file_row_2, ), types_.DocumentMeta( - path=path_to_markdown(Path(base_path_dir_str_2) / Path(path_str)), link=link + path=path_to_markdown(Path(root_dir_row_2.path) / Path(sub_file_row.path)), + link=link_str, + table_row=sub_file_row, ), types_.DocumentMeta( - path=path_to_markdown(Path(base_path_dir_str_2) / Path(path_str_2)), link=link + path=path_to_markdown(Path(root_dir_row_2.path) / Path(sub_file_row_2.path)), + link=link_str, + table_row=sub_file_row_2, ), ], id="multiple files in multiple directory", ), pytest.param( [ - types_.TableRow( - level=1, - path=(base_path_dir_str), - navlink=(dir_navlink), - ), - types_.TableRow( - level=2, - path=(path_str), - navlink=(dir_navlink), - ), - types_.TableRow( - level=3, - path=(path_str_2), - navlink=(file_navlink), + root_dir_row, + sub_dir_row, + ( + nested_file_row := types_.TableRow( + level=3, + path="path 3", + navlink=(file_navlink), + ) ), ], [ types_.DocumentMeta( path=path_to_markdown( - Path(base_path_dir_str) / Path(path_str) / Path(path_str_2) + Path(root_dir_row.path) + / Path(sub_dir_row.path) + / Path(nested_file_row.path) ), - link=link, + link=link_str, + table_row=nested_file_row, ), ], id="nested directory file", ), ], ) -def test_migrate_directory( +def test_extract_docs( table_rows: Iterable[types_.TableRow], expected_files: List[types_.MigrationFileMeta], ): @@ -410,3 +406,91 @@ def test_migrate_directory( """ files = [file for file in migration.extract_docs(table_rows=table_rows)] assert files == expected_files + + +def test__migrate_gitkeep(tmp_path: Path): + """ + arrange: given valid gitkeep metadata + act: when _migrate_gitkeep is called + assert: migration report is created with responsible table row, written path \ + and reason. + """ + path = Path("empty/docs/dir/.gitkeep") + table_row = types_.TableRow( + level=1, path="empty-directory", navlink=types_.Navlink(title="title 1", link=None) + ) + gitkeep_meta = types_.GitkeepMeta(path=path, table_row=table_row) + + migration_report = migration._migrate_gitkeep(gitkeep_meta=gitkeep_meta, docs_path=tmp_path) + + assert (file_path := tmp_path / path).is_file() + assert file_path.read_text(encoding="utf-8") == "" + assert migration_report.table_row == table_row + assert migration_report.result == types_.ActionResult.SUCCESS + assert migration_report.reason is not None + assert "created due to empty directory" in migration_report.reason + + +def test__migrate_document_fail(tmp_path: Path): + """ + arrange: given valid document metadata and mocked discourse that raises an error + act: when _migrate_document is called + assert: failed migration report is returned. + """ + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.retrieve_topic.side_effect = (error := exceptions.DiscourseError("fail")) + table_row = types_.TableRow( + level=(level := 1), + path=(path_str := "empty-directory"), + navlink=types_.Navlink(title=(navlink_title := "title 1"), link=(link_str := "link 1")), + ) + document_meta = types_.DocumentMeta( + path=(path := Path(path_str)), table_row=table_row, link=link_str + ) + + returned_report = migration._migrate_document( + document_meta=document_meta, discourse=mocked_discourse, docs_path=tmp_path + ) + + assert not (tmp_path / path).exists() + mocked_discourse.retrieve_topic.assert_called_once_with(url=link_str) + assert returned_report.table_row is not None + assert returned_report.table_row.level == level + assert returned_report.table_row.path == path_str + assert returned_report.table_row.navlink.title == navlink_title + assert returned_report.table_row.navlink.link == link_str + assert returned_report.result == types_.ActionResult.FAIL + assert returned_report.reason == str(error) + + +def test__migrate_document(tmp_path: Path): + """ + arrange: given valid document metadata + act: when _migrate_document is called + assert: migration report is created with responsible table row, written path \ + and reason. + """ + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.retrieve_topic.return_value = (content := "content") + table_row = types_.TableRow( + level=(level := 1), + path=(path_str := "empty-directory"), + navlink=types_.Navlink(title=(navlink_title := "title 1"), link=(link_str := "link 1")), + ) + document_meta = types_.DocumentMeta( + path=(path := Path(path_str)), table_row=table_row, link=link_str + ) + + returned_report = migration._migrate_document( + document_meta=document_meta, discourse=mocked_discourse, docs_path=tmp_path + ) + + assert (file_path := (tmp_path / path)).is_file() + assert file_path.read_text(encoding="utf-8") == content + mocked_discourse.retrieve_topic.assert_called_once_with(url=link_str) + assert returned_report.table_row is not None + assert returned_report.table_row.level == level + assert returned_report.table_row.path == path_str + assert returned_report.table_row.navlink.title == navlink_title + assert returned_report.table_row.navlink.link == link_str + assert returned_report.result == types_.ActionResult.SUCCESS From f2750ab996841d047ac40ec6ef5bf4ea07e68c6f Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 14 Dec 2022 18:11:55 +0800 Subject: [PATCH 005/107] move retrieve topic to separate try except --- src/migration.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/migration.py b/src/migration.py index df22f07e..4182ec07 100644 --- a/src/migration.py +++ b/src/migration.py @@ -69,15 +69,6 @@ def _migrate_document(document_meta: types_.DocumentMeta, discourse: Discourse, """ try: content = discourse.retrieve_topic(url=document_meta.link) - path = docs_path / document_meta.path - path.parent.mkdir(parents=True, exist_ok=True) - path.write_text(content, encoding="utf-8") - return types_.MigrationReport( - table_row=document_meta.table_row, - result=types_.ActionResult.SUCCESS, - path=path, - reason=None, - ) except exceptions.DiscourseError as exc: return types_.MigrationReport( table_row=document_meta.table_row, @@ -85,6 +76,15 @@ def _migrate_document(document_meta: types_.DocumentMeta, discourse: Discourse, path=None, reason=str(exc), ) + path = docs_path / document_meta.path + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(content, encoding="utf-8") + return types_.MigrationReport( + table_row=document_meta.table_row, + result=types_.ActionResult.SUCCESS, + path=path, + reason=None, + ) def _run_one( From 1e175767737062b40fab07b24861d27acf39850a Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 14 Dec 2022 18:25:00 +0800 Subject: [PATCH 006/107] switch docstring attr order --- src/types_.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/types_.py b/src/types_.py index d3ce0d38..c17f1c7f 100644 --- a/src/types_.py +++ b/src/types_.py @@ -320,8 +320,8 @@ class MigrationFileMeta: """Metadata about a document to be migrated. Attrs: - table_row: The navigation table entry. path: The full document path to be written to. + table_row: The navigation table entry. """ path: Path From e1dc927b70e04d31c5c100f843f5bf8fafdecc7e Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 14 Dec 2022 20:30:19 +0800 Subject: [PATCH 007/107] separate migrate/reconcile flow --- src/__init__.py | 64 ++++++++++++++++++++++++++++++++++++- src/docs_directory.py | 13 ++++++++ tests/unit/test___init__.py | 46 ++++++-------------------- 3 files changed, 86 insertions(+), 37 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index b5d2f9a7..673990a7 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -8,15 +8,24 @@ from .action import DRY_RUN_NAVLINK_LINK, FAIL_NAVLINK_LINK from .action import run_all as run_all_actions from .discourse import Discourse +from .docs_directory import has_docs_directory from .docs_directory import read as read_docs_directory +from .exceptions import InputError from .index import DOCUMENTATION_FOLDER_NAME from .index import get as get_index from .metadata import get as get_metadata +from .migration import extract_docs +from .migration import run as run_migrate from .navigation_table import from_page as navigation_table_from_page from .reconcile import run as run_reconcile +GETTING_STARTED = ( + "To get started with upload-charm-docs, " + "please refer to https://github.com/canonical/upload-charm-docs#getting-started" +) -def run( + +def _run_reconcile( base_path: Path, discourse: Discourse, dry_run: bool, @@ -56,3 +65,56 @@ def run( and report.url != DRY_RUN_NAVLINK_LINK and report.url != FAIL_NAVLINK_LINK } + + +def _run_migrate( + base_path: Path, + discourse: Discourse, +) -> dict[str, str]: + """Migrate existing docs from charmhub to local repository. + + Returns: + All the filepaths that were created with the result of that action. + """ + metadata = get_metadata(base_path) + index = get_index(metadata=metadata, base_path=base_path, server_client=discourse) + server_content = ( + index.server.content if index.server is not None and index.server.content else "" + ) + table_rows = navigation_table_from_page(page=server_content) + file_metadata = extract_docs(table_rows=table_rows) + reports = run_migrate( + documents=file_metadata, + discourse=discourse, + docs_path=base_path / DOCUMENTATION_FOLDER_NAME, + ) + + return {str(report.path): report.result for report in reports if report.path is not None} + + +def run( + base_path: Path, + discourse: Discourse, + dry_run: bool, + delete_pages: bool, +) -> dict[str, str]: + """Interact with charmhub to upload documentation or migrate to local repository. + + Args: + base_path: The base path to look for the metadata file in. + discourse: A client to the documentation server. + dry_run: If enabled, only log the action that would be taken. + delete_pages: Whether to delete pages that are no longer needed. + + Returns: + All the URLs that had an action with the result of that action. + """ + metadata = get_metadata(base_path) + has_docs_dir = has_docs_directory(base_path=base_path) + if metadata.docs and not has_docs_dir: + return _run_migrate(base_path=base_path, discourse=discourse) + elif has_docs_dir: + return _run_reconcile( + base_path=base_path, discourse=discourse, dry_run=dry_run, delete_pages=delete_pages + ) + raise InputError(GETTING_STARTED) diff --git a/src/docs_directory.py b/src/docs_directory.py index 3f925d64..3b29ae69 100644 --- a/src/docs_directory.py +++ b/src/docs_directory.py @@ -8,6 +8,7 @@ from pathlib import Path from . import types_ +from .index import DOCUMENTATION_FOLDER_NAME def _get_directories_files(docs_path: Path) -> list[Path]: @@ -129,3 +130,15 @@ def read(docs_path: Path) -> typing.Iterator[types_.PathInfo]: return map( partial(_get_path_info, docs_path=docs_path), _get_directories_files(docs_path=docs_path) ) + + +def has_docs_directory(base_path: Path): + """Return existence of docs directory from base path. + + Args: + base_path: Base path of the repository to search the docs directory from + + Returns: + True if documentation folder exists, False otherwise + """ + return (base_path / DOCUMENTATION_FOLDER_NAME).is_dir() diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 3dd54beb..1e8cb2e9 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -6,33 +6,29 @@ from pathlib import Path from unittest import mock -from src import discourse, exceptions, metadata, reconcile, run, types_ +import pytest + +from src import GETTING_STARTED, discourse, exceptions, metadata, reconcile, run, types_ from .helpers import create_metadata_yaml -def test_run_empty_local_server(tmp_path: Path): +def test__run_reconcile_empty_local_server(tmp_path: Path): """ arrange: given metadata with name but not docs and empty docs folder and mocked discourse act: when run is called - assert: then an index page is created with empty navigation table. + assert: then InputError is raised with a link to getting started guide. """ create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=tmp_path) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) - mocked_discourse.create_topic.return_value = (url := "url 1") - returned_page_interactions = run( - base_path=tmp_path, discourse=mocked_discourse, dry_run=False, delete_pages=True - ) + with pytest.raises(exceptions.InputError) as exc: + run(base_path=tmp_path, discourse=mocked_discourse, dry_run=False, delete_pages=True) - mocked_discourse.create_topic.assert_called_once_with( - title="Name 1 Documentation Overview", - content=f"{reconcile.NAVIGATION_TABLE_START.strip()}", - ) - assert returned_page_interactions == {url: types_.ActionResult.SUCCESS} + assert GETTING_STARTED == str(exc.value) -def test_run_local_empty_server(tmp_path: Path): +def test__run_reconcile_local_empty_server(tmp_path: Path): """ arrange: given metadata with name but not docs and docs folder with a file and mocked discourse act: when run is called @@ -71,7 +67,7 @@ def test_run_local_empty_server(tmp_path: Path): } -def test_run_local_empty_server_dry_run(tmp_path: Path): +def test__run_reconcile_local_empty_server_dry_run(tmp_path: Path): """ arrange: given metadata with name but not docs and docs folder with a file and mocked discourse act: when run is called with dry run mode enabled @@ -89,25 +85,3 @@ def test_run_local_empty_server_dry_run(tmp_path: Path): mocked_discourse.create_topic.assert_not_called() assert not returned_page_interactions - - -def test_run_local_empty_server_error(tmp_path: Path): - """ - arrange: given metadata with name but not docs and empty docs directory and mocked discourse - that raises an exception - act: when run is called - assert: no pages are created. - """ - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=tmp_path) - mocked_discourse = mock.MagicMock(spec=discourse.Discourse) - mocked_discourse.create_topic.side_effect = exceptions.DiscourseError - - returned_page_interactions = run( - base_path=tmp_path, discourse=mocked_discourse, dry_run=False, delete_pages=True - ) - - mocked_discourse.create_topic.assert_called_once_with( - title="Name 1 Documentation Overview", - content=f"{reconcile.NAVIGATION_TABLE_START.strip()}", - ) - assert not returned_page_interactions From 174c4749db83e31d9ddbfb62f0a0d2808d0e26dc Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 15 Dec 2022 10:16:31 +0800 Subject: [PATCH 008/107] change run tests to run_reconcile tests --- tests/unit/test___init__.py | 80 +++++++++++++++++++++++++++++-------- 1 file changed, 63 insertions(+), 17 deletions(-) diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 1e8cb2e9..3d3b5893 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -3,12 +3,13 @@ """Unit tests for execution.""" +# Need access to protected functions for testing +# pylint: disable=protected-access + from pathlib import Path from unittest import mock -import pytest - -from src import GETTING_STARTED, discourse, exceptions, metadata, reconcile, run, types_ +from src import _run_reconcile, discourse, exceptions, metadata, reconcile, types_ from .helpers import create_metadata_yaml @@ -16,27 +17,38 @@ def test__run_reconcile_empty_local_server(tmp_path: Path): """ arrange: given metadata with name but not docs and empty docs folder and mocked discourse - act: when run is called - assert: then InputError is raised with a link to getting started guide. + act: when _run_reconcile is called + assert: then an index page is created with empty navigation table. """ create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=tmp_path) + meta = types_.Metadata(name="name 1", docs=None) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.create_topic.return_value = (url := "url 1") + + returned_page_interactions = _run_reconcile( + base_path=tmp_path, + metadata=meta, + discourse=mocked_discourse, + dry_run=False, + delete_pages=True, + ) - with pytest.raises(exceptions.InputError) as exc: - run(base_path=tmp_path, discourse=mocked_discourse, dry_run=False, delete_pages=True) - - assert GETTING_STARTED == str(exc.value) + mocked_discourse.create_topic.assert_called_once_with( + title="Name 1 Documentation Overview", + content=f"{reconcile.NAVIGATION_TABLE_START.strip()}", + ) + assert returned_page_interactions == {url: types_.ActionResult.SUCCESS} def test__run_reconcile_local_empty_server(tmp_path: Path): """ arrange: given metadata with name but not docs and docs folder with a file and mocked discourse - act: when run is called + act: when _run_reconcile is called assert: then a documentation page is created and an index page is created with a navigation page with a reference to the documentation page. """ name = "name 1" - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: {name}", path=tmp_path) + meta = types_.Metadata(name=name, docs=None) (docs_folder := tmp_path / "docs").mkdir() (docs_folder / "index.md").write_text(index_content := "index content") (docs_folder / "page.md").write_text(page_content := "page content") @@ -46,8 +58,12 @@ def test__run_reconcile_local_empty_server(tmp_path: Path): (index_url := "url 2"), ] - returned_page_interactions = run( - base_path=tmp_path, discourse=mocked_discourse, dry_run=False, delete_pages=True + returned_page_interactions = _run_reconcile( + base_path=tmp_path, + metadata=meta, + discourse=mocked_discourse, + dry_run=False, + delete_pages=True, ) assert mocked_discourse.create_topic.call_count == 2 @@ -70,18 +86,48 @@ def test__run_reconcile_local_empty_server(tmp_path: Path): def test__run_reconcile_local_empty_server_dry_run(tmp_path: Path): """ arrange: given metadata with name but not docs and docs folder with a file and mocked discourse - act: when run is called with dry run mode enabled + act: when _run_reconcile is called with dry run mode enabled assert: no pages are created. """ - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=tmp_path) + meta = types_.Metadata(name="name 1", docs=None) (docs_folder := tmp_path / "docs").mkdir() (docs_folder / "index.md").write_text("index content") (docs_folder / "page.md").write_text("page content") mocked_discourse = mock.MagicMock(spec=discourse.Discourse) - returned_page_interactions = run( - base_path=tmp_path, discourse=mocked_discourse, dry_run=True, delete_pages=True + returned_page_interactions = _run_reconcile( + base_path=tmp_path, + metadata=meta, + discourse=mocked_discourse, + dry_run=True, + delete_pages=True, ) mocked_discourse.create_topic.assert_not_called() assert not returned_page_interactions + + +def test__run_reconcile_local_empty_server_error(tmp_path: Path): + """ + arrange: given metadata with name but not docs and empty docs directory and mocked discourse + that raises an exception + act: when _run_reconcile is called + assert: no pages are created. + """ + meta = types_.Metadata(name="name 1", docs=None) + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.create_topic.side_effect = exceptions.DiscourseError + + returned_page_interactions = _run_reconcile( + base_path=tmp_path, + metadata=meta, + discourse=mocked_discourse, + dry_run=False, + delete_pages=True, + ) + + mocked_discourse.create_topic.assert_called_once_with( + title="Name 1 Documentation Overview", + content=f"{reconcile.NAVIGATION_TABLE_START.strip()}", + ) + assert not returned_page_interactions From e0acbced0ec7232cb12510585d5e876f3087de25 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 15 Dec 2022 22:34:41 +0800 Subject: [PATCH 009/107] add migration control flow --- src/__init__.py | 26 +++-- src/index.py | 23 ++++ src/migration.py | 57 +++++++++- src/types_.py | 19 +++- tests/unit/test___init__.py | 193 +++++++++++++++++++++++++++++++- tests/unit/test_action.py | 2 +- tests/unit/test_index.py | 39 +++++++ tests/unit/test_migration.py | 211 +++++++++++++++++++++++++++++++---- 8 files changed, 537 insertions(+), 33 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index 673990a7..597b93e8 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -11,13 +11,14 @@ from .docs_directory import has_docs_directory from .docs_directory import read as read_docs_directory from .exceptions import InputError -from .index import DOCUMENTATION_FOLDER_NAME +from .index import DOCUMENTATION_FOLDER_NAME, contents_from_page from .index import get as get_index from .metadata import get as get_metadata -from .migration import extract_docs +from .migration import get_docs_metadata from .migration import run as run_migrate from .navigation_table import from_page as navigation_table_from_page from .reconcile import run as run_reconcile +from .types_ import ActionResult, Metadata GETTING_STARTED = ( "To get started with upload-charm-docs, " @@ -27,6 +28,7 @@ def _run_reconcile( base_path: Path, + metadata: Metadata, discourse: Discourse, dry_run: bool, delete_pages: bool, @@ -43,7 +45,6 @@ def _run_reconcile( All the URLs that had an action with the result of that action. """ - metadata = get_metadata(base_path) index = get_index(metadata=metadata, base_path=base_path, server_client=discourse) path_infos = read_docs_directory(docs_path=base_path / DOCUMENTATION_FOLDER_NAME) server_content = ( @@ -69,6 +70,7 @@ def _run_reconcile( def _run_migrate( base_path: Path, + metadata: Metadata, discourse: Discourse, ) -> dict[str, str]: """Migrate existing docs from charmhub to local repository. @@ -76,20 +78,24 @@ def _run_migrate( Returns: All the filepaths that were created with the result of that action. """ - metadata = get_metadata(base_path) index = get_index(metadata=metadata, base_path=base_path, server_client=discourse) server_content = ( index.server.content if index.server is not None and index.server.content else "" ) + index_content = contents_from_page(server_content) table_rows = navigation_table_from_page(page=server_content) - file_metadata = extract_docs(table_rows=table_rows) + file_metadata = get_docs_metadata(table_rows=table_rows, index_content=index_content) reports = run_migrate( documents=file_metadata, discourse=discourse, docs_path=base_path / DOCUMENTATION_FOLDER_NAME, ) - return {str(report.path): report.result for report in reports if report.path is not None} + return { + str(report.path): report.result + for report in reports + if report.path is not None and report.result != ActionResult.FAIL + } def run( @@ -112,9 +118,13 @@ def run( metadata = get_metadata(base_path) has_docs_dir = has_docs_directory(base_path=base_path) if metadata.docs and not has_docs_dir: - return _run_migrate(base_path=base_path, discourse=discourse) + return _run_migrate(base_path=base_path, metadata=metadata, discourse=discourse) elif has_docs_dir: return _run_reconcile( - base_path=base_path, discourse=discourse, dry_run=dry_run, delete_pages=delete_pages + base_path=base_path, + metadata=metadata, + discourse=discourse, + dry_run=dry_run, + delete_pages=delete_pages, ) raise InputError(GETTING_STARTED) diff --git a/src/index.py b/src/index.py index 4fe31951..e1230471 100644 --- a/src/index.py +++ b/src/index.py @@ -3,12 +3,17 @@ """Execute the uploading of documentation.""" +import re from pathlib import Path from .discourse import Discourse from .exceptions import DiscourseError, ServerError from .types_ import Index, IndexFile, Metadata, Page +_WHITESPACE = r"\s*" +_NAVIGATION_HEADER_REGEX = rf"{_WHITESPACE}# Navigation" +_INDEX_CONTENT_REGEX = r"^((.|\n)*)" +_INDEX_CONTENT_PATTERN = re.compile(rf"{_INDEX_CONTENT_REGEX}(?={_NAVIGATION_HEADER_REGEX})") DOCUMENTATION_FOLDER_NAME = "docs" DOCUMENTATION_INDEX_FILENAME = "index.md" @@ -64,3 +69,21 @@ def get(metadata: Metadata, base_path: Path, server_client: Discourse) -> Index: ) return Index(server=server, local=local, name=name_value) + + +def contents_from_page(page: str) -> str: + """Get index file contents from server page. + + Args: + page: Page contents from server. + + Returns: + Index file contents. + """ + match = _INDEX_CONTENT_PATTERN.match(page) + + if match is None: + return "" + + content = match.group(0) + return content diff --git a/src/migration.py b/src/migration.py index 4182ec07..b14ffd74 100644 --- a/src/migration.py +++ b/src/migration.py @@ -3,6 +3,7 @@ """Module for transforming index table rows into local files.""" +import itertools import typing from pathlib import Path @@ -87,6 +88,27 @@ def _migrate_document(document_meta: types_.DocumentMeta, discourse: Discourse, ) +def _migrate_index(index_meta: types_.IndexDocumentMeta, docs_path: Path): + """Write index document to docs repository. + + Args: + index_meta: Index file metadata. + docs_path: The path to the docs directory to migrate all the documentation. + + Returns: + Migration report for index file creation. + """ + path = docs_path / index_meta.path + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(index_meta.content, encoding="utf-8") + return types_.MigrationReport( + table_row=None, + result=types_.ActionResult.SUCCESS, + path=path, + reason=None, + ) + + def _run_one( file_meta: types_.MigrationFileMeta, discourse: Discourse, docs_path: Path ) -> types_.MigrationReport: @@ -109,6 +131,9 @@ def _run_one( return _migrate_document( document_meta=file_meta, discourse=discourse, docs_path=docs_path ) + case types_.IndexDocumentMeta: + assert isinstance(file_meta, types_.IndexDocumentMeta) + return _migrate_index(index_meta=file_meta, docs_path=docs_path) # Edge case that should not be possible. case _: # pragma: no cover raise exceptions.MigrationError( @@ -116,7 +141,7 @@ def _run_one( ) -def extract_docs( +def _extract_docs_from_table_rows( table_rows: typing.Iterable[types_.TableRow], ) -> typing.Iterable[types_.MigrationFileMeta]: """Extract necessary migration documents to build docs directory from server. @@ -139,6 +164,7 @@ def extract_docs( Migration documents with navlink to content.\ .gitkeep file with no content if empty directory. """ + table_rows = list(table_rows) _validate_row_levels(table_rows=table_rows) level = 0 @@ -170,6 +196,35 @@ def extract_docs( yield types_.GitkeepMeta(path=cwd / GITKEEP_FILE, table_row=last_dir_row) +def _index_file_from_content(content: str): + """Get index file document metadata. + + Args: + content: Index file content. + + Returns: + Index file document metadata. + """ + return types_.IndexDocumentMeta(path=Path("index.md"), content=content) + + +def get_docs_metadata( + table_rows: typing.Iterable[types_.TableRow], index_content: str +) -> typing.Iterable[types_.MigrationFileMeta]: + """Get metadata for documents to be migrated. + + Args: + table_rows: Table rows from the index table. + index_content: Index content from index page. + + Returns: + Metadata of files to be migrated. + """ + table_docs = _extract_docs_from_table_rows(table_rows=table_rows) + index_doc = _index_file_from_content(content=index_content) + return itertools.chain([index_doc], table_docs) + + def run( documents: typing.Iterable[types_.MigrationFileMeta], discourse: Discourse, docs_path: Path ) -> typing.Iterable[types_.MigrationReport]: diff --git a/src/types_.py b/src/types_.py index c17f1c7f..b294c293 100644 --- a/src/types_.py +++ b/src/types_.py @@ -325,13 +325,14 @@ class MigrationFileMeta: """ path: Path - table_row: TableRow @dataclasses.dataclass class GitkeepMeta(MigrationFileMeta): """Represents an empty directory from the index table.""" + table_row: TableRow + @dataclasses.dataclass class DocumentMeta(MigrationFileMeta): @@ -342,19 +343,31 @@ class DocumentMeta(MigrationFileMeta): """ link: str + table_row: TableRow + + +@dataclasses.dataclass +class IndexDocumentMeta(MigrationFileMeta): + """Represents an index file document. + + Attrs: + content: Contents to write to index file. + """ + + content: str class MigrationReport(typing.NamedTuple): """Post execution report for an action. Attrs: - table_row: The navigation table entry. + table_row: The navigation table entry. None if index file. path: Path the file was written to. None if failed. result: The action execution result. reason: The reason, None for success reports. """ - table_row: TableRow + table_row: TableRow | None path: Path | None result: ActionResult reason: str | None diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 3d3b5893..832371dd 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -9,7 +9,21 @@ from pathlib import Path from unittest import mock -from src import _run_reconcile, discourse, exceptions, metadata, reconcile, types_ +import pytest + +from src import ( + DOCUMENTATION_FOLDER_NAME, + GETTING_STARTED, + _run_migrate, + _run_reconcile, + discourse, + exceptions, + index, + metadata, + reconcile, + run, + types_, +) from .helpers import create_metadata_yaml @@ -131,3 +145,180 @@ def test__run_reconcile_local_empty_server_error(tmp_path: Path): content=f"{reconcile.NAVIGATION_TABLE_START.strip()}", ) assert not returned_page_interactions + + +def test__run_migrate_server_error_index(tmp_path: Path): + """ + arrange: given metadata with name and docs but no docs directory and mocked discourse + that raises an exception during index file fetching + act: when _run_migrate is called + assert: Server error is raised with page retrieval fail. + """ + meta = types_.Metadata(name="name 1", docs="http://discourse/t/docs") + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.retrieve_topic.side_effect = exceptions.DiscourseError + + with pytest.raises(exceptions.ServerError) as exc: + _run_migrate( + base_path=tmp_path, + metadata=meta, + discourse=mocked_discourse, + ) + + assert "Index page retrieval failed" == str(exc.value) + + +def test__run_migrate_server_error_topic(tmp_path: Path): + """ + arrange: given metadata with name and docs but no docs directory and mocked discourse + that raises an exception during topic retrieval + act: when _run_migrate is called + assert: only index document is migrated. + """ + index_url = "http://discourse/t/docs" + index_content = """Content Title + + Content description. + + # Navigation + + | Level | Path | Navlink | + | -- | -- | -- | + | 1 | path 1 | [Link](/t/link-to-1) | + """ + meta = types_.Metadata(name="name 1", docs=index_url) + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.retrieve_topic.side_effect = [index_content, exceptions.DiscourseError] + + returned_migration_reports = _run_migrate( + base_path=tmp_path, metadata=meta, discourse=mocked_discourse + ) + + assert returned_migration_reports == { + str(tmp_path / DOCUMENTATION_FOLDER_NAME / "index.md"): types_.ActionResult.SUCCESS + } + + +def test__run_migrate(tmp_path: Path): + """ + arrange: given metadata with name and docs but no docs directory and mocked discourse + act: when _run_migrate is called + assert: docs are migrated and a report on migrated documents are returned. + """ + index_url = "http://discourse/t/docs" + index_content = """Content header. + + Content body. + """ + index_table = """# Navigation + + | Level | Path | Navlink | + | -- | -- | -- | + | 1 | path-1 | [Tutorials](link-1) |""" + index_page = f"{index_content}{index_table}" + meta = types_.Metadata(name="name 1", docs=index_url) + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.retrieve_topic.side_effect = [ + index_page, + (link_content := "link 1 content"), + ] + + returned_migration_reports = _run_migrate( + base_path=tmp_path, metadata=meta, discourse=mocked_discourse + ) + + assert returned_migration_reports == { + str( + index_file := tmp_path / DOCUMENTATION_FOLDER_NAME / "index.md" + ): types_.ActionResult.SUCCESS, + str( + path_file := tmp_path / DOCUMENTATION_FOLDER_NAME / "path-1.md" + ): types_.ActionResult.SUCCESS, + } + assert index_file.read_text(encoding="utf-8") == index_content + assert path_file.read_text(encoding="utf-8") == link_content + + +def test_run_no_docs_no_dir(tmp_path: Path): + """ + arrange: given a path with a metadata.yaml that has no docs key and no docs directory + and mocked discourse + act: when run is called + assert: InputError is raised with a guide to getting started. + """ + create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=tmp_path) + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + + with pytest.raises(exceptions.InputError) as exc: + run(base_path=tmp_path, discourse=mocked_discourse, dry_run=False, delete_pages=False) + + assert str(exc.value) == GETTING_STARTED + + +def test_run_no_docs_empty_dir(tmp_path: Path): + """ + arrange: given a path with a metadata.yaml that has no docs key and has empty docs directory + and mocked discourse + act: when run is called + assert: then an index page is created with empty navigation table. + """ + create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=tmp_path) + (tmp_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.create_topic.return_value = (url := "url 1") + + returned_page_interactions = run( + base_path=tmp_path, + discourse=mocked_discourse, + dry_run=False, + delete_pages=True, + ) + + mocked_discourse.create_topic.assert_called_once_with( + title="Name 1 Documentation Overview", + content=f"{reconcile.NAVIGATION_TABLE_START.strip()}", + ) + assert returned_page_interactions == {url: types_.ActionResult.SUCCESS} + + +def test_run_no_docs_dir(tmp_path: Path): + """ + arrange: given a path with a metadata.yaml that has docs key and no docs directory + and mocked discourse + act: when run is called + assert: then docs from the server is migrated into local docs path and the files created + are return as the result. + """ + create_metadata_yaml( + content=f"{metadata.METADATA_NAME_KEY}: name 1\n" f"{metadata.METADATA_DOCS_KEY}: docsUrl", + path=tmp_path, + ) + index_content = """Content header. + + Content body. + """ + index_table = """Page title. + + Page description. + + # Navigation + + | Level | Path | Navlink | + | -- | -- | -- | + | 1 | path-1 | [empty-navlink]() | + | 2 | file-1 | [file-navlink](/file-navlink) |""" + index_page = f"{index_content}\n{index_table}" + navlink_page = "file-navlink-content" + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.retrieve_topic.side_effect = [index_page, navlink_page] + + returned_migration_paths = run( + base_path=tmp_path, discourse=mocked_discourse, dry_run=False, delete_pages=False + ) + + assert returned_migration_paths == { + str(tmp_path / index.DOCUMENTATION_FOLDER_NAME / "index.md"): types_.ActionResult.SUCCESS, + str( + tmp_path / index.DOCUMENTATION_FOLDER_NAME / "path-1" / "file-1.md" + ): types_.ActionResult.SUCCESS, + } diff --git a/tests/unit/test_action.py b/tests/unit/test_action.py index 86bc69d9..99f0e061 100644 --- a/tests/unit/test_action.py +++ b/tests/unit/test_action.py @@ -161,7 +161,7 @@ def test__create_file(caplog: pytest.LogCaptureFixture): assert returned_report.reason is None -# Pylint diesn't understand how the walrus operator works +# Pylint doesn't understand how the walrus operator works # pylint: disable=undefined-variable,unused-variable @pytest.mark.parametrize( "noop_action, expected_table_row", diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index 64865d1a..783da9cf 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -115,3 +115,42 @@ def test_get_metadata_yaml_retrieve_empty(tmp_path: Path): assert returned_index.local.title == "Name 1 Documentation Overview" assert returned_index.local.content is None assert returned_index.name == name + + +# Pylint doesn't understand how the walrus operator works +# pylint: disable=undefined-variable,unused-variable +@pytest.mark.parametrize( + "page, expected_content", + [ + pytest.param( + ( + nav_table := """# Navigation + + | Level | Path | Navlink | + | -- | -- | -- | + """ + ), + "", + id="navigation table only", + ), + pytest.param((content := "Page content"), "", id="page content only"), + pytest.param( + f"{content}" + """# Navigation + + | Level | Path | Navlink | + | -- | -- | -- | + """, + content, + id="page with content and navigation table", + ), + ], +) +# pylint: enable=undefined-variable,unused-variable +def test_get_contents_from_page(page: str, expected_content: str): + """ + arrange: given an index page from server + act: when contents_from_page is called + assert: contents without navigation table is returned. + """ + assert index.contents_from_page(page=page) == expected_content diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index ceefe94d..ae40cd94 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -12,7 +12,7 @@ import pytest -from src import discourse, exceptions, migration, types_ +from src import discourse, exceptions, index, migration, types_ from .helpers import path_to_markdown @@ -177,16 +177,14 @@ def test__validate_row_levels(table_rows: Iterable[types_.TableRow]): [ pytest.param( [ - ( - root_dir_row := types_.TableRow( - level=1, - path="root path 1", - navlink=(dir_navlink := types_.Navlink(title="title 1", link=None)), - ) - ), + root_dir_row := types_.TableRow( + level=1, + path="root path 1", + navlink=(dir_navlink := types_.Navlink(title="title 1", link=None)), + ) ], [ - types_.GitkeepMeta( + root_dir_gitkeep := types_.GitkeepMeta( path=Path(root_dir_row.path) / (gitkeep_file := Path(".gitkeep")), table_row=root_dir_row, ) @@ -205,10 +203,8 @@ def test__validate_row_levels(table_rows: Iterable[types_.TableRow]): ), ], [ - types_.GitkeepMeta( - path=Path(root_dir_row.path) / gitkeep_file, table_row=root_dir_row - ), - types_.GitkeepMeta( + root_dir_gitkeep, + root_dir_2_gitkeep := types_.GitkeepMeta( path=Path(root_dir_row_2.path) / gitkeep_file, table_row=root_dir_row_2 ), ], @@ -233,19 +229,67 @@ def test__validate_row_levels(table_rows: Iterable[types_.TableRow]): ), ], ) -def test_extract_docs_empty_directory_rows( +def test_extract_docs__from_table_rows_empty_directory_rows( table_rows: Iterable[types_.TableRow], expected_files: List[types_.MigrationFileMeta], ): """ arrange: given valid table rows with no navlink(only directories) act: when migrate is called - assert: .gitkeep files with respective directories are returned. + assert: .gitkeep files metadata with respective directories are returned. """ - files = [file for file in migration.extract_docs(table_rows=table_rows)] + files = [file for file in migration._extract_docs_from_table_rows(table_rows=table_rows)] assert files == expected_files +def test__index_file_from_content(): + """ + arrange: given content to write to index file + act: when _index_file_from_content is called + assert: index file metadata is returned. + """ + content = "content 1" + + assert migration._index_file_from_content(content=content) == types_.IndexDocumentMeta( + path=Path("index.md"), content=content + ) + + +@pytest.mark.parametrize( + "table_rows, index_content, expected_migration_metadata", + [ + pytest.param( + [], + content := "content 1", + [index_meta := types_.IndexDocumentMeta(path=Path("index.md"), content=content)], + id="no table rows", + ), + pytest.param( + [root_dir_row, root_dir_row_2], + content, + [index_meta, root_dir_gitkeep, root_dir_2_gitkeep], + id="multiple table_rows", + ), + ], +) +def test_get_docs_metadata( + table_rows: list[types_.TableRow], + index_content: str, + expected_migration_metadata: list[types_.MigrationFileMeta], +): + """ + arrange: given document table rows and index file content + act: when get_docs_metadata is called + assert: expected metadata are returned. + """ + assert [ + metadata + for metadata in migration.get_docs_metadata( + table_rows=table_rows, index_content=index_content + ) + ] == expected_migration_metadata + + @pytest.mark.parametrize( "table_rows, expected_files", [ @@ -404,7 +448,7 @@ def test_extract_docs( act: when migrate is called assert: document file with correct paths are returned. """ - files = [file for file in migration.extract_docs(table_rows=table_rows)] + files = [file for file in migration._extract_docs_from_table_rows(table_rows=table_rows)] assert files == expected_files @@ -467,8 +511,7 @@ def test__migrate_document(tmp_path: Path): """ arrange: given valid document metadata act: when _migrate_document is called - assert: migration report is created with responsible table row, written path \ - and reason. + assert: document is created and migration report is returned. """ mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.return_value = (content := "content") @@ -494,3 +537,133 @@ def test__migrate_document(tmp_path: Path): assert returned_report.table_row.navlink.title == navlink_title assert returned_report.table_row.navlink.link == link_str assert returned_report.result == types_.ActionResult.SUCCESS + + +def test__migrate_index(tmp_path: Path): + """ + arrange: given valid index document metadata + act: when _migrate_index is called + assert: index file is created and migration report is returned. + """ + document_meta = types_.IndexDocumentMeta( + path=(path := Path("index.md")), content=(content := "content 1") + ) + + returned_report = migration._migrate_index(index_meta=document_meta, docs_path=tmp_path) + + assert (file_path := (tmp_path / path)).is_file() + assert file_path.read_text(encoding="utf-8") == content + assert returned_report.table_row is None + assert returned_report.result == types_.ActionResult.SUCCESS + assert returned_report.path == tmp_path / path + assert returned_report.reason is None + + +@pytest.mark.parametrize( + "file_meta, expected_report", + [ + pytest.param( + gitkeep_meta := types_.GitkeepMeta( + path=(gitkeep_path := Path(".gitkeep")), + table_row=( + table_row_sample := types_.TableRow( + level=1, + path="tablepath", + navlink=types_.Navlink(title="navlink", link=None), + ) + ), + ), + gitkeep_report := types_.MigrationReport( + table_row=table_row_sample, + path=gitkeep_path, + result=types_.ActionResult.SUCCESS, + reason=migration.EMPTY_DIR_REASON, + ), + id="gitkeep file", + ), + pytest.param( + document_meta := types_.DocumentMeta( + path=(document_path := Path("document.md")), + table_row=table_row_sample, + link="samplelink", + ), + document_report := types_.MigrationReport( + table_row=table_row_sample, + path=document_path, + result=types_.ActionResult.SUCCESS, + reason=None, + ), + id="document file", + ), + pytest.param( + types_.IndexDocumentMeta( + path=(index_path := Path("index.md")), content="index content" + ), + types_.MigrationReport( + table_row=None, + path=index_path, + result=types_.ActionResult.SUCCESS, + reason=None, + ), + id="index file", + ), + ], +) +def test__run_one( + file_meta: types_.MigrationFileMeta, expected_report: types_.MigrationReport, tmp_path: Path +): + """ + arrange: given a migration metadata and mocked discourse + act: when _run_one is called + assert: a valid migration report is returned and a file is created. + """ + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.retrieve_topic.side_effect = "content" + + returned_report = migration._run_one( + file_meta=file_meta, discourse=mocked_discourse, docs_path=tmp_path + ) + + assert returned_report.path is not None + assert returned_report.path.is_file() + assert expected_report.path is not None + assert returned_report.path == tmp_path / expected_report.path + assert returned_report.result == expected_report.result + assert returned_report.reason == expected_report.reason + assert returned_report.table_row == expected_report.table_row + + +@pytest.mark.parametrize( + "migration_metas, expected_results", + [ + pytest.param([document_meta], [document_report], id="single"), + pytest.param( + [document_meta, gitkeep_meta], [document_report, gitkeep_report], id="multiple" + ), + ], +) +def test_run( + migration_metas: list[types_.MigrationFileMeta], + expected_results: list[types_.MigrationReport], + tmp_path: Path, +): + """ + arrange: given migration metadata and mocked discourse + act: when run is called + assert: migration reports are returned and files are created. + """ + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.retrieve_topic.side_effect = "content" + + returned_reports = migration.run( + documents=migration_metas, discourse=mocked_discourse, docs_path=tmp_path + ) + + for returned, expected in zip(returned_reports, expected_results): + assert returned.path is not None + assert returned.path.is_file() + assert expected.path is not None + assert returned.path == tmp_path / expected.path + assert returned.result == expected.result + assert returned.reason == expected.reason + assert returned.table_row == expected.table_row From 243e32ca86b64c106cff8f1e384ef1e1f4658f87 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 15 Dec 2022 22:55:45 +0800 Subject: [PATCH 010/107] fix spelling and lint feedback --- src/__init__.py | 2 +- src/migration.py | 9 +++++---- tests/unit/test_migration.py | 20 ++++++++------------ 3 files changed, 14 insertions(+), 17 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index 597b93e8..ea08baff 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -119,7 +119,7 @@ def run( has_docs_dir = has_docs_directory(base_path=base_path) if metadata.docs and not has_docs_dir: return _run_migrate(base_path=base_path, metadata=metadata, discourse=discourse) - elif has_docs_dir: + if has_docs_dir: return _run_reconcile( base_path=base_path, metadata=metadata, diff --git a/src/migration.py b/src/migration.py index b14ffd74..4ccc3501 100644 --- a/src/migration.py +++ b/src/migration.py @@ -124,15 +124,16 @@ def _run_one( """ match type(file_meta): case types_.GitkeepMeta: - assert isinstance(file_meta, types_.GitkeepMeta) + # To help mypy (same for the rest of the asserts), it is ok if the assert does not run + assert isinstance(file_meta, types_.GitkeepMeta) # nosec return _migrate_gitkeep(gitkeep_meta=file_meta, docs_path=docs_path) case types_.DocumentMeta: - assert isinstance(file_meta, types_.DocumentMeta) + assert isinstance(file_meta, types_.DocumentMeta) # nosec return _migrate_document( document_meta=file_meta, discourse=discourse, docs_path=docs_path ) case types_.IndexDocumentMeta: - assert isinstance(file_meta, types_.IndexDocumentMeta) + assert isinstance(file_meta, types_.IndexDocumentMeta) # nosec return _migrate_index(index_meta=file_meta, docs_path=docs_path) # Edge case that should not be possible. case _: # pragma: no cover @@ -158,7 +159,7 @@ def _extract_docs_from_table_rows( 2. If last table row was a directory and yielded no DocumentMeta, yield GitkeepMeta. Args: - table_rows: Table rows from the index file in the order of directory hierarcy. + table_rows: Table rows from the index file in the order of directory hierarchy. Returns: Migration documents with navlink to content.\ diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index ae40cd94..78b185a2 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -12,7 +12,7 @@ import pytest -from src import discourse, exceptions, index, migration, types_ +from src import discourse, exceptions, migration, types_ from .helpers import path_to_markdown @@ -79,7 +79,7 @@ def test__validate_row_levels_invalid_rows( """ arrange: given table rows with invalid levels act: when _validate_row_levels is called - assert: InvalidRow exception is raised with excpected error message contents. + assert: InvalidRow exception is raised with expected error message contents. """ with pytest.raises(exceptions.InvalidTableRowError) as exc_info: migration._validate_row_levels(table_rows=table_rows) @@ -238,8 +238,7 @@ def test_extract_docs__from_table_rows_empty_directory_rows( act: when migrate is called assert: .gitkeep files metadata with respective directories are returned. """ - files = [file for file in migration._extract_docs_from_table_rows(table_rows=table_rows)] - assert files == expected_files + assert list(migration._extract_docs_from_table_rows(table_rows=table_rows)) == expected_files def test__index_file_from_content(): @@ -282,12 +281,10 @@ def test_get_docs_metadata( act: when get_docs_metadata is called assert: expected metadata are returned. """ - assert [ - metadata - for metadata in migration.get_docs_metadata( - table_rows=table_rows, index_content=index_content - ) - ] == expected_migration_metadata + assert ( + list(migration.get_docs_metadata(table_rows=table_rows, index_content=index_content)) + == expected_migration_metadata + ) @pytest.mark.parametrize( @@ -448,8 +445,7 @@ def test_extract_docs( act: when migrate is called assert: document file with correct paths are returned. """ - files = [file for file in migration._extract_docs_from_table_rows(table_rows=table_rows)] - assert files == expected_files + assert list(migration._extract_docs_from_table_rows(table_rows=table_rows)) == expected_files def test__migrate_gitkeep(tmp_path: Path): From ad545c89a7e33a8b5c4bb2d6e486a5e0d3c53be3 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 03:29:06 +0800 Subject: [PATCH 011/107] composite actions --- action.yaml | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/action.yaml b/action.yaml index 01018512..42220fe7 100644 --- a/action.yaml +++ b/action.yaml @@ -46,5 +46,16 @@ outputs: The configuration used by the action to interact with the discourse server. runs: - using: docker - image: Dockerfile + using: composite + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + - run: python main.py + shell: bash + - uses: peter-evans/create-pull-request@v4 + with: + branch: "docs/migration-${{ github.sha }}" + title: "[docs] Documentation migration" + body: "This is a test for demo purpose, do not merge." From 8903445b7af65d2292d3f71887c62696f19dce4d Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 03:39:38 +0800 Subject: [PATCH 012/107] github action path --- action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/action.yaml b/action.yaml index 42220fe7..a1832f85 100644 --- a/action.yaml +++ b/action.yaml @@ -52,7 +52,7 @@ runs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - run: python main.py + - run: python ${{ github.action_path }}/main.py shell: bash - uses: peter-evans/create-pull-request@v4 with: From d845d326afb3f67f580e82125517542cc74d508c Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 03:41:25 +0800 Subject: [PATCH 013/107] pip install requirements --- action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/action.yaml b/action.yaml index a1832f85..812baf43 100644 --- a/action.yaml +++ b/action.yaml @@ -52,7 +52,7 @@ runs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - run: python ${{ github.action_path }}/main.py + - run: python -m pip install -r ${{ github.action_path }}/requirements.txt && python ${{ github.action_path }}.main.py shell: bash - uses: peter-evans/create-pull-request@v4 with: From e18af3df777c06b3aea45bdf320b88f11879eb93 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 03:42:27 +0800 Subject: [PATCH 014/107] fix typo --- action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/action.yaml b/action.yaml index 812baf43..8b98f131 100644 --- a/action.yaml +++ b/action.yaml @@ -52,7 +52,7 @@ runs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - run: python -m pip install -r ${{ github.action_path }}/requirements.txt && python ${{ github.action_path }}.main.py + - run: python -m pip install -r ${{ github.action_path }}/requirements.txt && python ${{ github.action_path }}/main.py shell: bash - uses: peter-evans/create-pull-request@v4 with: From 456bf572aaf58f24d86375d57a54caaf7fc2981a Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 03:53:04 +0800 Subject: [PATCH 015/107] disable PR and enable tmate for demo --- action.yaml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/action.yaml b/action.yaml index 8b98f131..d2882b7a 100644 --- a/action.yaml +++ b/action.yaml @@ -54,8 +54,9 @@ runs: python-version: "3.10" - run: python -m pip install -r ${{ github.action_path }}/requirements.txt && python ${{ github.action_path }}/main.py shell: bash - - uses: peter-evans/create-pull-request@v4 - with: - branch: "docs/migration-${{ github.sha }}" - title: "[docs] Documentation migration" - body: "This is a test for demo purpose, do not merge." + - uses: mxschmitt/action-tmate@v3 + # - uses: peter-evans/create-pull-request@v4 + # with: + # branch: "docs/migration-${{ github.sha }}" + # title: "[docs] Documentation migration" + # body: "This is a test for demo purpose, do not merge." From 61a08f40561c76b0f967c06d79fa6cd9b99f4c40 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 03:53:38 +0800 Subject: [PATCH 016/107] remove tmate --- action.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/action.yaml b/action.yaml index d2882b7a..b0fc1c0f 100644 --- a/action.yaml +++ b/action.yaml @@ -54,7 +54,6 @@ runs: python-version: "3.10" - run: python -m pip install -r ${{ github.action_path }}/requirements.txt && python ${{ github.action_path }}/main.py shell: bash - - uses: mxschmitt/action-tmate@v3 # - uses: peter-evans/create-pull-request@v4 # with: # branch: "docs/migration-${{ github.sha }}" From 3d4c2498db151faa640075cb29bfc0c396d0ee14 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 03:57:46 +0800 Subject: [PATCH 017/107] env python --- action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/action.yaml b/action.yaml index b0fc1c0f..cd0a3287 100644 --- a/action.yaml +++ b/action.yaml @@ -52,7 +52,7 @@ runs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - run: python -m pip install -r ${{ github.action_path }}/requirements.txt && python ${{ github.action_path }}/main.py + - run: python -m pip install -r ${{ github.action_path }}/requirements.txt && ${{ github.action_path }}/main.py shell: bash # - uses: peter-evans/create-pull-request@v4 # with: From 4bd5d94ca71f6870ebc2bb94e5dcd94e4b834b70 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 04:11:56 +0800 Subject: [PATCH 018/107] map inputs to env vars --- action.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/action.yaml b/action.yaml index cd0a3287..8e7552cf 100644 --- a/action.yaml +++ b/action.yaml @@ -52,7 +52,15 @@ runs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - run: python -m pip install -r ${{ github.action_path }}/requirements.txt && ${{ github.action_path }}/main.py + - run: + python -m pip install -r ${{ github.action_path }}/requirements.txt && \ + INPUT_DRY_RUN=${{ inputs.dry_run }} \ + INPUT_DELETE_TOPICS=${{ inputs.delete_topics }} \ + INPUT_DISCOURSE_HOST=${{ inputs.discourse_host }} \ + INPUT_DISCOURSE_CATEGORY_ID=${{ inputs.discourse_category_id }} \ + INPUT_DISCOURSE_API_USERNAME=${{ inputs.discourse_api_username }} \ + INPUT_DISCOURSE_API_KEY=${{ inputs.discourse_api_key }} \ + ${{ github.action_path }}/main.py shell: bash # - uses: peter-evans/create-pull-request@v4 # with: From d4136bc441efeeff206e50f41ac6dd76b6b4bb9c Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 04:20:42 +0800 Subject: [PATCH 019/107] multiline run --- action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/action.yaml b/action.yaml index 8e7552cf..10831578 100644 --- a/action.yaml +++ b/action.yaml @@ -52,7 +52,7 @@ runs: - uses: actions/setup-python@v4 with: python-version: "3.10" - - run: + - run: | python -m pip install -r ${{ github.action_path }}/requirements.txt && \ INPUT_DRY_RUN=${{ inputs.dry_run }} \ INPUT_DELETE_TOPICS=${{ inputs.delete_topics }} \ From 921bd10b4f7327e0c7221c84355e3327e980f241 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 04:26:57 +0800 Subject: [PATCH 020/107] enable PR demo --- action.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/action.yaml b/action.yaml index 10831578..0c19b792 100644 --- a/action.yaml +++ b/action.yaml @@ -62,8 +62,8 @@ runs: INPUT_DISCOURSE_API_KEY=${{ inputs.discourse_api_key }} \ ${{ github.action_path }}/main.py shell: bash - # - uses: peter-evans/create-pull-request@v4 - # with: - # branch: "docs/migration-${{ github.sha }}" - # title: "[docs] Documentation migration" - # body: "This is a test for demo purpose, do not merge." + - uses: peter-evans/create-pull-request@v4 + with: + branch: "docs/migration-demo" + title: "[docs] Documentation migration demo" + body: "This is a test for demo purpose, do not merge." From 64fa218dfc3524dafedd581e0023aa92e0a7bd23 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 16 Dec 2022 13:46:56 +0800 Subject: [PATCH 021/107] test for invalid parent directory --- src/migration.py | 8 ++++++-- tests/unit/test_migration.py | 32 ++++++++++++++++++++++++-------- 2 files changed, 30 insertions(+), 10 deletions(-) diff --git a/src/migration.py b/src/migration.py index 4ccc3501..5fec92b1 100644 --- a/src/migration.py +++ b/src/migration.py @@ -14,7 +14,7 @@ GITKEEP_FILE = ".gitkeep" -def _validate_row_levels(table_rows: typing.Iterable[types_.TableRow]): +def _validate_row_levels(table_rows: list[types_.TableRow]): """Check for invalid row levels. Args: @@ -24,7 +24,7 @@ def _validate_row_levels(table_rows: typing.Iterable[types_.TableRow]): InvalidRow exception if invalid row level is encountered. """ level = 0 - for row in table_rows: + for i, row in enumerate(table_rows): if row.level <= 0: raise exceptions.InvalidTableRowError(f"Invalid level {row.level} in {row!=row.level}") # Level increase of more than 1 is not possible. @@ -32,6 +32,10 @@ def _validate_row_levels(table_rows: typing.Iterable[types_.TableRow]): raise exceptions.InvalidTableRowError( f"Level difference of {difference} encountered in {row=!r}" ) + # Subdirectory but previous row is not a file. + if row.level > level and i > 0 and table_rows[i - 1].navlink.link: + raise exceptions.InvalidTableRowError(f"Invalid parent row for {row=!r}") + # Level decrease or same level is fine. level = row.level diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 78b185a2..a7ed7ed3 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -27,7 +27,7 @@ types_.TableRow( level=-1, path=(test_path := "path 1"), - navlink=(test_navlink := types_.Navlink(title="title 1", link=None)), + navlink=(directory_navlink := types_.Navlink(title="title 1", link=None)), ) ], (invalid_msg := "invalid level"), @@ -38,7 +38,7 @@ types_.TableRow( level=0, path=(test_path), - navlink=(test_navlink), + navlink=directory_navlink, ) ], invalid_msg, @@ -49,7 +49,7 @@ types_.TableRow( level=2, path=(test_path), - navlink=(test_navlink), + navlink=directory_navlink, ) ], (level_difference_msg := "level difference"), @@ -60,21 +60,37 @@ types_.TableRow( level=1, path=(test_path), - navlink=(test_navlink), + navlink=directory_navlink, ), types_.TableRow( level=3, path=(test_path), - navlink=(test_navlink), + navlink=directory_navlink, ), ], level_difference_msg, id="invalid table row level change", ), + pytest.param( + [ + types_.TableRow( + level=1, + path=(test_path), + navlink=(file_navlink := types_.Navlink(title="title 1", link="link 1")), + ), + types_.TableRow( + level=2, + path=(test_path), + navlink=(file_navlink := types_.Navlink(title="title 1", link="link 1")), + ), + ], + "invalid parent row", + id="invalid parent directory", + ), ], ) def test__validate_row_levels_invalid_rows( - table_rows: Iterable[types_.TableRow], expected_error_msg_contents: str + table_rows: list[types_.TableRow], expected_error_msg_contents: str ): """ arrange: given table rows with invalid levels @@ -146,7 +162,7 @@ def test__validate_row_levels_invalid_rows( types_.TableRow( level=2, path=("path 2"), - navlink=(types_.Navlink(title="title 2", link="link 1")), + navlink=(types_.Navlink(title="title 2", link=None)), ), types_.TableRow( level=3, @@ -163,7 +179,7 @@ def test__validate_row_levels_invalid_rows( ), ], ) -def test__validate_row_levels(table_rows: Iterable[types_.TableRow]): +def test__validate_row_levels(table_rows: list[types_.TableRow]): """ arrange: given table rows with valid levels act: when __validate_row_levels is called From f8663d054af5e7306e70c9d8ee337e0fbf961e5a Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 19 Dec 2022 18:52:53 +0800 Subject: [PATCH 022/107] add git module --- requirements.txt | 2 + src/exceptions.py | 4 + src/git.py | 185 ++++++++++++++++++++++++++++++++++++++++++++++ tox.ini | 1 + 4 files changed, 192 insertions(+) create mode 100644 src/git.py diff --git a/requirements.txt b/requirements.txt index 68da73e5..bd12996e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,5 @@ pydiscourse>=1.3,<1.4 PyYAML>=6.0,<6.1 requests>=2.28,<2.29 +GitPython>=3.1.28,<3.1.30 +PyGithub>=1.57,<1.58 \ No newline at end of file diff --git a/src/exceptions.py b/src/exceptions.py index 896c1f2b..3f738a6d 100644 --- a/src/exceptions.py +++ b/src/exceptions.py @@ -38,3 +38,7 @@ class InvalidTableRowError(BaseError): class MigrationError(BaseError): """A problem with migration occurred.""" + + +class GitError(BaseError): + """A problem with git occurred.""" diff --git a/src/git.py b/src/git.py new file mode 100644 index 00000000..ccc36948 --- /dev/null +++ b/src/git.py @@ -0,0 +1,185 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Module for handling git repository.""" + +import re +from pathlib import Path +from uuid import uuid4 + +from github import Github + +from git.exc import GitCommandError +from git.repo import Repo + +from .exceptions import GitError, InputError + +GITHUB_HOSTNAME = "github.com" +HTTPS_URL_PATTERN = re.compile(rf"^https?:\/\/.*@?{GITHUB_HOSTNAME}\/(.+\/.+?)(.git)?$") + +GITHUB_HOSTNAME = "github.com" +HTTPS_URL_PATTERN = re.compile(rf"^https?:\/\/.*@?{GITHUB_HOSTNAME}\/(.+\/.+?)(.git)?$") + + +class Git: + """Client to interact with git repository.""" + + user_name = "actions-bot" + user_email = "actions-bot@users.noreply.github.com" + + def __init__(self, access_token: str, repository_path: Path) -> None: + """Construct. + + Args: + access_token: Github access token. + repository_path: Repository root where .git resides. + """ + self._repository = Repo(path=repository_path) + self._github = Github(login_or_token=access_token) + self._github_repo = self._github.get_repo( + self._get_repository_name(self._repository.remote().url) + ) + self._configure_user() + + def _get_repository_name(self, remote_url: str): + """Get repository name. + + Args: + remote_url: URL of remote repository. \ + e.g. https://github.com/canonical/upload-charm-docs.git + + Raises: + GitError if invalid remote url. + + Returns: + Git repository name. e.g. canonical/upload-charm-docs. + """ + matched_repository = HTTPS_URL_PATTERN.match(remote_url) + if not matched_repository: + raise GitError(f"No match for remote repository name {remote_url=!r}") + return matched_repository.group(1) + + def _configure_user(self): + """Configure action git profile defaults.""" + config_writer = self._repository.config_writer() + config_writer.set_value("user", "name", self.user_name) + config_writer.set_value("user", "email", self.user_email) + config_writer.release() + + def _check_branch_exists(self, branch_name: str): + """Check if branch exists on remote. + + Args: + branch_name: Branch name to check on remote. + + Returns: + True if branch already exists, False otherwise. + """ + try: + self._repository.git.fetch("origin", branch_name) + return True + except GitCommandError as exc: + if "couldn't find remote ref" in exc.stderr: + return False + raise exc + + def _merge_existing_branch(self, branch_name: str, commit_msg: str): + """Merge existing changes in current repository with specified branch with theirs strategy. + + Args: + branch_name: Base branch to merge to. + commit_msg: Commit message for current changes. + """ + temp_branch = str(uuid4()) + head = self._repository.create_head(temp_branch) + head.checkout() + self._repository.git.add(".") + self._repository.git.commit("-m", commit_msg) + + self._repository.git.checkout(branch_name) + self._repository.git.pull() + self._repository.git.merge(temp_branch, "-Xtheirs", "--squash", "--no-edit") + self._repository.git.commit("-m", commit_msg) + self._repository.git.push("-u", "origin", branch_name) + + self._repository.git.branch("-D", temp_branch) + + def _create_branch(self, branch_name: str, commit_msg: str): + """Create new branch with existing changes. + + Args: + branch_name: New branch name. + commit_msg: Commit message for current changes. + """ + self._repository.git.checkout("-b", branch_name) + self._repository.git.add(".") + self._repository.git.commit("-m", commit_msg) + self._repository.git.push("--set-upstream", "origin", branch_name) + + def create_pull_request( + self, + title: str, + body: str, + branch_name: str, + commit_msg: str = "actions-bot commit", + ): + """Creates pull request or updates pull request if already existing. + + Args: + title: Pull request title. + body: Pull request body. + branch: Branch name to base Pull Request from. + commit_msg: Commit message to push changes with. Defaults to "actions-bot commit" + + Returns: + Pull request URL. None if URL is not created or updated. + """ + base = self._repository.active_branch.name + + if base == branch_name: + raise InputError("Branch name cannot be equal to base branch.") + + if not self._repository.is_dirty(untracked_files=True): + return None + + if self._check_branch_exists(branch_name=branch_name): + self._merge_existing_branch(branch_name=branch_name, commit_msg=commit_msg) + else: + self._create_branch(branch_name=branch_name, commit_msg=commit_msg) + + self._repository.git.checkout(base) + + open_pulls = self._github_repo.get_pulls(state="open", head=f"actions-bot/{branch_name}") + if not list(open_pulls): + pull_request = self._github_repo.create_pull( + title=title, body=body, base=base, head=branch_name + ) + else: + pull_request = open_pulls[0] + + return pull_request.url + + +def create_git(access_token: str, repository_path: Path): + """Create Github client. + + Args: + access_token: Github access token. + repository_path: Repository root where .git resides. + + Returns: + A github client that connected to given repository. + + Raises: + InputError: if access_token is not string or empty. + """ + if not isinstance(access_token, str): + raise InputError( + f"Invalid 'access_token' input, it must be a string, got {access_token=!r}" + ) + if not access_token: + raise InputError( + f"Invalid 'access_token' input, it must be non-empty, got {access_token=!r}" + ) + + return Git(access_token=access_token, repository_path=repository_path) diff --git a/tox.ini b/tox.ini index 01616f04..06f89c40 100644 --- a/tox.ini +++ b/tox.ini @@ -34,6 +34,7 @@ commands = [testenv:lint] description = Check code against coding style standards deps = + -r{toxinidir}/requirements.txt black flake8<6.0.0 flake8-docstrings>=1.6 From 6f216a7727e5285da835e7a4a22839868f10a95f Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 20 Dec 2022 16:39:32 +0800 Subject: [PATCH 023/107] pull request module --- .codespellignore | 1 + src/git.py | 185 --------------- src/pull_request.py | 178 ++++++++++++++ tests/unit/conftest.py | 35 +++ tests/unit/test_pull_request.py | 406 ++++++++++++++++++++++++++++++++ tox.ini | 4 +- 6 files changed, 623 insertions(+), 186 deletions(-) create mode 100644 .codespellignore delete mode 100644 src/git.py create mode 100644 src/pull_request.py create mode 100644 tests/unit/test_pull_request.py diff --git a/.codespellignore b/.codespellignore new file mode 100644 index 00000000..f8c7c588 --- /dev/null +++ b/.codespellignore @@ -0,0 +1 @@ +pullrequest diff --git a/src/git.py b/src/git.py deleted file mode 100644 index ccc36948..00000000 --- a/src/git.py +++ /dev/null @@ -1,185 +0,0 @@ -# Copyright 2022 Canonical Ltd. -# See LICENSE file for licensing details. - -"""Module for handling git repository.""" - -import re -from pathlib import Path -from uuid import uuid4 - -from github import Github - -from git.exc import GitCommandError -from git.repo import Repo - -from .exceptions import GitError, InputError - -GITHUB_HOSTNAME = "github.com" -HTTPS_URL_PATTERN = re.compile(rf"^https?:\/\/.*@?{GITHUB_HOSTNAME}\/(.+\/.+?)(.git)?$") - -GITHUB_HOSTNAME = "github.com" -HTTPS_URL_PATTERN = re.compile(rf"^https?:\/\/.*@?{GITHUB_HOSTNAME}\/(.+\/.+?)(.git)?$") - - -class Git: - """Client to interact with git repository.""" - - user_name = "actions-bot" - user_email = "actions-bot@users.noreply.github.com" - - def __init__(self, access_token: str, repository_path: Path) -> None: - """Construct. - - Args: - access_token: Github access token. - repository_path: Repository root where .git resides. - """ - self._repository = Repo(path=repository_path) - self._github = Github(login_or_token=access_token) - self._github_repo = self._github.get_repo( - self._get_repository_name(self._repository.remote().url) - ) - self._configure_user() - - def _get_repository_name(self, remote_url: str): - """Get repository name. - - Args: - remote_url: URL of remote repository. \ - e.g. https://github.com/canonical/upload-charm-docs.git - - Raises: - GitError if invalid remote url. - - Returns: - Git repository name. e.g. canonical/upload-charm-docs. - """ - matched_repository = HTTPS_URL_PATTERN.match(remote_url) - if not matched_repository: - raise GitError(f"No match for remote repository name {remote_url=!r}") - return matched_repository.group(1) - - def _configure_user(self): - """Configure action git profile defaults.""" - config_writer = self._repository.config_writer() - config_writer.set_value("user", "name", self.user_name) - config_writer.set_value("user", "email", self.user_email) - config_writer.release() - - def _check_branch_exists(self, branch_name: str): - """Check if branch exists on remote. - - Args: - branch_name: Branch name to check on remote. - - Returns: - True if branch already exists, False otherwise. - """ - try: - self._repository.git.fetch("origin", branch_name) - return True - except GitCommandError as exc: - if "couldn't find remote ref" in exc.stderr: - return False - raise exc - - def _merge_existing_branch(self, branch_name: str, commit_msg: str): - """Merge existing changes in current repository with specified branch with theirs strategy. - - Args: - branch_name: Base branch to merge to. - commit_msg: Commit message for current changes. - """ - temp_branch = str(uuid4()) - head = self._repository.create_head(temp_branch) - head.checkout() - self._repository.git.add(".") - self._repository.git.commit("-m", commit_msg) - - self._repository.git.checkout(branch_name) - self._repository.git.pull() - self._repository.git.merge(temp_branch, "-Xtheirs", "--squash", "--no-edit") - self._repository.git.commit("-m", commit_msg) - self._repository.git.push("-u", "origin", branch_name) - - self._repository.git.branch("-D", temp_branch) - - def _create_branch(self, branch_name: str, commit_msg: str): - """Create new branch with existing changes. - - Args: - branch_name: New branch name. - commit_msg: Commit message for current changes. - """ - self._repository.git.checkout("-b", branch_name) - self._repository.git.add(".") - self._repository.git.commit("-m", commit_msg) - self._repository.git.push("--set-upstream", "origin", branch_name) - - def create_pull_request( - self, - title: str, - body: str, - branch_name: str, - commit_msg: str = "actions-bot commit", - ): - """Creates pull request or updates pull request if already existing. - - Args: - title: Pull request title. - body: Pull request body. - branch: Branch name to base Pull Request from. - commit_msg: Commit message to push changes with. Defaults to "actions-bot commit" - - Returns: - Pull request URL. None if URL is not created or updated. - """ - base = self._repository.active_branch.name - - if base == branch_name: - raise InputError("Branch name cannot be equal to base branch.") - - if not self._repository.is_dirty(untracked_files=True): - return None - - if self._check_branch_exists(branch_name=branch_name): - self._merge_existing_branch(branch_name=branch_name, commit_msg=commit_msg) - else: - self._create_branch(branch_name=branch_name, commit_msg=commit_msg) - - self._repository.git.checkout(base) - - open_pulls = self._github_repo.get_pulls(state="open", head=f"actions-bot/{branch_name}") - if not list(open_pulls): - pull_request = self._github_repo.create_pull( - title=title, body=body, base=base, head=branch_name - ) - else: - pull_request = open_pulls[0] - - return pull_request.url - - -def create_git(access_token: str, repository_path: Path): - """Create Github client. - - Args: - access_token: Github access token. - repository_path: Repository root where .git resides. - - Returns: - A github client that connected to given repository. - - Raises: - InputError: if access_token is not string or empty. - """ - if not isinstance(access_token, str): - raise InputError( - f"Invalid 'access_token' input, it must be a string, got {access_token=!r}" - ) - if not access_token: - raise InputError( - f"Invalid 'access_token' input, it must be non-empty, got {access_token=!r}" - ) - - return Git(access_token=access_token, repository_path=repository_path) diff --git a/src/pull_request.py b/src/pull_request.py new file mode 100644 index 00000000..5db52eb5 --- /dev/null +++ b/src/pull_request.py @@ -0,0 +1,178 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Module for handling git repository.""" + +import re +import typing +from uuid import uuid4 + +from git import GitCommandError +from git.repo import Repo +from github import Github +from github.Repository import Repository + +from .exceptions import GitError, InputError + +GITHUB_HOSTNAME = "github.com" +HTTPS_URL_PATTERN = re.compile(rf"^https?:\/\/.*@?{GITHUB_HOSTNAME}\/(.+\/.+?)(.git)?$") +ACTIONS_USER_NAME = "actions-bot" +ACTIONS_USER_EMAIL = "actions-bot@users.noreply.github.com" +ACTIONS_COMMIT_MESSAGE = "migrate docs from server" +ACTIONS_PULL_REQUEST_TITLE = "[docs] Migrate charm docs" +ACTIONS_PULL_REQUEST_BODY = "This pull request was autogenerated by upload-charm-docs" + + +def _get_repository_name(remote_url: str): + """Get repository name. + + Args: + remote_url: URL of remote repository. \ + e.g. https://github.com/canonical/upload-charm-docs.git + + Raises: + GitError if invalid remote url. + + Returns: + Git repository name. e.g. canonical/upload-charm-docs. + """ + matched_repository = HTTPS_URL_PATTERN.match(remote_url) + if not matched_repository: + raise GitError(f"No match for remote repository name {remote_url=!r}") + return matched_repository.group(1) + + +def _configure_user(repository: Repo): + """Configure action git profile defaults.""" + config_writer = repository.config_writer() + config_writer.set_value("user", "name", ACTIONS_USER_NAME) + config_writer.set_value("user", "email", ACTIONS_USER_EMAIL) + config_writer.release() + + +def _check_branch_exists(repository: Repo, branch_name: str): + """Check if branch exists on remote. + + Args: + branch_name: Branch name to check on remote. + + Returns: + True if branch already exists, False otherwise. + """ + try: + repository.git.fetch("origin", branch_name) + return True + except GitCommandError as exc: + if "couldn't find remote ref" in exc.stderr: + return False + raise exc + + +def _merge_existing_branch(repository: Repo, branch_name: str, commit_msg: str): + """Merge existing changes in current repository with specified branch with theirs strategy. + + Args: + repository: Current repository. + branch_name: Base branch to merge to. + commit_msg: Commit message for current changes. + """ + temp_branch = str(uuid4()) + head = repository.create_head(temp_branch) + head.checkout() + repository.git.add(".") + repository.git.commit("-m", f"'{commit_msg}'") + + repository.git.checkout(branch_name) + repository.git.pull() + repository.git.merge(temp_branch, "-Xtheirs", "--squash", "--no-edit") + repository.git.commit("-m", f"'{commit_msg}'") + repository.git.push("-u", "origin", branch_name) + + repository.git.branch("-D", temp_branch) + + +def _create_branch(repository: Repo, branch_name: str, commit_msg: str): + """Create new branch with existing changes. + + Args: + repository: Current repository. + branch_name: New branch name. + commit_msg: Commit message for current changes. + """ + repository.git.checkout("-b", branch_name) + repository.git.add(".") + repository.git.commit("-m", f"'{commit_msg}'") + repository.git.push("-u", "origin", branch_name) + + +def create_github_instance(access_token: typing.Any): + """Create a Github instance to handle communication with Github server. + + Args: + access_token: Access token that has permissions to open a pull request. + + Raises: + InputError: if invalid token format input. + + Returns: + A Github repository instance. + """ + if not access_token: + raise InputError( + f"Invalid 'access_token' input, it must be non-empty, got {access_token=!r}" + ) + if not isinstance(access_token, str): + raise InputError( + f"Invalid 'access_token' input, it must be a string, got {access_token=!r}" + ) + + return Github(login_or_token=access_token) + + +def create_pull_request(repository: Repo, github_repository: Repository, branch_name: str): + """Create pull request for changes in given repository path. + + Args: + access_token: Github access token. + repository_path: Repository root where .git resides. + branch_name: Pull request branch name. + + Raises: + InputError: if branch name configuration is invalid. + + Returns: + Pull request URL string. None if no pull request was created/modified. + """ + base = repository.active_branch.name + if base == branch_name: + raise InputError("Branch name cannot be equal to base branch.") + + if not repository.is_dirty(untracked_files=True): + return None + + _configure_user(repository=repository) + + if _check_branch_exists(repository=repository, branch_name=branch_name): + _merge_existing_branch( + repository=repository, branch_name=branch_name, commit_msg=ACTIONS_COMMIT_MESSAGE + ) + else: + _create_branch( + repository=repository, branch_name=branch_name, commit_msg=ACTIONS_COMMIT_MESSAGE + ) + repository.git.checkout(base) + + open_pulls = github_repository.get_pulls( + state="open", head=f"{ACTIONS_USER_NAME}/{branch_name}" + ) + if not list(open_pulls): + pull_request = github_repository.create_pull( + title=ACTIONS_PULL_REQUEST_TITLE, + body=ACTIONS_PULL_REQUEST_BODY, + base=base, + head=branch_name, + ) + else: + pull_request = open_pulls[0] + + return pull_request.url diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index b03760cd..59e062ac 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -8,6 +8,7 @@ from pathlib import Path import pytest +from git.repo import Repo from src import index from src.discourse import Discourse @@ -34,3 +35,37 @@ def index_file_content(tmp_path: Path): content = "content 1" index_file.write_text(content, encoding="utf-8") return content + + +@pytest.fixture() +def upstream_repository(tmp_path: Path) -> tuple[Repo, Path]: + """Create upstream repository.""" + upstream_path = tmp_path / "upstream" + upstream_path.mkdir() + upstream = Repo.init(upstream_path) + upstream.git.checkout("-b", "main") + (upstream_path / "index.md").touch() + upstream.git.add(".") + upstream.git.commit("-m", "'initial commit'") + + return (upstream, upstream_path) + + +@pytest.fixture() +def temp_repository(upstream_repository: tuple[Repo, Path], tmp_path: Path) -> tuple[Repo, Path]: + """Create temporary repository.""" + (_, upstream_path) = upstream_repository + repo_path = tmp_path / "temp" + repo_path.mkdir() + repo = Repo.clone_from(url=upstream_path, to_path=repo_path) + return (repo, repo_path) + + +@pytest.fixture() +def repository(upstream_repository: tuple[Repo, Path], tmp_path: Path) -> tuple[Repo, Path]: + """Create repository with mocked upstream.""" + (_, upstream_path) = upstream_repository + repo_path = tmp_path / "mocked" + repo_path.mkdir() + repo = Repo.clone_from(url=upstream_path, to_path=repo_path) + return (repo, repo_path) diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py new file mode 100644 index 00000000..e69dbaf9 --- /dev/null +++ b/tests/unit/test_pull_request.py @@ -0,0 +1,406 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Unit tests for git.""" + +# Need access to protected functions for testing +# pylint: disable=protected-access + +import typing +from os.path import dirname +from pathlib import Path +from unittest import mock + +import pytest +from git.exc import GitCommandError +from git.repo import Repo +from github import Github +from github.PullRequest import PullRequest +from github.Repository import Repository +from github.Requester import Requester + +from src import pull_request +from src.exceptions import GitError, InputError + +from .helpers import assert_substrings_in_string + + +@pytest.mark.parametrize( + "remote_url", + [ + pytest.param("https://gitlab.com/canonical/upload-charm-docs.git", id="non-github url"), + pytest.param("http://gitlab.com/canonical/upload-charm-docs.git", id="http url"), + pytest.param("git@github.com:yanksyoon/actionrefer.git", id="ssh url"), + ], +) +def test_get_repository_name_invalid(remote_url: str): + """ + arrange: given a non-valid remote_url + act: when _get_repository_name is called + assert: GitError is raised. + """ + with pytest.raises(GitError): + pull_request._get_repository_name(remote_url=remote_url) + + +# Pylint doesn't understand how the walrus operator works +# pylint: disable=undefined-variable,unused-variable,too-many-locals +@pytest.mark.parametrize( + "remote_url, expected_repository_name", + [ + pytest.param( + "https://github.com/canonical/upload-charm-docs", + valid_url := "canonical/upload-charm-docs", + id="valid url", + ), + pytest.param( + "https://github.com/canonical/upload-charm-docs.git", + valid_url, + id="valid git url", + ), + ], +) +# pylint: enable=undefined-variable,unused-variable +def test_get_repository_name(remote_url: str, expected_repository_name: str): + """ + arrange: given a non-valid remote_url + act: when _get_repository_name is called + assert: GitError is raised. + """ + assert pull_request._get_repository_name(remote_url=remote_url) == expected_repository_name + + +def test_check_branch_exists_error(tmp_path: Path): + """ + arrange: given an invalid repository with no origin upstream + act: when _check_branch_exists is called with a branch_name that doesn't exist + assert: a GitCommandError is raised. + """ + branch_name = "branch_name" + repo = Repo.init(tmp_path) + with pytest.raises(GitCommandError): + pull_request._check_branch_exists(repo, branch_name) + + +def test_check_branch_exists_not_exist(repository: tuple[Repo, Path]): + """ + arrange: given a git repository + act: when _check_branch_exists is called with a branch_name that does not exist + assert: False is returned. + """ + (repo, _) = repository + branch_name = "no-such-branchname" + assert not pull_request._check_branch_exists(repo, branch_name) + + +def test_check_branch_exists( + upstream_repository: tuple[Repo, Path], repository: tuple[Repo, Path] +): + """ + arrange: given a local git repository and an upstream repository with a branch + act: when _check_branch_exists is called with a branch_name that exists + assert: True is returned. + """ + branch_name = "branch_name" + (upstream_repo, _) = upstream_repository + upstream_repo.create_head(branch_name) + (repo, _) = repository + assert pull_request._check_branch_exists(repo, branch_name) + + +@pytest.mark.parametrize( + "existing_files, new_files, expected_files", + [ + pytest.param( + [original_file := (Path("text.txt"), "original")], + [test_file := (Path("test.txt"), "test")], + [ + original_file, + test_file, + ], + id="simple merge", + ), + pytest.param( + [original_file], + [updated_file := (Path("text.txt"), "update")], + [updated_file], + id="merge incoming", + ), + ], +) +def test_merge_existing_branch( + existing_files: list[tuple[Path, str]], + new_files: list[tuple[Path, str]], + expected_files: list[tuple[Path, str]], + upstream_repository: tuple[Repo, Path], + repository: tuple[Repo, Path], +): + """ + arrange: given a local git repository with changes and \ + a remote repository with existing branch with existing files + act: when _merge_existing_branch is called with existing branch name + assert: files are merged with expected content upstream. + """ + branch_name = "test_branch" + commit_message = "test_message" + (upstream, upstream_path) = upstream_repository + upstream_head = upstream.create_head(branch_name) + upstream_head.checkout() + for (file, content) in existing_files: + (upstream_path / file).touch() + (upstream_path / file).write_text(content, encoding="utf-8") + upstream.git.add(".") + upstream.git.commit("-m", "'add upstream'") + upstream.git.checkout("main") + (repo, repo_path) = repository + for (file, content) in new_files: + (repo_path / file).touch() + (repo_path / file).write_text(content, encoding="utf-8") + print(f"{repo_path/file}") + repo.git.fetch("origin", branch_name) + + pull_request._merge_existing_branch( + repository=repo, branch_name=branch_name, commit_msg=commit_message + ) + + upstream.git.checkout(branch_name) + for (file, content) in expected_files: + assert (upstream_path / file).is_file() + assert (upstream_path / file).read_text(encoding="utf-8") == content + + +@pytest.mark.parametrize( + "new_files", + [ + pytest.param([test_file], id="single file"), + pytest.param( + [test_file, nested_file := (Path("nested/file.txt"), "nested file content")], + id="nested file", + ), + ], +) +def test_create_branch( + new_files: list[tuple[Path, str]], + upstream_repository: tuple[Repo, Path], + repository: tuple[Repo, Path], +): + """ + arrange: given a local git repository with new files + act: when _create_branch is called with new branch name + assert: new files are created upstream. + """ + branch_name = "test_branch" + (upstream, upstream_path) = upstream_repository + (repo, repo_path) = repository + for (file, content) in new_files: + Path(dirname(repo_path / file)).mkdir(parents=True, exist_ok=True) + (repo_path / file).touch() + (repo_path / file).write_text(content, encoding="utf-8") + + pull_request._create_branch(repository=repo, branch_name=branch_name, commit_msg="test_commit") + + upstream.git.checkout(branch_name) + for (file, content) in new_files: + assert (upstream_path / file).is_file() + + +@pytest.mark.parametrize( + "access_token, expected_error_msg_contents", + [ + pytest.param( + "", + (err_strs := ("invalid", "access_token", "input", "must be non-empty")), + id="No access token", + ), + pytest.param( + {}, + err_strs, + id="Invalid access token type(empty)", + ), + pytest.param( + 1234, + ("invalid", "access_token", "input", "must be a string"), + id="invalid access token type(numeric)", + ), + ], +) +def test_create_github_instance_error( + access_token: typing.Any, expected_error_msg_contents: tuple[str, ...] +): + """ + arrange: Given an invalid access token input + act: when create_github_repository_instance is called + assert: InputError is raised with invalid access token info. + """ + with pytest.raises(InputError) as exc_info: + pull_request.create_github_instance(access_token=access_token) + + assert_substrings_in_string(expected_error_msg_contents, str(exc_info.value).lower()) + + +def test_create_github_instance(): + """ + arrange: Given a valid access token + act: when create_github_repository_instance is called + assert: valid Github instance is returned. + """ + # bandit will not let hardcoded passwords pass + access_token = "valid-access-token" # nosec + assert isinstance(pull_request.create_github_instance(access_token=access_token), Github) + + +def test_create_pull_request_invalid_branch(tmp_path: Path): + """ + arrange: given a repository and a mocked github repository and a branch_name that is equal + to the base branch + act: when create_pull_request is called + assert: InputError is raised with error message. + """ + branch_name = "test-branch" + # Setting up an exiting branch requires a head in an empty repository. + # Committing an empty file allows so. + repo = Repo.init(tmp_path) + (tmp_path / "test.txt").touch() + repo.git.add(".") + repo.git.commit("-m", "test commit") + current_branch = repo.create_head(branch_name) + current_branch.checkout() + mocked_github_repo = mock.MagicMock(spec=Repository) + + with pytest.raises(InputError) as exc_info: + pull_request.create_pull_request( + repository=repo, github_repository=mocked_github_repo, branch_name=branch_name + ) + + assert_substrings_in_string( + ("branch name", "cannot be equal", "base branch"), str(exc_info.value).lower() + ) + + +def test_create_pull_request_no_change(repository: tuple[Repo, Path]): + """ + arrange: given a repository and a mocked github repository with no changed file + act: when create_pull_request is called + assert: Nothing is returned. + """ + branch_name = "test_branch_name" + (repo, _) = repository + mocked_github_repo = mock.MagicMock(spec=Repository) + + returned_pr = pull_request.create_pull_request( + repository=repo, github_repository=mocked_github_repo, branch_name=branch_name + ) + + assert returned_pr is None + + +def test_create_pull_request_existing_branch( + repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path] +): + """ + arrange: given a mocked repository with a new file and a mocked github repository \ + with an existing branch and no existing pull request + act: when create_pull_request is called + assert: a github PR link is returned. + """ + branch_name = "test_branch_name" + (repo, repo_path) = repository + test_file = "file.md" + (repo_path / test_file).touch() + (upstream, upstream_path) = upstream_repository + upstream.create_head(branch_name) + mocked_github_repo = mock.MagicMock(spec=Repository) + + pr_link = pull_request.create_pull_request( + repository=repo, github_repository=mocked_github_repo, branch_name=branch_name + ) + + upstream.git.checkout(branch_name) + (upstream_path / test_file).is_file() + assert pr_link is not None + mocked_github_repo.get_pulls.assert_called_once_with( + state="open", + head=f"{pull_request.ACTIONS_USER_NAME}/{branch_name}", + ) + mocked_github_repo.create_pull.assert_called_once_with( + title=pull_request.ACTIONS_PULL_REQUEST_TITLE, + body=pull_request.ACTIONS_PULL_REQUEST_BODY, + base="main", + head=branch_name, + ) + + +def test_create_pull_request( + repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path] +): + """ + arrange: given a mocked repository with a new file and a mocked github repository \ + and no existing pull request + act: when create_pull_request is called + assert: a github PR link is returned. + """ + branch_name = "test_branch_name" + (repo, repo_path) = repository + test_file = "file.md" + (repo_path / test_file).touch() + mocked_github_repo = mock.MagicMock(spec=Repository) + + pr_link = pull_request.create_pull_request( + repository=repo, github_repository=mocked_github_repo, branch_name=branch_name + ) + + (upstream, upstream_path) = upstream_repository + upstream.git.checkout(branch_name) + (upstream_path / test_file).is_file() + assert pr_link is not None + mocked_github_repo.get_pulls.assert_called_once_with( + state="open", + head=f"{pull_request.ACTIONS_USER_NAME}/{branch_name}", + ) + mocked_github_repo.create_pull.assert_called_once_with( + title=pull_request.ACTIONS_PULL_REQUEST_TITLE, + body=pull_request.ACTIONS_PULL_REQUEST_BODY, + base="main", + head=branch_name, + ) + + +def test_create_pull_request_existing_pr( + repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path] +): + """ + arrange: given a mocked repository with a new file and a mocked github repository \ + and no existing pull request + act: when create_pull_request is called + assert: a github PR link is returned. + """ + branch_name = "test_branch_name" + test_url = "pull_request_url" + (repo, repo_path) = repository + test_file = "file.md" + (repo_path / test_file).touch() + mocked_github_repo = mock.MagicMock(spec=Repository) + mock_requester = mock.MagicMock(spec=Requester) + mocked_github_repo.get_pulls.side_effect = [ + [ + PullRequest( + requester=mock_requester, + headers={}, + attributes={"url": test_url}, + completed=False, + ) + ] + ] + + pr_link = pull_request.create_pull_request( + repository=repo, github_repository=mocked_github_repo, branch_name=branch_name + ) + + (upstream, upstream_path) = upstream_repository + upstream.git.checkout(branch_name) + (upstream_path / test_file).is_file() + assert pr_link == test_url + mocked_github_repo.get_pulls.assert_called_once_with( + state="open", + head=f"{pull_request.ACTIONS_USER_NAME}/{branch_name}", + ) diff --git a/tox.ini b/tox.ini index 06f89c40..922c012e 100644 --- a/tox.ini +++ b/tox.ini @@ -25,6 +25,7 @@ passenv = [testenv:fmt] description = Apply coding style standards to code deps = + -r{toxinidir}/requirements.txt black isort commands = @@ -60,7 +61,8 @@ commands = pydocstyle {[vars]src_path} codespell {toxinidir} --skip {toxinidir}/.git --skip {toxinidir}/.tox \ --skip {toxinidir}/build --skip {toxinidir}/lib --skip {toxinidir}/venv \ - --skip {toxinidir}/.mypy_cache --skip {toxinidir}/icon.svg + --skip {toxinidir}/.mypy_cache --skip {toxinidir}/icon.svg \ + --ignore-words {toxinidir}/.codespellignore # pflake8 wrapper supports config from pyproject.toml pflake8 {[vars]all_path} isort --check-only --diff {[vars]all_path} From 01d73a7d2d124f683ac4b1c61cfceccc88403af8 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 20 Dec 2022 20:27:01 +0800 Subject: [PATCH 024/107] add pull request to main control flow --- README.md | 10 ++ action.yaml | 11 ++ main.py | 16 ++- src/__init__.py | 32 ++++-- src/index.py | 2 +- src/pull_request.py | 49 +++++---- tests/conftest.py | 29 ++++++ tests/integration/test___init__.py | 129 +++++++++++++++++++---- tests/unit/conftest.py | 44 +++----- tests/unit/test___init__.py | 162 ++++++++++++++++++++--------- tests/unit/test_index.py | 6 +- tests/unit/test_pull_request.py | 40 +++---- 12 files changed, 376 insertions(+), 154 deletions(-) diff --git a/README.md b/README.md index 4ea73551..b6005d96 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,8 @@ charmhub. ## Getting Started +### Sync docs + 1. Create the `docs` folder in the repository. 2. Optionally, create a file `docs/index.md` for any content you would like to display above the navigation table on discourse. This content does not get @@ -53,6 +55,7 @@ charmhub. discourse_host: discourse.charmhub.io discourse_api_username: ${{ secrets.DISCOURSE_API_USERNAME }} discourse_api_key: ${{ secrets.DISCOURSE_API_KEY }} + github_token: ${{ secrets.GITHUB_TOKEN }} - name: Show index page run: echo '${{ steps.publishDocumentation.outputs.index_url }}' ``` @@ -75,6 +78,13 @@ charmhub. is also available under the `index_url` output of the action. This needs to be added to the `metadata.yaml` under the `docs` key. +### Migrate docs + +1. Create a `docs` key in `metadata.yaml` with the link to the documentation on + charmhub. +2. Add the action to your desired workflow as mentioned in step 5 of + [Sync docs section](#sync-docs) + The action will now compare the discourse topics with the files and directories under the `docs` directory and make any changes based on differences. Additional recommended steps: diff --git a/action.yaml b/action.yaml index 0c19b792..8c4f91ac 100644 --- a/action.yaml +++ b/action.yaml @@ -34,6 +34,17 @@ inputs: default: 41 required: false type: integer + github_token: + description: | + The github access token (${{ secrets.GITHUB_TOKEN }}) to create pull request on Github. + default: ${{ github.token }} + required: true + type: string + branch_name: + description: Branch name to create pull request branch. + default: upload-charm-docs + required: false + type: string outputs: urls_with_actions: description: | diff --git a/main.py b/main.py index 3a05880a..c7968b10 100755 --- a/main.py +++ b/main.py @@ -11,10 +11,14 @@ import pathlib from functools import partial +from git.repo import Repo + from src import run from src.discourse import create_discourse +from src.pull_request import create_github, get_repository_name +# pylint: disable=too-many-locals def main(): """Execute the action.""" logging.basicConfig(level=logging.INFO) @@ -26,6 +30,8 @@ def main(): discourse_category_id = os.getenv("INPUT_DISCOURSE_CATEGORY_ID") discourse_api_username = os.getenv("INPUT_DISCOURSE_API_USERNAME") discourse_api_key = os.getenv("INPUT_DISCOURSE_API_KEY") + github_access_token = os.getenv("INPUT_GITHUB_TOKEN") + branch_name = os.getenv("INPUT_BRANCH_NAME") # Execute action create_discourse_kwargs = { @@ -34,12 +40,20 @@ def main(): "api_username": discourse_api_username, "api_key": discourse_api_key, } + base_path = pathlib.Path() discourse = create_discourse(**create_discourse_kwargs) + repo = Repo(path=base_path) + repository = get_repository_name(repo.remote().url) + github = create_github(access_token=github_access_token) + github_repo = github.get_repo(repository) urls_with_actions_dict = run( - base_path=pathlib.Path(), + base_path=base_path, discourse=discourse, dry_run=dry_run, delete_pages=delete_topics, + repo=repo, + github_repo=github_repo, + branch_name=branch_name, ) # Write output diff --git a/src/__init__.py b/src/__init__.py index ea08baff..e14f61bf 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -5,6 +5,9 @@ from pathlib import Path +from git.repo import Repo +from github.Repository import Repository + from .action import DRY_RUN_NAVLINK_LINK, FAIL_NAVLINK_LINK from .action import run_all as run_all_actions from .discourse import Discourse @@ -17,6 +20,7 @@ from .migration import get_docs_metadata from .migration import run as run_migrate from .navigation_table import from_page as navigation_table_from_page +from .pull_request import create_pull_request from .reconcile import run as run_reconcile from .types_ import ActionResult, Metadata @@ -68,10 +72,14 @@ def _run_reconcile( } +# pylint: disable=too-many-arguments def _run_migrate( base_path: Path, metadata: Metadata, discourse: Discourse, + repo: Repo, + github_repo: Repository, + branch_name: str | None, ) -> dict[str, str]: """Migrate existing docs from charmhub to local repository. @@ -85,17 +93,17 @@ def _run_migrate( index_content = contents_from_page(server_content) table_rows = navigation_table_from_page(page=server_content) file_metadata = get_docs_metadata(table_rows=table_rows, index_content=index_content) - reports = run_migrate( + run_migrate( documents=file_metadata, discourse=discourse, docs_path=base_path / DOCUMENTATION_FOLDER_NAME, ) - return { - str(report.path): report.result - for report in reports - if report.path is not None and report.result != ActionResult.FAIL - } + pr_link = create_pull_request( + repository=repo, github_repository=github_repo, branch_name=branch_name + ) + + return {pr_link: ActionResult.SUCCESS} def run( @@ -103,6 +111,9 @@ def run( discourse: Discourse, dry_run: bool, delete_pages: bool, + repo: Repo, + github_repo: Repository, + branch_name: str | None, ) -> dict[str, str]: """Interact with charmhub to upload documentation or migrate to local repository. @@ -118,7 +129,14 @@ def run( metadata = get_metadata(base_path) has_docs_dir = has_docs_directory(base_path=base_path) if metadata.docs and not has_docs_dir: - return _run_migrate(base_path=base_path, metadata=metadata, discourse=discourse) + return _run_migrate( + base_path=base_path, + metadata=metadata, + discourse=discourse, + repo=repo, + github_repo=github_repo, + branch_name=branch_name, + ) if has_docs_dir: return _run_reconcile( base_path=base_path, diff --git a/src/index.py b/src/index.py index e1230471..ab5e7b06 100644 --- a/src/index.py +++ b/src/index.py @@ -12,7 +12,7 @@ _WHITESPACE = r"\s*" _NAVIGATION_HEADER_REGEX = rf"{_WHITESPACE}# Navigation" -_INDEX_CONTENT_REGEX = r"^((.|\n)*)" +_INDEX_CONTENT_REGEX = r"^((.|\n)*\n)" _INDEX_CONTENT_PATTERN = re.compile(rf"{_INDEX_CONTENT_REGEX}(?={_NAVIGATION_HEADER_REGEX})") DOCUMENTATION_FOLDER_NAME = "docs" DOCUMENTATION_INDEX_FILENAME = "index.md" diff --git a/src/pull_request.py b/src/pull_request.py index 5db52eb5..a1d38c49 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -21,25 +21,8 @@ ACTIONS_COMMIT_MESSAGE = "migrate docs from server" ACTIONS_PULL_REQUEST_TITLE = "[docs] Migrate charm docs" ACTIONS_PULL_REQUEST_BODY = "This pull request was autogenerated by upload-charm-docs" - - -def _get_repository_name(remote_url: str): - """Get repository name. - - Args: - remote_url: URL of remote repository. \ - e.g. https://github.com/canonical/upload-charm-docs.git - - Raises: - GitError if invalid remote url. - - Returns: - Git repository name. e.g. canonical/upload-charm-docs. - """ - matched_repository = HTTPS_URL_PATTERN.match(remote_url) - if not matched_repository: - raise GitError(f"No match for remote repository name {remote_url=!r}") - return matched_repository.group(1) +PR_LINK_NO_CHANGE = "" +DEFAULT_BRANCH_NAME = "upload-charm-docs" def _configure_user(repository: Repo): @@ -105,7 +88,26 @@ def _create_branch(repository: Repo, branch_name: str, commit_msg: str): repository.git.push("-u", "origin", branch_name) -def create_github_instance(access_token: typing.Any): +def get_repository_name(remote_url: str): + """Get repository name. + + Args: + remote_url: URL of remote repository. \ + e.g. https://github.com/canonical/upload-charm-docs.git + + Raises: + GitError if invalid remote url. + + Returns: + Git repository name. e.g. canonical/upload-charm-docs. + """ + matched_repository = HTTPS_URL_PATTERN.match(remote_url) + if not matched_repository: + raise GitError(f"No match for remote repository name {remote_url=!r}") + return matched_repository.group(1) + + +def create_github(access_token: typing.Any): """Create a Github instance to handle communication with Github server. Args: @@ -129,7 +131,9 @@ def create_github_instance(access_token: typing.Any): return Github(login_or_token=access_token) -def create_pull_request(repository: Repo, github_repository: Repository, branch_name: str): +def create_pull_request( + repository: Repo, github_repository: Repository, branch_name: str | None +) -> str: """Create pull request for changes in given repository path. Args: @@ -143,12 +147,13 @@ def create_pull_request(repository: Repo, github_repository: Repository, branch_ Returns: Pull request URL string. None if no pull request was created/modified. """ + branch_name = branch_name or DEFAULT_BRANCH_NAME base = repository.active_branch.name if base == branch_name: raise InputError("Branch name cannot be equal to base branch.") if not repository.is_dirty(untracked_files=True): - return None + return PR_LINK_NO_CHANGE _configure_user(repository=repository) diff --git a/tests/conftest.py b/tests/conftest.py index c4f834c3..8a8e0ec9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,3 +2,32 @@ # See LICENSE file for licensing details. """Fixtures for all tests.""" + +from pathlib import Path + +import pytest +from git.repo import Repo + + +@pytest.fixture(name="upstream_repository") +def fixture_upstream_repository(tmp_path: Path) -> tuple[Repo, Path]: + """Create upstream repository.""" + upstream_path = tmp_path / "upstream" + upstream_path.mkdir() + upstream = Repo.init(upstream_path) + upstream.git.checkout("-b", "main") + (upstream_path / ".gitkeep").touch() + upstream.git.add(".") + upstream.git.commit("-m", "'initial commit'") + + return (upstream, upstream_path) + + +@pytest.fixture(name="repository") +def repository(upstream_repository: tuple[Repo, Path], tmp_path: Path) -> tuple[Repo, Path]: + """Create repository with mocked upstream.""" + (_, upstream_path) = upstream_repository + repo_path = tmp_path / "mocked" + repo_path.mkdir() + repo = Repo.clone_from(url=upstream_path, to_path=repo_path) + return (repo, repo_path) diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index d7daf337..742a576a 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -9,9 +9,12 @@ import logging from itertools import chain from pathlib import Path +from unittest import mock from urllib.parse import urlparse import pytest +from git.repo import Repo +from github.Repository import Repository from src import exceptions, index, metadata, reconcile, run from src.discourse import Discourse @@ -22,7 +25,11 @@ @pytest.mark.asyncio -async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogCaptureFixture): +async def test_run( + discourse_api: Discourse, + caplog: pytest.LogCaptureFixture, + repository: tuple[Repo, Path], +): """ arrange: given running discourse server act: when run is called with: @@ -57,12 +64,20 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC 13. the documentation page is deleted 14. an index page is not updated """ + (repo, repo_path) = repository + mocked_github_repo = mock.MagicMock(spec=Repository) caplog.set_level(logging.INFO) - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=tmp_path) + create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) # 1. docs empty urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=False, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert len(urls_with_actions) == 1 @@ -75,13 +90,19 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC caplog.clear() create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", - path=tmp_path, + path=repo_path, ) - (docs_dir := tmp_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() + (docs_dir := repo_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() (index_file := docs_dir / "index.md").write_text(index_content := "index content 1") urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=True, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=True, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert tuple(urls_with_actions) == (index_url,) @@ -93,7 +114,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC caplog.clear() urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=False, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert tuple(urls_with_actions) == (index_url,) @@ -107,7 +134,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC (doc_file := docs_dir / f"{doc_table_key}.md").write_text(doc_content_1 := "doc content 1") urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=True, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=True, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert tuple(urls_with_actions) == (index_url,) @@ -119,7 +152,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC caplog.clear() urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=False, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert len(urls_with_actions) == 2 @@ -139,7 +178,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC doc_file.write_text(doc_content_2 := "doc content 2") urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=True, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=True, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert (urls := tuple(urls_with_actions)) == (doc_url, index_url) @@ -153,7 +198,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC caplog.clear() urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=False, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert (urls := tuple(urls_with_actions)) == (doc_url, index_url) @@ -172,7 +223,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC (nested_dir := docs_dir / nested_dir_table_key).mkdir() urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=False, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert (urls := tuple(urls_with_actions)) == (doc_url, index_url) @@ -191,7 +248,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC ) urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=False, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert len(urls_with_actions) == 3 @@ -214,7 +277,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC nested_dir_doc_file.unlink() urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=True, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=True, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert (urls := tuple(urls_with_actions)) == (doc_url, nested_dir_doc_url, index_url) @@ -231,7 +300,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC caplog.clear() urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=False, delete_pages=False + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=False, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert (urls := tuple(urls_with_actions)) == (doc_url, nested_dir_doc_url, index_url) @@ -248,7 +323,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC nested_dir.rmdir() urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=False, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert (urls := tuple(urls_with_actions)) == (doc_url, index_url) @@ -263,7 +344,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC doc_file.unlink() urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=False, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert (urls := tuple(urls_with_actions)) == (doc_url, index_url) @@ -280,7 +367,13 @@ async def test_run(discourse_api: Discourse, tmp_path: Path, caplog: pytest.LogC index_file.unlink() urls_with_actions = run( - base_path=tmp_path, discourse=discourse_api, dry_run=False, delete_pages=True + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert (urls := tuple(urls_with_actions)) == (index_url,) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 59e062ac..43c8a5f9 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -6,9 +6,11 @@ # pylint: disable=redefined-outer-name from pathlib import Path +from unittest import mock import pytest -from git.repo import Repo +from github.PullRequest import PullRequest +from github.Requester import Requester from src import index from src.discourse import Discourse @@ -38,34 +40,12 @@ def index_file_content(tmp_path: Path): @pytest.fixture() -def upstream_repository(tmp_path: Path) -> tuple[Repo, Path]: - """Create upstream repository.""" - upstream_path = tmp_path / "upstream" - upstream_path.mkdir() - upstream = Repo.init(upstream_path) - upstream.git.checkout("-b", "main") - (upstream_path / "index.md").touch() - upstream.git.add(".") - upstream.git.commit("-m", "'initial commit'") - - return (upstream, upstream_path) - - -@pytest.fixture() -def temp_repository(upstream_repository: tuple[Repo, Path], tmp_path: Path) -> tuple[Repo, Path]: - """Create temporary repository.""" - (_, upstream_path) = upstream_repository - repo_path = tmp_path / "temp" - repo_path.mkdir() - repo = Repo.clone_from(url=upstream_path, to_path=repo_path) - return (repo, repo_path) - - -@pytest.fixture() -def repository(upstream_repository: tuple[Repo, Path], tmp_path: Path) -> tuple[Repo, Path]: - """Create repository with mocked upstream.""" - (_, upstream_path) = upstream_repository - repo_path = tmp_path / "mocked" - repo_path.mkdir() - repo = Repo.clone_from(url=upstream_path, to_path=repo_path) - return (repo, repo_path) +def mock_pull_request() -> PullRequest: + """Create mock pull request.""" + mock_requester = mock.MagicMock(spec=Requester) + return PullRequest( + requester=mock_requester, + headers={}, + attributes={"url": "test_url"}, + completed=False, + ) diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 832371dd..3decbcc8 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -10,6 +10,9 @@ from unittest import mock import pytest +from git.repo import Repo +from github.PullRequest import PullRequest +from github.Repository import Repository from src import ( DOCUMENTATION_FOLDER_NAME, @@ -24,6 +27,7 @@ run, types_, ) +from src.pull_request import DEFAULT_BRANCH_NAME from .helpers import create_metadata_yaml @@ -147,7 +151,7 @@ def test__run_reconcile_local_empty_server_error(tmp_path: Path): assert not returned_page_interactions -def test__run_migrate_server_error_index(tmp_path: Path): +def test__run_migrate_server_error_index(tmp_path: Path, repository: tuple[Repo, Path]): """ arrange: given metadata with name and docs but no docs directory and mocked discourse that raises an exception during index file fetching @@ -156,19 +160,28 @@ def test__run_migrate_server_error_index(tmp_path: Path): """ meta = types_.Metadata(name="name 1", docs="http://discourse/t/docs") mocked_discourse = mock.MagicMock(spec=discourse.Discourse) - mocked_discourse.retrieve_topic.side_effect = exceptions.DiscourseError + mocked_discourse.retrieve_topic.side_effect = [exceptions.DiscourseError] + mocked_github_repo = mock.MagicMock(spec=Repository) + (repo, _) = repository with pytest.raises(exceptions.ServerError) as exc: _run_migrate( base_path=tmp_path, metadata=meta, discourse=mocked_discourse, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) assert "Index page retrieval failed" == str(exc.value) -def test__run_migrate_server_error_topic(tmp_path: Path): +def test__run_migrate_server_error_topic( + repository: tuple[Repo, Path], + upstream_repository: tuple[Repo, Path], + mock_pull_request: PullRequest, +): """ arrange: given metadata with name and docs but no docs directory and mocked discourse that raises an exception during topic retrieval @@ -189,89 +202,129 @@ def test__run_migrate_server_error_topic(tmp_path: Path): meta = types_.Metadata(name="name 1", docs=index_url) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.side_effect = [index_content, exceptions.DiscourseError] + mocked_github_repo = mock.MagicMock(spec=Repository) + mocked_github_repo.create_pull.return_value = mock_pull_request + (repo, repo_path) = repository returned_migration_reports = _run_migrate( - base_path=tmp_path, metadata=meta, discourse=mocked_discourse + base_path=repo_path, + metadata=meta, + discourse=mocked_discourse, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) - assert returned_migration_reports == { - str(tmp_path / DOCUMENTATION_FOLDER_NAME / "index.md"): types_.ActionResult.SUCCESS - } + (upstream_repo, upstream_path) = upstream_repository + upstream_repo.git.checkout(DEFAULT_BRANCH_NAME) + assert returned_migration_reports == {mock_pull_request.url: types_.ActionResult.SUCCESS} + assert (upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() + assert not (upstream_path / DOCUMENTATION_FOLDER_NAME / "path 1").exists() -def test__run_migrate(tmp_path: Path): +# pylint: disable=too-many-locals +def test__run_migrate( + repository: tuple[Repo, Path], + upstream_repository: tuple[Repo, Path], + mock_pull_request: PullRequest, +): """ arrange: given metadata with name and docs but no docs directory and mocked discourse act: when _run_migrate is called assert: docs are migrated and a report on migrated documents are returned. """ - index_url = "http://discourse/t/docs" index_content = """Content header. - Content body. - """ + Content body.\n""" index_table = """# Navigation | Level | Path | Navlink | | -- | -- | -- | | 1 | path-1 | [Tutorials](link-1) |""" index_page = f"{index_content}{index_table}" - meta = types_.Metadata(name="name 1", docs=index_url) + meta = types_.Metadata(name="name 1", docs="http://discourse/t/docs") mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.side_effect = [ index_page, (link_content := "link 1 content"), ] + mocked_github_repo = mock.MagicMock(spec=Repository) + mocked_github_repo.create_pull.return_value = mock_pull_request + (repo, repo_path) = repository returned_migration_reports = _run_migrate( - base_path=tmp_path, metadata=meta, discourse=mocked_discourse + base_path=repo_path, + metadata=meta, + discourse=mocked_discourse, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) - assert returned_migration_reports == { - str( - index_file := tmp_path / DOCUMENTATION_FOLDER_NAME / "index.md" - ): types_.ActionResult.SUCCESS, - str( - path_file := tmp_path / DOCUMENTATION_FOLDER_NAME / "path-1.md" - ): types_.ActionResult.SUCCESS, - } + (upstream_repo, upstream_path) = upstream_repository + upstream_repo.git.checkout(DEFAULT_BRANCH_NAME) + assert returned_migration_reports == {mock_pull_request.url: types_.ActionResult.SUCCESS} + assert (index_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() + assert (path_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "path-1.md").is_file() assert index_file.read_text(encoding="utf-8") == index_content assert path_file.read_text(encoding="utf-8") == link_content -def test_run_no_docs_no_dir(tmp_path: Path): +# pylint: enable=too-many-locals + + +def test_run_no_docs_no_dir(repository: tuple[Repo, Path]): """ arrange: given a path with a metadata.yaml that has no docs key and no docs directory and mocked discourse act: when run is called assert: InputError is raised with a guide to getting started. """ - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=tmp_path) + (repo, repo_path) = repository + create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_github_repo = mock.MagicMock(spec=Repository) with pytest.raises(exceptions.InputError) as exc: - run(base_path=tmp_path, discourse=mocked_discourse, dry_run=False, delete_pages=False) + # run is repeated in unit tests / integration tests + # pylint: disable=duplicate-code + _ = run( + base_path=repo_path, + discourse=mocked_discourse, + dry_run=False, + delete_pages=False, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, + ) assert str(exc.value) == GETTING_STARTED -def test_run_no_docs_empty_dir(tmp_path: Path): +def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): """ arrange: given a path with a metadata.yaml that has no docs key and has empty docs directory and mocked discourse act: when run is called assert: then an index page is created with empty navigation table. """ - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=tmp_path) - (tmp_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() + (repo, repo_path) = repository + create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) + (repo_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.create_topic.return_value = (url := "url 1") + mocked_github_repo = mock.MagicMock(spec=Repository) + # run is repeated in unit tests / integration tests + # pylint: disable=duplicate-code returned_page_interactions = run( - base_path=tmp_path, + base_path=repo_path, discourse=mocked_discourse, dry_run=False, delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) mocked_discourse.create_topic.assert_called_once_with( @@ -281,7 +334,12 @@ def test_run_no_docs_empty_dir(tmp_path: Path): assert returned_page_interactions == {url: types_.ActionResult.SUCCESS} -def test_run_no_docs_dir(tmp_path: Path): +# pylint: disable=too-many-locals +def test_run_no_docs_dir( + repository: tuple[Repo, Path], + upstream_repository: tuple[Repo, Path], + mock_pull_request: PullRequest, +): """ arrange: given a path with a metadata.yaml that has docs key and no docs directory and mocked discourse @@ -289,36 +347,46 @@ def test_run_no_docs_dir(tmp_path: Path): assert: then docs from the server is migrated into local docs path and the files created are return as the result. """ + (repo, repo_path) = repository create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n" f"{metadata.METADATA_DOCS_KEY}: docsUrl", - path=tmp_path, + path=repo_path, ) index_content = """Content header. - Content body. - """ - index_table = """Page title. - - Page description. - - # Navigation + Content body.\n""" + index_table = """# Navigation | Level | Path | Navlink | | -- | -- | -- | | 1 | path-1 | [empty-navlink]() | | 2 | file-1 | [file-navlink](/file-navlink) |""" - index_page = f"{index_content}\n{index_table}" + index_page = f"{index_content}{index_table}" navlink_page = "file-navlink-content" mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.side_effect = [index_page, navlink_page] + mocked_github_repo = mock.MagicMock(spec=Repository) + mocked_github_repo.create_pull.return_value = mock_pull_request - returned_migration_paths = run( - base_path=tmp_path, discourse=mocked_discourse, dry_run=False, delete_pages=False + # run is repeated in unit tests / integration tests + # pylint: disable=duplicate-code + returned_migration_reports = run( + base_path=repo_path, + discourse=mocked_discourse, + dry_run=False, + delete_pages=False, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, ) - - assert returned_migration_paths == { - str(tmp_path / index.DOCUMENTATION_FOLDER_NAME / "index.md"): types_.ActionResult.SUCCESS, - str( - tmp_path / index.DOCUMENTATION_FOLDER_NAME / "path-1" / "file-1.md" - ): types_.ActionResult.SUCCESS, - } + # pylint: enable=duplicate-code + + (upstream_repo, upstream_path) = upstream_repository + upstream_repo.git.checkout(DEFAULT_BRANCH_NAME) + assert returned_migration_reports == {mock_pull_request.url: types_.ActionResult.SUCCESS} + assert (index_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() + assert ( + path_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "path-1" / "file-1.md" + ).is_file() + assert index_file.read_text(encoding="utf-8") == index_content + assert path_file.read_text(encoding="utf-8") == navlink_page diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index 783da9cf..36fbd3bd 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -133,7 +133,11 @@ def test_get_metadata_yaml_retrieve_empty(tmp_path: Path): "", id="navigation table only", ), - pytest.param((content := "Page content"), "", id="page content only"), + pytest.param( + (content := "Page content\n"), + "", + id="page content only", + ), pytest.param( f"{content}" """# Navigation diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index e69dbaf9..bd523c6a 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -17,7 +17,6 @@ from github import Github from github.PullRequest import PullRequest from github.Repository import Repository -from github.Requester import Requester from src import pull_request from src.exceptions import GitError, InputError @@ -40,7 +39,7 @@ def test_get_repository_name_invalid(remote_url: str): assert: GitError is raised. """ with pytest.raises(GitError): - pull_request._get_repository_name(remote_url=remote_url) + pull_request.get_repository_name(remote_url=remote_url) # Pylint doesn't understand how the walrus operator works @@ -67,10 +66,10 @@ def test_get_repository_name(remote_url: str, expected_repository_name: str): act: when _get_repository_name is called assert: GitError is raised. """ - assert pull_request._get_repository_name(remote_url=remote_url) == expected_repository_name + assert pull_request.get_repository_name(remote_url=remote_url) == expected_repository_name -def test_check_branch_exists_error(tmp_path: Path): +def test__check_branch_exists_error(tmp_path: Path): """ arrange: given an invalid repository with no origin upstream act: when _check_branch_exists is called with a branch_name that doesn't exist @@ -82,7 +81,7 @@ def test_check_branch_exists_error(tmp_path: Path): pull_request._check_branch_exists(repo, branch_name) -def test_check_branch_exists_not_exist(repository: tuple[Repo, Path]): +def test__check_branch_exists_not_exist(repository: tuple[Repo, Path]): """ arrange: given a git repository act: when _check_branch_exists is called with a branch_name that does not exist @@ -93,7 +92,7 @@ def test_check_branch_exists_not_exist(repository: tuple[Repo, Path]): assert not pull_request._check_branch_exists(repo, branch_name) -def test_check_branch_exists( +def test__check_branch_exists( upstream_repository: tuple[Repo, Path], repository: tuple[Repo, Path] ): """ @@ -128,7 +127,7 @@ def test_check_branch_exists( ), ], ) -def test_merge_existing_branch( +def test__merge_existing_branch( existing_files: list[tuple[Path, str]], new_files: list[tuple[Path, str]], expected_files: list[tuple[Path, str]], @@ -179,7 +178,7 @@ def test_merge_existing_branch( ), ], ) -def test_create_branch( +def test__create_branch( new_files: list[tuple[Path, str]], upstream_repository: tuple[Repo, Path], repository: tuple[Repo, Path], @@ -233,7 +232,7 @@ def test_create_github_instance_error( assert: InputError is raised with invalid access token info. """ with pytest.raises(InputError) as exc_info: - pull_request.create_github_instance(access_token=access_token) + pull_request.create_github(access_token=access_token) assert_substrings_in_string(expected_error_msg_contents, str(exc_info.value).lower()) @@ -246,7 +245,7 @@ def test_create_github_instance(): """ # bandit will not let hardcoded passwords pass access_token = "valid-access-token" # nosec - assert isinstance(pull_request.create_github_instance(access_token=access_token), Github) + assert isinstance(pull_request.create_github(access_token=access_token), Github) def test_create_pull_request_invalid_branch(tmp_path: Path): @@ -291,7 +290,7 @@ def test_create_pull_request_no_change(repository: tuple[Repo, Path]): repository=repo, github_repository=mocked_github_repo, branch_name=branch_name ) - assert returned_pr is None + assert returned_pr == pull_request.PR_LINK_NO_CHANGE def test_create_pull_request_existing_branch( @@ -366,7 +365,9 @@ def test_create_pull_request( def test_create_pull_request_existing_pr( - repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path] + repository: tuple[Repo, Path], + upstream_repository: tuple[Repo, Path], + mock_pull_request: PullRequest, ): """ arrange: given a mocked repository with a new file and a mocked github repository \ @@ -375,22 +376,11 @@ def test_create_pull_request_existing_pr( assert: a github PR link is returned. """ branch_name = "test_branch_name" - test_url = "pull_request_url" (repo, repo_path) = repository test_file = "file.md" (repo_path / test_file).touch() mocked_github_repo = mock.MagicMock(spec=Repository) - mock_requester = mock.MagicMock(spec=Requester) - mocked_github_repo.get_pulls.side_effect = [ - [ - PullRequest( - requester=mock_requester, - headers={}, - attributes={"url": test_url}, - completed=False, - ) - ] - ] + mocked_github_repo.get_pulls.side_effect = [[mock_pull_request]] pr_link = pull_request.create_pull_request( repository=repo, github_repository=mocked_github_repo, branch_name=branch_name @@ -399,7 +389,7 @@ def test_create_pull_request_existing_pr( (upstream, upstream_path) = upstream_repository upstream.git.checkout(branch_name) (upstream_path / test_file).is_file() - assert pr_link == test_url + assert pr_link == mock_pull_request.url mocked_github_repo.get_pulls.assert_called_once_with( state="open", head=f"{pull_request.ACTIONS_USER_NAME}/{branch_name}", From 9004668594bfded73616c46ee99806027072f4af Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 21 Dec 2022 09:57:23 +0800 Subject: [PATCH 025/107] add configure user test --- tests/unit/test_pull_request.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index bd523c6a..d46e307b 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -24,6 +24,21 @@ from .helpers import assert_substrings_in_string +def test__configure_user(repository: tuple[Repo, Path]): + """ + arrange: given a git repository without profile + act: when _configure_user is called + assert: default user and email are configured as profile. + """ + (repo, _) = repository + + pull_request._configure_user(repo) + + reader = repo.config_reader() + assert reader.get_value("user", "name") == pull_request.ACTIONS_USER_NAME + assert reader.get_value("user", "email") == pull_request.ACTIONS_USER_EMAIL + + @pytest.mark.parametrize( "remote_url", [ From 4f31f47e86cdab51d684d3c0a21cb06bc0506f65 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 21 Dec 2022 12:18:18 +0800 Subject: [PATCH 026/107] fix integration test --- tests/conftest.py | 4 ++++ tests/integration/test___init__.py | 36 ++++++++++++++++-------------- 2 files changed, 23 insertions(+), 17 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 8a8e0ec9..36b1b0a9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,6 +15,10 @@ def fixture_upstream_repository(tmp_path: Path) -> tuple[Repo, Path]: upstream_path = tmp_path / "upstream" upstream_path.mkdir() upstream = Repo.init(upstream_path) + writer = upstream.config_writer() + writer.set_value("user", "name", "upstream_user") + writer.set_value("user", "email", "upstream_email") + writer.release() upstream.git.checkout("-b", "main") (upstream_path / ".gitkeep").touch() upstream.git.add(".") diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index 742a576a..72542901 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -16,7 +16,7 @@ from git.repo import Repo from github.Repository import Repository -from src import exceptions, index, metadata, reconcile, run +from src import GETTING_STARTED, exceptions, index, metadata, reconcile, run from src.discourse import Discourse from ..unit.helpers import assert_substrings_in_string, create_metadata_yaml @@ -66,28 +66,30 @@ async def test_run( """ (repo, repo_path) = repository mocked_github_repo = mock.MagicMock(spec=Repository) + document_name = "name 1" caplog.set_level(logging.INFO) - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) + create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: {document_name}", path=repo_path) # 1. docs empty - urls_with_actions = run( - base_path=repo_path, - discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_repo=mocked_github_repo, - branch_name=None, - ) - - assert len(urls_with_actions) == 1 - index_url = next(iter(urls_with_actions.keys())) - index_topic = discourse_api.retrieve_topic(url=index_url) - assert index_topic == f"{reconcile.NAVIGATION_TABLE_START}".strip() - assert index_url in caplog.text + with pytest.raises(exceptions.InputError) as exc_info: + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mocked_github_repo, + branch_name=None, + ) + + assert str(exc_info.value) == GETTING_STARTED # 2. docs with an index file in dry run mode caplog.clear() + index_url = discourse_api.create_topic( + title=f"{document_name.replace('-', ' ').title()} Documentation Overview", + content=f"{reconcile.NAVIGATION_TABLE_START}".strip(), + ) create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", path=repo_path, From e5cc3b20b52397e7080d8ccb5f0603f8a78193f6 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 23 Dec 2022 09:25:17 +0800 Subject: [PATCH 027/107] add logs --- src/migration.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/src/migration.py b/src/migration.py index 5fec92b1..935f176d 100644 --- a/src/migration.py +++ b/src/migration.py @@ -4,6 +4,7 @@ """Module for transforming index table rows into local files.""" import itertools +import logging import typing from pathlib import Path @@ -50,6 +51,8 @@ def _migrate_gitkeep(gitkeep_meta: types_.GitkeepMeta, docs_path: Path): Returns: Migration report for gitkeep file creation. """ + logging.info("migrate meta: %s", gitkeep_meta) + path = docs_path / gitkeep_meta.path path.parent.mkdir(parents=True, exist_ok=True) path.touch() @@ -72,6 +75,8 @@ def _migrate_document(document_meta: types_.DocumentMeta, discourse: Discourse, Returns: Migration report for document file creation. """ + logging.info("migrate meta: %s", document_meta) + try: content = discourse.retrieve_topic(url=document_meta.link) except exceptions.DiscourseError as exc: @@ -102,6 +107,8 @@ def _migrate_index(index_meta: types_.IndexDocumentMeta, docs_path: Path): Returns: Migration report for index file creation. """ + logging.info("migrate meta: %s", index_meta) + path = docs_path / index_meta.path path.parent.mkdir(parents=True, exist_ok=True) path.write_text(index_meta.content, encoding="utf-8") @@ -130,21 +137,24 @@ def _run_one( case types_.GitkeepMeta: # To help mypy (same for the rest of the asserts), it is ok if the assert does not run assert isinstance(file_meta, types_.GitkeepMeta) # nosec - return _migrate_gitkeep(gitkeep_meta=file_meta, docs_path=docs_path) + report = _migrate_gitkeep(gitkeep_meta=file_meta, docs_path=docs_path) case types_.DocumentMeta: assert isinstance(file_meta, types_.DocumentMeta) # nosec - return _migrate_document( + report = _migrate_document( document_meta=file_meta, discourse=discourse, docs_path=docs_path ) case types_.IndexDocumentMeta: assert isinstance(file_meta, types_.IndexDocumentMeta) # nosec - return _migrate_index(index_meta=file_meta, docs_path=docs_path) + report = _migrate_index(index_meta=file_meta, docs_path=docs_path) # Edge case that should not be possible. case _: # pragma: no cover raise exceptions.MigrationError( f"internal error, no implementation for migration file, {file_meta=!r}" ) + logging.info("report: %s", report) + return report + def _extract_docs_from_table_rows( table_rows: typing.Iterable[types_.TableRow], From a4e977284041f4049a43c73c3f87854f06129327 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 23 Dec 2022 09:26:32 +0800 Subject: [PATCH 028/107] change regex to string processing --- src/index.py | 15 ++++----------- tests/unit/test_index.py | 39 ++++++++++++++++++++++++--------------- 2 files changed, 28 insertions(+), 26 deletions(-) diff --git a/src/index.py b/src/index.py index ab5e7b06..326ca31e 100644 --- a/src/index.py +++ b/src/index.py @@ -3,17 +3,13 @@ """Execute the uploading of documentation.""" -import re from pathlib import Path from .discourse import Discourse from .exceptions import DiscourseError, ServerError +from .reconcile import NAVIGATION_TABLE_START from .types_ import Index, IndexFile, Metadata, Page -_WHITESPACE = r"\s*" -_NAVIGATION_HEADER_REGEX = rf"{_WHITESPACE}# Navigation" -_INDEX_CONTENT_REGEX = r"^((.|\n)*\n)" -_INDEX_CONTENT_PATTERN = re.compile(rf"{_INDEX_CONTENT_REGEX}(?={_NAVIGATION_HEADER_REGEX})") DOCUMENTATION_FOLDER_NAME = "docs" DOCUMENTATION_INDEX_FILENAME = "index.md" @@ -80,10 +76,7 @@ def contents_from_page(page: str) -> str: Returns: Index file contents. """ - match = _INDEX_CONTENT_PATTERN.match(page) - - if match is None: + contents = page.split(NAVIGATION_TABLE_START) + if not contents or (len(contents) == 1 and contents[0] == NAVIGATION_TABLE_START): return "" - - content = match.group(0) - return content + return contents[0] diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index 36fbd3bd..bcaf92ee 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -123,31 +123,40 @@ def test_get_metadata_yaml_retrieve_empty(tmp_path: Path): "page, expected_content", [ pytest.param( - ( - nav_table := """# Navigation - - | Level | Path | Navlink | - | -- | -- | -- | - """ - ), + index.NAVIGATION_TABLE_START, "", id="navigation table only", ), pytest.param( - (content := "Page content\n"), - "", + (content := "Page content"), + content, id="page content only", ), pytest.param( - f"{content}" - """# Navigation - - | Level | Path | Navlink | - | -- | -- | -- | - """, + (multiline_content := "Page content\nWithMultiline"), + multiline_content, + id="multiline content only", + ), + pytest.param( + f"{content}{index.NAVIGATION_TABLE_START}", content, id="page with content and navigation table", ), + pytest.param( + f"{multiline_content}{index.NAVIGATION_TABLE_START}", + multiline_content, + id="page with multiline content and navigation table", + ), + pytest.param( + (separated_multiline_content := "Page content\n\nManyMultiline"), + separated_multiline_content, + id="page with seperated multiline content", + ), + pytest.param( + f"{separated_multiline_content}{index.NAVIGATION_TABLE_START}", + separated_multiline_content, + id="page with seperated multiline content and navigation table", + ), ], ) # pylint: enable=undefined-variable,unused-variable From f329061838799520326c002679bea849f891334d Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 23 Dec 2022 09:29:04 +0800 Subject: [PATCH 029/107] refactor tests to use git fixtures --- tests/conftest.py | 30 ++++++++- tests/integration/test___init__.py | 102 ++++++++++++++++++++++++----- tests/unit/conftest.py | 15 ----- tests/unit/helpers.py | 10 +++ tests/unit/test___init__.py | 12 +--- tests/unit/test_pull_request.py | 50 +++++++------- 6 files changed, 155 insertions(+), 64 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 36b1b0a9..e21dd7be 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,9 +4,13 @@ """Fixtures for all tests.""" from pathlib import Path +from unittest import mock import pytest from git.repo import Repo +from github.PullRequest import PullRequest +from github.Repository import Repository +from github.Requester import Requester @pytest.fixture(name="upstream_repository") @@ -28,10 +32,34 @@ def fixture_upstream_repository(tmp_path: Path) -> tuple[Repo, Path]: @pytest.fixture(name="repository") -def repository(upstream_repository: tuple[Repo, Path], tmp_path: Path) -> tuple[Repo, Path]: +def fixture_repository( + upstream_repository: tuple[Repo, Path], tmp_path: Path +) -> tuple[Repo, Path]: """Create repository with mocked upstream.""" (_, upstream_path) = upstream_repository repo_path = tmp_path / "mocked" repo_path.mkdir() repo = Repo.clone_from(url=upstream_path, to_path=repo_path) + repo.git.checkout("main") + repo.git.pull() return (repo, repo_path) + + +@pytest.fixture(name="mock_pull_request") +def fixture_mock_pull_request() -> PullRequest: + """Create mock pull request.""" + mock_requester = mock.MagicMock(spec=Requester) + return PullRequest( + requester=mock_requester, + headers={}, + attributes={"url": "test_url"}, + completed=False, + ) + + +@pytest.fixture(name="mock_github_repo") +def fixture_mock_github_repo(mock_pull_request: PullRequest): + """Create a mock github repository instance.""" + mocked_repo = mock.MagicMock(spec=Repository) + mocked_repo.create_pull.return_value = mock_pull_request + return mocked_repo diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index 72542901..ebdba121 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -7,16 +7,17 @@ # pylint: disable=too-many-arguments,too-many-locals,too-many-statements import logging +import shutil from itertools import chain from pathlib import Path -from unittest import mock from urllib.parse import urlparse import pytest from git.repo import Repo +from github.PullRequest import PullRequest from github.Repository import Repository -from src import GETTING_STARTED, exceptions, index, metadata, reconcile, run +from src import GETTING_STARTED, exceptions, index, metadata, pull_request, reconcile, run from src.discourse import Discourse from ..unit.helpers import assert_substrings_in_string, create_metadata_yaml @@ -29,6 +30,8 @@ async def test_run( discourse_api: Discourse, caplog: pytest.LogCaptureFixture, repository: tuple[Repo, Path], + mock_github_repo: Repository, + mock_pull_request: PullRequest, ): """ arrange: given running discourse server @@ -65,7 +68,6 @@ async def test_run( 14. an index page is not updated """ (repo, repo_path) = repository - mocked_github_repo = mock.MagicMock(spec=Repository) document_name = "name 1" caplog.set_level(logging.INFO) create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: {document_name}", path=repo_path) @@ -78,7 +80,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -103,7 +105,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -121,7 +123,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -141,7 +143,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -159,7 +161,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -185,7 +187,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -205,7 +207,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -230,7 +232,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -255,7 +257,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -284,7 +286,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -307,7 +309,7 @@ async def test_run( dry_run=False, delete_pages=False, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -330,7 +332,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -351,7 +353,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -374,7 +376,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, ) @@ -382,3 +384,69 @@ async def test_run( assert_substrings_in_string(chain(urls, ("Update", "'success'")), caplog.text) index_topic = discourse_api.retrieve_topic(url=index_url) assert index_content not in index_topic + + # 15. with docs dir removed on no custom branchname + caplog.clear() + doc_table_key_2 = "docs-2" + nested_dir_table_key_2 = "nested-dir-doc-2" + (index_file := docs_dir / "index.md").write_text(index_content := "index content 1") + (doc_file := docs_dir / f"{doc_table_key_2}.md").write_text(doc_content_3 := "doc content 3") + (nested_dir := docs_dir / nested_dir_table_key_2).mkdir() + (nested_dir_doc_file := nested_dir / "doc.md").write_text( + (nested_dir_doc_content_2 := "nested dir doc content 2") + ) + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mock_github_repo, + branch_name=None, + ) + shutil.rmtree(docs_dir) + + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mock_github_repo, + branch_name=None, + ) + + repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + assert tuple(urls_with_actions) == (mock_pull_request.url,) + assert index_file.read_text(encoding="utf-8") == index_content + assert doc_file.read_text(encoding="utf-8") == doc_content_3 + assert (nested_dir / f"{nested_dir_table_key_2}-doc.md").read_text( + encoding="utf-8" + ) == nested_dir_doc_content_2 + + # 15. with docs dir removed on custom branchname + caplog.clear() + repo.git.checkout("main") + create_metadata_yaml( + content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", + path=repo_path, + ) + custom_branchname = "branchname-1" + + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_repo=mock_github_repo, + branch_name=custom_branchname, + ) + + repo.git.checkout(custom_branchname) + assert tuple(urls_with_actions) == (mock_pull_request.url,) + assert index_file.read_text(encoding="utf-8") == index_content + assert doc_file.read_text(encoding="utf-8") == doc_content_3 + assert (nested_dir / f"{nested_dir_table_key_2}-doc.md").read_text( + encoding="utf-8" + ) == nested_dir_doc_content_2 diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 43c8a5f9..b03760cd 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -6,11 +6,8 @@ # pylint: disable=redefined-outer-name from pathlib import Path -from unittest import mock import pytest -from github.PullRequest import PullRequest -from github.Requester import Requester from src import index from src.discourse import Discourse @@ -37,15 +34,3 @@ def index_file_content(tmp_path: Path): content = "content 1" index_file.write_text(content, encoding="utf-8") return content - - -@pytest.fixture() -def mock_pull_request() -> PullRequest: - """Create mock pull request.""" - mock_requester = mock.MagicMock(spec=Requester) - return PullRequest( - requester=mock_requester, - headers={}, - attributes={"url": "test_url"}, - completed=False, - ) diff --git a/tests/unit/helpers.py b/tests/unit/helpers.py index 0747a542..7797c0b6 100644 --- a/tests/unit/helpers.py +++ b/tests/unit/helpers.py @@ -6,6 +6,8 @@ import typing from pathlib import Path +from git.repo import Repo + from src import metadata @@ -43,3 +45,11 @@ def path_to_markdown(path: Path) -> Path: Path with last path being a markdown file. """ return Path(f"{path}.md") + + +def create_repository_author(repo: Repo) -> None: + """Create repository author""" + writer = repo.config_writer() + writer.set_value("user", "name", "repo_user") + writer.set_value("user", "email", "repo_email") + writer.release() diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 3decbcc8..75b57c27 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -235,11 +235,8 @@ def test__run_migrate( """ index_content = """Content header. - Content body.\n""" - index_table = """# Navigation - - | Level | Path | Navlink | - | -- | -- | -- | + Content body.""" + index_table = f"""{index.NAVIGATION_TABLE_START} | 1 | path-1 | [Tutorials](link-1) |""" index_page = f"{index_content}{index_table}" meta = types_.Metadata(name="name 1", docs="http://discourse/t/docs") @@ -355,10 +352,7 @@ def test_run_no_docs_dir( index_content = """Content header. Content body.\n""" - index_table = """# Navigation - - | Level | Path | Navlink | - | -- | -- | -- | + index_table = f"""{index.NAVIGATION_TABLE_START} | 1 | path-1 | [empty-navlink]() | | 2 | file-1 | [file-navlink](/file-navlink) |""" index_page = f"{index_content}{index_table}" diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index d46e307b..eeb4af00 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -21,7 +21,7 @@ from src import pull_request from src.exceptions import GitError, InputError -from .helpers import assert_substrings_in_string +from .helpers import assert_substrings_in_string, create_repository_author def test__configure_user(repository: tuple[Repo, Path]): @@ -170,8 +170,8 @@ def test__merge_existing_branch( for (file, content) in new_files: (repo_path / file).touch() (repo_path / file).write_text(content, encoding="utf-8") - print(f"{repo_path/file}") repo.git.fetch("origin", branch_name) + create_repository_author(repo) pull_request._merge_existing_branch( repository=repo, branch_name=branch_name, commit_msg=commit_message @@ -210,6 +210,7 @@ def test__create_branch( Path(dirname(repo_path / file)).mkdir(parents=True, exist_ok=True) (repo_path / file).touch() (repo_path / file).write_text(content, encoding="utf-8") + create_repository_author(repo) pull_request._create_branch(repository=repo, branch_name=branch_name, commit_msg="test_commit") @@ -263,7 +264,7 @@ def test_create_github_instance(): assert isinstance(pull_request.create_github(access_token=access_token), Github) -def test_create_pull_request_invalid_branch(tmp_path: Path): +def test_create_pull_request_invalid_branch(tmp_path: Path, mock_github_repo: Repository): """ arrange: given a repository and a mocked github repository and a branch_name that is equal to the base branch @@ -274,16 +275,16 @@ def test_create_pull_request_invalid_branch(tmp_path: Path): # Setting up an exiting branch requires a head in an empty repository. # Committing an empty file allows so. repo = Repo.init(tmp_path) + create_repository_author(repo) (tmp_path / "test.txt").touch() repo.git.add(".") repo.git.commit("-m", "test commit") current_branch = repo.create_head(branch_name) current_branch.checkout() - mocked_github_repo = mock.MagicMock(spec=Repository) with pytest.raises(InputError) as exc_info: pull_request.create_pull_request( - repository=repo, github_repository=mocked_github_repo, branch_name=branch_name + repository=repo, github_repository=mock_github_repo, branch_name=branch_name ) assert_substrings_in_string( @@ -291,7 +292,9 @@ def test_create_pull_request_invalid_branch(tmp_path: Path): ) -def test_create_pull_request_no_change(repository: tuple[Repo, Path]): +def test_create_pull_request_no_change( + repository: tuple[Repo, Path], mock_github_repo: Repository +): """ arrange: given a repository and a mocked github repository with no changed file act: when create_pull_request is called @@ -299,17 +302,18 @@ def test_create_pull_request_no_change(repository: tuple[Repo, Path]): """ branch_name = "test_branch_name" (repo, _) = repository - mocked_github_repo = mock.MagicMock(spec=Repository) returned_pr = pull_request.create_pull_request( - repository=repo, github_repository=mocked_github_repo, branch_name=branch_name + repository=repo, github_repository=mock_github_repo, branch_name=branch_name ) assert returned_pr == pull_request.PR_LINK_NO_CHANGE def test_create_pull_request_existing_branch( - repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path] + repository: tuple[Repo, Path], + upstream_repository: tuple[Repo, Path], + mock_github_repo: Repository, ): """ arrange: given a mocked repository with a new file and a mocked github repository \ @@ -323,20 +327,20 @@ def test_create_pull_request_existing_branch( (repo_path / test_file).touch() (upstream, upstream_path) = upstream_repository upstream.create_head(branch_name) - mocked_github_repo = mock.MagicMock(spec=Repository) + mock_github_repo = mock.MagicMock(spec=Repository) pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mocked_github_repo, branch_name=branch_name + repository=repo, github_repository=mock_github_repo, branch_name=branch_name ) upstream.git.checkout(branch_name) (upstream_path / test_file).is_file() assert pr_link is not None - mocked_github_repo.get_pulls.assert_called_once_with( + mock_github_repo.get_pulls.assert_called_once_with( state="open", head=f"{pull_request.ACTIONS_USER_NAME}/{branch_name}", ) - mocked_github_repo.create_pull.assert_called_once_with( + mock_github_repo.create_pull.assert_called_once_with( title=pull_request.ACTIONS_PULL_REQUEST_TITLE, body=pull_request.ACTIONS_PULL_REQUEST_BODY, base="main", @@ -345,7 +349,9 @@ def test_create_pull_request_existing_branch( def test_create_pull_request( - repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path] + repository: tuple[Repo, Path], + upstream_repository: tuple[Repo, Path], + mock_github_repo: Repository, ): """ arrange: given a mocked repository with a new file and a mocked github repository \ @@ -357,21 +363,20 @@ def test_create_pull_request( (repo, repo_path) = repository test_file = "file.md" (repo_path / test_file).touch() - mocked_github_repo = mock.MagicMock(spec=Repository) pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mocked_github_repo, branch_name=branch_name + repository=repo, github_repository=mock_github_repo, branch_name=branch_name ) (upstream, upstream_path) = upstream_repository upstream.git.checkout(branch_name) (upstream_path / test_file).is_file() assert pr_link is not None - mocked_github_repo.get_pulls.assert_called_once_with( + mock_github_repo.get_pulls.assert_called_once_with( state="open", head=f"{pull_request.ACTIONS_USER_NAME}/{branch_name}", ) - mocked_github_repo.create_pull.assert_called_once_with( + mock_github_repo.create_pull.assert_called_once_with( title=pull_request.ACTIONS_PULL_REQUEST_TITLE, body=pull_request.ACTIONS_PULL_REQUEST_BODY, base="main", @@ -382,6 +387,7 @@ def test_create_pull_request( def test_create_pull_request_existing_pr( repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path], + mock_github_repo: Repository, mock_pull_request: PullRequest, ): """ @@ -392,20 +398,20 @@ def test_create_pull_request_existing_pr( """ branch_name = "test_branch_name" (repo, repo_path) = repository + create_repository_author(repo) test_file = "file.md" (repo_path / test_file).touch() - mocked_github_repo = mock.MagicMock(spec=Repository) - mocked_github_repo.get_pulls.side_effect = [[mock_pull_request]] + mock_github_repo.get_pulls.side_effect = [[mock_pull_request]] pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mocked_github_repo, branch_name=branch_name + repository=repo, github_repository=mock_github_repo, branch_name=branch_name ) (upstream, upstream_path) = upstream_repository upstream.git.checkout(branch_name) (upstream_path / test_file).is_file() assert pr_link == mock_pull_request.url - mocked_github_repo.get_pulls.assert_called_once_with( + mock_github_repo.get_pulls.assert_called_once_with( state="open", head=f"{pull_request.ACTIONS_USER_NAME}/{branch_name}", ) From 22cd4c2ec47893328b12c9b5d4b44e2eb2fb2fdf Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 28 Dec 2022 14:47:31 +0800 Subject: [PATCH 030/107] makke github access token optional for migration flow --- README.md | 152 +++++++++++++++++------------ action.yaml | 3 +- main.py | 6 +- src/__init__.py | 25 ++++- src/docs_directory.py | 4 +- src/index.py | 2 - src/migration.py | 18 +++- src/pull_request.py | 76 ++++++++++++--- tests/conftest.py | 33 +++++++ tests/integration/test___init__.py | 64 +++++++----- tests/unit/test___init__.py | 34 +++---- tests/unit/test_docs_directory.py | 2 +- tests/unit/test_pull_request.py | 53 ++++++++-- 13 files changed, 324 insertions(+), 148 deletions(-) diff --git a/README.md b/README.md index b6005d96..0eba676b 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Upload Charm Documentation -*This action is still in alpha, breaking changes could occur.* +_This action is still in alpha, breaking changes could occur._ This action automates uploading documentation from the `docs` folder in a repository to discourse which is how the charm documentation is published to @@ -12,85 +12,107 @@ charmhub. 1. Create the `docs` folder in the repository. 2. Optionally, create a file `docs/index.md` for any content you would like to - display above the navigation table on discourse. This content does not get - published to charmhub and is only visible on discourse. + display above the navigation table on discourse. This content does not get + published to charmhub and is only visible on discourse. 3. Within the `docs` folder, create directories for page groups (e.g., for all - tutorials) and markdown files (`*.md`) for individual pages. On charmhub, - the groupings on the navigation panel will be named based on the name of - the directory after replacing `_` and `-` with spaces and appliying the - [`str.title`](https://docs.python.org/3/library/stdtypes.html#str.title) - function to it. The name of pages is based on whatever of the following is - available, in order: (1) the first level 1 heading (e.g., `# `) in - the file, the first line in the file or the name of the file treated in the - same way as the name of groupings. - - If you have existing documentation on discourse, you can retrieve the - markdown version by changing the link to the topic in your browser from - `https://discourse.charmhub.io/t//` to - `https://discourse.charmhub.io/raw/`. *Future plans for this - action include automating this migration by pulling the content down and - creating a PR for you to review in the repository.* - - Note that the action may change the order of how groups and pages are - displayed in the navigation pane. The action will sort them alphabetically. + tutorials) and markdown files (`*.md`) for individual pages. On charmhub, + the groupings on the navigation panel will be named based on the name of + the directory after replacing `_` and `-` with spaces and appliying the + [`str.title`](https://docs.python.org/3/library/stdtypes.html#str.title) + function to it. The name of pages is based on whatever of the following is + available, in order: (1) the first level 1 heading (e.g., `# `) in + the file, the first line in the file or the name of the file treated in the + same way as the name of groupings. + + If you have existing documentation on discourse, you can retrieve the + markdown version by changing the link to the topic in your browser from + `https://discourse.charmhub.io/t//` to + `https://discourse.charmhub.io/raw/`. _Future plans for this + action include automating this migration by pulling the content down and + creating a PR for you to review in the repository._ + + Note that the action may change the order of how groups and pages are + displayed in the navigation pane. The action will sort them alphabetically. + 4. Optionally, remove the current `docs` key from `metadata.yaml` if you would - like the action to create its own topics on discourse rather than re-use - any existing topics. This means that if, for some reason, you don't like - what the action does, you can easily revert back to the previous - documentation. Be sure to file an issue with the reason if the action does - something unexpected or you would prefer it to do something different. + like the action to create its own topics on discourse rather than re-use + any existing topics. This means that if, for some reason, you don't like + what the action does, you can easily revert back to the previous + documentation. Be sure to file an issue with the reason if the action does + something unexpected or you would prefer it to do something different. 5. Add this action to your desired workflow. For example: - ```yaml - jobs: - publish-docs: - name: Publish docs - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v3 - - name: Publish documentation - uses: canonical/upload-charm-docs@main - id: publishDocumentation - with: - discourse_host: discourse.charmhub.io - discourse_api_username: ${{ secrets.DISCOURSE_API_USERNAME }} - discourse_api_key: ${{ secrets.DISCOURSE_API_KEY }} - github_token: ${{ secrets.GITHUB_TOKEN }} - - name: Show index page - run: echo '${{ steps.publishDocumentation.outputs.index_url }}' - ``` - - This action requires an API username and key to discourse. For Canonical - staff, please file a ticket with IS to request one. Note that there is a - rate limit on the number of topics that can be created by a user per day on - discourse. If you encounter this issue, the action will fail and report - that as the reason. It may help to space out adopting this action if you - are planning to use it for multiple charms or to use different users for - each charm. Note that other rate limits also apply which is why execution - might look like it is stalled for a short period and then resume. The - action will gracefully wait in case of throttling up to a maximum of 10 - minutes. - - There is a nice parameter, `dry_run`, which will do everything except - make changes on discourse and log what would have happened. This will help - you see what the action would have done. + ```yaml + jobs: + publish-docs: + name: Publish docs + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v3 + - name: Publish documentation + uses: canonical/upload-charm-docs@main + id: publishDocumentation + with: + discourse_host: discourse.charmhub.io + discourse_api_username: ${{ secrets.DISCOURSE_API_USERNAME }} + discourse_api_key: ${{ secrets.DISCOURSE_API_KEY }} + - name: Show index page + run: echo '${{ steps.publishDocumentation.outputs.index_url }}' + ``` + + This action requires an API username and key to discourse. For Canonical + staff, please file a ticket with IS to request one. Note that there is a + rate limit on the number of topics that can be created by a user per day on + discourse. If you encounter this issue, the action will fail and report + that as the reason. It may help to space out adopting this action if you + are planning to use it for multiple charms or to use different users for + each charm. Note that other rate limits also apply which is why execution + might look like it is stalled for a short period and then resume. The + action will gracefully wait in case of throttling up to a maximum of 10 + minutes. + + There is a nice parameter, `dry_run`, which will do everything except + make changes on discourse and log what would have happened. This will help + you see what the action would have done. + 6. Check the logs for the URL to the index topic that the action created. This - is also available under the `index_url` output of the action. This needs to - be added to the `metadata.yaml` under the `docs` key. + is also available under the `index_url` output of the action. This needs to + be added to the `metadata.yaml` under the `docs` key. ### Migrate docs 1. Create a `docs` key in `metadata.yaml` with the link to the documentation on - charmhub. + charmhub. 2. Add the action to your desired workflow as mentioned in step 5 of - [Sync docs section](#sync-docs) + [Sync docs section](#sync-docs) with github_token. For example: + + ```yaml + jobs: + publish-docs: + name: Publish docs + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v3 + - name: Publish documentation + uses: canonical/upload-charm-docs@main + id: publishDocumentation + with: + discourse_host: discourse.charmhub.io + discourse_api_username: ${{ secrets.DISCOURSE_API_USERNAME }} + discourse_api_key: ${{ secrets.DISCOURSE_API_KEY }} + github_token: ${{ secrets.GITHUB_TOKEN }} + - name: Show index page + run: echo '${{ steps.publishDocumentation.outputs.index_url }}' + ``` + + additional branch_name input can be specified to create a pull request from a specific branch name. The action will now compare the discourse topics with the files and directories under the `docs` directory and make any changes based on differences. Additional recommended steps: -* Add the action in dry run mode to run on every PR. This will mean that you +- Add the action in dry run mode to run on every PR. This will mean that you will see all the changes that would be made by the PR once you are ready to publish a new version of the charm and documentation. -* Add the action in dry run mode on publishes to `edge` to see what changes to +- Add the action in dry run mode on publishes to `edge` to see what changes to the documentation will be made once you publish to `stable`. diff --git a/action.yaml b/action.yaml index 8c4f91ac..18287de9 100644 --- a/action.yaml +++ b/action.yaml @@ -37,8 +37,9 @@ inputs: github_token: description: | The github access token (${{ secrets.GITHUB_TOKEN }}) to create pull request on Github. + Required if running in migration mode. default: ${{ github.token }} - required: true + required: false type: string branch_name: description: Branch name to create pull request branch. diff --git a/main.py b/main.py index c7968b10..74e296c2 100755 --- a/main.py +++ b/main.py @@ -15,7 +15,6 @@ from src import run from src.discourse import create_discourse -from src.pull_request import create_github, get_repository_name # pylint: disable=too-many-locals @@ -43,16 +42,13 @@ def main(): base_path = pathlib.Path() discourse = create_discourse(**create_discourse_kwargs) repo = Repo(path=base_path) - repository = get_repository_name(repo.remote().url) - github = create_github(access_token=github_access_token) - github_repo = github.get_repo(repository) urls_with_actions_dict = run( base_path=base_path, discourse=discourse, dry_run=dry_run, delete_pages=delete_topics, repo=repo, - github_repo=github_repo, + github_access_token=github_access_token, branch_name=branch_name, ) diff --git a/src/__init__.py b/src/__init__.py index e14f61bf..54b14a47 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -20,7 +20,7 @@ from .migration import get_docs_metadata from .migration import run as run_migrate from .navigation_table import from_page as navigation_table_from_page -from .pull_request import create_pull_request +from .pull_request import create_github, create_pull_request, get_repository_name from .reconcile import run as run_reconcile from .types_ import ActionResult, Metadata @@ -80,11 +80,21 @@ def _run_migrate( repo: Repo, github_repo: Repository, branch_name: str | None, + dry_run: bool, ) -> dict[str, str]: """Migrate existing docs from charmhub to local repository. + Args: + base_path: The base path to look for the metadata file in. + metadata: A metadata file with a link to the docs url. + discourse: A client to the documentation server. + repo: A git-binding for the current repository. + github_repo: A client for communicating with github. + branch_name: The branch name to base the pull request from. + dry_run: If enabled, only log the action that would be taken. + Returns: - All the filepaths that were created with the result of that action. + A Pull Request link to the Github repository. """ index = get_index(metadata=metadata, base_path=base_path, server_client=discourse) server_content = ( @@ -100,7 +110,7 @@ def _run_migrate( ) pr_link = create_pull_request( - repository=repo, github_repository=github_repo, branch_name=branch_name + repository=repo, github_repository=github_repo, branch_name=branch_name, dry_run=dry_run ) return {pr_link: ActionResult.SUCCESS} @@ -112,7 +122,7 @@ def run( dry_run: bool, delete_pages: bool, repo: Repo, - github_repo: Repository, + github_access_token: str | None, branch_name: str | None, ) -> dict[str, str]: """Interact with charmhub to upload documentation or migrate to local repository. @@ -122,6 +132,9 @@ def run( discourse: A client to the documentation server. dry_run: If enabled, only log the action that would be taken. delete_pages: Whether to delete pages that are no longer needed. + repo: A git-binding client for current repository. + github_access_token: A Personal Access Token(PAT) or access token with repository access. + branch_name: A branch name for creating a Pull Request. Returns: All the URLs that had an action with the result of that action. @@ -129,6 +142,9 @@ def run( metadata = get_metadata(base_path) has_docs_dir = has_docs_directory(base_path=base_path) if metadata.docs and not has_docs_dir: + repository = get_repository_name(repo.remote().url) + github = create_github(access_token=github_access_token) + github_repo = github.get_repo(repository) return _run_migrate( base_path=base_path, metadata=metadata, @@ -136,6 +152,7 @@ def run( repo=repo, github_repo=github_repo, branch_name=branch_name, + dry_run=dry_run, ) if has_docs_dir: return _run_reconcile( diff --git a/src/docs_directory.py b/src/docs_directory.py index 3b29ae69..0d32529f 100644 --- a/src/docs_directory.py +++ b/src/docs_directory.py @@ -41,7 +41,7 @@ def _calculate_level(path_relative_to_docs: Path) -> types_.Level: return len(path_relative_to_docs.parents) -def _calculate_table_path(path_relative_to_docs: Path) -> types_.TablePath: +def calculate_table_path(path_relative_to_docs: Path) -> types_.TablePath: """Calculate the table path of a path. Args: @@ -102,7 +102,7 @@ def _get_path_info(path: Path, docs_path: Path) -> types_.PathInfo: return types_.PathInfo( local_path=path, level=_calculate_level(path_relative_to_docs=path_relative_to_docs), - table_path=_calculate_table_path(path_relative_to_docs=path_relative_to_docs), + table_path=calculate_table_path(path_relative_to_docs=path_relative_to_docs), navlink_title=_calculate_navlink_title(path=path), ) diff --git a/src/index.py b/src/index.py index 326ca31e..9f94c8e7 100644 --- a/src/index.py +++ b/src/index.py @@ -77,6 +77,4 @@ def contents_from_page(page: str) -> str: Index file contents. """ contents = page.split(NAVIGATION_TABLE_START) - if not contents or (len(contents) == 1 and contents[0] == NAVIGATION_TABLE_START): - return "" return contents[0] diff --git a/src/migration.py b/src/migration.py index 935f176d..1d7f5155 100644 --- a/src/migration.py +++ b/src/migration.py @@ -10,6 +10,7 @@ from . import exceptions, types_ from .discourse import Discourse +from .docs_directory import calculate_table_path EMPTY_DIR_REASON = "" GITKEEP_FILE = ".gitkeep" @@ -156,6 +157,20 @@ def _run_one( return report +def _calculate_file_name(current_directory: Path, table_path: types_.TablePath) -> str: + """Calculate file name given table path from the index file and current path \ + relative to the docs directory. + + Args: + current_directory: current directory of the file relative to the docs directory. + table_path: table path of the file from the index file, of format path-to-file-filename. + + Returns: + The filename derived by removing the directory path from given table path of the file. + """ + return table_path.removeprefix(f"{calculate_table_path(current_directory)}-") + + def _extract_docs_from_table_rows( table_rows: typing.Iterable[types_.TableRow], ) -> typing.Iterable[types_.MigrationFileMeta]: @@ -203,8 +218,9 @@ def _extract_docs_from_table_rows( level = row.level else: last_dir_has_file = True + file_name = _calculate_file_name(cwd, row.path) yield types_.DocumentMeta( - path=cwd / f"{row.path}.md", link=row.navlink.link, table_row=row + path=cwd / f"{file_name}.md", link=row.navlink.link, table_row=row ) if not last_dir_has_file and last_dir_row: diff --git a/src/pull_request.py b/src/pull_request.py index a1d38c49..3bec0ab4 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -3,6 +3,7 @@ """Module for handling git repository.""" +import logging import re import typing from uuid import uuid4 @@ -22,6 +23,7 @@ ACTIONS_PULL_REQUEST_TITLE = "[docs] Migrate charm docs" ACTIONS_PULL_REQUEST_BODY = "This pull request was autogenerated by upload-charm-docs" PR_LINK_NO_CHANGE = "" +PR_LINK_DRY_RUN = "" DEFAULT_BRANCH_NAME = "upload-charm-docs" @@ -37,6 +39,7 @@ def _check_branch_exists(repository: Repo, branch_name: str): """Check if branch exists on remote. Args: + repository: Git-binding for the current repository. branch_name: Branch name to check on remote. Returns: @@ -51,14 +54,16 @@ def _check_branch_exists(repository: Repo, branch_name: str): raise exc -def _merge_existing_branch(repository: Repo, branch_name: str, commit_msg: str): +def _merge_existing_branch(repository: Repo, branch_name: str, commit_msg: str, dry_run: bool): """Merge existing changes in current repository with specified branch with theirs strategy. Args: - repository: Current repository. + repository: Git-binding for the current repository. branch_name: Base branch to merge to. commit_msg: Commit message for current changes. + dry_run: If enabled, only log the action that would be taken. """ + logging.info("dry run: %s, merge to existing branch %s", dry_run, branch_name) temp_branch = str(uuid4()) head = repository.create_head(temp_branch) head.checkout() @@ -69,12 +74,14 @@ def _merge_existing_branch(repository: Repo, branch_name: str, commit_msg: str): repository.git.pull() repository.git.merge(temp_branch, "-Xtheirs", "--squash", "--no-edit") repository.git.commit("-m", f"'{commit_msg}'") - repository.git.push("-u", "origin", branch_name) + + if not dry_run: + repository.git.push("-u", "origin", branch_name) repository.git.branch("-D", temp_branch) -def _create_branch(repository: Repo, branch_name: str, commit_msg: str): +def _create_branch(repository: Repo, branch_name: str, commit_msg: str, dry_run: bool): """Create new branch with existing changes. Args: @@ -82,10 +89,40 @@ def _create_branch(repository: Repo, branch_name: str, commit_msg: str): branch_name: New branch name. commit_msg: Commit message for current changes. """ + logging.info("dry run: %s, create new branch %s", dry_run, branch_name) repository.git.checkout("-b", branch_name) repository.git.add(".") repository.git.commit("-m", f"'{commit_msg}'") - repository.git.push("-u", "origin", branch_name) + + if not dry_run: + repository.git.push("-u", "origin", branch_name) + + +def _create_pull_request( + github_repository: Repository, branch_name: str, base: str, dry_run: bool +): + """Create a pull request. + + Args: + github_repository: Github repository client. + branch_name: Branch name from which the pull request will be created. + base: Base branch to which the pull request will be created. + dry_run: If enabled, only log the action that would be taken. + + Returns: + The pull request URL. + """ + logging.info("dry run: %s, create pull request %s", dry_run, branch_name) + if not dry_run: + pull_request = github_repository.create_pull( + title=ACTIONS_PULL_REQUEST_TITLE, + body=ACTIONS_PULL_REQUEST_BODY, + base=base, + head=branch_name, + ) + else: + pull_request = None + return pull_request.url if pull_request is not None else PR_LINK_DRY_RUN def get_repository_name(remote_url: str): @@ -103,7 +140,7 @@ def get_repository_name(remote_url: str): """ matched_repository = HTTPS_URL_PATTERN.match(remote_url) if not matched_repository: - raise GitError(f"No match for remote repository name {remote_url=!r}") + raise GitError(f"Invalid remote repository name {remote_url=!r}") return matched_repository.group(1) @@ -132,7 +169,10 @@ def create_github(access_token: typing.Any): def create_pull_request( - repository: Repo, github_repository: Repository, branch_name: str | None + repository: Repo, + github_repository: Repository, + branch_name: str | None, + dry_run: bool, ) -> str: """Create pull request for changes in given repository path. @@ -159,11 +199,17 @@ def create_pull_request( if _check_branch_exists(repository=repository, branch_name=branch_name): _merge_existing_branch( - repository=repository, branch_name=branch_name, commit_msg=ACTIONS_COMMIT_MESSAGE + repository=repository, + branch_name=branch_name, + commit_msg=ACTIONS_COMMIT_MESSAGE, + dry_run=dry_run, ) else: _create_branch( - repository=repository, branch_name=branch_name, commit_msg=ACTIONS_COMMIT_MESSAGE + repository=repository, + branch_name=branch_name, + commit_msg=ACTIONS_COMMIT_MESSAGE, + dry_run=dry_run, ) repository.git.checkout(base) @@ -171,13 +217,13 @@ def create_pull_request( state="open", head=f"{ACTIONS_USER_NAME}/{branch_name}" ) if not list(open_pulls): - pull_request = github_repository.create_pull( - title=ACTIONS_PULL_REQUEST_TITLE, - body=ACTIONS_PULL_REQUEST_BODY, + pr_url = _create_pull_request( + github_repository=github_repository, + branch_name=branch_name, base=base, - head=branch_name, + dry_run=dry_run, ) else: - pull_request = open_pulls[0] + pr_url = open_pulls[0].url - return pull_request.url + return pr_url diff --git a/tests/conftest.py b/tests/conftest.py index e21dd7be..d125935d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,15 +3,19 @@ """Fixtures for all tests.""" +import typing from pathlib import Path from unittest import mock import pytest from git.repo import Repo +from github import Github from github.PullRequest import PullRequest from github.Repository import Repository from github.Requester import Requester +import src + @pytest.fixture(name="upstream_repository") def fixture_upstream_repository(tmp_path: Path) -> tuple[Repo, Path]: @@ -63,3 +67,32 @@ def fixture_mock_github_repo(mock_pull_request: PullRequest): mocked_repo = mock.MagicMock(spec=Repository) mocked_repo.create_pull.return_value = mock_pull_request return mocked_repo + + +@pytest.fixture(name="mock_github") +def fixture_mock_github(mock_github_repo: Repository): + """Create a mock github instance.""" + mocked_github = mock.MagicMock(spec=Github) + mocked_github.get_repo.return_value = mock_github_repo + return mocked_github + + +@pytest.fixture(name="patch_get_repository_name") +def fixture_patch_get_repository_name(monkeypatch: pytest.MonkeyPatch): + """Replace get_repository_name operation to pass.""" + + def mock_get_repository_name(remote_url: str): + return remote_url + + monkeypatch.setattr(src, "get_repository_name", mock_get_repository_name) + + +@pytest.fixture(name="patch_create_github") +def fixture_patch_create_github(monkeypatch: pytest.MonkeyPatch, mock_github: Github): + """Replace create_github operation to return a mocked github client.""" + + def mock_create_github(access_token: typing.Any): + del access_token + return mock_github + + monkeypatch.setattr(src, "create_github", mock_create_github) diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index ebdba121..5285ada9 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -15,7 +15,6 @@ import pytest from git.repo import Repo from github.PullRequest import PullRequest -from github.Repository import Repository from src import GETTING_STARTED, exceptions, index, metadata, pull_request, reconcile, run from src.discourse import Discourse @@ -30,7 +29,6 @@ async def test_run( discourse_api: Discourse, caplog: pytest.LogCaptureFixture, repository: tuple[Repo, Path], - mock_github_repo: Repository, mock_pull_request: PullRequest, ): """ @@ -80,7 +78,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -105,7 +103,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -123,7 +121,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -143,7 +141,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -161,7 +159,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -187,7 +185,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -207,7 +205,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -232,7 +230,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -257,7 +255,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -286,7 +284,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -309,7 +307,7 @@ async def test_run( dry_run=False, delete_pages=False, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -332,7 +330,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -353,7 +351,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -376,7 +374,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -388,7 +386,7 @@ async def test_run( # 15. with docs dir removed on no custom branchname caplog.clear() doc_table_key_2 = "docs-2" - nested_dir_table_key_2 = "nested-dir-doc-2" + nested_dir_table_key_2 = "nested-dir-2" (index_file := docs_dir / "index.md").write_text(index_content := "index content 1") (doc_file := docs_dir / f"{doc_table_key_2}.md").write_text(doc_content_3 := "doc content 3") (nested_dir := docs_dir / nested_dir_table_key_2).mkdir() @@ -401,9 +399,10 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) + urls = tuple(urls_with_actions) shutil.rmtree(docs_dir) urls_with_actions = run( @@ -412,7 +411,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=None, ) @@ -420,11 +419,9 @@ async def test_run( assert tuple(urls_with_actions) == (mock_pull_request.url,) assert index_file.read_text(encoding="utf-8") == index_content assert doc_file.read_text(encoding="utf-8") == doc_content_3 - assert (nested_dir / f"{nested_dir_table_key_2}-doc.md").read_text( - encoding="utf-8" - ) == nested_dir_doc_content_2 + assert nested_dir_doc_file.read_text(encoding="utf-8") == nested_dir_doc_content_2 - # 15. with docs dir removed on custom branchname + # 16. with docs dir removed on custom branchname caplog.clear() repo.git.checkout("main") create_metadata_yaml( @@ -439,7 +436,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_repo=mock_github_repo, + github_access_token="test-access-token", branch_name=custom_branchname, ) @@ -447,6 +444,19 @@ async def test_run( assert tuple(urls_with_actions) == (mock_pull_request.url,) assert index_file.read_text(encoding="utf-8") == index_content assert doc_file.read_text(encoding="utf-8") == doc_content_3 - assert (nested_dir / f"{nested_dir_table_key_2}-doc.md").read_text( - encoding="utf-8" - ) == nested_dir_doc_content_2 + assert (nested_dir / "doc.md").read_text(encoding="utf-8") == nested_dir_doc_content_2 + + # 17. with no changes applied after migration + caplog.clear() + + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_access_token="test-access-token", + branch_name=custom_branchname, + ) + + assert_substrings_in_string(chain(urls, ("Noop", "Noop", "Noop", "'success'")), caplog.text) diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 75b57c27..ac208c62 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -23,11 +23,11 @@ exceptions, index, metadata, + pull_request, reconcile, run, types_, ) -from src.pull_request import DEFAULT_BRANCH_NAME from .helpers import create_metadata_yaml @@ -38,7 +38,6 @@ def test__run_reconcile_empty_local_server(tmp_path: Path): act: when _run_reconcile is called assert: then an index page is created with empty navigation table. """ - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=tmp_path) meta = types_.Metadata(name="name 1", docs=None) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.create_topic.return_value = (url := "url 1") @@ -172,6 +171,7 @@ def test__run_migrate_server_error_index(tmp_path: Path, repository: tuple[Repo, repo=repo, github_repo=mocked_github_repo, branch_name=None, + dry_run=False, ) assert "Index page retrieval failed" == str(exc.value) @@ -181,6 +181,7 @@ def test__run_migrate_server_error_topic( repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path], mock_pull_request: PullRequest, + mock_github_repo: Repository, ): """ arrange: given metadata with name and docs but no docs directory and mocked discourse @@ -202,8 +203,6 @@ def test__run_migrate_server_error_topic( meta = types_.Metadata(name="name 1", docs=index_url) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.side_effect = [index_content, exceptions.DiscourseError] - mocked_github_repo = mock.MagicMock(spec=Repository) - mocked_github_repo.create_pull.return_value = mock_pull_request (repo, repo_path) = repository returned_migration_reports = _run_migrate( @@ -211,12 +210,13 @@ def test__run_migrate_server_error_topic( metadata=meta, discourse=mocked_discourse, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, + dry_run=False, ) (upstream_repo, upstream_path) = upstream_repository - upstream_repo.git.checkout(DEFAULT_BRANCH_NAME) + upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) assert returned_migration_reports == {mock_pull_request.url: types_.ActionResult.SUCCESS} assert (upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() assert not (upstream_path / DOCUMENTATION_FOLDER_NAME / "path 1").exists() @@ -227,6 +227,7 @@ def test__run_migrate( repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path], mock_pull_request: PullRequest, + mock_github_repo: Repository, ): """ arrange: given metadata with name and docs but no docs directory and mocked discourse @@ -245,8 +246,6 @@ def test__run_migrate( index_page, (link_content := "link 1 content"), ] - mocked_github_repo = mock.MagicMock(spec=Repository) - mocked_github_repo.create_pull.return_value = mock_pull_request (repo, repo_path) = repository returned_migration_reports = _run_migrate( @@ -254,12 +253,13 @@ def test__run_migrate( metadata=meta, discourse=mocked_discourse, repo=repo, - github_repo=mocked_github_repo, + github_repo=mock_github_repo, branch_name=None, + dry_run=False, ) (upstream_repo, upstream_path) = upstream_repository - upstream_repo.git.checkout(DEFAULT_BRANCH_NAME) + upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) assert returned_migration_reports == {mock_pull_request.url: types_.ActionResult.SUCCESS} assert (index_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() assert (path_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "path-1.md").is_file() @@ -280,7 +280,6 @@ def test_run_no_docs_no_dir(repository: tuple[Repo, Path]): (repo, repo_path) = repository create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) - mocked_github_repo = mock.MagicMock(spec=Repository) with pytest.raises(exceptions.InputError) as exc: # run is repeated in unit tests / integration tests @@ -291,7 +290,7 @@ def test_run_no_docs_no_dir(repository: tuple[Repo, Path]): dry_run=False, delete_pages=False, repo=repo, - github_repo=mocked_github_repo, + github_access_token="test-github-token", branch_name=None, ) @@ -310,7 +309,6 @@ def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): (repo_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.create_topic.return_value = (url := "url 1") - mocked_github_repo = mock.MagicMock(spec=Repository) # run is repeated in unit tests / integration tests # pylint: disable=duplicate-code @@ -320,7 +318,7 @@ def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): dry_run=False, delete_pages=True, repo=repo, - github_repo=mocked_github_repo, + github_access_token="test-github-token", branch_name=None, ) @@ -332,6 +330,8 @@ def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): # pylint: disable=too-many-locals +@pytest.mark.usefixtures("patch_get_repository_name") +@pytest.mark.usefixtures("patch_create_github") def test_run_no_docs_dir( repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path], @@ -359,8 +359,6 @@ def test_run_no_docs_dir( navlink_page = "file-navlink-content" mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.side_effect = [index_page, navlink_page] - mocked_github_repo = mock.MagicMock(spec=Repository) - mocked_github_repo.create_pull.return_value = mock_pull_request # run is repeated in unit tests / integration tests # pylint: disable=duplicate-code @@ -370,13 +368,13 @@ def test_run_no_docs_dir( dry_run=False, delete_pages=False, repo=repo, - github_repo=mocked_github_repo, + github_access_token="test-github-token", branch_name=None, ) # pylint: enable=duplicate-code (upstream_repo, upstream_path) = upstream_repository - upstream_repo.git.checkout(DEFAULT_BRANCH_NAME) + upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) assert returned_migration_reports == {mock_pull_request.url: types_.ActionResult.SUCCESS} assert (index_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() assert ( diff --git a/tests/unit/test_docs_directory.py b/tests/unit/test_docs_directory.py index ea80d7af..93ad080d 100644 --- a/tests/unit/test_docs_directory.py +++ b/tests/unit/test_docs_directory.py @@ -193,7 +193,7 @@ def test__calculate_table_path( """ path = create_nested_directories_file(base_path=tmp_path, directories=directories, file=file) - returned_level = docs_directory._calculate_table_path( + returned_level = docs_directory.calculate_table_path( path_relative_to_docs=path.relative_to(tmp_path) ) diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index eeb4af00..8c0b60bb 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -39,6 +39,40 @@ def test__configure_user(repository: tuple[Repo, Path]): assert reader.get_value("user", "email") == pull_request.ACTIONS_USER_EMAIL +def test__create_pull_request(mock_pull_request: PullRequest, mock_github_repo: Repository): + """ + arrange: given a mocked github repository client and a mocked pull request + act: when _create_pull_request is called with dry_run False, + assert: a pull request link is returned. + """ + assert ( + pull_request._create_pull_request( + github_repository=mock_github_repo, + branch_name="branch-1", + base="base-1", + dry_run=False, + ) + == mock_pull_request.url + ) + + +def test__create_pull_request_dry_run(mock_github_repo: Repository): + """ + arrange: given a mocked github repository client and a mocked pull request + act: when _create_pull_request is called with dry_run True, + assert: a dry run pull request link is returned. + """ + assert ( + pull_request._create_pull_request( + github_repository=mock_github_repo, + branch_name="branch-1", + base="base-1", + dry_run=True, + ) + == pull_request.PR_LINK_DRY_RUN + ) + + @pytest.mark.parametrize( "remote_url", [ @@ -174,7 +208,7 @@ def test__merge_existing_branch( create_repository_author(repo) pull_request._merge_existing_branch( - repository=repo, branch_name=branch_name, commit_msg=commit_message + repository=repo, branch_name=branch_name, commit_msg=commit_message, dry_run=False ) upstream.git.checkout(branch_name) @@ -212,7 +246,9 @@ def test__create_branch( (repo_path / file).write_text(content, encoding="utf-8") create_repository_author(repo) - pull_request._create_branch(repository=repo, branch_name=branch_name, commit_msg="test_commit") + pull_request._create_branch( + repository=repo, branch_name=branch_name, commit_msg="test_commit", dry_run=False + ) upstream.git.checkout(branch_name) for (file, content) in new_files: @@ -284,7 +320,10 @@ def test_create_pull_request_invalid_branch(tmp_path: Path, mock_github_repo: Re with pytest.raises(InputError) as exc_info: pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name + repository=repo, + github_repository=mock_github_repo, + branch_name=branch_name, + dry_run=False, ) assert_substrings_in_string( @@ -304,7 +343,7 @@ def test_create_pull_request_no_change( (repo, _) = repository returned_pr = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name + repository=repo, github_repository=mock_github_repo, branch_name=branch_name, dry_run=False ) assert returned_pr == pull_request.PR_LINK_NO_CHANGE @@ -330,7 +369,7 @@ def test_create_pull_request_existing_branch( mock_github_repo = mock.MagicMock(spec=Repository) pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name + repository=repo, github_repository=mock_github_repo, branch_name=branch_name, dry_run=False ) upstream.git.checkout(branch_name) @@ -365,7 +404,7 @@ def test_create_pull_request( (repo_path / test_file).touch() pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name + repository=repo, github_repository=mock_github_repo, branch_name=branch_name, dry_run=False ) (upstream, upstream_path) = upstream_repository @@ -404,7 +443,7 @@ def test_create_pull_request_existing_pr( mock_github_repo.get_pulls.side_effect = [[mock_pull_request]] pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name + repository=repo, github_repository=mock_github_repo, branch_name=branch_name, dry_run=False ) (upstream, upstream_path) = upstream_repository From 4e9d3db75383dd4e9f0810c7708a0a5fbc36ba6e Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 28 Dec 2022 16:41:09 +0800 Subject: [PATCH 031/107] add tests for dry run mode --- tests/integration/test___init__.py | 111 ++++++++++++++++++++++++++++- 1 file changed, 108 insertions(+), 3 deletions(-) diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index 5285ada9..db595f86 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -13,6 +13,7 @@ from urllib.parse import urlparse import pytest +from git.exc import GitCommandError from git.repo import Repo from github.PullRequest import PullRequest @@ -25,10 +26,12 @@ @pytest.mark.asyncio +@pytest.mark.usefixtures("patch_get_repository_name", "patch_create_github") async def test_run( discourse_api: Discourse, caplog: pytest.LogCaptureFixture, repository: tuple[Repo, Path], + upstream_repository: tuple[Repo, Path], mock_pull_request: PullRequest, ): """ @@ -49,6 +52,12 @@ async def test_run( 12. with the nested directory removed 13. with the documentation file removed 14. with the index file removed + 15. with no docs dir and no custom branchname provided in dry run mode + 16. with no docs dir and no custom branchname provided + 17. with no docs dir and custom branchname provided in dry run mode + 18. with no docs dir and custom branchname provided + 19. with no changes applied after migration in dry run mode + 20. with no changes applied after migration assert: then: 1. an index page is created with an empty navigation table 2. an index page is not updated @@ -64,6 +73,12 @@ async def test_run( 12. the nested directory is removed from the navigation table 13. the documentation page is deleted 14. an index page is not updated + 15. the documentation files are not pushed to default branch + 16. the documentation files are pushed to default branch + 17. the documentation files are not pushed to custom branch + 18. the documentation files are pushed to custom branch + 19. no operations are taken place + 20. no operations are taken place """ (repo, repo_path) = repository document_name = "name 1" @@ -383,7 +398,47 @@ async def test_run( index_topic = discourse_api.retrieve_topic(url=index_url) assert index_content not in index_topic - # 15. with docs dir removed on no custom branchname + # 15. with no docs dir and no custom branchname provided in dry run mode + caplog.clear() + (upstream_repo, _) = upstream_repository + doc_table_key_2 = "docs-2" + nested_dir_table_key_2 = "nested-dir-2" + (index_file := docs_dir / "index.md").write_text(index_content := "index content 1") + (doc_file := docs_dir / f"{doc_table_key_2}.md").write_text(doc_content_3 := "doc content 3") + (nested_dir := docs_dir / nested_dir_table_key_2).mkdir() + (nested_dir_doc_file := nested_dir / "doc.md").write_text( + (nested_dir_doc_content_2 := "nested dir doc content 2") + ) + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + dry_run=True, + delete_pages=True, + repo=repo, + github_access_token="test-access-token", + branch_name=None, + ) + urls = tuple(urls_with_actions) + shutil.rmtree(docs_dir) + + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + dry_run=True, + delete_pages=True, + repo=repo, + github_access_token="test-access-token", + branch_name=None, + ) + + with pytest.raises(GitCommandError) as exc_info: + upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + assert_substrings_in_string( + ("error: pathspec", "did not match any file(s) known to git"), str(exc_info.value) + ) + assert tuple(urls_with_actions) == (pull_request.PR_LINK_DRY_RUN,) + + # 16. with no docs dir and no custom branchname provided caplog.clear() doc_table_key_2 = "docs-2" nested_dir_table_key_2 = "nested-dir-2" @@ -415,14 +470,16 @@ async def test_run( branch_name=None, ) + upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) assert tuple(urls_with_actions) == (mock_pull_request.url,) assert index_file.read_text(encoding="utf-8") == index_content assert doc_file.read_text(encoding="utf-8") == doc_content_3 assert nested_dir_doc_file.read_text(encoding="utf-8") == nested_dir_doc_content_2 - # 16. with docs dir removed on custom branchname + # 17. with no docs dir and custom branchname provided in dry run mode caplog.clear() + upstream_repo.git.checkout("main") repo.git.checkout("main") create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", @@ -440,13 +497,61 @@ async def test_run( branch_name=custom_branchname, ) + with pytest.raises(GitCommandError) as exc_info: + upstream_repo.git.checkout(custom_branchname) + assert_substrings_in_string( + ("error: pathspec", "did not match any file(s) known to git"), str(exc_info.value) + ) + assert tuple(urls_with_actions) == (pull_request.PR_LINK_DRY_RUN,) + + # 18. with no docs dir and custom branchname provided + caplog.clear() + upstream_repo.git.checkout("main") + repo.git.checkout("main") + create_metadata_yaml( + content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", + path=repo_path, + ) + custom_branchname = "branchname-1" + + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_access_token="test-access-token", + branch_name=custom_branchname, + ) + + upstream_repo.git.checkout(custom_branchname) repo.git.checkout(custom_branchname) assert tuple(urls_with_actions) == (mock_pull_request.url,) assert index_file.read_text(encoding="utf-8") == index_content assert doc_file.read_text(encoding="utf-8") == doc_content_3 assert (nested_dir / "doc.md").read_text(encoding="utf-8") == nested_dir_doc_content_2 - # 17. with no changes applied after migration + # 19. with no changes applied after migration in dry run mode + caplog.clear() + + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + dry_run=False, + delete_pages=True, + repo=repo, + github_access_token="test-access-token", + branch_name=custom_branchname, + ) + + with pytest.raises(GitCommandError) as exc_info: + upstream_repo.git.checkout(custom_branchname) + assert_substrings_in_string( + ("error: pathspec", "did not match any file(s) known to git"), str(exc_info.value) + ) + assert_substrings_in_string(chain(urls, ("Noop", "Noop", "Noop", "'success'")), caplog.text) + + # 20. with no changes applied after migration caplog.clear() urls_with_actions = run( From 828c72e8260ff57c5b89083477c88cbe5866fc9d Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 28 Dec 2022 16:42:06 +0800 Subject: [PATCH 032/107] add tests for dry run mode in pull request module --- tests/unit/test_pull_request.py | 125 +++++++++++++++++++++++++++++--- 1 file changed, 115 insertions(+), 10 deletions(-) diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index 8c0b60bb..ed3c4ddf 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -6,6 +6,7 @@ # Need access to protected functions for testing # pylint: disable=protected-access +import logging import typing from os.path import dirname from pathlib import Path @@ -39,37 +40,43 @@ def test__configure_user(repository: tuple[Repo, Path]): assert reader.get_value("user", "email") == pull_request.ACTIONS_USER_EMAIL -def test__create_pull_request(mock_pull_request: PullRequest, mock_github_repo: Repository): +def test__create_pull_request_dry_run( + mock_github_repo: Repository, + caplog: pytest.LogCaptureFixture, +): """ arrange: given a mocked github repository client and a mocked pull request - act: when _create_pull_request is called with dry_run False, - assert: a pull request link is returned. + act: when _create_pull_request is called with dry_run True, + assert: a dry run pull request link is returned. """ + caplog.set_level(logging.INFO) assert ( pull_request._create_pull_request( github_repository=mock_github_repo, branch_name="branch-1", base="base-1", - dry_run=False, + dry_run=True, ) - == mock_pull_request.url + == pull_request.PR_LINK_DRY_RUN ) + assert "create pull request" in caplog.text + assert f"dry run: {True}" in caplog.text -def test__create_pull_request_dry_run(mock_github_repo: Repository): +def test__create_pull_request(mock_pull_request: PullRequest, mock_github_repo: Repository): """ arrange: given a mocked github repository client and a mocked pull request - act: when _create_pull_request is called with dry_run True, - assert: a dry run pull request link is returned. + act: when _create_pull_request is called with dry_run False, + assert: a pull request link is returned. """ assert ( pull_request._create_pull_request( github_repository=mock_github_repo, branch_name="branch-1", base="base-1", - dry_run=True, + dry_run=False, ) - == pull_request.PR_LINK_DRY_RUN + == mock_pull_request.url ) @@ -156,6 +163,60 @@ def test__check_branch_exists( assert pull_request._check_branch_exists(repo, branch_name) +@pytest.mark.parametrize( + "existing_files, new_files", + [ + pytest.param( + [original_file := (Path("text.txt"), "original")], + [test_file := (Path("test.txt"), "test")], + id="simple merge", + ), + ], +) +def test__merge_existing_branch_dry_run( + existing_files: list[tuple[Path, str]], + new_files: list[tuple[Path, str]], + upstream_repository: tuple[Repo, Path], + repository: tuple[Repo, Path], + caplog: pytest.LogCaptureFixture, +): + """ + arrange: given a local git repository with changes and \ + a remote repository with existing branch with existing files + act: when _merge_existing_branch is called with existing branch name \ + in dry run mode + assert: then no changes are merged, and the action is logged. + """ + caplog.set_level(logging.INFO) + branch_name = "test_branch" + commit_message = "test_message" + (upstream, upstream_path) = upstream_repository + upstream_head = upstream.create_head(branch_name) + upstream_head.checkout() + for (file, content) in existing_files: + (upstream_path / file).touch() + (upstream_path / file).write_text(content, encoding="utf-8") + upstream.git.add(".") + upstream.git.commit("-m", "'add upstream'") + upstream.git.checkout("main") + (repo, repo_path) = repository + for (file, content) in new_files: + (repo_path / file).touch() + (repo_path / file).write_text(content, encoding="utf-8") + repo.git.fetch("origin", branch_name) + create_repository_author(repo) + + pull_request._merge_existing_branch( + repository=repo, branch_name=branch_name, commit_msg=commit_message, dry_run=True + ) + + upstream.git.checkout(branch_name) + assert f"dry run: {True}" in caplog.text + assert "merge to existing branch" in caplog.text + for (file, content) in new_files: + assert not (upstream_path / file).is_file() + + @pytest.mark.parametrize( "existing_files, new_files, expected_files", [ @@ -217,6 +278,50 @@ def test__merge_existing_branch( assert (upstream_path / file).read_text(encoding="utf-8") == content +@pytest.mark.parametrize( + "new_files", + [ + pytest.param([test_file], id="single file"), + pytest.param( + [test_file, nested_file := (Path("nested/file.txt"), "nested file content")], + id="nested file", + ), + ], +) +def test__create_branch_dry_run( + new_files: list[tuple[Path, str]], + upstream_repository: tuple[Repo, Path], + repository: tuple[Repo, Path], + caplog: pytest.LogCaptureFixture, +): + """ + arrange: given a local git repository with new files + act: when _create_branch is called with new branch name in dry run mode + assert: new files are created upstream. + """ + caplog.set_level(logging.INFO) + branch_name = "test_branch" + (upstream, upstream_path) = upstream_repository + (repo, repo_path) = repository + for (file, content) in new_files: + Path(dirname(repo_path / file)).mkdir(parents=True, exist_ok=True) + (repo_path / file).touch() + (repo_path / file).write_text(content, encoding="utf-8") + create_repository_author(repo) + + pull_request._create_branch( + repository=repo, branch_name=branch_name, commit_msg="test_commit", dry_run=True + ) + + assert f"dry run: {True}" in caplog.text + assert "create new branch" in caplog.text + with pytest.raises(GitCommandError) as exc_info: + upstream.git.checkout(branch_name) + assert_substrings_in_string( + ("error: pathspec", "did not match any file(s) known to git"), str(exc_info.value) + ) + + @pytest.mark.parametrize( "new_files", [ From 646d9d98cc4f465d5f1a027b380d21cfbff4b00d Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 28 Dec 2022 20:38:23 +0800 Subject: [PATCH 033/107] revert to using docker action --- action.yaml | 23 ++--------------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/action.yaml b/action.yaml index 18287de9..444a6135 100644 --- a/action.yaml +++ b/action.yaml @@ -58,24 +58,5 @@ outputs: The configuration used by the action to interact with the discourse server. runs: - using: composite - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - run: | - python -m pip install -r ${{ github.action_path }}/requirements.txt && \ - INPUT_DRY_RUN=${{ inputs.dry_run }} \ - INPUT_DELETE_TOPICS=${{ inputs.delete_topics }} \ - INPUT_DISCOURSE_HOST=${{ inputs.discourse_host }} \ - INPUT_DISCOURSE_CATEGORY_ID=${{ inputs.discourse_category_id }} \ - INPUT_DISCOURSE_API_USERNAME=${{ inputs.discourse_api_username }} \ - INPUT_DISCOURSE_API_KEY=${{ inputs.discourse_api_key }} \ - ${{ github.action_path }}/main.py - shell: bash - - uses: peter-evans/create-pull-request@v4 - with: - branch: "docs/migration-demo" - title: "[docs] Documentation migration demo" - body: "This is a test for demo purpose, do not merge." + using: docker + image: Dockerfile From 15c2c1e43c4488eb6eb1e070666205ee4fa49565 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 29 Dec 2022 09:32:45 +0800 Subject: [PATCH 034/107] remove interpolation in description --- action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/action.yaml b/action.yaml index 444a6135..02392946 100644 --- a/action.yaml +++ b/action.yaml @@ -36,7 +36,7 @@ inputs: type: integer github_token: description: | - The github access token (${{ secrets.GITHUB_TOKEN }}) to create pull request on Github. + The github access token (secrets.GITHUB_TOKEN) to create pull request on Github. Required if running in migration mode. default: ${{ github.token }} required: false From 81adaa20c0d966452739669da072a356a7abd9e6 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 29 Dec 2022 09:36:15 +0800 Subject: [PATCH 035/107] add git to docker image --- Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Dockerfile b/Dockerfile index 2e884e5d..dc6680f8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,8 @@ FROM python:3.10-slim +RUN apt-get update && + apt-get install -y --no-install-recommends git + RUN mkdir /usr/src/app WORKDIR /usr/src/app COPY requirements.txt /usr/src/app From 01a4a80100edcfb67d0dc40a51b43b376cdca1cf Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 29 Dec 2022 09:39:13 +0800 Subject: [PATCH 036/107] add git to docker image --- Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index dc6680f8..6dc91d64 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,6 @@ FROM python:3.10-slim -RUN apt-get update && - apt-get install -y --no-install-recommends git +RUN apt-get update && apt-get install -y --no-install-recommends git RUN mkdir /usr/src/app WORKDIR /usr/src/app From ee046e8dd03d2ea2437ad9feefc0777dbdd8116d Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 29 Dec 2022 09:42:17 +0800 Subject: [PATCH 037/107] fix spelling --- tests/unit/test_index.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index bcaf92ee..c02a3031 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -150,12 +150,12 @@ def test_get_metadata_yaml_retrieve_empty(tmp_path: Path): pytest.param( (separated_multiline_content := "Page content\n\nManyMultiline"), separated_multiline_content, - id="page with seperated multiline content", + id="page with separated multiline content", ), pytest.param( f"{separated_multiline_content}{index.NAVIGATION_TABLE_START}", separated_multiline_content, - id="page with seperated multiline content and navigation table", + id="page with separated multiline content and navigation table", ), ], ) From a7858ee9ac73d18af3fdf86bb45675c43a9a2080 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 29 Dec 2022 09:44:54 +0800 Subject: [PATCH 038/107] add missing docstring --- src/pull_request.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pull_request.py b/src/pull_request.py index 3bec0ab4..171bbd8d 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -88,6 +88,7 @@ def _create_branch(repository: Repo, branch_name: str, commit_msg: str, dry_run: repository: Current repository. branch_name: New branch name. commit_msg: Commit message for current changes. + dry_run: If enabled, only log the action that would be taken. """ logging.info("dry run: %s, create new branch %s", dry_run, branch_name) repository.git.checkout("-b", branch_name) @@ -180,6 +181,7 @@ def create_pull_request( access_token: Github access token. repository_path: Repository root where .git resides. branch_name: Pull request branch name. + dry_run: If enabled, only log the action that would be taken. Raises: InputError: if branch name configuration is invalid. From 96e2d0146fced773ac64e280913aca752a661ee0 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 29 Dec 2022 18:41:28 +0800 Subject: [PATCH 039/107] fix integratino test w/ dry run mode --- src/pull_request.py | 11 +++-- tests/integration/test___init__.py | 70 ++++++++++++++++-------------- tests/unit/test___init__.py | 15 ++++--- tests/unit/test_pull_request.py | 4 +- 4 files changed, 54 insertions(+), 46 deletions(-) diff --git a/src/pull_request.py b/src/pull_request.py index 171bbd8d..e4491dff 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -72,10 +72,10 @@ def _merge_existing_branch(repository: Repo, branch_name: str, commit_msg: str, repository.git.checkout(branch_name) repository.git.pull() - repository.git.merge(temp_branch, "-Xtheirs", "--squash", "--no-edit") - repository.git.commit("-m", f"'{commit_msg}'") if not dry_run: + repository.git.merge(temp_branch, "-Xtheirs", "--squash", "--no-edit") + repository.git.commit("-m", f"'{commit_msg}'") repository.git.push("-u", "origin", branch_name) repository.git.branch("-D", temp_branch) @@ -91,11 +91,10 @@ def _create_branch(repository: Repo, branch_name: str, commit_msg: str, dry_run: dry_run: If enabled, only log the action that would be taken. """ logging.info("dry run: %s, create new branch %s", dry_run, branch_name) - repository.git.checkout("-b", branch_name) - repository.git.add(".") - repository.git.commit("-m", f"'{commit_msg}'") - if not dry_run: + repository.git.checkout("-b", branch_name) + repository.git.add(".") + repository.git.commit("-m", f"'{commit_msg}'") repository.git.push("-u", "origin", branch_name) diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index db595f86..fcac1769 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -81,6 +81,8 @@ async def test_run( 20. no operations are taken place """ (repo, repo_path) = repository + # this is an access token string for testing purposes. + test_access_token = "test-access-token" # nosec document_name = "name 1" caplog.set_level(logging.INFO) create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: {document_name}", path=repo_path) @@ -93,7 +95,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -118,7 +120,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -136,7 +138,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -156,7 +158,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -174,7 +176,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -200,7 +202,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -220,7 +222,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -245,7 +247,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -270,7 +272,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -299,7 +301,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -322,7 +324,7 @@ async def test_run( dry_run=False, delete_pages=False, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -345,7 +347,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -366,7 +368,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -389,7 +391,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -415,7 +417,7 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) urls = tuple(urls_with_actions) @@ -427,19 +429,24 @@ async def test_run( dry_run=True, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) - with pytest.raises(GitCommandError) as exc_info: + with pytest.raises(GitCommandError) as exc: upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) assert_substrings_in_string( - ("error: pathspec", "did not match any file(s) known to git"), str(exc_info.value) + ("error: pathspec", "did not match any file(s) known to git"), str(exc.value) ) assert tuple(urls_with_actions) == (pull_request.PR_LINK_DRY_RUN,) # 16. with no docs dir and no custom branchname provided caplog.clear() + repo.git.checkout("--", ".") + create_metadata_yaml( + content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", + path=repo_path, + ) doc_table_key_2 = "docs-2" nested_dir_table_key_2 = "nested-dir-2" (index_file := docs_dir / "index.md").write_text(index_content := "index content 1") @@ -454,7 +461,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) urls = tuple(urls_with_actions) @@ -466,7 +473,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=None, ) @@ -479,6 +486,7 @@ async def test_run( # 17. with no docs dir and custom branchname provided in dry run mode caplog.clear() + shutil.rmtree(docs_dir) upstream_repo.git.checkout("main") repo.git.checkout("main") create_metadata_yaml( @@ -490,22 +498,23 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, + dry_run=True, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=custom_branchname, ) - with pytest.raises(GitCommandError) as exc_info: + with pytest.raises(GitCommandError) as exc: upstream_repo.git.checkout(custom_branchname) assert_substrings_in_string( - ("error: pathspec", "did not match any file(s) known to git"), str(exc_info.value) + ("error: pathspec", "did not match any file(s) known to git"), str(exc.value) ) assert tuple(urls_with_actions) == (pull_request.PR_LINK_DRY_RUN,) # 18. with no docs dir and custom branchname provided caplog.clear() + shutil.rmtree(docs_dir) upstream_repo.git.checkout("main") repo.git.checkout("main") create_metadata_yaml( @@ -520,7 +529,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=custom_branchname, ) @@ -537,18 +546,13 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, + dry_run=True, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=custom_branchname, ) - with pytest.raises(GitCommandError) as exc_info: - upstream_repo.git.checkout(custom_branchname) - assert_substrings_in_string( - ("error: pathspec", "did not match any file(s) known to git"), str(exc_info.value) - ) assert_substrings_in_string(chain(urls, ("Noop", "Noop", "Noop", "'success'")), caplog.text) # 20. with no changes applied after migration @@ -560,7 +564,7 @@ async def test_run( dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-access-token", + github_access_token=test_access_token, branch_name=custom_branchname, ) diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index ac208c62..59637069 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -278,6 +278,8 @@ def test_run_no_docs_no_dir(repository: tuple[Repo, Path]): assert: InputError is raised with a guide to getting started. """ (repo, repo_path) = repository + # this is an access token string for testing purposes. + test_access_token = "test-access-token" # nosec create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) @@ -290,7 +292,7 @@ def test_run_no_docs_no_dir(repository: tuple[Repo, Path]): dry_run=False, delete_pages=False, repo=repo, - github_access_token="test-github-token", + github_access_token=test_access_token, branch_name=None, ) @@ -305,6 +307,8 @@ def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): assert: then an index page is created with empty navigation table. """ (repo, repo_path) = repository + # this is an access token string for testing purposes. + test_access_token = "test-access-token" # nosec create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) (repo_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() mocked_discourse = mock.MagicMock(spec=discourse.Discourse) @@ -318,7 +322,7 @@ def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): dry_run=False, delete_pages=True, repo=repo, - github_access_token="test-github-token", + github_access_token=test_access_token, branch_name=None, ) @@ -330,8 +334,7 @@ def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): # pylint: disable=too-many-locals -@pytest.mark.usefixtures("patch_get_repository_name") -@pytest.mark.usefixtures("patch_create_github") +@pytest.mark.usefixtures("patch_get_repository_name", "patch_create_github") def test_run_no_docs_dir( repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path], @@ -345,6 +348,8 @@ def test_run_no_docs_dir( are return as the result. """ (repo, repo_path) = repository + # this is an access token string for testing purposes. + test_access_token = "test-access-token" # nosec create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n" f"{metadata.METADATA_DOCS_KEY}: docsUrl", path=repo_path, @@ -368,7 +373,7 @@ def test_run_no_docs_dir( dry_run=False, delete_pages=False, repo=repo, - github_access_token="test-github-token", + github_access_token=test_access_token, branch_name=None, ) # pylint: enable=duplicate-code diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index ed3c4ddf..5c8776ca 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -495,7 +495,7 @@ def test_create_pull_request_existing_branch( def test_create_pull_request( repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path], - mock_github_repo: Repository, + mock_github_repo: mock.MagicMock, ): """ arrange: given a mocked repository with a new file and a mocked github repository \ @@ -531,7 +531,7 @@ def test_create_pull_request( def test_create_pull_request_existing_pr( repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path], - mock_github_repo: Repository, + mock_github_repo: mock.MagicMock, mock_pull_request: PullRequest, ): """ From 0010b225679aa62cf44143c746bcb4856d5cc223 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 29 Dec 2022 19:08:35 +0800 Subject: [PATCH 040/107] fix github repo get pulls base --- src/pull_request.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pull_request.py b/src/pull_request.py index e4491dff..dc169744 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -215,7 +215,7 @@ def create_pull_request( repository.git.checkout(base) open_pulls = github_repository.get_pulls( - state="open", head=f"{ACTIONS_USER_NAME}/{branch_name}" + state="open", base=f"{ACTIONS_USER_NAME}/{branch_name}" ) if not list(open_pulls): pr_url = _create_pull_request( From e2400687f63e7bb32d46189297103e7c1415544f Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 29 Dec 2022 23:44:02 +0800 Subject: [PATCH 041/107] add git before to self test --- .github/workflows/integration_test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 6bb601ad..74164ada 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -55,7 +55,7 @@ jobs: run: echo '${{ steps.selfTestDraft.outputs.urls_with_actions }}' - name: Check draft run: | - sudo apt update && sudo apt install python3-pip + sudo apt update && sudo apt install python3-pip git pip3 install -r requirements.txt ./discourse_check_cleanup.py --action check-draft --action-kwargs '{"expected_url_results": []}' '${{ steps.selfTestDraft.outputs.urls_with_actions }}' '${{ steps.selfTestDraft.outputs.discourse_config }}' - name: Create self test From e284fecb74ce28fc507aa4caa4b16413a47ee1e2 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 30 Dec 2022 10:05:14 +0800 Subject: [PATCH 042/107] fix pull requests fetching --- src/pull_request.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pull_request.py b/src/pull_request.py index dc169744..2746d7ff 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -122,7 +122,7 @@ def _create_pull_request( ) else: pull_request = None - return pull_request.url if pull_request is not None else PR_LINK_DRY_RUN + return pull_request.html_url if pull_request is not None else PR_LINK_DRY_RUN def get_repository_name(remote_url: str): @@ -215,7 +215,7 @@ def create_pull_request( repository.git.checkout(base) open_pulls = github_repository.get_pulls( - state="open", base=f"{ACTIONS_USER_NAME}/{branch_name}" + state="open", head=f"{github_repository.full_name}:{branch_name}" ) if not list(open_pulls): pr_url = _create_pull_request( @@ -225,6 +225,6 @@ def create_pull_request( dry_run=dry_run, ) else: - pr_url = open_pulls[0].url + pr_url = open_pulls[0].html_url return pr_url From cea5267dc62eee2c1d2ec69e50d2f700a7ba1f44 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 30 Dec 2022 10:34:49 +0800 Subject: [PATCH 043/107] fix tests to match PR html url --- tests/conftest.py | 3 ++- tests/integration/test___init__.py | 4 ++-- tests/unit/test___init__.py | 6 +++--- tests/unit/test_pull_request.py | 10 +++++----- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index d125935d..de9f9c9b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -56,7 +56,7 @@ def fixture_mock_pull_request() -> PullRequest: return PullRequest( requester=mock_requester, headers={}, - attributes={"url": "test_url"}, + attributes={"html_url": "test_url"}, completed=False, ) @@ -66,6 +66,7 @@ def fixture_mock_github_repo(mock_pull_request: PullRequest): """Create a mock github repository instance.""" mocked_repo = mock.MagicMock(spec=Repository) mocked_repo.create_pull.return_value = mock_pull_request + mocked_repo.full_name = "test/repository" return mocked_repo diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index fcac1769..ca77333d 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -479,7 +479,7 @@ async def test_run( upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) - assert tuple(urls_with_actions) == (mock_pull_request.url,) + assert tuple(urls_with_actions) == (mock_pull_request.html_url,) assert index_file.read_text(encoding="utf-8") == index_content assert doc_file.read_text(encoding="utf-8") == doc_content_3 assert nested_dir_doc_file.read_text(encoding="utf-8") == nested_dir_doc_content_2 @@ -535,7 +535,7 @@ async def test_run( upstream_repo.git.checkout(custom_branchname) repo.git.checkout(custom_branchname) - assert tuple(urls_with_actions) == (mock_pull_request.url,) + assert tuple(urls_with_actions) == (mock_pull_request.html_url,) assert index_file.read_text(encoding="utf-8") == index_content assert doc_file.read_text(encoding="utf-8") == doc_content_3 assert (nested_dir / "doc.md").read_text(encoding="utf-8") == nested_dir_doc_content_2 diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 59637069..bf13bc79 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -217,7 +217,7 @@ def test__run_migrate_server_error_topic( (upstream_repo, upstream_path) = upstream_repository upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) - assert returned_migration_reports == {mock_pull_request.url: types_.ActionResult.SUCCESS} + assert returned_migration_reports == {mock_pull_request.html_url: types_.ActionResult.SUCCESS} assert (upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() assert not (upstream_path / DOCUMENTATION_FOLDER_NAME / "path 1").exists() @@ -260,7 +260,7 @@ def test__run_migrate( (upstream_repo, upstream_path) = upstream_repository upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) - assert returned_migration_reports == {mock_pull_request.url: types_.ActionResult.SUCCESS} + assert returned_migration_reports == {mock_pull_request.html_url: types_.ActionResult.SUCCESS} assert (index_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() assert (path_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "path-1.md").is_file() assert index_file.read_text(encoding="utf-8") == index_content @@ -380,7 +380,7 @@ def test_run_no_docs_dir( (upstream_repo, upstream_path) = upstream_repository upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) - assert returned_migration_reports == {mock_pull_request.url: types_.ActionResult.SUCCESS} + assert returned_migration_reports == {mock_pull_request.html_url: types_.ActionResult.SUCCESS} assert (index_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() assert ( path_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "path-1" / "file-1.md" diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index 5c8776ca..2962a391 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -76,7 +76,7 @@ def test__create_pull_request(mock_pull_request: PullRequest, mock_github_repo: base="base-1", dry_run=False, ) - == mock_pull_request.url + == mock_pull_request.html_url ) @@ -482,7 +482,7 @@ def test_create_pull_request_existing_branch( assert pr_link is not None mock_github_repo.get_pulls.assert_called_once_with( state="open", - head=f"{pull_request.ACTIONS_USER_NAME}/{branch_name}", + head=f"{mock_github_repo.full_name}:{branch_name}", ) mock_github_repo.create_pull.assert_called_once_with( title=pull_request.ACTIONS_PULL_REQUEST_TITLE, @@ -518,7 +518,7 @@ def test_create_pull_request( assert pr_link is not None mock_github_repo.get_pulls.assert_called_once_with( state="open", - head=f"{pull_request.ACTIONS_USER_NAME}/{branch_name}", + head=f"{mock_github_repo.full_name}:{branch_name}", ) mock_github_repo.create_pull.assert_called_once_with( title=pull_request.ACTIONS_PULL_REQUEST_TITLE, @@ -554,8 +554,8 @@ def test_create_pull_request_existing_pr( (upstream, upstream_path) = upstream_repository upstream.git.checkout(branch_name) (upstream_path / test_file).is_file() - assert pr_link == mock_pull_request.url + assert pr_link == mock_pull_request.html_url mock_github_repo.get_pulls.assert_called_once_with( state="open", - head=f"{pull_request.ACTIONS_USER_NAME}/{branch_name}", + head=f"{mock_github_repo.full_name}:{branch_name}", ) From 5182b94944b249325d2ed3423e74ba8d31565dfc Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 30 Dec 2022 11:10:44 +0800 Subject: [PATCH 044/107] improve comments per pyupgrade --- tests/integration/test___init__.py | 4 ++-- tests/unit/test_migration.py | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index ca77333d..4022dc35 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -409,7 +409,7 @@ async def test_run( (doc_file := docs_dir / f"{doc_table_key_2}.md").write_text(doc_content_3 := "doc content 3") (nested_dir := docs_dir / nested_dir_table_key_2).mkdir() (nested_dir_doc_file := nested_dir / "doc.md").write_text( - (nested_dir_doc_content_2 := "nested dir doc content 2") + nested_dir_doc_content_2 := "nested dir doc content 2" ) urls_with_actions = run( base_path=repo_path, @@ -453,7 +453,7 @@ async def test_run( (doc_file := docs_dir / f"{doc_table_key_2}.md").write_text(doc_content_3 := "doc content 3") (nested_dir := docs_dir / nested_dir_table_key_2).mkdir() (nested_dir_doc_file := nested_dir / "doc.md").write_text( - (nested_dir_doc_content_2 := "nested dir doc content 2") + nested_dir_doc_content_2 := "nested dir doc content 2" ) urls_with_actions = run( base_path=repo_path, diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index a7ed7ed3..337efc4d 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -7,7 +7,8 @@ # pylint: disable=protected-access from pathlib import Path -from typing import Iterable, List +from typing import List +from collections.abc import Iterable from unittest import mock import pytest @@ -247,7 +248,7 @@ def test__validate_row_levels(table_rows: list[types_.TableRow]): ) def test_extract_docs__from_table_rows_empty_directory_rows( table_rows: Iterable[types_.TableRow], - expected_files: List[types_.MigrationFileMeta], + expected_files: list[types_.MigrationFileMeta], ): """ arrange: given valid table rows with no navlink(only directories) @@ -454,7 +455,7 @@ def test_get_docs_metadata( ) def test_extract_docs( table_rows: Iterable[types_.TableRow], - expected_files: List[types_.MigrationFileMeta], + expected_files: list[types_.MigrationFileMeta], ): """ arrange: given valid table rows From 9ea1daf7215af5f296de1abfbbc256ec3ec8bf25 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 30 Dec 2022 12:48:28 +0800 Subject: [PATCH 045/107] remove unused import --- tests/unit/test_migration.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 337efc4d..25f30bf8 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -6,9 +6,8 @@ # Need access to protected functions for testing # pylint: disable=protected-access -from pathlib import Path -from typing import List from collections.abc import Iterable +from pathlib import Path from unittest import mock import pytest From a5e941b8ba76eae61ed0e13f618f2dec70f3390f Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 30 Dec 2022 12:50:06 +0800 Subject: [PATCH 046/107] merge changes in main --- src/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/__init__.py b/src/__init__.py index 14be6347..4bd6645e 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -101,7 +101,7 @@ def _run_migrate( index.server.content if index.server is not None and index.server.content else "" ) index_content = contents_from_page(server_content) - table_rows = navigation_table_from_page(page=server_content) + table_rows = navigation_table_from_page(page=server_content, discourse=discourse) file_metadata = get_docs_metadata(table_rows=table_rows, index_content=index_content) run_migrate( documents=file_metadata, From 79b6fb6ca94ba7cfddcea7dc14266fd35b592fd6 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 30 Dec 2022 13:24:00 +0800 Subject: [PATCH 047/107] refurb code feedback --- src/reconcile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reconcile.py b/src/reconcile.py index 7a038c2b..1ef94e84 100644 --- a/src/reconcile.py +++ b/src/reconcile.py @@ -243,7 +243,7 @@ def _calculate_action( Raises: ReconcilliationError: if both path_info and table_row are None. """ - if path_info is None and table_row is None: + if path_info is table_row is None: raise exceptions.ReconcilliationError( "internal error, both path info and table row are None" ) From e22527943715d9bc7468969c38df028c795d3a62 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 2 Jan 2023 19:16:50 +0800 Subject: [PATCH 048/107] reformat doc --- README.md | 170 +++++++++++++++++++++++++++--------------------------- 1 file changed, 85 insertions(+), 85 deletions(-) diff --git a/README.md b/README.md index 0eba676b..fbdc5434 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Upload Charm Documentation -_This action is still in alpha, breaking changes could occur._ +*This action is still in alpha, breaking changes could occur.* This action automates uploading documentation from the `docs` folder in a repository to discourse which is how the charm documentation is published to @@ -12,107 +12,107 @@ charmhub. 1. Create the `docs` folder in the repository. 2. Optionally, create a file `docs/index.md` for any content you would like to - display above the navigation table on discourse. This content does not get - published to charmhub and is only visible on discourse. + display above the navigation table on discourse. This content does not get + published to charmhub and is only visible on discourse. 3. Within the `docs` folder, create directories for page groups (e.g., for all - tutorials) and markdown files (`*.md`) for individual pages. On charmhub, - the groupings on the navigation panel will be named based on the name of - the directory after replacing `_` and `-` with spaces and appliying the - [`str.title`](https://docs.python.org/3/library/stdtypes.html#str.title) - function to it. The name of pages is based on whatever of the following is - available, in order: (1) the first level 1 heading (e.g., `# `) in - the file, the first line in the file or the name of the file treated in the - same way as the name of groupings. - - If you have existing documentation on discourse, you can retrieve the - markdown version by changing the link to the topic in your browser from - `https://discourse.charmhub.io/t//` to - `https://discourse.charmhub.io/raw/`. _Future plans for this - action include automating this migration by pulling the content down and - creating a PR for you to review in the repository._ - - Note that the action may change the order of how groups and pages are - displayed in the navigation pane. The action will sort them alphabetically. + tutorials) and markdown files (`*.md`) for individual pages. On charmhub, + the groupings on the navigation panel will be named based on the name of + the directory after replacing `_` and `-` with spaces and appliying the + [`str.title`](https://docs.python.org/3/library/stdtypes.html#str.title) + function to it. The name of pages is based on whatever of the following is + available, in order: (1) the first level 1 heading (e.g., `# `) in + the file, the first line in the file or the name of the file treated in the + same way as the name of groupings. + + If you have existing documentation on discourse, you can retrieve the + markdown version by changing the link to the topic in your browser from + `https://discourse.charmhub.io/t//` to + `https://discourse.charmhub.io/raw/`. _Future plans for this + action include automating this migration by pulling the content down and + creating a PR for you to review in the repository._ + + Note that the action may change the order of how groups and pages are + displayed in the navigation pane. The action will sort them alphabetically. 4. Optionally, remove the current `docs` key from `metadata.yaml` if you would - like the action to create its own topics on discourse rather than re-use - any existing topics. This means that if, for some reason, you don't like - what the action does, you can easily revert back to the previous - documentation. Be sure to file an issue with the reason if the action does - something unexpected or you would prefer it to do something different. + like the action to create its own topics on discourse rather than re-use + any existing topics. This means that if, for some reason, you don't like + what the action does, you can easily revert back to the previous + documentation. Be sure to file an issue with the reason if the action does + something unexpected or you would prefer it to do something different. 5. Add this action to your desired workflow. For example: - ```yaml - jobs: - publish-docs: - name: Publish docs - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v3 - - name: Publish documentation - uses: canonical/upload-charm-docs@main - id: publishDocumentation - with: - discourse_host: discourse.charmhub.io - discourse_api_username: ${{ secrets.DISCOURSE_API_USERNAME }} - discourse_api_key: ${{ secrets.DISCOURSE_API_KEY }} - - name: Show index page - run: echo '${{ steps.publishDocumentation.outputs.index_url }}' - ``` - - This action requires an API username and key to discourse. For Canonical - staff, please file a ticket with IS to request one. Note that there is a - rate limit on the number of topics that can be created by a user per day on - discourse. If you encounter this issue, the action will fail and report - that as the reason. It may help to space out adopting this action if you - are planning to use it for multiple charms or to use different users for - each charm. Note that other rate limits also apply which is why execution - might look like it is stalled for a short period and then resume. The - action will gracefully wait in case of throttling up to a maximum of 10 - minutes. - - There is a nice parameter, `dry_run`, which will do everything except - make changes on discourse and log what would have happened. This will help - you see what the action would have done. + ```yaml + jobs: + publish-docs: + name: Publish docs + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v3 + - name: Publish documentation + uses: canonical/upload-charm-docs@main + id: publishDocumentation + with: + discourse_host: discourse.charmhub.io + discourse_api_username: ${{ secrets.DISCOURSE_API_USERNAME }} + discourse_api_key: ${{ secrets.DISCOURSE_API_KEY }} + - name: Show index page + run: echo '${{ steps.publishDocumentation.outputs.index_url }}' + ``` + + This action requires an API username and key to discourse. For Canonical + staff, please file a ticket with IS to request one. Note that there is a + rate limit on the number of topics that can be created by a user per day on + discourse. If you encounter this issue, the action will fail and report + that as the reason. It may help to space out adopting this action if you + are planning to use it for multiple charms or to use different users for + each charm. Note that other rate limits also apply which is why execution + might look like it is stalled for a short period and then resume. The + action will gracefully wait in case of throttling up to a maximum of 10 + minutes. + + There is a nice parameter, `dry_run`, which will do everything except + make changes on discourse and log what would have happened. This will help + you see what the action would have done. 6. Check the logs for the URL to the index topic that the action created. This - is also available under the `index_url` output of the action. This needs to - be added to the `metadata.yaml` under the `docs` key. + is also available under the `index_url` output of the action. This needs to + be added to the `metadata.yaml` under the `docs` key. ### Migrate docs 1. Create a `docs` key in `metadata.yaml` with the link to the documentation on - charmhub. + charmhub. 2. Add the action to your desired workflow as mentioned in step 5 of - [Sync docs section](#sync-docs) with github_token. For example: - - ```yaml - jobs: - publish-docs: - name: Publish docs - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v3 - - name: Publish documentation - uses: canonical/upload-charm-docs@main - id: publishDocumentation - with: - discourse_host: discourse.charmhub.io - discourse_api_username: ${{ secrets.DISCOURSE_API_USERNAME }} - discourse_api_key: ${{ secrets.DISCOURSE_API_KEY }} - github_token: ${{ secrets.GITHUB_TOKEN }} - - name: Show index page - run: echo '${{ steps.publishDocumentation.outputs.index_url }}' - ``` - - additional branch_name input can be specified to create a pull request from a specific branch name. + [Sync docs section](#sync-docs) with github_token. For example: + + ```yaml + jobs: + publish-docs: + name: Publish docs + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v3 + - name: Publish documentation + uses: canonical/upload-charm-docs@main + id: publishDocumentation + with: + discourse_host: discourse.charmhub.io + discourse_api_username: ${{ secrets.DISCOURSE_API_USERNAME }} + discourse_api_key: ${{ secrets.DISCOURSE_API_KEY }} + github_token: ${{ secrets.GITHUB_TOKEN }} + - name: Show index page + run: echo '${{ steps.publishDocumentation.outputs.index_url }}' + ``` + + additional branch_name input can be specified to create a pull request from a specific branch name. The action will now compare the discourse topics with the files and directories under the `docs` directory and make any changes based on differences. Additional recommended steps: -- Add the action in dry run mode to run on every PR. This will mean that you +* Add the action in dry run mode to run on every PR. This will mean that you will see all the changes that would be made by the PR once you are ready to publish a new version of the charm and documentation. -- Add the action in dry run mode on publishes to `edge` to see what changes to +* Add the action in dry run mode on publishes to `edge` to see what changes to the documentation will be made once you publish to `stable`. From 434af4f525bc4a506e4131d05bf9f4ffb21850fb Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 2 Jan 2023 19:17:46 +0800 Subject: [PATCH 049/107] change docs italics --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index fbdc5434..cd40f003 100644 --- a/README.md +++ b/README.md @@ -27,13 +27,12 @@ charmhub. If you have existing documentation on discourse, you can retrieve the markdown version by changing the link to the topic in your browser from `https://discourse.charmhub.io/t//` to - `https://discourse.charmhub.io/raw/`. _Future plans for this + `https://discourse.charmhub.io/raw/`. *Future plans for this action include automating this migration by pulling the content down and - creating a PR for you to review in the repository._ + creating a PR for you to review in the repository.* Note that the action may change the order of how groups and pages are displayed in the navigation pane. The action will sort them alphabetically. - 4. Optionally, remove the current `docs` key from `metadata.yaml` if you would like the action to create its own topics on discourse rather than re-use any existing topics. This means that if, for some reason, you don't like From 79c78426cdf1d23f2e6b47285526eda8444eb034 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 2 Jan 2023 22:47:51 +0800 Subject: [PATCH 050/107] add fail on migration fail behavior --- src/__init__.py | 5 ++-- src/migration.py | 20 ++++++++++++- tests/factories.py | 28 ++++++++++++++++++ tests/unit/test___init__.py | 31 ++++++++------------ tests/unit/test_migration.py | 55 ++++++++++++++++++++++++++++++++++++ 5 files changed, 117 insertions(+), 22 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index 4bd6645e..09446681 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -17,7 +17,7 @@ from .index import DOCUMENTATION_FOLDER_NAME, contents_from_page from .index import get as get_index from .metadata import get as get_metadata -from .migration import get_docs_metadata +from .migration import assert_migration_success, get_docs_metadata from .migration import run as run_migrate from .navigation_table import from_page as navigation_table_from_page from .pull_request import create_github, create_pull_request, get_repository_name @@ -103,11 +103,12 @@ def _run_migrate( index_content = contents_from_page(server_content) table_rows = navigation_table_from_page(page=server_content, discourse=discourse) file_metadata = get_docs_metadata(table_rows=table_rows, index_content=index_content) - run_migrate( + migration_results = run_migrate( documents=file_metadata, discourse=discourse, docs_path=base_path / DOCUMENTATION_FOLDER_NAME, ) + assert_migration_success(migration_results=migration_results) pr_link = create_pull_request( repository=repo, github_repository=github_repo, branch_name=branch_name, dry_run=dry_run diff --git a/src/migration.py b/src/migration.py index 1d7f5155..22d59e5d 100644 --- a/src/migration.py +++ b/src/migration.py @@ -251,8 +251,8 @@ def get_docs_metadata( Returns: Metadata of files to be migrated. """ - table_docs = _extract_docs_from_table_rows(table_rows=table_rows) index_doc = _index_file_from_content(content=index_content) + table_docs = _extract_docs_from_table_rows(table_rows=table_rows) return itertools.chain([index_doc], table_docs) @@ -265,8 +265,26 @@ def run( documents: metadata about a file to be migrated to local docs directory. discourse: Client to the documentation server. docs_path: The path to the docs directory containing all the documentation. + + Returns: + Migration result reports containing action result and failure reason if any. """ return [ _run_one(file_meta=document, discourse=discourse, docs_path=docs_path) for document in documents ] + + +def assert_migration_success(migration_results: typing.Iterable[types_.MigrationReport]) -> None: + """Assert all documents have been successfully migrated. + + Args: + migration_results: Migration results from server to local. + + Returns: + None if success, raises MigrationError otherwise. + """ + if [result for result in migration_results if result.result is types_.ActionResult.FAIL]: + raise exceptions.MigrationError( + "Error migrating the docs, please check the logs for more detail." + ) diff --git a/tests/factories.py b/tests/factories.py index e52a19a4..24b2377a 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -27,3 +27,31 @@ class Meta: table_path = factory.Sequence(lambda n: f"path {n}") navlink_title = factory.Sequence(lambda n: f"title {n}") alphabetical_rank = factory.Sequence(lambda n: n) + + +class MigrationReportFactory(factory.Factory): + """Generate Migration reports.""" + + class Meta: + """Configuration for factory.""" + + model = types_.MigrationReport + abstract = False + + class Params: + """Variable factory params for generating different status report.""" + + is_success = factory.Trait(result=types_.ActionResult.SUCCESS, reason=None) + is_skipped = factory.Trait(result=types_.ActionResult.SKIP, reason="skipped") + is_failed = factory.Trait(result=types_.ActionResult.FAIL, reason="failed") + + table_row = factory.Sequence( + lambda n: types_.TableRow( + level=n, + path=f"path {n}", + navlink=types_.Navlink(title=f"title {n}", link=f"link {n}"), + ) + ) + path = factory.Sequence(lambda n: Path(f"dir{n}")) + result = None + reason = None diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index bf13bc79..1bd7094b 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -179,15 +179,13 @@ def test__run_migrate_server_error_index(tmp_path: Path, repository: tuple[Repo, def test__run_migrate_server_error_topic( repository: tuple[Repo, Path], - upstream_repository: tuple[Repo, Path], - mock_pull_request: PullRequest, mock_github_repo: Repository, ): """ arrange: given metadata with name and docs but no docs directory and mocked discourse that raises an exception during topic retrieval act: when _run_migrate is called - assert: only index document is migrated. + assert: MigrationError is raised. """ index_url = "http://discourse/t/docs" index_content = """Content Title @@ -198,28 +196,23 @@ def test__run_migrate_server_error_topic( | Level | Path | Navlink | | -- | -- | -- | - | 1 | path 1 | [Link](/t/link-to-1) | + | 1 | path-1 | [Link](/t/link-to-1) | """ meta = types_.Metadata(name="name 1", docs=index_url) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.side_effect = [index_content, exceptions.DiscourseError] (repo, repo_path) = repository - returned_migration_reports = _run_migrate( - base_path=repo_path, - metadata=meta, - discourse=mocked_discourse, - repo=repo, - github_repo=mock_github_repo, - branch_name=None, - dry_run=False, - ) - - (upstream_repo, upstream_path) = upstream_repository - upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) - assert returned_migration_reports == {mock_pull_request.html_url: types_.ActionResult.SUCCESS} - assert (upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() - assert not (upstream_path / DOCUMENTATION_FOLDER_NAME / "path 1").exists() + with pytest.raises(exceptions.MigrationError): + _run_migrate( + base_path=repo_path, + metadata=meta, + discourse=mocked_discourse, + repo=repo, + github_repo=mock_github_repo, + branch_name=None, + dry_run=False, + ) # pylint: disable=too-many-locals diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 25f30bf8..86fcdb75 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -14,6 +14,7 @@ from src import discourse, exceptions, migration, types_ +from .. import factories from .helpers import path_to_markdown @@ -679,3 +680,57 @@ def test_run( assert returned.result == expected.result assert returned.reason == expected.reason assert returned.table_row == expected.table_row + + +@pytest.mark.parametrize( + "migration_results", + [ + pytest.param( + [failed_result := factories.MigrationReportFactory(is_failed=True)], id="failed result" + ), + pytest.param( + [success_result := factories.MigrationReportFactory(is_success=True), failed_result], + id="mixed result", + ), + ], +) +def test_asssert_migration_fail(migration_results: list[types_.MigrationReport]): + """ + arrange: given at least one failed result in migration results + act: when assert_migration_success is called + assert: MigrationError exception is raised. + + """ + with pytest.raises(exceptions.MigrationError): + migration.assert_migration_success(migration_results=migration_results) + + +@pytest.mark.parametrize( + "migration_results", + [ + pytest.param( + [success_result], + id="success result", + ), + pytest.param( + [skipped_result := factories.MigrationReportFactory(is_skipped=True)], + id="skipped result", + ), + pytest.param([success_result, success_result], id="success results"), + pytest.param([skipped_result, skipped_result], id="skipped results"), + pytest.param( + [ + success_result, + skipped_result, + ], + id="mixed results", + ), + ], +) +def test_assert_migration_success(migration_results: list[types_.MigrationReport]): + """ + arrange: given successful migration results + act: when assert_migration_success is called + assert: no exceptions are raised. + """ + migration.assert_migration_success(migration_results=migration_results) From 548b17d17b8f62e37250d4431bfa81853c893863 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 2 Jan 2023 22:50:52 +0800 Subject: [PATCH 051/107] fix typo asssert --- tests/unit/test_migration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 86fcdb75..19ae6fcc 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -694,7 +694,7 @@ def test_run( ), ], ) -def test_asssert_migration_fail(migration_results: list[types_.MigrationReport]): +def test_assert_migration_fail(migration_results: list[types_.MigrationReport]): """ arrange: given at least one failed result in migration results act: when assert_migration_success is called From 8fd598d165538f50c48db46075eff24bbdd145da Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 3 Jan 2023 19:08:41 +0800 Subject: [PATCH 052/107] remove dry run mode in migration mode --- src/__init__.py | 5 +- src/pull_request.py | 56 +++++-------- tests/unit/test___init__.py | 3 - tests/unit/test_pull_request.py | 138 ++------------------------------ 4 files changed, 28 insertions(+), 174 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index 09446681..0a84658a 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -80,7 +80,6 @@ def _run_migrate( repo: Repo, github_repo: Repository, branch_name: str | None, - dry_run: bool, ) -> dict[str, str]: """Migrate existing docs from charmhub to local repository. @@ -91,7 +90,6 @@ def _run_migrate( repo: A git-binding for the current repository. github_repo: A client for communicating with github. branch_name: The branch name to base the pull request from. - dry_run: If enabled, only log the action that would be taken. Returns: A Pull Request link to the Github repository. @@ -111,7 +109,7 @@ def _run_migrate( assert_migration_success(migration_results=migration_results) pr_link = create_pull_request( - repository=repo, github_repository=github_repo, branch_name=branch_name, dry_run=dry_run + repository=repo, github_repository=github_repo, branch_name=branch_name ) return {pr_link: ActionResult.SUCCESS} @@ -153,7 +151,6 @@ def run( repo=repo, github_repo=github_repo, branch_name=branch_name, - dry_run=dry_run, ) if has_docs_dir: return _run_reconcile( diff --git a/src/pull_request.py b/src/pull_request.py index 2746d7ff..79d67422 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -23,7 +23,6 @@ ACTIONS_PULL_REQUEST_TITLE = "[docs] Migrate charm docs" ACTIONS_PULL_REQUEST_BODY = "This pull request was autogenerated by upload-charm-docs" PR_LINK_NO_CHANGE = "" -PR_LINK_DRY_RUN = "" DEFAULT_BRANCH_NAME = "upload-charm-docs" @@ -54,7 +53,7 @@ def _check_branch_exists(repository: Repo, branch_name: str): raise exc -def _merge_existing_branch(repository: Repo, branch_name: str, commit_msg: str, dry_run: bool): +def _merge_existing_branch(repository: Repo, branch_name: str, commit_msg: str): """Merge existing changes in current repository with specified branch with theirs strategy. Args: @@ -63,7 +62,7 @@ def _merge_existing_branch(repository: Repo, branch_name: str, commit_msg: str, commit_msg: Commit message for current changes. dry_run: If enabled, only log the action that would be taken. """ - logging.info("dry run: %s, merge to existing branch %s", dry_run, branch_name) + logging.info("merge to existing branch %s", branch_name) temp_branch = str(uuid4()) head = repository.create_head(temp_branch) head.checkout() @@ -73,56 +72,48 @@ def _merge_existing_branch(repository: Repo, branch_name: str, commit_msg: str, repository.git.checkout(branch_name) repository.git.pull() - if not dry_run: - repository.git.merge(temp_branch, "-Xtheirs", "--squash", "--no-edit") - repository.git.commit("-m", f"'{commit_msg}'") - repository.git.push("-u", "origin", branch_name) + repository.git.merge(temp_branch, "-Xtheirs", "--squash", "--no-edit") + repository.git.commit("-m", f"'{commit_msg}'") + repository.git.push("-u", "origin", branch_name) repository.git.branch("-D", temp_branch) -def _create_branch(repository: Repo, branch_name: str, commit_msg: str, dry_run: bool): +def _create_branch(repository: Repo, branch_name: str, commit_msg: str): """Create new branch with existing changes. Args: repository: Current repository. branch_name: New branch name. commit_msg: Commit message for current changes. - dry_run: If enabled, only log the action that would be taken. """ - logging.info("dry run: %s, create new branch %s", dry_run, branch_name) - if not dry_run: - repository.git.checkout("-b", branch_name) - repository.git.add(".") - repository.git.commit("-m", f"'{commit_msg}'") - repository.git.push("-u", "origin", branch_name) + logging.info("create new branch %s", branch_name) + repository.git.checkout("-b", branch_name) + repository.git.add(".") + repository.git.commit("-m", f"'{commit_msg}'") + repository.git.push("-u", "origin", branch_name) -def _create_pull_request( - github_repository: Repository, branch_name: str, base: str, dry_run: bool -): +def _create_pull_request(github_repository: Repository, branch_name: str, base: str): """Create a pull request. Args: github_repository: Github repository client. branch_name: Branch name from which the pull request will be created. base: Base branch to which the pull request will be created. - dry_run: If enabled, only log the action that would be taken. Returns: The pull request URL. """ - logging.info("dry run: %s, create pull request %s", dry_run, branch_name) - if not dry_run: - pull_request = github_repository.create_pull( - title=ACTIONS_PULL_REQUEST_TITLE, - body=ACTIONS_PULL_REQUEST_BODY, - base=base, - head=branch_name, - ) - else: - pull_request = None - return pull_request.html_url if pull_request is not None else PR_LINK_DRY_RUN + logging.info("create pull request %s", branch_name) + pull_request = github_repository.create_pull( + title=ACTIONS_PULL_REQUEST_TITLE, + body=ACTIONS_PULL_REQUEST_BODY, + base=base, + head=branch_name, + ) + + return pull_request.html_url def get_repository_name(remote_url: str): @@ -172,7 +163,6 @@ def create_pull_request( repository: Repo, github_repository: Repository, branch_name: str | None, - dry_run: bool, ) -> str: """Create pull request for changes in given repository path. @@ -180,7 +170,6 @@ def create_pull_request( access_token: Github access token. repository_path: Repository root where .git resides. branch_name: Pull request branch name. - dry_run: If enabled, only log the action that would be taken. Raises: InputError: if branch name configuration is invalid. @@ -203,14 +192,12 @@ def create_pull_request( repository=repository, branch_name=branch_name, commit_msg=ACTIONS_COMMIT_MESSAGE, - dry_run=dry_run, ) else: _create_branch( repository=repository, branch_name=branch_name, commit_msg=ACTIONS_COMMIT_MESSAGE, - dry_run=dry_run, ) repository.git.checkout(base) @@ -222,7 +209,6 @@ def create_pull_request( github_repository=github_repository, branch_name=branch_name, base=base, - dry_run=dry_run, ) else: pr_url = open_pulls[0].html_url diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 1bd7094b..3b0e0b77 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -171,7 +171,6 @@ def test__run_migrate_server_error_index(tmp_path: Path, repository: tuple[Repo, repo=repo, github_repo=mocked_github_repo, branch_name=None, - dry_run=False, ) assert "Index page retrieval failed" == str(exc.value) @@ -211,7 +210,6 @@ def test__run_migrate_server_error_topic( repo=repo, github_repo=mock_github_repo, branch_name=None, - dry_run=False, ) @@ -248,7 +246,6 @@ def test__run_migrate( repo=repo, github_repo=mock_github_repo, branch_name=None, - dry_run=False, ) (upstream_repo, upstream_path) = upstream_repository diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index 2962a391..877fa108 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -6,7 +6,6 @@ # Need access to protected functions for testing # pylint: disable=protected-access -import logging import typing from os.path import dirname from pathlib import Path @@ -40,29 +39,6 @@ def test__configure_user(repository: tuple[Repo, Path]): assert reader.get_value("user", "email") == pull_request.ACTIONS_USER_EMAIL -def test__create_pull_request_dry_run( - mock_github_repo: Repository, - caplog: pytest.LogCaptureFixture, -): - """ - arrange: given a mocked github repository client and a mocked pull request - act: when _create_pull_request is called with dry_run True, - assert: a dry run pull request link is returned. - """ - caplog.set_level(logging.INFO) - assert ( - pull_request._create_pull_request( - github_repository=mock_github_repo, - branch_name="branch-1", - base="base-1", - dry_run=True, - ) - == pull_request.PR_LINK_DRY_RUN - ) - assert "create pull request" in caplog.text - assert f"dry run: {True}" in caplog.text - - def test__create_pull_request(mock_pull_request: PullRequest, mock_github_repo: Repository): """ arrange: given a mocked github repository client and a mocked pull request @@ -74,7 +50,6 @@ def test__create_pull_request(mock_pull_request: PullRequest, mock_github_repo: github_repository=mock_github_repo, branch_name="branch-1", base="base-1", - dry_run=False, ) == mock_pull_request.html_url ) @@ -163,60 +138,6 @@ def test__check_branch_exists( assert pull_request._check_branch_exists(repo, branch_name) -@pytest.mark.parametrize( - "existing_files, new_files", - [ - pytest.param( - [original_file := (Path("text.txt"), "original")], - [test_file := (Path("test.txt"), "test")], - id="simple merge", - ), - ], -) -def test__merge_existing_branch_dry_run( - existing_files: list[tuple[Path, str]], - new_files: list[tuple[Path, str]], - upstream_repository: tuple[Repo, Path], - repository: tuple[Repo, Path], - caplog: pytest.LogCaptureFixture, -): - """ - arrange: given a local git repository with changes and \ - a remote repository with existing branch with existing files - act: when _merge_existing_branch is called with existing branch name \ - in dry run mode - assert: then no changes are merged, and the action is logged. - """ - caplog.set_level(logging.INFO) - branch_name = "test_branch" - commit_message = "test_message" - (upstream, upstream_path) = upstream_repository - upstream_head = upstream.create_head(branch_name) - upstream_head.checkout() - for (file, content) in existing_files: - (upstream_path / file).touch() - (upstream_path / file).write_text(content, encoding="utf-8") - upstream.git.add(".") - upstream.git.commit("-m", "'add upstream'") - upstream.git.checkout("main") - (repo, repo_path) = repository - for (file, content) in new_files: - (repo_path / file).touch() - (repo_path / file).write_text(content, encoding="utf-8") - repo.git.fetch("origin", branch_name) - create_repository_author(repo) - - pull_request._merge_existing_branch( - repository=repo, branch_name=branch_name, commit_msg=commit_message, dry_run=True - ) - - upstream.git.checkout(branch_name) - assert f"dry run: {True}" in caplog.text - assert "merge to existing branch" in caplog.text - for (file, content) in new_files: - assert not (upstream_path / file).is_file() - - @pytest.mark.parametrize( "existing_files, new_files, expected_files", [ @@ -269,7 +190,7 @@ def test__merge_existing_branch( create_repository_author(repo) pull_request._merge_existing_branch( - repository=repo, branch_name=branch_name, commit_msg=commit_message, dry_run=False + repository=repo, branch_name=branch_name, commit_msg=commit_message ) upstream.git.checkout(branch_name) @@ -278,50 +199,6 @@ def test__merge_existing_branch( assert (upstream_path / file).read_text(encoding="utf-8") == content -@pytest.mark.parametrize( - "new_files", - [ - pytest.param([test_file], id="single file"), - pytest.param( - [test_file, nested_file := (Path("nested/file.txt"), "nested file content")], - id="nested file", - ), - ], -) -def test__create_branch_dry_run( - new_files: list[tuple[Path, str]], - upstream_repository: tuple[Repo, Path], - repository: tuple[Repo, Path], - caplog: pytest.LogCaptureFixture, -): - """ - arrange: given a local git repository with new files - act: when _create_branch is called with new branch name in dry run mode - assert: new files are created upstream. - """ - caplog.set_level(logging.INFO) - branch_name = "test_branch" - (upstream, upstream_path) = upstream_repository - (repo, repo_path) = repository - for (file, content) in new_files: - Path(dirname(repo_path / file)).mkdir(parents=True, exist_ok=True) - (repo_path / file).touch() - (repo_path / file).write_text(content, encoding="utf-8") - create_repository_author(repo) - - pull_request._create_branch( - repository=repo, branch_name=branch_name, commit_msg="test_commit", dry_run=True - ) - - assert f"dry run: {True}" in caplog.text - assert "create new branch" in caplog.text - with pytest.raises(GitCommandError) as exc_info: - upstream.git.checkout(branch_name) - assert_substrings_in_string( - ("error: pathspec", "did not match any file(s) known to git"), str(exc_info.value) - ) - - @pytest.mark.parametrize( "new_files", [ @@ -351,9 +228,7 @@ def test__create_branch( (repo_path / file).write_text(content, encoding="utf-8") create_repository_author(repo) - pull_request._create_branch( - repository=repo, branch_name=branch_name, commit_msg="test_commit", dry_run=False - ) + pull_request._create_branch(repository=repo, branch_name=branch_name, commit_msg="test_commit") upstream.git.checkout(branch_name) for (file, content) in new_files: @@ -428,7 +303,6 @@ def test_create_pull_request_invalid_branch(tmp_path: Path, mock_github_repo: Re repository=repo, github_repository=mock_github_repo, branch_name=branch_name, - dry_run=False, ) assert_substrings_in_string( @@ -448,7 +322,7 @@ def test_create_pull_request_no_change( (repo, _) = repository returned_pr = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name, dry_run=False + repository=repo, github_repository=mock_github_repo, branch_name=branch_name ) assert returned_pr == pull_request.PR_LINK_NO_CHANGE @@ -474,7 +348,7 @@ def test_create_pull_request_existing_branch( mock_github_repo = mock.MagicMock(spec=Repository) pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name, dry_run=False + repository=repo, github_repository=mock_github_repo, branch_name=branch_name ) upstream.git.checkout(branch_name) @@ -509,7 +383,7 @@ def test_create_pull_request( (repo_path / test_file).touch() pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name, dry_run=False + repository=repo, github_repository=mock_github_repo, branch_name=branch_name ) (upstream, upstream_path) = upstream_repository @@ -548,7 +422,7 @@ def test_create_pull_request_existing_pr( mock_github_repo.get_pulls.side_effect = [[mock_pull_request]] pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name, dry_run=False + repository=repo, github_repository=mock_github_repo, branch_name=branch_name ) (upstream, upstream_path) = upstream_repository From b179f64ef8abfa37cbabd091176954e2965a8bc3 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 3 Jan 2023 21:54:31 +0800 Subject: [PATCH 053/107] separate out migration tests --- tests/factories.py | 15 ++ tests/integration/test___init__.py | 247 ++++++++++++++--------------- tests/types.py | 13 ++ 3 files changed, 144 insertions(+), 131 deletions(-) create mode 100644 tests/types.py diff --git a/tests/factories.py b/tests/factories.py index 24b2377a..9369b7a3 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -12,6 +12,8 @@ from src import types_ +from . import types + class PathInfoFactory(factory.Factory): """Generate PathInfos.""" @@ -55,3 +57,16 @@ class Params: path = factory.Sequence(lambda n: Path(f"dir{n}")) result = None reason = None + + +class ContentPageFactory(factory.Factory): + """Generate discourse content page.""" + + class Meta: + """Configuration for factory.""" + + model = types.DiscoursePageMeta + abstract = False + + title = factory.Sequence(lambda n: f"Content title {n}") + content = factory.Sequence(lambda n: f"Content {n}") diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index 4022dc35..3d7c0cd3 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -7,19 +7,27 @@ # pylint: disable=too-many-arguments,too-many-locals,too-many-statements import logging -import shutil from itertools import chain from pathlib import Path from urllib.parse import urlparse import pytest -from git.exc import GitCommandError from git.repo import Repo from github.PullRequest import PullRequest -from src import GETTING_STARTED, exceptions, index, metadata, pull_request, reconcile, run +from src import ( + GETTING_STARTED, + exceptions, + index, + metadata, + migration, + pull_request, + reconcile, + run, +) from src.discourse import Discourse +from .. import factories from ..unit.helpers import assert_substrings_in_string, create_metadata_yaml pytestmark = pytest.mark.init @@ -31,8 +39,6 @@ async def test_run( discourse_api: Discourse, caplog: pytest.LogCaptureFixture, repository: tuple[Repo, Path], - upstream_repository: tuple[Repo, Path], - mock_pull_request: PullRequest, ): """ arrange: given running discourse server @@ -52,12 +58,9 @@ async def test_run( 12. with the nested directory removed 13. with the documentation file removed 14. with the index file removed - 15. with no docs dir and no custom branchname provided in dry run mode - 16. with no docs dir and no custom branchname provided - 17. with no docs dir and custom branchname provided in dry run mode - 18. with no docs dir and custom branchname provided - 19. with no changes applied after migration in dry run mode - 20. with no changes applied after migration + 15. with no docs dir and no custom branchname provided + 16. with no docs dir and custom branchname provided + 17. with no changes applied after migration assert: then: 1. an index page is created with an empty navigation table 2. an index page is not updated @@ -73,12 +76,9 @@ async def test_run( 12. the nested directory is removed from the navigation table 13. the documentation page is deleted 14. an index page is not updated - 15. the documentation files are not pushed to default branch - 16. the documentation files are pushed to default branch - 17. the documentation files are not pushed to custom branch - 18. the documentation files are pushed to custom branch - 19. no operations are taken place - 20. no operations are taken place + 15. the documentation files are pushed to default branch + 16. the documentation files are pushed to custom branch + 17. no operations are taken place """ (repo, repo_path) = repository # this is an access token string for testing purposes. @@ -400,72 +400,81 @@ async def test_run( index_topic = discourse_api.retrieve_topic(url=index_url) assert index_content not in index_topic - # 15. with no docs dir and no custom branchname provided in dry run mode - caplog.clear() - (upstream_repo, _) = upstream_repository - doc_table_key_2 = "docs-2" - nested_dir_table_key_2 = "nested-dir-2" - (index_file := docs_dir / "index.md").write_text(index_content := "index content 1") - (doc_file := docs_dir / f"{doc_table_key_2}.md").write_text(doc_content_3 := "doc content 3") - (nested_dir := docs_dir / nested_dir_table_key_2).mkdir() - (nested_dir_doc_file := nested_dir / "doc.md").write_text( - nested_dir_doc_content_2 := "nested dir doc content 2" - ) - urls_with_actions = run( - base_path=repo_path, - discourse=discourse_api, - dry_run=True, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, - ) - urls = tuple(urls_with_actions) - shutil.rmtree(docs_dir) - - urls_with_actions = run( - base_path=repo_path, - discourse=discourse_api, - dry_run=True, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, - ) - with pytest.raises(GitCommandError) as exc: - upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) - assert_substrings_in_string( - ("error: pathspec", "did not match any file(s) known to git"), str(exc.value) +@pytest.mark.asyncio +@pytest.mark.usefixtures("patch_get_repository_name", "patch_create_github") +async def test_run_migrate( + discourse_api: Discourse, + caplog: pytest.LogCaptureFixture, + repository: tuple[Repo, Path], + upstream_repository: tuple[Repo, Path], + mock_pull_request: PullRequest, +): + """ + arrange: given running discourse server + act: when run is called with: + 1. with no docs dir and no custom branchname provided + 2. with no docs dir and custom branchname provided + 3. with no changes applied after migration + assert: then: + 1. the documentation files are pushed to default branch + 2. the documentation files are pushed to custom branch + 3. no operations are taken place + """ + document_name = "migration name 1" + discourse_prefix = "http://discourse" + (repo, repo_path) = repository + (upstream_repo, upstream_repo_path) = upstream_repository + # this is an access token string for testing purposes. + test_access_token = "test-access-token" # nosec + content_page_1 = factories.ContentPageFactory() + content_page_1_url = discourse_api.create_topic( + title=content_page_1.title, + content=content_page_1.content, + ).removeprefix(discourse_prefix) + content_page_2 = factories.ContentPageFactory() + content_page_2_url = discourse_api.create_topic( + title=content_page_2.title, + content=content_page_2.content, + ).removeprefix(discourse_prefix) + content_page_3 = factories.ContentPageFactory() + content_page_3_url = discourse_api.create_topic( + title=content_page_3.title, + content=content_page_3.content, + ).removeprefix(discourse_prefix) + content_page_4 = factories.ContentPageFactory() + content_page_4_url = discourse_api.create_topic( + title=content_page_4.title, + content=content_page_4.content, + ).removeprefix(discourse_prefix) + index_page_content = f"""Testing index page. + + Testing index page content. + + # Navigation + + | Level | Path | Navlink | + | -- | -- | -- | + | 1 | group-1 | [Group 1]() | + | 1 | group-2 | [Group 2]() | + | 2 | group-2-content-1 | [Content Link 1]({content_page_1_url}) | + | 2 | group-2-content-2 | [Content Link 2]({content_page_2_url}) | + | 1 | group-3 | [Group 3]() | + | 2 | group-3-group-4 | [Group 4]() | + | 3 | group-3-group-4-content-3 | [Content Link 3]({content_page_3_url}) | + | 2 | group-3-content-4 | [Content Link 4]({content_page_4_url}) | + | 1 | group-5 | [Group 5]() |""" + index_url = discourse_api.create_topic( + title=f"{document_name.replace('-', ' ').title()} Documentation Overview", + content=index_page_content, ) - assert tuple(urls_with_actions) == (pull_request.PR_LINK_DRY_RUN,) - # 16. with no docs dir and no custom branchname provided + # 1. with no docs dir and no custom branchname provided caplog.clear() - repo.git.checkout("--", ".") create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", path=repo_path, ) - doc_table_key_2 = "docs-2" - nested_dir_table_key_2 = "nested-dir-2" - (index_file := docs_dir / "index.md").write_text(index_content := "index content 1") - (doc_file := docs_dir / f"{doc_table_key_2}.md").write_text(doc_content_3 := "doc content 3") - (nested_dir := docs_dir / nested_dir_table_key_2).mkdir() - (nested_dir_doc_file := nested_dir / "doc.md").write_text( - nested_dir_doc_content_2 := "nested dir doc content 2" - ) - urls_with_actions = run( - base_path=repo_path, - discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, - ) - urls = tuple(urls_with_actions) - shutil.rmtree(docs_dir) urls_with_actions = run( base_path=repo_path, @@ -478,43 +487,24 @@ async def test_run( ) upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) - repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + upstream_doc_dir = upstream_repo_path / index.DOCUMENTATION_FOLDER_NAME assert tuple(urls_with_actions) == (mock_pull_request.html_url,) - assert index_file.read_text(encoding="utf-8") == index_content - assert doc_file.read_text(encoding="utf-8") == doc_content_3 - assert nested_dir_doc_file.read_text(encoding="utf-8") == nested_dir_doc_content_2 - - # 17. with no docs dir and custom branchname provided in dry run mode + assert ((group_1_path := upstream_doc_dir / "group-1")).is_dir() + assert ((group_1_gitkeep_path := group_1_path / migration.GITKEEP_FILE)).is_file() + assert ((group_2_path := upstream_doc_dir / "group-2")).is_dir() + assert ((group_2_content_1_path := group_2_path / "content-1.md")).read_text( + encoding="utf-8" + ) == content_page_1.content + assert (group_2_path / "content-2.md").read_text(encoding="utf-8") == content_page_2.content + assert ((group_3_path := upstream_doc_dir / "group-3")).is_dir() + assert ((group_4_path := group_3_path / "group-4")).is_dir() + assert (group_4_path / "content-3.md").read_text(encoding="utf-8") == content_page_3.content + assert (group_3_path / "content-4.md").read_text(encoding="utf-8") == content_page_4.content + assert (group_5_path := upstream_doc_dir / "group-5").is_dir() + assert group_5_path.is_dir() + + # 2. with no docs dir and custom branchname provided caplog.clear() - shutil.rmtree(docs_dir) - upstream_repo.git.checkout("main") - repo.git.checkout("main") - create_metadata_yaml( - content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", - path=repo_path, - ) - custom_branchname = "branchname-1" - - urls_with_actions = run( - base_path=repo_path, - discourse=discourse_api, - dry_run=True, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=custom_branchname, - ) - - with pytest.raises(GitCommandError) as exc: - upstream_repo.git.checkout(custom_branchname) - assert_substrings_in_string( - ("error: pathspec", "did not match any file(s) known to git"), str(exc.value) - ) - assert tuple(urls_with_actions) == (pull_request.PR_LINK_DRY_RUN,) - - # 18. with no docs dir and custom branchname provided - caplog.clear() - shutil.rmtree(docs_dir) upstream_repo.git.checkout("main") repo.git.checkout("main") create_metadata_yaml( @@ -536,26 +526,19 @@ async def test_run( upstream_repo.git.checkout(custom_branchname) repo.git.checkout(custom_branchname) assert tuple(urls_with_actions) == (mock_pull_request.html_url,) - assert index_file.read_text(encoding="utf-8") == index_content - assert doc_file.read_text(encoding="utf-8") == doc_content_3 - assert (nested_dir / "doc.md").read_text(encoding="utf-8") == nested_dir_doc_content_2 - - # 19. with no changes applied after migration in dry run mode - caplog.clear() - - urls_with_actions = run( - base_path=repo_path, - discourse=discourse_api, - dry_run=True, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=custom_branchname, - ) - - assert_substrings_in_string(chain(urls, ("Noop", "Noop", "Noop", "'success'")), caplog.text) - - # 20. with no changes applied after migration + assert group_1_path.is_dir() + assert group_1_gitkeep_path.is_file() + assert group_2_path.is_dir() + assert group_2_content_1_path.read_text(encoding="utf-8") == content_page_1.content + assert (group_2_path / "content-2.md").read_text(encoding="utf-8") == content_page_2.content + assert ((group_3_path := upstream_doc_dir / "group-3")).is_dir() + assert ((group_4_path := group_3_path / "group-4")).is_dir() + assert (group_4_path / "content-3.md").read_text(encoding="utf-8") == content_page_3.content + assert (group_3_path / "content-4.md").read_text(encoding="utf-8") == content_page_4.content + assert (group_5_path := upstream_doc_dir / "group-5").is_dir() + assert group_5_path.is_dir() + + # 3. with no changes applied after migration caplog.clear() urls_with_actions = run( @@ -568,4 +551,6 @@ async def test_run( branch_name=custom_branchname, ) - assert_substrings_in_string(chain(urls, ("Noop", "Noop", "Noop", "'success'")), caplog.text) + assert_substrings_in_string( + chain(urls_with_actions, ("Noop", "Noop", "Noop", "'success'")), caplog.text + ) diff --git a/tests/types.py b/tests/types.py new file mode 100644 index 00000000..15fca778 --- /dev/null +++ b/tests/types.py @@ -0,0 +1,13 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Useful types for tests.""" + +from typing import NamedTuple + + +class DiscoursePageMeta(NamedTuple): + """Metadata for creating a discourse page.""" + + title: str + content: str From 3893fb47ebd0bd09e1d34208c2d05a9a7dbfabce Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 4 Jan 2023 14:44:29 +0800 Subject: [PATCH 054/107] remove default value --- action.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/action.yaml b/action.yaml index 02392946..48e47eb9 100644 --- a/action.yaml +++ b/action.yaml @@ -38,12 +38,12 @@ inputs: description: | The github access token (secrets.GITHUB_TOKEN) to create pull request on Github. Required if running in migration mode. - default: ${{ github.token }} required: false type: string branch_name: - description: Branch name to create pull request branch. - default: upload-charm-docs + description: | + Branch name to create pull request branch. Defaults to + upload-charm-docs/migrate. required: false type: string outputs: From 5cdfc89cd873bb3f6caa1ef084258d7a976c337c Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 4 Jan 2023 14:44:58 +0800 Subject: [PATCH 055/107] add mypy type checks --- pyproject.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 49e55b7c..bc27d14a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,3 +49,9 @@ copyright-regexp = "Copyright\\s\\d{4}([-,]\\d{4})*\\s+%(author)s" [tool.mypy] ignore_missing_imports = true +check_untyped_defs = true +disallow_untyped_defs = true + +[[tool.mypy.overrides]] +module = "tests.*" +disallow_untyped_defs = false From 6e1e599dbab7bdfed1da36c68fb0094193c50f6e Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 4 Jan 2023 18:03:03 +0800 Subject: [PATCH 056/107] refactor pull request module as RepositoryClient --- src/exceptions.py | 4 +- src/pull_request.py | 304 +++++++++--------- tests/unit/conftest.py | 11 +- tests/unit/helpers.py | 8 - tests/unit/test_pull_request.py | 525 ++++++++++++-------------------- 5 files changed, 348 insertions(+), 504 deletions(-) diff --git a/src/exceptions.py b/src/exceptions.py index 63481578..002d75f4 100644 --- a/src/exceptions.py +++ b/src/exceptions.py @@ -44,5 +44,5 @@ class MigrationError(BaseError): """A problem with migration occurred.""" -class GitError(BaseError): - """A problem with git occurred.""" +class RepositoryClientError(BaseError): + """A problem with git repository client occurred.""" diff --git a/src/pull_request.py b/src/pull_request.py index 79d67422..0900b763 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -5,123 +5,156 @@ import logging import re -import typing -from uuid import uuid4 +from pathlib import Path from git import GitCommandError from git.repo import Repo from github import Github +from github.GithubException import GithubException from github.Repository import Repository -from .exceptions import GitError, InputError +from .exceptions import InputError, RepositoryClientError GITHUB_HOSTNAME = "github.com" HTTPS_URL_PATTERN = re.compile(rf"^https?:\/\/.*@?{GITHUB_HOSTNAME}\/(.+\/.+?)(.git)?$") ACTIONS_USER_NAME = "actions-bot" ACTIONS_USER_EMAIL = "actions-bot@users.noreply.github.com" ACTIONS_COMMIT_MESSAGE = "migrate docs from server" -ACTIONS_PULL_REQUEST_TITLE = "[docs] Migrate charm docs" +ACTIONS_PULL_REQUEST_TITLE = "[upload-charm-docs] Migrate charm docs" ACTIONS_PULL_REQUEST_BODY = "This pull request was autogenerated by upload-charm-docs" PR_LINK_NO_CHANGE = "" -DEFAULT_BRANCH_NAME = "upload-charm-docs" - - -def _configure_user(repository: Repo): - """Configure action git profile defaults.""" - config_writer = repository.config_writer() - config_writer.set_value("user", "name", ACTIONS_USER_NAME) - config_writer.set_value("user", "email", ACTIONS_USER_EMAIL) - config_writer.release() - - -def _check_branch_exists(repository: Repo, branch_name: str): - """Check if branch exists on remote. - - Args: - repository: Git-binding for the current repository. - branch_name: Branch name to check on remote. - - Returns: - True if branch already exists, False otherwise. - """ - try: - repository.git.fetch("origin", branch_name) - return True - except GitCommandError as exc: - if "couldn't find remote ref" in exc.stderr: - return False - raise exc - - -def _merge_existing_branch(repository: Repo, branch_name: str, commit_msg: str): - """Merge existing changes in current repository with specified branch with theirs strategy. - - Args: - repository: Git-binding for the current repository. - branch_name: Base branch to merge to. - commit_msg: Commit message for current changes. - dry_run: If enabled, only log the action that would be taken. - """ - logging.info("merge to existing branch %s", branch_name) - temp_branch = str(uuid4()) - head = repository.create_head(temp_branch) - head.checkout() - repository.git.add(".") - repository.git.commit("-m", f"'{commit_msg}'") - - repository.git.checkout(branch_name) - repository.git.pull() - - repository.git.merge(temp_branch, "-Xtheirs", "--squash", "--no-edit") - repository.git.commit("-m", f"'{commit_msg}'") - repository.git.push("-u", "origin", branch_name) - - repository.git.branch("-D", temp_branch) - - -def _create_branch(repository: Repo, branch_name: str, commit_msg: str): - """Create new branch with existing changes. - - Args: - repository: Current repository. - branch_name: New branch name. - commit_msg: Commit message for current changes. - """ - logging.info("create new branch %s", branch_name) - repository.git.checkout("-b", branch_name) - repository.git.add(".") - repository.git.commit("-m", f"'{commit_msg}'") - repository.git.push("-u", "origin", branch_name) - - -def _create_pull_request(github_repository: Repository, branch_name: str, base: str): - """Create a pull request. - - Args: - github_repository: Github repository client. - branch_name: Branch name from which the pull request will be created. - base: Base branch to which the pull request will be created. - - Returns: - The pull request URL. - """ - logging.info("create pull request %s", branch_name) - pull_request = github_repository.create_pull( - title=ACTIONS_PULL_REQUEST_TITLE, - body=ACTIONS_PULL_REQUEST_BODY, - base=base, - head=branch_name, - ) +BRANCH_PREFIX = "upload-charm-docs" +DEFAULT_BRANCH_NAME = f"{BRANCH_PREFIX}/migrate" + + +class RepositoryClient: + """Wrapper for git/git-server related functionalities.""" + + def __init__(self, repository: Repo, github_repository: Repository) -> None: + """Construct. + + Args: + repo: Client for interacting with local git repository. + github_repository: Client for interacting with remote github repository. + """ + self._git_repo = repository + self._github_repo = github_repository + self._configure_git_user() + + def _configure_git_user(self) -> None: + """Configure action git profile defaults. + + Configured profile appears as the git committer. + """ + config_writer = self._git_repo.config_writer() + config_writer.set_value("user", "name", ACTIONS_USER_NAME) + config_writer.set_value("user", "email", ACTIONS_USER_EMAIL) + # there is no context manager, config writer must be manually released. + config_writer.release() + + def _check_branch_exists(self, branch_name: str) -> bool: + """Check if branch exists on remote. + + Args: + branch_name: Branch name to check on remote. + + Returns: + True if branch already exists, False otherwise. + """ + try: + self._git_repo.git.fetch("origin", branch_name) + return True + except GitCommandError as exc: + if "couldn't find remote ref" in exc.stderr: + return False + raise RepositoryClientError( + f"Unexpected error checking existing branch. {exc=!r}" + ) from exc + + def _create_branch(self, branch_name: str, commit_msg: str) -> None: + """Create new branch with existing changes. + + Args: + branch_name: New branch name. + commit_msg: Commit message for current changes. + """ + logging.info("create new branch %s", branch_name) + try: + self._git_repo.git.checkout("-b", branch_name) + self._git_repo.git.add(".") + self._git_repo.git.commit("-m", f"'{commit_msg}'") + self._git_repo.git.push("-u", "origin", branch_name) + except GitCommandError as exc: + raise RepositoryClientError(f"Unexpected error creating new branch. {exc=!r}") from exc + + def _create_github_pull_request(self, branch_name: str, base: str) -> str: + """Create a pull request from given branch to base. + + Args: + branch_name: Branch name from which the pull request will be created. + base: Base branch to which the pull request will be created. + + Returns: + The web url to pull request page. + """ + logging.info("create pull request %s", branch_name) + try: + pull_request = self._github_repo.create_pull( + title=ACTIONS_PULL_REQUEST_TITLE, + body=ACTIONS_PULL_REQUEST_BODY, + base=base, + head=branch_name, + ) + except GithubException as exc: + raise RepositoryClientError( + f"Unexpected error creating pull request. {exc=!r}" + ) from exc + + return pull_request.html_url + + def create_pull_request( + self, + ) -> str: + """Create pull request for changes in given repository path. + + Raises: + InputError: if pull request branch name is invalid or the a branch + with same name already exists. + + Returns: + Pull request URL string. None if no pull request was created/modified. + """ + base = self._git_repo.active_branch.name + if base == DEFAULT_BRANCH_NAME: + raise InputError( + f"Pull request branch cannot be named {DEFAULT_BRANCH_NAME}." + "Please try again after changing the branch name." + ) + if not self._git_repo.is_dirty(untracked_files=True): + raise InputError("No files seem to be migrated. Please add contents upstream first.") + if self._check_branch_exists(branch_name=DEFAULT_BRANCH_NAME): + raise InputError( + f"Branch {DEFAULT_BRANCH_NAME} already exists." + f"Please try again after removing {DEFAULT_BRANCH_NAME}." + ) + + self._create_branch( + branch_name=DEFAULT_BRANCH_NAME, + commit_msg=ACTIONS_COMMIT_MESSAGE, + ) - return pull_request.html_url + return self._create_github_pull_request( + branch_name=DEFAULT_BRANCH_NAME, + base=base, + ) -def get_repository_name(remote_url: str): - """Get repository name. +def _get_repository_name_from_git_url(remote_url: str): + """Get repository name from git remote URL. Args: - remote_url: URL of remote repository. \ - e.g. https://github.com/canonical/upload-charm-docs.git + remote_url: URL of remote repository. + e.g. https://github.com/canonical/upload-charm-docs.git Raises: GitError if invalid remote url. @@ -131,18 +164,19 @@ def get_repository_name(remote_url: str): """ matched_repository = HTTPS_URL_PATTERN.match(remote_url) if not matched_repository: - raise GitError(f"Invalid remote repository name {remote_url=!r}") + raise InputError(f"Invalid remote repository url {remote_url=!r}") return matched_repository.group(1) -def create_github(access_token: typing.Any): +def create_repository_client(access_token: str, base_path: Path): """Create a Github instance to handle communication with Github server. Args: access_token: Access token that has permissions to open a pull request. + base_path: Path where local .git resides in. Raises: - InputError: if invalid token format input. + InputError: if invalid inputs are provided. Returns: A Github repository instance. @@ -151,66 +185,10 @@ def create_github(access_token: typing.Any): raise InputError( f"Invalid 'access_token' input, it must be non-empty, got {access_token=!r}" ) - if not isinstance(access_token, str): - raise InputError( - f"Invalid 'access_token' input, it must be a string, got {access_token=!r}" - ) - - return Github(login_or_token=access_token) - - -def create_pull_request( - repository: Repo, - github_repository: Repository, - branch_name: str | None, -) -> str: - """Create pull request for changes in given repository path. - - Args: - access_token: Github access token. - repository_path: Repository root where .git resides. - branch_name: Pull request branch name. - - Raises: - InputError: if branch name configuration is invalid. - - Returns: - Pull request URL string. None if no pull request was created/modified. - """ - branch_name = branch_name or DEFAULT_BRANCH_NAME - base = repository.active_branch.name - if base == branch_name: - raise InputError("Branch name cannot be equal to base branch.") - - if not repository.is_dirty(untracked_files=True): - return PR_LINK_NO_CHANGE - - _configure_user(repository=repository) - - if _check_branch_exists(repository=repository, branch_name=branch_name): - _merge_existing_branch( - repository=repository, - branch_name=branch_name, - commit_msg=ACTIONS_COMMIT_MESSAGE, - ) - else: - _create_branch( - repository=repository, - branch_name=branch_name, - commit_msg=ACTIONS_COMMIT_MESSAGE, - ) - repository.git.checkout(base) - - open_pulls = github_repository.get_pulls( - state="open", head=f"{github_repository.full_name}:{branch_name}" - ) - if not list(open_pulls): - pr_url = _create_pull_request( - github_repository=github_repository, - branch_name=branch_name, - base=base, - ) - else: - pr_url = open_pulls[0].html_url - return pr_url + local_repo = Repo(base_path) + github_client = Github(login_or_token=access_token) + remote_url = local_repo.remote().url + repository_fullname = _get_repository_name_from_git_url(remote_url=remote_url) + remote_repo = github_client.get_repo(repository_fullname) + return RepositoryClient(repository=local_repo, github_repository=remote_repo) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index b03760cd..b7b6e8b4 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -8,8 +8,10 @@ from pathlib import Path import pytest +from git.repo import Repo +from github.Repository import Repository -from src import index +from src import index, pull_request from src.discourse import Discourse @@ -34,3 +36,10 @@ def index_file_content(tmp_path: Path): content = "content 1" index_file.write_text(content, encoding="utf-8") return content + + +@pytest.fixture() +def repository_client(repository: tuple[Repo, Path], mock_github_repo: Repository): + """Get repository client.""" + (repo, _) = repository + return pull_request.RepositoryClient(repository=repo, github_repository=mock_github_repo) diff --git a/tests/unit/helpers.py b/tests/unit/helpers.py index 7797c0b6..9c602d9a 100644 --- a/tests/unit/helpers.py +++ b/tests/unit/helpers.py @@ -45,11 +45,3 @@ def path_to_markdown(path: Path) -> Path: Path with last path being a markdown file. """ return Path(f"{path}.md") - - -def create_repository_author(repo: Repo) -> None: - """Create repository author""" - writer = repo.config_writer() - writer.set_value("user", "name", "repo_user") - writer.set_value("user", "email", "repo_email") - writer.release() diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index 877fa108..ed909cd9 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -6,430 +6,295 @@ # Need access to protected functions for testing # pylint: disable=protected-access -import typing -from os.path import dirname from pathlib import Path from unittest import mock import pytest from git.exc import GitCommandError from git.repo import Repo -from github import Github +from github.GithubException import GithubException from github.PullRequest import PullRequest from github.Repository import Repository from src import pull_request -from src.exceptions import GitError, InputError +from src.exceptions import InputError, RepositoryClientError +from src.pull_request import RepositoryClient -from .helpers import assert_substrings_in_string, create_repository_author +from .helpers import assert_substrings_in_string -def test__configure_user(repository: tuple[Repo, Path]): +def test___init__(repository: tuple[Repo, Path], mock_github_repo: Repository): """ - arrange: given a git repository without profile - act: when _configure_user is called - assert: default user and email are configured as profile. + arrange: given a local git repository client and mock github repository client + act: when RepositoryClient is initialized + assert: RepositoryClient is created and git user is configured. """ (repo, _) = repository - pull_request._configure_user(repo) - - reader = repo.config_reader() - assert reader.get_value("user", "name") == pull_request.ACTIONS_USER_NAME - assert reader.get_value("user", "email") == pull_request.ACTIONS_USER_EMAIL - - -def test__create_pull_request(mock_pull_request: PullRequest, mock_github_repo: Repository): - """ - arrange: given a mocked github repository client and a mocked pull request - act: when _create_pull_request is called with dry_run False, - assert: a pull request link is returned. - """ - assert ( - pull_request._create_pull_request( - github_repository=mock_github_repo, - branch_name="branch-1", - base="base-1", - ) - == mock_pull_request.html_url + repository_client = pull_request.RepositoryClient( + repository=repo, github_repository=mock_github_repo ) - -@pytest.mark.parametrize( - "remote_url", - [ - pytest.param("https://gitlab.com/canonical/upload-charm-docs.git", id="non-github url"), - pytest.param("http://gitlab.com/canonical/upload-charm-docs.git", id="http url"), - pytest.param("git@github.com:yanksyoon/actionrefer.git", id="ssh url"), - ], -) -def test_get_repository_name_invalid(remote_url: str): - """ - arrange: given a non-valid remote_url - act: when _get_repository_name is called - assert: GitError is raised. - """ - with pytest.raises(GitError): - pull_request.get_repository_name(remote_url=remote_url) + config_reader = repository_client._git_repo.config_reader() + assert config_reader.get_value("user", "name") == pull_request.ACTIONS_USER_NAME + assert config_reader.get_value("user", "email") == pull_request.ACTIONS_USER_EMAIL -# Pylint doesn't understand how the walrus operator works -# pylint: disable=undefined-variable,unused-variable,too-many-locals -@pytest.mark.parametrize( - "remote_url, expected_repository_name", - [ - pytest.param( - "https://github.com/canonical/upload-charm-docs", - valid_url := "canonical/upload-charm-docs", - id="valid url", - ), - pytest.param( - "https://github.com/canonical/upload-charm-docs.git", - valid_url, - id="valid git url", - ), - ], -) -# pylint: enable=undefined-variable,unused-variable -def test_get_repository_name(remote_url: str, expected_repository_name: str): +def test__check_branch_exists_error( + monkeypatch: pytest.MonkeyPatch, repository_client: RepositoryClient +): """ - arrange: given a non-valid remote_url - act: when _get_repository_name is called - assert: GitError is raised. + arrange: given RepositoryClient with a mocked local git repository client that raises an + exception + act: when _check_branch_exists is called + assert: RepositoryClientError is raised from GitCommandError. """ - assert pull_request.get_repository_name(remote_url=remote_url) == expected_repository_name + err_str = "mocked error" + mock_git_repository = mock.MagicMock(spec=Repo) + mock_git_repository.git.fetch.side_effect = [GitCommandError(err_str)] + monkeypatch.setattr(repository_client, "_git_repo", mock_git_repository) + with pytest.raises(RepositoryClientError) as exc: + repository_client._check_branch_exists("branchname-1") -def test__check_branch_exists_error(tmp_path: Path): - """ - arrange: given an invalid repository with no origin upstream - act: when _check_branch_exists is called with a branch_name that doesn't exist - assert: a GitCommandError is raised. - """ - branch_name = "branch_name" - repo = Repo.init(tmp_path) - with pytest.raises(GitCommandError): - pull_request._check_branch_exists(repo, branch_name) + assert_substrings_in_string( + ("unexpected error checking existing branch", err_str), str(exc.value).lower() + ) -def test__check_branch_exists_not_exist(repository: tuple[Repo, Path]): +def test__check_branch_not_exists(repository_client: RepositoryClient): """ - arrange: given a git repository - act: when _check_branch_exists is called with a branch_name that does not exist + arrange: given RepositoryClient with an upstream repository + act: when _check_branch_exists is called assert: False is returned. """ - (repo, _) = repository - branch_name = "no-such-branchname" - assert not pull_request._check_branch_exists(repo, branch_name) + assert not repository_client._check_branch_exists("no-such-branchname") def test__check_branch_exists( - upstream_repository: tuple[Repo, Path], repository: tuple[Repo, Path] + repository_client: RepositoryClient, upstream_repository: tuple[Repo, Path] ): """ - arrange: given a local git repository and an upstream repository with a branch - act: when _check_branch_exists is called with a branch_name that exists + arrange: given RepositoryClient with an upstream repository with check-branch-exists branch + act: when _check_branch_exists is called assert: True is returned. """ - branch_name = "branch_name" - (upstream_repo, _) = upstream_repository - upstream_repo.create_head(branch_name) - (repo, _) = repository - assert pull_request._check_branch_exists(repo, branch_name) + (upstream_repo, upstream_path) = upstream_repository + branch_name = "check-branch-exists" + head = upstream_repo.create_head(branch_name) + head.checkout() + (upstream_path / "filler-file").touch() + upstream_repo.git.add(".") + upstream_repo.git.commit("-m", "test") + assert repository_client._check_branch_exists(branch_name) -@pytest.mark.parametrize( - "existing_files, new_files, expected_files", - [ - pytest.param( - [original_file := (Path("text.txt"), "original")], - [test_file := (Path("test.txt"), "test")], - [ - original_file, - test_file, - ], - id="simple merge", - ), - pytest.param( - [original_file], - [updated_file := (Path("text.txt"), "update")], - [updated_file], - id="merge incoming", - ), - ], -) -def test__merge_existing_branch( - existing_files: list[tuple[Path, str]], - new_files: list[tuple[Path, str]], - expected_files: list[tuple[Path, str]], - upstream_repository: tuple[Repo, Path], - repository: tuple[Repo, Path], + upstream_repo.git.checkout("main") + upstream_repo.git.branch("-D", branch_name) + + +def test__create_branch_error( + monkeypatch: pytest.MonkeyPatch, repository_client: RepositoryClient ): """ - arrange: given a local git repository with changes and \ - a remote repository with existing branch with existing files - act: when _merge_existing_branch is called with existing branch name - assert: files are merged with expected content upstream. + arrange: given RepositoryClient with a mocked local git repository that raises an exception + act: when _create_branch is called + assert: RepositoryClientError is raised. """ - branch_name = "test_branch" - commit_message = "test_message" - (upstream, upstream_path) = upstream_repository - upstream_head = upstream.create_head(branch_name) - upstream_head.checkout() - for (file, content) in existing_files: - (upstream_path / file).touch() - (upstream_path / file).write_text(content, encoding="utf-8") - upstream.git.add(".") - upstream.git.commit("-m", "'add upstream'") - upstream.git.checkout("main") - (repo, repo_path) = repository - for (file, content) in new_files: - (repo_path / file).touch() - (repo_path / file).write_text(content, encoding="utf-8") - repo.git.fetch("origin", branch_name) - create_repository_author(repo) - - pull_request._merge_existing_branch( - repository=repo, branch_name=branch_name, commit_msg=commit_message - ) + err_str = "mocked error" + mock_git_repository = mock.MagicMock(spec=Repo) + mock_git_repository.git.fetch.side_effect = [GitCommandError(err_str)] + monkeypatch.setattr(repository_client, "_git_repo", mock_git_repository) - upstream.git.checkout(branch_name) - for (file, content) in expected_files: - assert (upstream_path / file).is_file() - assert (upstream_path / file).read_text(encoding="utf-8") == content + with pytest.raises(RepositoryClientError) as exc: + repository_client._create_branch(branch_name="test-create-branch", commit_msg="commit-1") + + assert_substrings_in_string( + ("unexpected error checking existing branch", err_str), str(exc.value).lower() + ) -@pytest.mark.parametrize( - "new_files", - [ - pytest.param([test_file], id="single file"), - pytest.param( - [test_file, nested_file := (Path("nested/file.txt"), "nested file content")], - id="nested file", - ), - ], -) def test__create_branch( - new_files: list[tuple[Path, str]], - upstream_repository: tuple[Repo, Path], - repository: tuple[Repo, Path], + repository_client: RepositoryClient, upstream_repository: tuple[Repo, Path] ): """ - arrange: given a local git repository with new files - act: when _create_branch is called with new branch name - assert: new files are created upstream. + arrange: given RepositoryClient + act: when _create_branch is called + assert: a new branch is successfully created upstream. """ - branch_name = "test_branch" - (upstream, upstream_path) = upstream_repository - (repo, repo_path) = repository - for (file, content) in new_files: - Path(dirname(repo_path / file)).mkdir(parents=True, exist_ok=True) - (repo_path / file).touch() - (repo_path / file).write_text(content, encoding="utf-8") - create_repository_author(repo) + (upstream_repo, _) = upstream_repository + branch_name = "test-create-branch" - pull_request._create_branch(repository=repo, branch_name=branch_name, commit_msg="test_commit") + repository_client._create_branch(branch_name=branch_name, commit_msg="commit-1") - upstream.git.checkout(branch_name) - for (file, content) in new_files: - assert (upstream_path / file).is_file() + assert any(branch for branch in upstream_repo.branches if branch.name == branch_name) -@pytest.mark.parametrize( - "access_token, expected_error_msg_contents", - [ - pytest.param( - "", - (err_strs := ("invalid", "access_token", "input", "must be non-empty")), - id="No access token", - ), - pytest.param( - {}, - err_strs, - id="Invalid access token type(empty)", - ), - pytest.param( - 1234, - ("invalid", "access_token", "input", "must be a string"), - id="invalid access token type(numeric)", - ), - ], -) -def test_create_github_instance_error( - access_token: typing.Any, expected_error_msg_contents: tuple[str, ...] +def test__create_github_pull_request_error( + monkeypatch: pytest.MonkeyPatch, repository_client: RepositoryClient ): """ - arrange: Given an invalid access token input - act: when create_github_repository_instance is called - assert: InputError is raised with invalid access token info. + arrange: given RepositoryClient with a mocked github repository client that raises an exception + act: when _create_github_pull_request is called + assert: RepositoryClientError is raised. """ - with pytest.raises(InputError) as exc_info: - pull_request.create_github(access_token=access_token) + mock_github_repository = mock.MagicMock(spec=Repository) + mock_github_repository.create_pull.fetch.side_effect = [GithubException] + monkeypatch.setattr(repository_client, "_github_repo", mock_github_repository) + + with pytest.raises(RepositoryClientError) as exc: + repository_client._create_github_pull_request( + branch_name="branchname-1", base="base-branchname" + ) - assert_substrings_in_string(expected_error_msg_contents, str(exc_info.value).lower()) + assert_substrings_in_string( + ("unexpected error creating pull request", "githubexception"), str(exc.value).lower() + ) -def test_create_github_instance(): +def test__create_github_pull_request( + repository_client: RepositoryClient, mock_pull_request: PullRequest +): """ - arrange: Given a valid access token - act: when create_github_repository_instance is called - assert: valid Github instance is returned. + arrange: given RepositoryClient with a mocked github client that returns a mocked pull request + act: when _create_github_pull_request is called + assert: a pull request's page link is returned. """ - # bandit will not let hardcoded passwords pass - access_token = "valid-access-token" # nosec - assert isinstance(pull_request.create_github(access_token=access_token), Github) + returned_url = repository_client._create_github_pull_request("branchname-1", "base-branchname") + + assert returned_url == mock_pull_request.html_url -def test_create_pull_request_invalid_branch(tmp_path: Path, mock_github_repo: Repository): +def test_create_pull_request_on_default_branchname( + monkeypatch: pytest.MonkeyPatch, + repository_client: RepositoryClient, +): """ - arrange: given a repository and a mocked github repository and a branch_name that is equal - to the base branch + arrange: given RepositoryClient with a mocked local git client that is on default branchname act: when create_pull_request is called - assert: InputError is raised with error message. + assert: InputError is raised. """ - branch_name = "test-branch" - # Setting up an exiting branch requires a head in an empty repository. - # Committing an empty file allows so. - repo = Repo.init(tmp_path) - create_repository_author(repo) - (tmp_path / "test.txt").touch() - repo.git.add(".") - repo.git.commit("-m", "test commit") - current_branch = repo.create_head(branch_name) - current_branch.checkout() - - with pytest.raises(InputError) as exc_info: - pull_request.create_pull_request( - repository=repo, - github_repository=mock_github_repo, - branch_name=branch_name, - ) + mock_git_repository = mock.MagicMock(spec=Repo) + mock_git_repository.active_branch_name = pull_request.DEFAULT_BRANCH_NAME + monkeypatch.setattr(repository_client, "_git_repo", mock_git_repository) - assert_substrings_in_string( - ("branch name", "cannot be equal", "base branch"), str(exc_info.value).lower() - ) + with pytest.raises(InputError): + repository_client.create_pull_request() -def test_create_pull_request_no_change( - repository: tuple[Repo, Path], mock_github_repo: Repository +def test_create_pull_request_no_dirty_files( + repository_client: RepositoryClient, ): """ - arrange: given a repository and a mocked github repository with no changed file + arrange: given RepositoryClient with no dirty files act: when create_pull_request is called - assert: Nothing is returned. + assert: InputError is raised. """ - branch_name = "test_branch_name" - (repo, _) = repository - - returned_pr = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name - ) - - assert returned_pr == pull_request.PR_LINK_NO_CHANGE + with pytest.raises(InputError): + repository_client.create_pull_request() def test_create_pull_request_existing_branch( - repository: tuple[Repo, Path], - upstream_repository: tuple[Repo, Path], - mock_github_repo: Repository, + repository_client: RepositoryClient, upstream_repository: tuple[Repo, Path] ): """ - arrange: given a mocked repository with a new file and a mocked github repository \ - with an existing branch and no existing pull request + arrange: given RepositoryClient and an upstream repository that already has migration branch act: when create_pull_request is called - assert: a github PR link is returned. + assert: InputError is raised. """ - branch_name = "test_branch_name" - (repo, repo_path) = repository - test_file = "file.md" - (repo_path / test_file).touch() - (upstream, upstream_path) = upstream_repository - upstream.create_head(branch_name) - mock_github_repo = mock.MagicMock(spec=Repository) - - pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name - ) + (upstream_repo, upstream_path) = upstream_repository + branch_name = pull_request.DEFAULT_BRANCH_NAME + head = upstream_repo.create_head(branch_name) + head.checkout() + (upstream_path / "filler-file").touch() + upstream_repo.git.add(".") + upstream_repo.git.commit("-m", "test") - upstream.git.checkout(branch_name) - (upstream_path / test_file).is_file() - assert pr_link is not None - mock_github_repo.get_pulls.assert_called_once_with( - state="open", - head=f"{mock_github_repo.full_name}:{branch_name}", - ) - mock_github_repo.create_pull.assert_called_once_with( - title=pull_request.ACTIONS_PULL_REQUEST_TITLE, - body=pull_request.ACTIONS_PULL_REQUEST_BODY, - base="main", - head=branch_name, - ) + with pytest.raises(InputError): + repository_client.create_pull_request() + + upstream_repo.git.checkout("main") + upstream_repo.git.branch("-D", branch_name) def test_create_pull_request( - repository: tuple[Repo, Path], + repository_client: RepositoryClient, upstream_repository: tuple[Repo, Path], - mock_github_repo: mock.MagicMock, + repository: tuple[Repo, Path], + mock_pull_request: PullRequest, ): """ - arrange: given a mocked repository with a new file and a mocked github repository \ - and no existing pull request + arrange: given RepositoryClient and a repository with changed files act: when create_pull_request is called - assert: a github PR link is returned. + assert: changes are pushed to default branch and pull request link is returned. """ - branch_name = "test_branch_name" - (repo, repo_path) = repository - test_file = "file.md" - (repo_path / test_file).touch() + (_, repo_path) = repository + filler_filename = "filler-file" + filler_file = repo_path / filler_filename + filler_text = "filler-text" + filler_file.write_text(filler_text) - pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name - ) + returned_pr_link = repository_client.create_pull_request() - (upstream, upstream_path) = upstream_repository - upstream.git.checkout(branch_name) - (upstream_path / test_file).is_file() - assert pr_link is not None - mock_github_repo.get_pulls.assert_called_once_with( - state="open", - head=f"{mock_github_repo.full_name}:{branch_name}", - ) - mock_github_repo.create_pull.assert_called_once_with( - title=pull_request.ACTIONS_PULL_REQUEST_TITLE, - body=pull_request.ACTIONS_PULL_REQUEST_BODY, - base="main", - head=branch_name, - ) + (upstream_repo, upstream_path) = upstream_repository + upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + assert returned_pr_link == mock_pull_request.html_url + assert (upstream_path / filler_filename).read_text() == filler_text -def test_create_pull_request_existing_pr( - repository: tuple[Repo, Path], - upstream_repository: tuple[Repo, Path], - mock_github_repo: mock.MagicMock, - mock_pull_request: PullRequest, -): +@pytest.mark.parametrize( + "remote_url", + [ + pytest.param("https://gitlab.com/canonical/upload-charm-docs.git", id="non-github url"), + pytest.param("http://gitlab.com/canonical/upload-charm-docs.git", id="http url"), + pytest.param("git@github.com:yanksyoon/actionrefer.git", id="ssh url"), + ], +) +def test_get_repository_name_invalid(remote_url: str): """ - arrange: given a mocked repository with a new file and a mocked github repository \ - and no existing pull request - act: when create_pull_request is called - assert: a github PR link is returned. + arrange: given a non-valid remote_url + act: when _get_repository_name is called + assert: InputError is raised. """ - branch_name = "test_branch_name" - (repo, repo_path) = repository - create_repository_author(repo) - test_file = "file.md" - (repo_path / test_file).touch() - mock_github_repo.get_pulls.side_effect = [[mock_pull_request]] - - pr_link = pull_request.create_pull_request( - repository=repo, github_repository=mock_github_repo, branch_name=branch_name - ) + with pytest.raises(InputError): + pull_request._get_repository_name_from_git_url(remote_url=remote_url) + - (upstream, upstream_path) = upstream_repository - upstream.git.checkout(branch_name) - (upstream_path / test_file).is_file() - assert pr_link == mock_pull_request.html_url - mock_github_repo.get_pulls.assert_called_once_with( - state="open", - head=f"{mock_github_repo.full_name}:{branch_name}", +@pytest.mark.parametrize( + "remote_url, expected_repository_name", + [ + pytest.param( + "https://github.com/canonical/upload-charm-docs", + "canonical/upload-charm-docs", + id="valid url", + ), + pytest.param( + "https://github.com/canonical/upload-charm-docs.git", + "canonical/upload-charm-docs", + id="valid git url", + ), + ], +) +def test_get_repository_name(remote_url: str, expected_repository_name: str): + """ + arrange: given a non-valid remote_url + act: when _get_repository_name is called + assert: GitError is raised. + """ + assert ( + pull_request._get_repository_name_from_git_url(remote_url=remote_url) + == expected_repository_name ) + + +def test_create_repository_client_no_token(repository: tuple[Repo, Path]): + """ + arrange: given valid repository path and empty access_token + act: when create_repository_client_no_token is called + assert: InputError is raised. + """ + (_, repo_path) = repository + test_token = "" + + with pytest.raises(InputError): + pull_request.create_repository_client(access_token=test_token, base_path=repo_path) From 57ca99b6d1e5f361209977e2b7e80bb19698c1cd Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 5 Jan 2023 21:33:34 +0800 Subject: [PATCH 057/107] refactor pull_request module to repository client --- main.py | 18 +-- src/__init__.py | 52 ++------ src/migration.py | 2 +- src/pull_request.py | 92 ++++++++----- src/types_.py | 29 +++++ tests/conftest.py | 38 +++--- tests/factories.py | 15 +++ tests/integration/test___init__.py | 203 ++++++++++------------------- tests/unit/conftest.py | 17 +-- tests/unit/helpers.py | 2 - tests/unit/test___init__.py | 71 +++------- tests/unit/test_pull_request.py | 138 +++++++++++++++----- 12 files changed, 340 insertions(+), 337 deletions(-) diff --git a/main.py b/main.py index 74e296c2..b6fca908 100755 --- a/main.py +++ b/main.py @@ -11,14 +11,12 @@ import pathlib from functools import partial -from git.repo import Repo - -from src import run +from src import run, types_ from src.discourse import create_discourse # pylint: disable=too-many-locals -def main(): +def main() -> None: """Execute the action.""" logging.basicConfig(level=logging.INFO) @@ -30,7 +28,6 @@ def main(): discourse_api_username = os.getenv("INPUT_DISCOURSE_API_USERNAME") discourse_api_key = os.getenv("INPUT_DISCOURSE_API_KEY") github_access_token = os.getenv("INPUT_GITHUB_TOKEN") - branch_name = os.getenv("INPUT_BRANCH_NAME") # Execute action create_discourse_kwargs = { @@ -41,19 +38,16 @@ def main(): } base_path = pathlib.Path() discourse = create_discourse(**create_discourse_kwargs) - repo = Repo(path=base_path) urls_with_actions_dict = run( base_path=base_path, discourse=discourse, - dry_run=dry_run, - delete_pages=delete_topics, - repo=repo, - github_access_token=github_access_token, - branch_name=branch_name, + user_inputs=types_.UserInputs( + dry_run=dry_run, delete_pages=delete_topics, github_access_token=github_access_token + ), ) # Write output - github_output = pathlib.Path(os.getenv("GITHUB_OUTPUT")) + github_output = pathlib.Path(os.getenv("GITHUB_OUTPUT", "")) compact_json = partial(json.dumps, separators=(",", ":")) urls_with_actions = compact_json(urls_with_actions_dict) if urls_with_actions_dict: diff --git a/src/__init__.py b/src/__init__.py index 0a84658a..d650c0a2 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -5,9 +5,6 @@ from pathlib import Path -from git.repo import Repo -from github.Repository import Repository - from .action import DRY_RUN_NAVLINK_LINK, FAIL_NAVLINK_LINK from .action import run_all as run_all_actions from .discourse import Discourse @@ -20,9 +17,9 @@ from .migration import assert_migration_success, get_docs_metadata from .migration import run as run_migrate from .navigation_table import from_page as navigation_table_from_page -from .pull_request import create_github, create_pull_request, get_repository_name +from .pull_request import RepositoryClient, create_pull_request, create_repository_client from .reconcile import run as run_reconcile -from .types_ import ActionResult, Metadata +from .types_ import ActionResult, Metadata, UserInputs GETTING_STARTED = ( "To get started with upload-charm-docs, " @@ -74,12 +71,7 @@ def _run_reconcile( # pylint: disable=too-many-arguments def _run_migrate( - base_path: Path, - metadata: Metadata, - discourse: Discourse, - repo: Repo, - github_repo: Repository, - branch_name: str | None, + base_path: Path, metadata: Metadata, discourse: Discourse, repository: RepositoryClient ) -> dict[str, str]: """Migrate existing docs from charmhub to local repository. @@ -87,9 +79,7 @@ def _run_migrate( base_path: The base path to look for the metadata file in. metadata: A metadata file with a link to the docs url. discourse: A client to the documentation server. - repo: A git-binding for the current repository. - github_repo: A client for communicating with github. - branch_name: The branch name to base the pull request from. + repository: Repository client for managing both local and remote git repositories. Returns: A Pull Request link to the Github repository. @@ -108,32 +98,18 @@ def _run_migrate( ) assert_migration_success(migration_results=migration_results) - pr_link = create_pull_request( - repository=repo, github_repository=github_repo, branch_name=branch_name - ) + pr_link = create_pull_request(repository=repository) return {pr_link: ActionResult.SUCCESS} -def run( - base_path: Path, - discourse: Discourse, - dry_run: bool, - delete_pages: bool, - repo: Repo, - github_access_token: str | None, - branch_name: str | None, -) -> dict[str, str]: +def run(base_path: Path, discourse: Discourse, user_inputs: UserInputs) -> dict[str, str]: """Interact with charmhub to upload documentation or migrate to local repository. Args: base_path: The base path to look for the metadata file in. discourse: A client to the documentation server. - dry_run: If enabled, only log the action that would be taken. - delete_pages: Whether to delete pages that are no longer needed. - repo: A git-binding client for current repository. - github_access_token: A Personal Access Token(PAT) or access token with repository access. - branch_name: A branch name for creating a Pull Request. + user_inputs: Configurable inputs for running upload-charm-docs. Returns: All the URLs that had an action with the result of that action. @@ -141,23 +117,21 @@ def run( metadata = get_metadata(base_path) has_docs_dir = has_docs_directory(base_path=base_path) if metadata.docs and not has_docs_dir: - repository = get_repository_name(repo.remote().url) - github = create_github(access_token=github_access_token) - github_repo = github.get_repo(repository) + repository = create_repository_client( + access_token=user_inputs.github_access_token, base_path=base_path + ) return _run_migrate( base_path=base_path, metadata=metadata, discourse=discourse, - repo=repo, - github_repo=github_repo, - branch_name=branch_name, + repository=repository, ) if has_docs_dir: return _run_reconcile( base_path=base_path, metadata=metadata, discourse=discourse, - dry_run=dry_run, - delete_pages=delete_pages, + dry_run=user_inputs.dry_run, + delete_pages=user_inputs.delete_pages, ) raise InputError(GETTING_STARTED) diff --git a/src/migration.py b/src/migration.py index 22d59e5d..a398d55f 100644 --- a/src/migration.py +++ b/src/migration.py @@ -13,7 +13,7 @@ from .docs_directory import calculate_table_path EMPTY_DIR_REASON = "" -GITKEEP_FILE = ".gitkeep" +GITKEEP_FILENAME = ".gitkeep" def _validate_row_levels(table_rows: list[types_.TableRow]): diff --git a/src/pull_request.py b/src/pull_request.py index 0900b763..4ce7dd18 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -34,7 +34,7 @@ def __init__(self, repository: Repo, github_repository: Repository) -> None: """Construct. Args: - repo: Client for interacting with local git repository. + repository: Client for interacting with local git repository. github_repository: Client for interacting with remote github repository. """ self._git_repo = repository @@ -52,7 +52,7 @@ def _configure_git_user(self) -> None: # there is no context manager, config writer must be manually released. config_writer.release() - def _check_branch_exists(self, branch_name: str) -> bool: + def check_branch_exists(self, branch_name: str) -> bool: """Check if branch exists on remote. Args: @@ -71,7 +71,7 @@ def _check_branch_exists(self, branch_name: str) -> bool: f"Unexpected error checking existing branch. {exc=!r}" ) from exc - def _create_branch(self, branch_name: str, commit_msg: str) -> None: + def create_branch(self, branch_name: str, commit_msg: str) -> None: """Create new branch with existing changes. Args: @@ -87,7 +87,7 @@ def _create_branch(self, branch_name: str, commit_msg: str) -> None: except GitCommandError as exc: raise RepositoryClientError(f"Unexpected error creating new branch. {exc=!r}") from exc - def _create_github_pull_request(self, branch_name: str, base: str) -> str: + def create_github_pull_request(self, branch_name: str, base: str) -> str: """Create a pull request from given branch to base. Args: @@ -112,44 +112,68 @@ def _create_github_pull_request(self, branch_name: str, base: str) -> str: return pull_request.html_url - def create_pull_request( - self, - ) -> str: - """Create pull request for changes in given repository path. + def is_dirty(self) -> bool: + """Check if repository path has any changes including new files. - Raises: - InputError: if pull request branch name is invalid or the a branch - with same name already exists. + Returns: + True if any changes have occurred. + """ + return self._git_repo.is_dirty(untracked_files=True) + + def get_active_branch(self) -> str: + """Get name of currently active branch on local git repository. Returns: - Pull request URL string. None if no pull request was created/modified. + Name of currently active branch. """ - base = self._git_repo.active_branch.name - if base == DEFAULT_BRANCH_NAME: - raise InputError( - f"Pull request branch cannot be named {DEFAULT_BRANCH_NAME}." - "Please try again after changing the branch name." - ) - if not self._git_repo.is_dirty(untracked_files=True): - raise InputError("No files seem to be migrated. Please add contents upstream first.") - if self._check_branch_exists(branch_name=DEFAULT_BRANCH_NAME): - raise InputError( - f"Branch {DEFAULT_BRANCH_NAME} already exists." - f"Please try again after removing {DEFAULT_BRANCH_NAME}." - ) + return self._git_repo.active_branch.name - self._create_branch( - branch_name=DEFAULT_BRANCH_NAME, - commit_msg=ACTIONS_COMMIT_MESSAGE, - ) + def set_active_branch(self, branch_name: str) -> None: + """Set current active branch to an given branch that already exists.""" + self._git_repo.git.checkout(branch_name) + + +def create_pull_request(repository: RepositoryClient) -> str: + """Create pull request for changes in given repository path. + + Raises: + InputError: if pull request branch name is invalid or the a branch + with same name already exists. - return self._create_github_pull_request( - branch_name=DEFAULT_BRANCH_NAME, - base=base, + Returns: + Pull request URL string. None if no pull request was created/modified. + """ + base = repository.get_active_branch() + if base == DEFAULT_BRANCH_NAME: + raise InputError( + f"Pull request branch cannot be named {DEFAULT_BRANCH_NAME}." + "Please try again after changing the branch name." ) + if not repository.is_dirty(): + raise InputError("No files seem to be migrated. Please add contents upstream first.") + if repository.check_branch_exists(branch_name=DEFAULT_BRANCH_NAME): + raise InputError( + f"Branch {DEFAULT_BRANCH_NAME} already exists." + f"Please try again after removing {DEFAULT_BRANCH_NAME}." + ) + + repository.create_branch( + branch_name=DEFAULT_BRANCH_NAME, + commit_msg=ACTIONS_COMMIT_MESSAGE, + ) + pull_request_web_link = repository.create_github_pull_request( + branch_name=DEFAULT_BRANCH_NAME, + base=base, + ) + + # reset active branch back to original branch to ensure following actions + # do not run on an newly created branch + repository.set_active_branch(branch_name=base) + + return pull_request_web_link -def _get_repository_name_from_git_url(remote_url: str): +def _get_repository_name_from_git_url(remote_url: str) -> str: """Get repository name from git remote URL. Args: @@ -168,7 +192,7 @@ def _get_repository_name_from_git_url(remote_url: str): return matched_repository.group(1) -def create_repository_client(access_token: str, base_path: Path): +def create_repository_client(access_token: str | None, base_path: Path) -> RepositoryClient: """Create a Github instance to handle communication with Github server. Args: diff --git a/src/types_.py b/src/types_.py index d7cf2671..33bd0164 100644 --- a/src/types_.py +++ b/src/types_.py @@ -10,6 +10,35 @@ from urllib.parse import urlparse +@dataclasses.dataclass +class MigrationInputs: + """Configurable parameters for migration mode. + + Attrs: + github_access_token: A Personal Access Token(PAT) or access token with repository access. + """ + + github_access_token: str | None + + +@dataclasses.dataclass +class ReconcileInputs: + """Configurable parameters for reconcile mode. + + Attrs: + dry_run: If enabled, only log the action that would be taken. + delete_pages: Whether to delete pages that are no longer needed. + """ + + dry_run: bool + delete_pages: bool + + +@dataclasses.dataclass +class UserInputs(ReconcileInputs, MigrationInputs): + """Parsed user input values used to run upload-charm-docs.""" + + class Metadata(typing.NamedTuple): """Information within metadata file. Refer to: https://juju.is/docs/sdk/metadata-yaml. diff --git a/tests/conftest.py b/tests/conftest.py index de9f9c9b..b1febc06 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,6 @@ """Fixtures for all tests.""" -import typing from pathlib import Path from unittest import mock @@ -15,6 +14,7 @@ from github.Requester import Requester import src +from src import pull_request @pytest.fixture(name="upstream_repository") @@ -62,7 +62,7 @@ def fixture_mock_pull_request() -> PullRequest: @pytest.fixture(name="mock_github_repo") -def fixture_mock_github_repo(mock_pull_request: PullRequest): +def fixture_mock_github_repo(mock_pull_request: PullRequest) -> Repository: """Create a mock github repository instance.""" mocked_repo = mock.MagicMock(spec=Repository) mocked_repo.create_pull.return_value = mock_pull_request @@ -71,29 +71,33 @@ def fixture_mock_github_repo(mock_pull_request: PullRequest): @pytest.fixture(name="mock_github") -def fixture_mock_github(mock_github_repo: Repository): +def fixture_mock_github(mock_github_repo: Repository) -> Github: """Create a mock github instance.""" mocked_github = mock.MagicMock(spec=Github) mocked_github.get_repo.return_value = mock_github_repo return mocked_github -@pytest.fixture(name="patch_get_repository_name") -def fixture_patch_get_repository_name(monkeypatch: pytest.MonkeyPatch): - """Replace get_repository_name operation to pass.""" +@pytest.fixture(name="repository_client") +def fixture_repository_client( + repository: tuple[Repo, Path], mock_github_repo: Repository +) -> pull_request.RepositoryClient: + """Get repository client.""" + (repo, _) = repository + return pull_request.RepositoryClient(repository=repo, github_repository=mock_github_repo) - def mock_get_repository_name(remote_url: str): - return remote_url - monkeypatch.setattr(src, "get_repository_name", mock_get_repository_name) +@pytest.fixture(name="patch_create_repository_client") +def fixture_patch_create_repository_client( + monkeypatch: pytest.MonkeyPatch, repository_client: pull_request.RepositoryClient +) -> None: + """Patch create_repository_client to return a mocked RepositoryClient.""" - -@pytest.fixture(name="patch_create_github") -def fixture_patch_create_github(monkeypatch: pytest.MonkeyPatch, mock_github: Github): - """Replace create_github operation to return a mocked github client.""" - - def mock_create_github(access_token: typing.Any): + def mock_create_repository_client(access_token: str | None, base_path: Path): + # to accept keywords as arguments del access_token - return mock_github + del base_path + + return repository_client - monkeypatch.setattr(src, "create_github", mock_create_github) + monkeypatch.setattr(src, "create_repository_client", mock_create_repository_client) diff --git a/tests/factories.py b/tests/factories.py index 9369b7a3..7d02c55c 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -70,3 +70,18 @@ class Meta: title = factory.Sequence(lambda n: f"Content title {n}") content = factory.Sequence(lambda n: f"Content {n}") + + +class UserInputFactory(factory.Factory): + """Generate user input tuple.""" + + class Meta: + """Configuration for factory.""" + + model = types_.UserInputs + abstract = False + + # the following token is a test variable for testing. + github_access_token = "test-token" # nosec + dry_run = False + delete_pages = False diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index 3d7c0cd3..fe845259 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -34,7 +34,7 @@ @pytest.mark.asyncio -@pytest.mark.usefixtures("patch_get_repository_name", "patch_create_github") +@pytest.mark.usefixtures("patch_create_repository_client") async def test_run( discourse_api: Discourse, caplog: pytest.LogCaptureFixture, @@ -58,9 +58,6 @@ async def test_run( 12. with the nested directory removed 13. with the documentation file removed 14. with the index file removed - 15. with no docs dir and no custom branchname provided - 16. with no docs dir and custom branchname provided - 17. with no changes applied after migration assert: then: 1. an index page is created with an empty navigation table 2. an index page is not updated @@ -76,13 +73,8 @@ async def test_run( 12. the nested directory is removed from the navigation table 13. the documentation page is deleted 14. an index page is not updated - 15. the documentation files are pushed to default branch - 16. the documentation files are pushed to custom branch - 17. no operations are taken place """ - (repo, repo_path) = repository - # this is an access token string for testing purposes. - test_access_token = "test-access-token" # nosec + (_, repo_path) = repository document_name = "name 1" caplog.set_level(logging.INFO) create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: {document_name}", path=repo_path) @@ -92,11 +84,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=False, + delete_pages=True, + ), ) assert str(exc_info.value) == GETTING_STARTED @@ -117,11 +108,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=True, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=True, + delete_pages=True, + ), ) assert tuple(urls_with_actions) == (index_url,) @@ -135,11 +125,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=False, + delete_pages=True, + ), ) assert tuple(urls_with_actions) == (index_url,) @@ -155,11 +144,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=True, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=True, + delete_pages=True, + ), ) assert tuple(urls_with_actions) == (index_url,) @@ -173,11 +161,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=False, + delete_pages=True, + ), ) assert len(urls_with_actions) == 2 @@ -199,11 +186,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=True, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=True, + delete_pages=True, + ), ) assert (urls := tuple(urls_with_actions)) == (doc_url, index_url) @@ -219,11 +205,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=False, + delete_pages=True, + ), ) assert (urls := tuple(urls_with_actions)) == (doc_url, index_url) @@ -244,11 +229,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=False, + delete_pages=True, + ), ) assert (urls := tuple(urls_with_actions)) == (doc_url, index_url) @@ -269,11 +253,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=False, + delete_pages=True, + ), ) assert len(urls_with_actions) == 3 @@ -298,11 +281,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=True, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=True, + delete_pages=True, + ), ) assert (urls := tuple(urls_with_actions)) == (doc_url, nested_dir_doc_url, index_url) @@ -321,11 +303,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=False, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=False, + delete_pages=False, + ), ) assert (urls := tuple(urls_with_actions)) == (doc_url, nested_dir_doc_url, index_url) @@ -344,11 +325,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=False, + delete_pages=True, + ), ) assert (urls := tuple(urls_with_actions)) == (doc_url, index_url) @@ -365,11 +345,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=False, + delete_pages=True, + ), ) assert (urls := tuple(urls_with_actions)) == (doc_url, index_url) @@ -388,11 +367,10 @@ async def test_run( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory( + dry_run=False, + delete_pages=True, + ), ) assert (urls := tuple(urls_with_actions)) == (index_url,) @@ -402,8 +380,9 @@ async def test_run( @pytest.mark.asyncio -@pytest.mark.usefixtures("patch_get_repository_name", "patch_create_github") +@pytest.mark.usefixtures("patch_create_repository_client") async def test_run_migrate( + discourse_hostname: str, discourse_api: Discourse, caplog: pytest.LogCaptureFixture, repository: tuple[Repo, Path], @@ -422,11 +401,9 @@ async def test_run_migrate( 3. no operations are taken place """ document_name = "migration name 1" - discourse_prefix = "http://discourse" + discourse_prefix = f"http://{discourse_hostname}" (repo, repo_path) = repository (upstream_repo, upstream_repo_path) = upstream_repository - # this is an access token string for testing purposes. - test_access_token = "test-access-token" # nosec content_page_1 = factories.ContentPageFactory() content_page_1_url = discourse_api.create_topic( title=content_page_1.title, @@ -469,7 +446,7 @@ async def test_run_migrate( content=index_page_content, ) - # 1. with no docs dir and no custom branchname provided + # 1. with no docs dir and a metadata.yaml with docs key caplog.clear() create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", @@ -479,57 +456,16 @@ async def test_run_migrate( urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + user_inputs=factories.UserInputFactory(), ) upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) upstream_doc_dir = upstream_repo_path / index.DOCUMENTATION_FOLDER_NAME assert tuple(urls_with_actions) == (mock_pull_request.html_url,) assert ((group_1_path := upstream_doc_dir / "group-1")).is_dir() - assert ((group_1_gitkeep_path := group_1_path / migration.GITKEEP_FILE)).is_file() + assert (group_1_path / migration.GITKEEP_FILENAME).is_file() assert ((group_2_path := upstream_doc_dir / "group-2")).is_dir() - assert ((group_2_content_1_path := group_2_path / "content-1.md")).read_text( - encoding="utf-8" - ) == content_page_1.content - assert (group_2_path / "content-2.md").read_text(encoding="utf-8") == content_page_2.content - assert ((group_3_path := upstream_doc_dir / "group-3")).is_dir() - assert ((group_4_path := group_3_path / "group-4")).is_dir() - assert (group_4_path / "content-3.md").read_text(encoding="utf-8") == content_page_3.content - assert (group_3_path / "content-4.md").read_text(encoding="utf-8") == content_page_4.content - assert (group_5_path := upstream_doc_dir / "group-5").is_dir() - assert group_5_path.is_dir() - - # 2. with no docs dir and custom branchname provided - caplog.clear() - upstream_repo.git.checkout("main") - repo.git.checkout("main") - create_metadata_yaml( - content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", - path=repo_path, - ) - custom_branchname = "branchname-1" - - urls_with_actions = run( - base_path=repo_path, - discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=custom_branchname, - ) - - upstream_repo.git.checkout(custom_branchname) - repo.git.checkout(custom_branchname) - assert tuple(urls_with_actions) == (mock_pull_request.html_url,) - assert group_1_path.is_dir() - assert group_1_gitkeep_path.is_file() - assert group_2_path.is_dir() - assert group_2_content_1_path.read_text(encoding="utf-8") == content_page_1.content + assert (group_2_path / "content-1.md").read_text(encoding="utf-8") == content_page_1.content assert (group_2_path / "content-2.md").read_text(encoding="utf-8") == content_page_2.content assert ((group_3_path := upstream_doc_dir / "group-3")).is_dir() assert ((group_4_path := group_3_path / "group-4")).is_dir() @@ -538,17 +474,14 @@ async def test_run_migrate( assert (group_5_path := upstream_doc_dir / "group-5").is_dir() assert group_5_path.is_dir() - # 3. with no changes applied after migration + # 2. with no changes applied after migration caplog.clear() + repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) urls_with_actions = run( base_path=repo_path, discourse=discourse_api, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=custom_branchname, + user_inputs=factories.UserInputFactory(), ) assert_substrings_in_string( diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index b7b6e8b4..46c71ee3 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -8,27 +8,25 @@ from pathlib import Path import pytest -from git.repo import Repo -from github.Repository import Repository -from src import index, pull_request +from src import index from src.discourse import Discourse @pytest.fixture(scope="module") -def base_path(): +def base_path() -> str: """Get the base path for discourse.""" return "http://discourse" @pytest.fixture() -def discourse(base_path: str): +def discourse(base_path: str) -> Discourse: """Get the discourse client.""" return Discourse(base_path=base_path, api_username="", api_key="", category_id=0) @pytest.fixture() -def index_file_content(tmp_path: Path): +def index_file_content(tmp_path: Path) -> str: """Create index file.""" docs_directory = tmp_path / index.DOCUMENTATION_FOLDER_NAME docs_directory.mkdir() @@ -36,10 +34,3 @@ def index_file_content(tmp_path: Path): content = "content 1" index_file.write_text(content, encoding="utf-8") return content - - -@pytest.fixture() -def repository_client(repository: tuple[Repo, Path], mock_github_repo: Repository): - """Get repository client.""" - (repo, _) = repository - return pull_request.RepositoryClient(repository=repo, github_repository=mock_github_repo) diff --git a/tests/unit/helpers.py b/tests/unit/helpers.py index 9c602d9a..0747a542 100644 --- a/tests/unit/helpers.py +++ b/tests/unit/helpers.py @@ -6,8 +6,6 @@ import typing from pathlib import Path -from git.repo import Repo - from src import metadata diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 3b0e0b77..5498f255 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -12,7 +12,6 @@ import pytest from git.repo import Repo from github.PullRequest import PullRequest -from github.Repository import Repository from src import ( DOCUMENTATION_FOLDER_NAME, @@ -29,6 +28,7 @@ types_, ) +from .. import factories from .helpers import create_metadata_yaml @@ -150,7 +150,9 @@ def test__run_reconcile_local_empty_server_error(tmp_path: Path): assert not returned_page_interactions -def test__run_migrate_server_error_index(tmp_path: Path, repository: tuple[Repo, Path]): +def test__run_migrate_server_error_index( + tmp_path: Path, repository_client: pull_request.RepositoryClient +): """ arrange: given metadata with name and docs but no docs directory and mocked discourse that raises an exception during index file fetching @@ -160,17 +162,13 @@ def test__run_migrate_server_error_index(tmp_path: Path, repository: tuple[Repo, meta = types_.Metadata(name="name 1", docs="http://discourse/t/docs") mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.side_effect = [exceptions.DiscourseError] - mocked_github_repo = mock.MagicMock(spec=Repository) - (repo, _) = repository with pytest.raises(exceptions.ServerError) as exc: _run_migrate( base_path=tmp_path, metadata=meta, discourse=mocked_discourse, - repo=repo, - github_repo=mocked_github_repo, - branch_name=None, + repository=repository_client, ) assert "Index page retrieval failed" == str(exc.value) @@ -178,7 +176,7 @@ def test__run_migrate_server_error_index(tmp_path: Path, repository: tuple[Repo, def test__run_migrate_server_error_topic( repository: tuple[Repo, Path], - mock_github_repo: Repository, + repository_client: pull_request.RepositoryClient, ): """ arrange: given metadata with name and docs but no docs directory and mocked discourse @@ -200,16 +198,14 @@ def test__run_migrate_server_error_topic( meta = types_.Metadata(name="name 1", docs=index_url) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.side_effect = [index_content, exceptions.DiscourseError] - (repo, repo_path) = repository + (_, repo_path) = repository with pytest.raises(exceptions.MigrationError): _run_migrate( base_path=repo_path, metadata=meta, discourse=mocked_discourse, - repo=repo, - github_repo=mock_github_repo, - branch_name=None, + repository=repository_client, ) @@ -217,8 +213,8 @@ def test__run_migrate_server_error_topic( def test__run_migrate( repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path], + repository_client: pull_request.RepositoryClient, mock_pull_request: PullRequest, - mock_github_repo: Repository, ): """ arrange: given metadata with name and docs but no docs directory and mocked discourse @@ -237,15 +233,13 @@ def test__run_migrate( index_page, (link_content := "link 1 content"), ] - (repo, repo_path) = repository + (_, repo_path) = repository returned_migration_reports = _run_migrate( base_path=repo_path, metadata=meta, discourse=mocked_discourse, - repo=repo, - github_repo=mock_github_repo, - branch_name=None, + repository=repository_client, ) (upstream_repo, upstream_path) = upstream_repository @@ -267,24 +261,15 @@ def test_run_no_docs_no_dir(repository: tuple[Repo, Path]): act: when run is called assert: InputError is raised with a guide to getting started. """ - (repo, repo_path) = repository - # this is an access token string for testing purposes. - test_access_token = "test-access-token" # nosec + (_, repo_path) = repository create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + user_input = factories.UserInputFactory() with pytest.raises(exceptions.InputError) as exc: # run is repeated in unit tests / integration tests # pylint: disable=duplicate-code - _ = run( - base_path=repo_path, - discourse=mocked_discourse, - dry_run=False, - delete_pages=False, - repo=repo, - github_access_token=test_access_token, - branch_name=None, - ) + _ = run(base_path=repo_path, discourse=mocked_discourse, user_inputs=user_input) assert str(exc.value) == GETTING_STARTED @@ -296,24 +281,17 @@ def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): act: when run is called assert: then an index page is created with empty navigation table. """ - (repo, repo_path) = repository - # this is an access token string for testing purposes. - test_access_token = "test-access-token" # nosec + (_, repo_path) = repository create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) (repo_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.create_topic.return_value = (url := "url 1") + user_input = factories.UserInputFactory() # run is repeated in unit tests / integration tests # pylint: disable=duplicate-code returned_page_interactions = run( - base_path=repo_path, - discourse=mocked_discourse, - dry_run=False, - delete_pages=True, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + base_path=repo_path, discourse=mocked_discourse, user_inputs=user_input ) mocked_discourse.create_topic.assert_called_once_with( @@ -324,7 +302,7 @@ def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): # pylint: disable=too-many-locals -@pytest.mark.usefixtures("patch_get_repository_name", "patch_create_github") +@pytest.mark.usefixtures("patch_create_repository_client") def test_run_no_docs_dir( repository: tuple[Repo, Path], upstream_repository: tuple[Repo, Path], @@ -337,9 +315,7 @@ def test_run_no_docs_dir( assert: then docs from the server is migrated into local docs path and the files created are return as the result. """ - (repo, repo_path) = repository - # this is an access token string for testing purposes. - test_access_token = "test-access-token" # nosec + (_, repo_path) = repository create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n" f"{metadata.METADATA_DOCS_KEY}: docsUrl", path=repo_path, @@ -354,17 +330,12 @@ def test_run_no_docs_dir( navlink_page = "file-navlink-content" mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.side_effect = [index_page, navlink_page] + user_input = factories.UserInputFactory() # run is repeated in unit tests / integration tests # pylint: disable=duplicate-code returned_migration_reports = run( - base_path=repo_path, - discourse=mocked_discourse, - dry_run=False, - delete_pages=False, - repo=repo, - github_access_token=test_access_token, - branch_name=None, + base_path=repo_path, discourse=mocked_discourse, user_inputs=user_input ) # pylint: enable=duplicate-code diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index ed909cd9..e4207323 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -12,6 +12,7 @@ import pytest from git.exc import GitCommandError from git.repo import Repo +from github import Github from github.GithubException import GithubException from github.PullRequest import PullRequest from github.Repository import Repository @@ -55,7 +56,7 @@ def test__check_branch_exists_error( monkeypatch.setattr(repository_client, "_git_repo", mock_git_repository) with pytest.raises(RepositoryClientError) as exc: - repository_client._check_branch_exists("branchname-1") + repository_client.check_branch_exists("branchname-1") assert_substrings_in_string( ("unexpected error checking existing branch", err_str), str(exc.value).lower() @@ -68,7 +69,7 @@ def test__check_branch_not_exists(repository_client: RepositoryClient): act: when _check_branch_exists is called assert: False is returned. """ - assert not repository_client._check_branch_exists("no-such-branchname") + assert not repository_client.check_branch_exists("no-such-branchname") def test__check_branch_exists( @@ -87,10 +88,7 @@ def test__check_branch_exists( upstream_repo.git.add(".") upstream_repo.git.commit("-m", "test") - assert repository_client._check_branch_exists(branch_name) - - upstream_repo.git.checkout("main") - upstream_repo.git.branch("-D", branch_name) + assert repository_client.check_branch_exists(branch_name) def test__create_branch_error( @@ -103,31 +101,40 @@ def test__create_branch_error( """ err_str = "mocked error" mock_git_repository = mock.MagicMock(spec=Repo) - mock_git_repository.git.fetch.side_effect = [GitCommandError(err_str)] + mock_git_repository.git.commit.side_effect = [GitCommandError(err_str)] monkeypatch.setattr(repository_client, "_git_repo", mock_git_repository) with pytest.raises(RepositoryClientError) as exc: - repository_client._create_branch(branch_name="test-create-branch", commit_msg="commit-1") + repository_client.create_branch(branch_name="test-create-branch", commit_msg="commit-1") assert_substrings_in_string( - ("unexpected error checking existing branch", err_str), str(exc.value).lower() + ("unexpected error creating new branch", err_str), str(exc.value).lower() ) def test__create_branch( - repository_client: RepositoryClient, upstream_repository: tuple[Repo, Path] + repository_client: RepositoryClient, + repository: tuple[Repo, Path], + upstream_repository: tuple[Repo, Path], ): """ - arrange: given RepositoryClient + arrange: given RepositoryClient and newly created files in repo directory act: when _create_branch is called assert: a new branch is successfully created upstream. """ + (_, repo_path) = repository + testfile = "testfile.txt" + testfile_content = "test" + (repo_path / testfile).write_text(testfile_content) (upstream_repo, _) = upstream_repository branch_name = "test-create-branch" - repository_client._create_branch(branch_name=branch_name, commit_msg="commit-1") + repository_client.create_branch(branch_name=branch_name, commit_msg="commit-1") - assert any(branch for branch in upstream_repo.branches if branch.name == branch_name) + # mypy false positive in lib due to getter/setter not being next to each other. + assert any( + branch for branch in upstream_repo.branches if branch.name == branch_name # type: ignore + ) def test__create_github_pull_request_error( @@ -139,11 +146,13 @@ def test__create_github_pull_request_error( assert: RepositoryClientError is raised. """ mock_github_repository = mock.MagicMock(spec=Repository) - mock_github_repository.create_pull.fetch.side_effect = [GithubException] + mock_github_repository.create_pull.side_effect = [ + GithubException(status=500, data="Internal Server Error", headers=None) + ] monkeypatch.setattr(repository_client, "_github_repo", mock_github_repository) with pytest.raises(RepositoryClientError) as exc: - repository_client._create_github_pull_request( + repository_client.create_github_pull_request( branch_name="branchname-1", base="base-branchname" ) @@ -160,13 +169,13 @@ def test__create_github_pull_request( act: when _create_github_pull_request is called assert: a pull request's page link is returned. """ - returned_url = repository_client._create_github_pull_request("branchname-1", "base-branchname") + returned_url = repository_client.create_github_pull_request("branchname-1", "base-branchname") assert returned_url == mock_pull_request.html_url def test_create_pull_request_on_default_branchname( - monkeypatch: pytest.MonkeyPatch, + repository: tuple[Repo, Path], repository_client: RepositoryClient, ): """ @@ -174,12 +183,21 @@ def test_create_pull_request_on_default_branchname( act: when create_pull_request is called assert: InputError is raised. """ - mock_git_repository = mock.MagicMock(spec=Repo) - mock_git_repository.active_branch_name = pull_request.DEFAULT_BRANCH_NAME - monkeypatch.setattr(repository_client, "_git_repo", mock_git_repository) + (repo, _) = repository + head = repo.create_head(pull_request.DEFAULT_BRANCH_NAME) + head.checkout() - with pytest.raises(InputError): - repository_client.create_pull_request() + with pytest.raises(InputError) as exc: + pull_request.create_pull_request(repository=repository_client) + + assert_substrings_in_string( + ( + "pull request branch cannot be named", + "please try again after changing the branch name.", + pull_request.DEFAULT_BRANCH_NAME, + ), + str(exc.value).lower(), + ) def test_create_pull_request_no_dirty_files( @@ -190,18 +208,27 @@ def test_create_pull_request_no_dirty_files( act: when create_pull_request is called assert: InputError is raised. """ - with pytest.raises(InputError): - repository_client.create_pull_request() + with pytest.raises(InputError) as exc: + pull_request.create_pull_request(repository=repository_client) + + assert_substrings_in_string( + ("no files seem to be migrated. please add contents upstream first."), + str(exc.value).lower(), + ) def test_create_pull_request_existing_branch( - repository_client: RepositoryClient, upstream_repository: tuple[Repo, Path] + repository_client: RepositoryClient, + upstream_repository: tuple[Repo, Path], + repository: tuple[Repo, Path], ): """ arrange: given RepositoryClient and an upstream repository that already has migration branch act: when create_pull_request is called assert: InputError is raised. """ + (_, repo_path) = repository + (repo_path / "filler-file").write_text("filler-content") (upstream_repo, upstream_path) = upstream_repository branch_name = pull_request.DEFAULT_BRANCH_NAME head = upstream_repo.create_head(branch_name) @@ -210,11 +237,18 @@ def test_create_pull_request_existing_branch( upstream_repo.git.add(".") upstream_repo.git.commit("-m", "test") - with pytest.raises(InputError): - repository_client.create_pull_request() + with pytest.raises(InputError) as exc: + pull_request.create_pull_request(repository=repository_client) - upstream_repo.git.checkout("main") - upstream_repo.git.branch("-D", branch_name) + assert_substrings_in_string( + ( + "branch", + "already exists", + "please try again after removing", + pull_request.DEFAULT_BRANCH_NAME, + ), + str(exc.value).lower(), + ) def test_create_pull_request( @@ -234,7 +268,7 @@ def test_create_pull_request( filler_text = "filler-text" filler_file.write_text(filler_text) - returned_pr_link = repository_client.create_pull_request() + returned_pr_link = pull_request.create_pull_request(repository=repository_client) (upstream_repo, upstream_path) = upstream_repository upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) @@ -256,9 +290,14 @@ def test_get_repository_name_invalid(remote_url: str): act: when _get_repository_name is called assert: InputError is raised. """ - with pytest.raises(InputError): + with pytest.raises(InputError) as exc: pull_request._get_repository_name_from_git_url(remote_url=remote_url) + assert_substrings_in_string( + ("invalid remote repository url"), + str(exc.value).lower(), + ) + @pytest.mark.parametrize( "remote_url, expected_repository_name", @@ -290,11 +329,42 @@ def test_get_repository_name(remote_url: str, expected_repository_name: str): def test_create_repository_client_no_token(repository: tuple[Repo, Path]): """ arrange: given valid repository path and empty access_token - act: when create_repository_client_no_token is called + act: when create_repository_client is called assert: InputError is raised. """ (_, repo_path) = repository - test_token = "" + # the following token is for testing purposes only. + test_token = "" # nosec - with pytest.raises(InputError): + with pytest.raises(InputError) as exc: pull_request.create_repository_client(access_token=test_token, base_path=repo_path) + + assert_substrings_in_string( + ("invalid", "access_token", "input", "it must be", "non-empty"), + str(exc.value).lower(), + ) + + +def test_create_repository_client( + monkeypatch: pytest.MonkeyPatch, repository: tuple[Repo, Path], mock_github_repo: Repository +): + """ + arrange: given valid repository path and a valid access_token and a mocked github client + act: when create_repository_client is called + assert: RepositoryClient is returned. + """ + (repo, repo_path) = repository + origin = repo.remote("origin") + repo.delete_remote(origin) + repo.create_remote("origin", "https://github.com/test-user/test-repo.git") + # the following token is for testing purposes only. + test_token = "testing-token" # nosec + mock_github_client = mock.MagicMock(spec=Github) + mock_github_client.get_repo.returns = mock_github_repo + monkeypatch.setattr(pull_request, "Github", mock_github_client) + + returned_client = pull_request.create_repository_client( + access_token=test_token, base_path=repo_path + ) + + assert isinstance(returned_client, pull_request.RepositoryClient) From 24d55400967de48d051ccbef3b7a32e379025c8c Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 5 Jan 2023 21:33:49 +0800 Subject: [PATCH 058/107] unpin patch versions --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index bd12996e..c3ec0207 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ pydiscourse>=1.3,<1.4 PyYAML>=6.0,<6.1 requests>=2.28,<2.29 -GitPython>=3.1.28,<3.1.30 -PyGithub>=1.57,<1.58 \ No newline at end of file +GitPython>=3.1,<3.1 +PyGithub>=1.57,<1.58 From eb2d41b2235548890b9b48564b070e32e4e99ba4 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 5 Jan 2023 21:34:29 +0800 Subject: [PATCH 059/107] remove unused dependencies in fmt run --- tox.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/tox.ini b/tox.ini index e34089c9..4a1c9303 100644 --- a/tox.ini +++ b/tox.ini @@ -25,7 +25,6 @@ passenv = [testenv:fmt] description = Apply coding style standards to code deps = - -r{toxinidir}/requirements.txt black isort commands = From 9a6e4d5e23d9bce137e3964fcee6547a85fc6623 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 5 Jan 2023 21:35:18 +0800 Subject: [PATCH 060/107] add missing types --- discourse_check_cleanup.py | 2 +- src/docs_directory.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/discourse_check_cleanup.py b/discourse_check_cleanup.py index f4e82583..5c97f6cb 100755 --- a/discourse_check_cleanup.py +++ b/discourse_check_cleanup.py @@ -34,7 +34,7 @@ class Action(str, Enum): CLEANUP = "cleanup" -def main(): +def main() -> None: """Clean up created Discourse pages.""" logging.basicConfig(level=logging.INFO) diff --git a/src/docs_directory.py b/src/docs_directory.py index 45480521..ec9612cf 100644 --- a/src/docs_directory.py +++ b/src/docs_directory.py @@ -137,7 +137,7 @@ def read(docs_path: Path) -> typing.Iterator[types_.PathInfo]: ) -def has_docs_directory(base_path: Path): +def has_docs_directory(base_path: Path) -> bool: """Return existence of docs directory from base path. Args: From da950fa0a0382f0367e5de709618db793570e632 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 14:55:08 +0800 Subject: [PATCH 061/107] fix wrong GitPython pin version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index c3ec0207..cba923b9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ pydiscourse>=1.3,<1.4 PyYAML>=6.0,<6.1 requests>=2.28,<2.29 -GitPython>=3.1,<3.1 +GitPython>=3.1,<3.2 PyGithub>=1.57,<1.58 From 95996b94050e0aef0e804bba7b0e3a6bd49f7e7d Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 14:58:15 +0800 Subject: [PATCH 062/107] merge MirationReport and ActionReport --- src/__init__.py | 8 +++---- src/action.py | 34 +++++++++++++++++------------ src/types_.py | 23 ++++---------------- tests/factories.py | 9 ++++---- tests/unit/test_action.py | 46 +++++++++++++++++++-------------------- 5 files changed, 56 insertions(+), 64 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index d650c0a2..4b953f21 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -61,11 +61,11 @@ def _run_reconcile( delete_pages=delete_pages, ) return { - report.url: report.result + str(report.location): report.result for report in reports - if report.url is not None - and report.url != DRY_RUN_NAVLINK_LINK - and report.url != FAIL_NAVLINK_LINK + if report.location is not None + and report.location != DRY_RUN_NAVLINK_LINK + and report.location != FAIL_NAVLINK_LINK } diff --git a/src/action.py b/src/action.py index 8d3c66be..2bfa3262 100644 --- a/src/action.py +++ b/src/action.py @@ -72,7 +72,7 @@ def _create( path=action.path, navlink=types_.Navlink(title=action.navlink_title, link=url), ) - return types_.ActionReport(table_row=table_row, url=url, result=result, reason=reason) + return types_.ActionReport(table_row=table_row, location=url, result=result, reason=reason) def _noop(action: types_.NoopAction, discourse: Discourse) -> types_.ActionReport: @@ -90,7 +90,7 @@ def _noop(action: types_.NoopAction, discourse: Discourse) -> types_.ActionRepor table_row = types_.TableRow(level=action.level, path=action.path, navlink=action.navlink) return types_.ActionReport( table_row=table_row, - url=_absolute_url(table_row.navlink.link, discourse=discourse), + location=_absolute_url(table_row.navlink.link, discourse=discourse), result=types_.ActionResult.SUCCESS, reason=None, ) @@ -150,7 +150,7 @@ def _update( table_row = types_.TableRow( level=action.level, path=action.path, navlink=action.navlink_change.new ) - return types_.ActionReport(table_row=table_row, url=url, result=result, reason=reason) + return types_.ActionReport(table_row=table_row, location=url, result=result, reason=reason) def _delete( @@ -176,15 +176,15 @@ def _delete( is_group = action.navlink.link is None if dry_run: return types_.ActionReport( - table_row=None, url=url, result=types_.ActionResult.SKIP, reason=DRY_RUN_REASON + table_row=None, location=url, result=types_.ActionResult.SKIP, reason=DRY_RUN_REASON ) if not delete_pages and not is_group: return types_.ActionReport( - table_row=None, url=url, result=types_.ActionResult.SKIP, reason=NOT_DELETE_REASON + table_row=None, location=url, result=types_.ActionResult.SKIP, reason=NOT_DELETE_REASON ) if is_group: return types_.ActionReport( - table_row=None, url=url, result=types_.ActionResult.SUCCESS, reason=None + table_row=None, location=url, result=types_.ActionResult.SUCCESS, reason=None ) try: @@ -196,11 +196,11 @@ def _delete( discourse.delete_topic(url=action.navlink.link) return types_.ActionReport( - table_row=None, url=url, result=types_.ActionResult.SUCCESS, reason=None + table_row=None, location=url, result=types_.ActionResult.SUCCESS, reason=None ) except exceptions.DiscourseError as exc: return types_.ActionReport( - table_row=None, url=url, result=types_.ActionResult.FAIL, reason=str(exc) + table_row=None, location=url, result=types_.ActionResult.FAIL, reason=str(exc) ) @@ -276,7 +276,7 @@ def _run_index( if dry_run: report = types_.ActionReport( table_row=None, - url=( + location=( DRY_RUN_NAVLINK_LINK if isinstance(action, types_.CreateIndexAction) else action.url @@ -295,32 +295,38 @@ def _run_index( assert isinstance(action, types_.CreateIndexAction) # nosec url = discourse.create_topic(title=action.title, content=action.content) report = types_.ActionReport( - table_row=None, url=url, result=types_.ActionResult.SUCCESS, reason=None + table_row=None, location=url, result=types_.ActionResult.SUCCESS, reason=None ) except exceptions.DiscourseError as exc: report = types_.ActionReport( table_row=None, - url=FAIL_NAVLINK_LINK, + location=FAIL_NAVLINK_LINK, result=types_.ActionResult.FAIL, reason=str(exc), ) case types_.NoopIndexAction: assert isinstance(action, types_.NoopIndexAction) # nosec report = types_.ActionReport( - table_row=None, url=action.url, result=types_.ActionResult.SUCCESS, reason=None + table_row=None, + location=action.url, + result=types_.ActionResult.SUCCESS, + reason=None, ) case types_.UpdateIndexAction: try: assert isinstance(action, types_.UpdateIndexAction) # nosec discourse.update_topic(url=action.url, content=action.content_change.new) report = types_.ActionReport( - table_row=None, url=action.url, result=types_.ActionResult.SUCCESS, reason=None + table_row=None, + location=action.url, + result=types_.ActionResult.SUCCESS, + reason=None, ) except exceptions.DiscourseError as exc: assert isinstance(action, types_.UpdateIndexAction) # nosec report = types_.ActionReport( table_row=None, - url=action.url, + location=action.url, result=types_.ActionResult.FAIL, reason=str(exc), ) diff --git a/src/types_.py b/src/types_.py index 33bd0164..86ec080c 100644 --- a/src/types_.py +++ b/src/types_.py @@ -335,14 +335,15 @@ class ActionReport(typing.NamedTuple): Attrs: table_row: The navigation table entry, None for delete or index actions. - url: The URL that the action operated on, None for groups or if a create action was - skipped. + location: The URL that the action operated on, None for groups or if a create action was + skipped, if running in reconcile mode. + Path to migrated file, if running in migration mode. None on action failure. result: The action execution result. reason: The reason, None for success reports. """ table_row: TableRow | None - url: Url | None + location: Url | Path | None result: ActionResult reason: str | None @@ -387,19 +388,3 @@ class IndexDocumentMeta(MigrationFileMeta): """ content: str - - -class MigrationReport(typing.NamedTuple): - """Post execution report for an action. - - Attrs: - table_row: The navigation table entry. None if index file. - path: Path the file was written to. None if failed. - result: The action execution result. - reason: The reason, None for success reports. - """ - - table_row: TableRow | None - path: Path | None - result: ActionResult - reason: str | None diff --git a/tests/factories.py b/tests/factories.py index 7d02c55c..d774afc2 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -31,13 +31,13 @@ class Meta: alphabetical_rank = factory.Sequence(lambda n: n) -class MigrationReportFactory(factory.Factory): - """Generate Migration reports.""" +class ActionReportFactory(factory.Factory): + """Generate Action reports.""" class Meta: """Configuration for factory.""" - model = types_.MigrationReport + model = types_.ActionReport abstract = False class Params: @@ -46,6 +46,7 @@ class Params: is_success = factory.Trait(result=types_.ActionResult.SUCCESS, reason=None) is_skipped = factory.Trait(result=types_.ActionResult.SKIP, reason="skipped") is_failed = factory.Trait(result=types_.ActionResult.FAIL, reason="failed") + is_migrate = factory.Trait(location=factory.Sequence(lambda n: Path(f"path-{n}"))) table_row = factory.Sequence( lambda n: types_.TableRow( @@ -54,7 +55,7 @@ class Params: navlink=types_.Navlink(title=f"title {n}", link=f"link {n}"), ) ) - path = factory.Sequence(lambda n: Path(f"dir{n}")) + location = factory.Sequence(lambda n: types_.Url(f"link-{n}")) result = None reason = None diff --git a/tests/unit/test_action.py b/tests/unit/test_action.py index 99f0e061..c40376cd 100644 --- a/tests/unit/test_action.py +++ b/tests/unit/test_action.py @@ -50,7 +50,7 @@ def test__create_directory(dry_run: bool, caplog: pytest.LogCaptureFixture): assert returned_report.table_row.path == path assert returned_report.table_row.navlink.title == navlink_title assert returned_report.table_row.navlink.link is None - assert returned_report.url is None + assert returned_report.location is None assert ( returned_report.result == src_types.ActionResult.SKIP if dry_run @@ -86,7 +86,7 @@ def test__create_file_dry_run(caplog: pytest.LogCaptureFixture): assert returned_report.table_row.path == path assert returned_report.table_row.navlink.title == navlink_title assert returned_report.table_row.navlink.link == action.DRY_RUN_NAVLINK_LINK - assert returned_report.url == action.DRY_RUN_NAVLINK_LINK + assert returned_report.location == action.DRY_RUN_NAVLINK_LINK assert returned_report.result == src_types.ActionResult.SKIP assert returned_report.reason == action.DRY_RUN_REASON @@ -121,7 +121,7 @@ def test__create_file_fail(caplog: pytest.LogCaptureFixture): assert returned_report.table_row.path == path assert returned_report.table_row.navlink.title == navlink_title assert returned_report.table_row.navlink.link == action.FAIL_NAVLINK_LINK - assert returned_report.url == action.FAIL_NAVLINK_LINK + assert returned_report.location == action.FAIL_NAVLINK_LINK assert returned_report.result == src_types.ActionResult.FAIL assert returned_report.reason == str(error) @@ -156,7 +156,7 @@ def test__create_file(caplog: pytest.LogCaptureFixture): assert returned_report.table_row.path == path assert returned_report.table_row.navlink.title == navlink_title assert returned_report.table_row.navlink.link == url - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.SUCCESS assert returned_report.reason is None @@ -208,7 +208,7 @@ def test__noop( assert str(noop_action) in caplog.text assert returned_report.table_row == expected_table_row - assert returned_report.url == ( + assert returned_report.location == ( absolute_url if expected_table_row.navlink.link is not None else None ) assert returned_report.result == src_types.ActionResult.SUCCESS @@ -251,7 +251,7 @@ def test__update_directory(dry_run: bool, caplog: pytest.LogCaptureFixture): assert returned_report.table_row.level == level assert returned_report.table_row.path == path assert returned_report.table_row.navlink == update_action.navlink_change.new - assert returned_report.url is None + assert returned_report.location is None assert ( returned_report.result == src_types.ActionResult.SKIP if dry_run @@ -291,7 +291,7 @@ def test__update_file_dry_run(caplog: pytest.LogCaptureFixture): assert returned_report.table_row.level == level assert returned_report.table_row.path == path assert returned_report.table_row.navlink == update_action.navlink_change.new - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.SKIP assert returned_report.reason == action.DRY_RUN_REASON @@ -328,7 +328,7 @@ def test__update_file_navlink_title_change(caplog: pytest.LogCaptureFixture): assert returned_report.table_row.level == level assert returned_report.table_row.path == path assert returned_report.table_row.navlink == update_action.navlink_change.new - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.SUCCESS assert returned_report.reason is None @@ -369,7 +369,7 @@ def test__update_file_navlink_content_change_discourse_error(caplog: pytest.LogC assert returned_report.table_row.level == level assert returned_report.table_row.path == path assert returned_report.table_row.navlink == update_action.navlink_change.new - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.FAIL assert returned_report.reason == str(error) @@ -408,7 +408,7 @@ def test__update_file_navlink_content_change(caplog: pytest.LogCaptureFixture): assert returned_report.table_row.level == level assert returned_report.table_row.path == path assert returned_report.table_row.navlink == update_action.navlink_change.new - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.SUCCESS assert returned_report.reason is None @@ -501,7 +501,7 @@ def test__delete_not_delete( assert f"delete pages: {delete_pages}" in caplog.text mocked_discourse.delete_topic.assert_not_called() assert returned_report.table_row is None - assert returned_report.url == (url if navlink_link else None) + assert returned_report.location == (url if navlink_link else None) assert returned_report.result == expected_result assert returned_report.reason == expected_reason @@ -536,7 +536,7 @@ def test__delete_error(caplog: pytest.LogCaptureFixture): assert f"delete pages: {True}" in caplog.text mocked_discourse.delete_topic.assert_called_once_with(url=link) assert returned_report.table_row is None - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.FAIL assert returned_report.reason == str(error) @@ -570,7 +570,7 @@ def test__delete(caplog: pytest.LogCaptureFixture): assert f"delete pages: {True}" in caplog.text mocked_discourse.delete_topic.assert_called_once_with(url=link) assert returned_report.table_row is None - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.SUCCESS assert returned_report.reason is None @@ -694,7 +694,7 @@ def test__run_index_dry_run( mocked_discourse.create_topic.assert_not_called() mocked_discourse.update_topic.assert_not_called() assert returned_report.table_row is None - assert returned_report.url == expected_url + assert returned_report.location == expected_url assert returned_report.result == src_types.ActionResult.SKIP assert returned_report.reason == action.DRY_RUN_REASON @@ -722,7 +722,7 @@ def test__run_index_create_error(caplog: pytest.LogCaptureFixture): assert f"report: {returned_report}" in caplog.text mocked_discourse.create_topic.assert_called_once_with(title=title, content=content) assert returned_report.table_row is None - assert returned_report.url == action.FAIL_NAVLINK_LINK + assert returned_report.location == action.FAIL_NAVLINK_LINK assert returned_report.result == src_types.ActionResult.FAIL assert returned_report.reason == str(error) @@ -750,7 +750,7 @@ def test__run_index_create(caplog: pytest.LogCaptureFixture): assert f"report: {returned_report}" in caplog.text mocked_discourse.create_topic.assert_called_once_with(title=title, content=content) assert returned_report.table_row is None - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.SUCCESS assert returned_report.reason is None @@ -776,7 +776,7 @@ def test__run_index_noop(caplog: pytest.LogCaptureFixture): mocked_discourse.create_topic.assert_not_called() mocked_discourse.update_topic.assert_not_called() assert returned_report.table_row is None - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.SUCCESS assert returned_report.reason is None @@ -804,7 +804,7 @@ def test__run_index_update_error(caplog: pytest.LogCaptureFixture): assert f"report: {returned_report}" in caplog.text mocked_discourse.update_topic.assert_called_once_with(url=url, content=content) assert returned_report.table_row is None - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.FAIL assert returned_report.reason == str(error) @@ -831,7 +831,7 @@ def test__run_index_update(caplog: pytest.LogCaptureFixture): assert f"report: {returned_report}" in caplog.text mocked_discourse.update_topic.assert_called_once_with(url=url, content=content) assert returned_report.table_row is None - assert returned_report.url == url + assert returned_report.location == url assert returned_report.result == src_types.ActionResult.SUCCESS assert returned_report.reason is None @@ -856,7 +856,7 @@ def test__run_index_update(caplog: pytest.LogCaptureFixture): [ src_types.ActionReport( table_row=src_types.TableRow(level=level, path=path, navlink=navlink), - url=link, + location=link, result=src_types.ActionResult.SUCCESS, reason=None, ) @@ -885,13 +885,13 @@ def test__run_index_update(caplog: pytest.LogCaptureFixture): [ src_types.ActionReport( table_row=src_types.TableRow(level=level_1, path=path_1, navlink=navlink_1), - url=link_1, + location=link_1, result=src_types.ActionResult.SUCCESS, reason=None, ), src_types.ActionReport( table_row=src_types.TableRow(level=level_2, path=path_2, navlink=navlink_2), - url=link_2, + location=link_2, result=src_types.ActionResult.SUCCESS, reason=None, ), @@ -926,7 +926,7 @@ def test_run_all( expected_reports.append( src_types.ActionReport( - table_row=None, url=url, result=src_types.ActionResult.SUCCESS, reason=None + table_row=None, location=url, result=src_types.ActionResult.SUCCESS, reason=None ) ) assert returned_reports == expected_reports From 1738d1d6ec6b278f6bb0630c5d5e7242662834ae Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 14:58:53 +0800 Subject: [PATCH 063/107] separate out migration integration test --- tests/integration/test___init__.py | 124 +--------------- tests/integration/test___init__run_migrate.py | 135 ++++++++++++++++++ 2 files changed, 137 insertions(+), 122 deletions(-) create mode 100644 tests/integration/test___init__run_migrate.py diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__.py index fe845259..657ed9c1 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__.py @@ -13,18 +13,8 @@ import pytest from git.repo import Repo -from github.PullRequest import PullRequest - -from src import ( - GETTING_STARTED, - exceptions, - index, - metadata, - migration, - pull_request, - reconcile, - run, -) + +from src import GETTING_STARTED, exceptions, index, metadata, reconcile, run from src.discourse import Discourse from .. import factories @@ -377,113 +367,3 @@ async def test_run( assert_substrings_in_string(chain(urls, ("Update", "'success'")), caplog.text) index_topic = discourse_api.retrieve_topic(url=index_url) assert index_content not in index_topic - - -@pytest.mark.asyncio -@pytest.mark.usefixtures("patch_create_repository_client") -async def test_run_migrate( - discourse_hostname: str, - discourse_api: Discourse, - caplog: pytest.LogCaptureFixture, - repository: tuple[Repo, Path], - upstream_repository: tuple[Repo, Path], - mock_pull_request: PullRequest, -): - """ - arrange: given running discourse server - act: when run is called with: - 1. with no docs dir and no custom branchname provided - 2. with no docs dir and custom branchname provided - 3. with no changes applied after migration - assert: then: - 1. the documentation files are pushed to default branch - 2. the documentation files are pushed to custom branch - 3. no operations are taken place - """ - document_name = "migration name 1" - discourse_prefix = f"http://{discourse_hostname}" - (repo, repo_path) = repository - (upstream_repo, upstream_repo_path) = upstream_repository - content_page_1 = factories.ContentPageFactory() - content_page_1_url = discourse_api.create_topic( - title=content_page_1.title, - content=content_page_1.content, - ).removeprefix(discourse_prefix) - content_page_2 = factories.ContentPageFactory() - content_page_2_url = discourse_api.create_topic( - title=content_page_2.title, - content=content_page_2.content, - ).removeprefix(discourse_prefix) - content_page_3 = factories.ContentPageFactory() - content_page_3_url = discourse_api.create_topic( - title=content_page_3.title, - content=content_page_3.content, - ).removeprefix(discourse_prefix) - content_page_4 = factories.ContentPageFactory() - content_page_4_url = discourse_api.create_topic( - title=content_page_4.title, - content=content_page_4.content, - ).removeprefix(discourse_prefix) - index_page_content = f"""Testing index page. - - Testing index page content. - - # Navigation - - | Level | Path | Navlink | - | -- | -- | -- | - | 1 | group-1 | [Group 1]() | - | 1 | group-2 | [Group 2]() | - | 2 | group-2-content-1 | [Content Link 1]({content_page_1_url}) | - | 2 | group-2-content-2 | [Content Link 2]({content_page_2_url}) | - | 1 | group-3 | [Group 3]() | - | 2 | group-3-group-4 | [Group 4]() | - | 3 | group-3-group-4-content-3 | [Content Link 3]({content_page_3_url}) | - | 2 | group-3-content-4 | [Content Link 4]({content_page_4_url}) | - | 1 | group-5 | [Group 5]() |""" - index_url = discourse_api.create_topic( - title=f"{document_name.replace('-', ' ').title()} Documentation Overview", - content=index_page_content, - ) - - # 1. with no docs dir and a metadata.yaml with docs key - caplog.clear() - create_metadata_yaml( - content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", - path=repo_path, - ) - - urls_with_actions = run( - base_path=repo_path, - discourse=discourse_api, - user_inputs=factories.UserInputFactory(), - ) - - upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) - upstream_doc_dir = upstream_repo_path / index.DOCUMENTATION_FOLDER_NAME - assert tuple(urls_with_actions) == (mock_pull_request.html_url,) - assert ((group_1_path := upstream_doc_dir / "group-1")).is_dir() - assert (group_1_path / migration.GITKEEP_FILENAME).is_file() - assert ((group_2_path := upstream_doc_dir / "group-2")).is_dir() - assert (group_2_path / "content-1.md").read_text(encoding="utf-8") == content_page_1.content - assert (group_2_path / "content-2.md").read_text(encoding="utf-8") == content_page_2.content - assert ((group_3_path := upstream_doc_dir / "group-3")).is_dir() - assert ((group_4_path := group_3_path / "group-4")).is_dir() - assert (group_4_path / "content-3.md").read_text(encoding="utf-8") == content_page_3.content - assert (group_3_path / "content-4.md").read_text(encoding="utf-8") == content_page_4.content - assert (group_5_path := upstream_doc_dir / "group-5").is_dir() - assert group_5_path.is_dir() - - # 2. with no changes applied after migration - caplog.clear() - repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) - - urls_with_actions = run( - base_path=repo_path, - discourse=discourse_api, - user_inputs=factories.UserInputFactory(), - ) - - assert_substrings_in_string( - chain(urls_with_actions, ("Noop", "Noop", "Noop", "'success'")), caplog.text - ) diff --git a/tests/integration/test___init__run_migrate.py b/tests/integration/test___init__run_migrate.py new file mode 100644 index 00000000..2a37f382 --- /dev/null +++ b/tests/integration/test___init__run_migrate.py @@ -0,0 +1,135 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Integration tests for running the migrate action.""" + +# This test is fairly complex as it simulates sequential action runs +# pylint: disable=too-many-arguments,too-many-locals,too-many-statements + +import logging +from itertools import chain +from pathlib import Path + +import pytest +from git.repo import Repo +from github.PullRequest import PullRequest + +from src import index, metadata, migration, pull_request, run +from src.discourse import Discourse + +from .. import factories +from ..unit.helpers import assert_substrings_in_string, create_metadata_yaml + +pytestmark = pytest.mark.init_migrate + + +@pytest.mark.init_run_migrate +@pytest.mark.asyncio +@pytest.mark.usefixtures("patch_create_repository_client") +async def test_run_migrate( + discourse_hostname: str, + discourse_api: Discourse, + caplog: pytest.LogCaptureFixture, + repository: tuple[Repo, Path], + upstream_repository: tuple[Repo, Path], + mock_pull_request: PullRequest, +): + """ + arrange: given running discourse server + act: when run is called with: + 1. with no docs dir and no custom branchname provided + 2. with no docs dir and custom branchname provided + 3. with no changes applied after migration + assert: then: + 1. the documentation files are pushed to default branch + 2. the documentation files are pushed to custom branch + 3. no operations are taken place + """ + caplog.set_level(logging.INFO) + document_name = "migration name 1" + discourse_prefix = f"http://{discourse_hostname}" + (repo, repo_path) = repository + (upstream_repo, upstream_repo_path) = upstream_repository + content_page_1 = factories.ContentPageFactory() + content_page_1_url = discourse_api.create_topic( + title=content_page_1.title, + content=content_page_1.content, + ).removeprefix(discourse_prefix) + content_page_2 = factories.ContentPageFactory() + content_page_2_url = discourse_api.create_topic( + title=content_page_2.title, + content=content_page_2.content, + ).removeprefix(discourse_prefix) + content_page_3 = factories.ContentPageFactory() + content_page_3_url = discourse_api.create_topic( + title=content_page_3.title, + content=content_page_3.content, + ).removeprefix(discourse_prefix) + content_page_4 = factories.ContentPageFactory() + content_page_4_url = discourse_api.create_topic( + title=content_page_4.title, + content=content_page_4.content, + ).removeprefix(discourse_prefix) + index_page_content = f"""Testing index page. + + Testing index page content. + + # Navigation + + | Level | Path | Navlink | + | -- | -- | -- | + | 1 | group-1 | [Group 1]() | + | 1 | group-2 | [Group 2]() | + | 2 | group-2-content-1 | [Content Link 1]({content_page_1_url}) | + | 2 | group-2-content-2 | [Content Link 2]({content_page_2_url}) | + | 1 | group-3 | [Group 3]() | + | 2 | group-3-group-4 | [Group 4]() | + | 3 | group-3-group-4-content-3 | [Content Link 3]({content_page_3_url}) | + | 2 | group-3-content-4 | [Content Link 4]({content_page_4_url}) | + | 1 | group-5 | [Group 5]() |""" + index_url = discourse_api.create_topic( + title=f"{document_name.replace('-', ' ').title()} Documentation Overview", + content=index_page_content, + ) + + # 1. with no docs dir and a metadata.yaml with docs key + caplog.clear() + create_metadata_yaml( + content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", + path=repo_path, + ) + + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + user_inputs=factories.UserInputFactory(), + ) + + upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + upstream_doc_dir = upstream_repo_path / index.DOCUMENTATION_FOLDER_NAME + assert tuple(urls_with_actions) == (mock_pull_request.html_url,) + assert ((group_1_path := upstream_doc_dir / "group-1")).is_dir() + assert (group_1_path / migration.GITKEEP_FILENAME).is_file() + assert ((group_2_path := upstream_doc_dir / "group-2")).is_dir() + assert (group_2_path / "content-1.md").read_text(encoding="utf-8") == content_page_1.content + assert (group_2_path / "content-2.md").read_text(encoding="utf-8") == content_page_2.content + assert ((group_3_path := upstream_doc_dir / "group-3")).is_dir() + assert ((group_4_path := group_3_path / "group-4")).is_dir() + assert (group_4_path / "content-3.md").read_text(encoding="utf-8") == content_page_3.content + assert (group_3_path / "content-4.md").read_text(encoding="utf-8") == content_page_4.content + assert (group_5_path := upstream_doc_dir / "group-5").is_dir() + assert group_5_path.is_dir() + + # 2. with no changes applied after migration + caplog.clear() + repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + + urls_with_actions = run( + base_path=repo_path, + discourse=discourse_api, + user_inputs=factories.UserInputFactory(), + ) + + assert_substrings_in_string( + chain(urls_with_actions, ("Noop", "Noop", "Noop", "'success'")), caplog.text + ) From 8fec15c15782382d965dc50441141d492836b62f Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 14:59:40 +0800 Subject: [PATCH 064/107] refactor migration module --- src/__init__.py | 11 +- src/migration.py | 367 +++++++----- tests/factories.py | 26 + tests/integration/conftest.py | 2 + tests/unit/test_migration.py | 1037 +++++++++++++++++++-------------- 5 files changed, 859 insertions(+), 584 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index 4b953f21..ced8fdae 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -14,8 +14,7 @@ from .index import DOCUMENTATION_FOLDER_NAME, contents_from_page from .index import get as get_index from .metadata import get as get_metadata -from .migration import assert_migration_success, get_docs_metadata -from .migration import run as run_migrate +from .migration import run as migrate_contents from .navigation_table import from_page as navigation_table_from_page from .pull_request import RepositoryClient, create_pull_request, create_repository_client from .reconcile import run as run_reconcile @@ -69,7 +68,6 @@ def _run_reconcile( } -# pylint: disable=too-many-arguments def _run_migrate( base_path: Path, metadata: Metadata, discourse: Discourse, repository: RepositoryClient ) -> dict[str, str]: @@ -90,13 +88,12 @@ def _run_migrate( ) index_content = contents_from_page(server_content) table_rows = navigation_table_from_page(page=server_content, discourse=discourse) - file_metadata = get_docs_metadata(table_rows=table_rows, index_content=index_content) - migration_results = run_migrate( - documents=file_metadata, + migrate_contents( + table_rows=table_rows, + index_content=index_content, discourse=discourse, docs_path=base_path / DOCUMENTATION_FOLDER_NAME, ) - assert_migration_success(migration_results=migration_results) pr_link = create_pull_request(repository=repository) diff --git a/src/migration.py b/src/migration.py index a398d55f..be061c06 100644 --- a/src/migration.py +++ b/src/migration.py @@ -1,4 +1,4 @@ -# Copyright 2022 Canonical Ltd. +# Copyright 2023 Canonical Ltd. # See LICENSE file for licensing details. """Module for transforming index table rows into local files.""" @@ -16,33 +16,205 @@ GITKEEP_FILENAME = ".gitkeep" -def _validate_row_levels(table_rows: list[types_.TableRow]): - """Check for invalid row levels. +def _extract_name_from_paths(current_path: Path, table_path: types_.TablePath) -> str: + """Extract name given a current working directory and table path. + + If there is a matching prefix in table path's prefix generated from the current directory, + the prefix is removed and the remaining segment is returned as the extracted name. + + Args: + current_path: current path of the file relative to the directory. + table_path: table path of the file from the index file, of format path-to-file-filename. + + Returns: + The filename derived by removing the directory path from given table path of the file. + """ + return table_path.removeprefix(f"{calculate_table_path(current_path)}-") + + +def _assert_valid_row(group_depth: int, row: types_.TableRow, is_first_row: bool) -> None: + """Chekcs validity of the row with respect to group level. Args: - table_rows: Table rows from the index file. + group_depth: Group depth in which the previous row was evaluated in. + row: Current row to be evaluated. + is_first_row: True if current row is the first row in table. Raises: - InvalidRow exception if invalid row level is encountered. + InputError on invalid row level or invalid row level sequence. """ - level = 0 - for i, row in enumerate(table_rows): - if row.level <= 0: - raise exceptions.InvalidTableRowError(f"Invalid level {row.level} in {row!=row.level}") - # Level increase of more than 1 is not possible. - if row.level > level and (difference := row.level - level) > 1: - raise exceptions.InvalidTableRowError( - f"Level difference of {difference} encountered in {row=!r}" + if is_first_row: + if row.level != 1: + raise exceptions.InputError( + "Invalid starting row level. A table row must start with level value 1. " + "Please fix the upstream first and re-run." + f"Row: {row=!r}" ) - # Subdirectory but previous row is not a file. - if row.level > level and i > 0 and table_rows[i - 1].navlink.link: - raise exceptions.InvalidTableRowError(f"Invalid parent row for {row=!r}") + if row.level < 1: + raise exceptions.InputError( + f"Invalid row level: {row.level=!r}." + "Zero or negative level value is invalid." + f"Row: {row=!r}" + ) + if row.level > group_depth + 1: + raise exceptions.InputError( + "Invalid row level value sequence. Level sequence jumps of more than 1 is invalid." + f"Did you mean level {group_depth+1}?" + f"Row: {row=!r}" + ) + + +def _get_next_group_info( + row: types_.TableRow, group_path: Path, group_depth: int +) -> tuple[Path, int]: + """Get next directory path representation of a group with it's depth. + + Algorithm: + 1. Set target group depth as one above current row level. + 2. While current group depth is not equal to target group depth + 2.1. If current group depth is lower than target, + should not be possible since it should have been caught during validation step. + target_group_depth being bigger than group_depth means traversing more than 1 level + at a given step. + 2.2. If current group depth is higher than target, decrement depth and adjust path by + moving to parent path. + 3. If row is a group row, increment depth and adjust path by appending extracted row name. + + Args: + row: Table row in which to move the path to. + group_path: Path representation of current group. + group_depth: Current group depth. - # Level decrease or same level is fine. - level = row.level + Returns: + A tuple consisting of next directory path representation of group and next group depth. + """ + target_group_depth = row.level - 1 + + while group_depth != target_group_depth: + group_depth -= 1 + group_path = group_path.parent + + if row.is_group: + group_depth += 1 + group_path = group_path / _extract_name_from_paths( + current_path=group_path, table_path=row.path + ) + + return (group_path, group_depth) + + +def _should_yield_gitkeep(row: types_.TableRow, next_depth: int, depth: int) -> bool: + """Determine whether to yield a gitkeep file depending on depth traversal. + + It is important to note that the previous row must have been an empty a group row. + + Args: + row: Current table row to evaluate whether a gitkeep should be yielded first. + next_depth: Incoming group depth of current table row. + depth: Current depth being evaluated. + + Returns: + True if gitkeep file should be yielded first before processing the row further. + """ + return (row.is_group and next_depth <= depth) or (not row.is_group and next_depth < depth) -def _migrate_gitkeep(gitkeep_meta: types_.GitkeepMeta, docs_path: Path): +def _create_document_meta(row: types_.TableRow, path: Path) -> types_.DocumentMeta: + """Create document meta file for migration from table row. + + Args: + row: Row containing link to document and path information. + path: Relative path to where the document should reside. + """ + # this is to help mypy understand that link is not None. + # this case cannot be possible since this is called for group rows only. + if not row.navlink.link: # pragma: no cover + raise exceptions.MigrationError( + "Internal error, no implementation for creating document meta with missing link in row." + ) + name = _extract_name_from_paths(current_path=path, table_path=row.path) + return types_.DocumentMeta(path=path / f"{name}.md", link=row.navlink.link, table_row=row) + + +def _create_gitkeep_meta(row: types_.TableRow, path: Path) -> types_.GitkeepMeta: + """Create a representation of an empty grouping through a .gitkeep file metadata. + + Args: + row: An empty group row. + path: Relative path to where the document should reside. + """ + return types_.GitkeepMeta(path=path / GITKEEP_FILENAME, table_row=row) + + +def _extract_docs_from_table_rows( + table_rows: typing.Iterable[types_.TableRow], +) -> typing.Generator[types_.MigrationFileMeta, None, None]: + """Extract necessary migration documents to build docs directory from server. + + Algorithm: + 1. For each row: + 1.1. Check if the row is valid with respect to current group depth. + 1.2. Calculate next group depth and next group path from row. + 1.3. If previous row was a group and + the current row is a document and we're traversing up the path OR + the current row is a folder and we're in the in the same path or above, + yield a gitkeep meta. + 1.4. Update current group depth and current group path. + 1.5. If current row is a document, yield document meta. + 2. If last row was a group, yield gitkeep meta. + + Args: + table_rows: Table rows from the index file in the order of group hierarchy. + + Raises: + InputError if invalid row level or invalid sequence of row level is found. + + Yields: + Migration documents with navlink to content. .gitkeep file if empty group. + """ + group_depth = 0 + current_path = Path() + previous_row: types_.TableRow | None = None + + for row in table_rows: + _assert_valid_row(group_depth=group_depth, row=row, is_first_row=previous_row is None) + (next_group_path, next_group_depth) = _get_next_group_info( + group_path=current_path, row=row, group_depth=group_depth + ) + # if previously processed row was a group and it had nothing in it + # we should yield a .gitkeep file to denote empty group. + if ( + previous_row + and previous_row.is_group + and _should_yield_gitkeep(row=row, next_depth=next_group_depth, depth=group_depth) + ): + yield _create_gitkeep_meta(row=previous_row, path=current_path) + + group_depth = next_group_depth + current_path = next_group_path + if not row.is_group: + yield _create_document_meta(row=row, path=current_path) + + previous_row = row + + # last group without documents yields gitkeep meta. + if previous_row is not None and previous_row.is_group: + yield _create_gitkeep_meta(row=previous_row, path=current_path) + + +def _index_file_from_content(content: str) -> types_.IndexDocumentMeta: + """Get index file document metadata. + + Args: + content: Index file content. + + Returns: + Index file document metadata. + """ + return types_.IndexDocumentMeta(path=Path("index.md"), content=content) + + +def _migrate_gitkeep(gitkeep_meta: types_.GitkeepMeta, docs_path: Path) -> types_.ActionReport: """Write gitkeep file to docs directory. Args: @@ -57,15 +229,17 @@ def _migrate_gitkeep(gitkeep_meta: types_.GitkeepMeta, docs_path: Path): path = docs_path / gitkeep_meta.path path.parent.mkdir(parents=True, exist_ok=True) path.touch() - return types_.MigrationReport( + return types_.ActionReport( table_row=gitkeep_meta.table_row, result=types_.ActionResult.SUCCESS, - path=path, + location=path, reason=EMPTY_DIR_REASON, ) -def _migrate_document(document_meta: types_.DocumentMeta, discourse: Discourse, docs_path: Path): +def _migrate_document( + document_meta: types_.DocumentMeta, discourse: Discourse, docs_path: Path +) -> types_.ActionReport: """Write document file with content to docs directory. Args: @@ -81,24 +255,24 @@ def _migrate_document(document_meta: types_.DocumentMeta, discourse: Discourse, try: content = discourse.retrieve_topic(url=document_meta.link) except exceptions.DiscourseError as exc: - return types_.MigrationReport( + return types_.ActionReport( table_row=document_meta.table_row, result=types_.ActionResult.FAIL, - path=None, + location=None, reason=str(exc), ) path = docs_path / document_meta.path path.parent.mkdir(parents=True, exist_ok=True) path.write_text(content, encoding="utf-8") - return types_.MigrationReport( + return types_.ActionReport( table_row=document_meta.table_row, result=types_.ActionResult.SUCCESS, - path=path, + location=path, reason=None, ) -def _migrate_index(index_meta: types_.IndexDocumentMeta, docs_path: Path): +def _migrate_index(index_meta: types_.IndexDocumentMeta, docs_path: Path) -> types_.ActionReport: """Write index document to docs repository. Args: @@ -113,17 +287,17 @@ def _migrate_index(index_meta: types_.IndexDocumentMeta, docs_path: Path): path = docs_path / index_meta.path path.parent.mkdir(parents=True, exist_ok=True) path.write_text(index_meta.content, encoding="utf-8") - return types_.MigrationReport( + return types_.ActionReport( table_row=None, result=types_.ActionResult.SUCCESS, - path=path, + location=path, reason=None, ) def _run_one( file_meta: types_.MigrationFileMeta, discourse: Discourse, docs_path: Path -) -> types_.MigrationReport: +) -> types_.ActionReport: """Write document content relative to docs directory. Args: @@ -150,96 +324,14 @@ def _run_one( # Edge case that should not be possible. case _: # pragma: no cover raise exceptions.MigrationError( - f"internal error, no implementation for migration file, {file_meta=!r}" + f"Internal error, no implementation for migration file, {file_meta=!r}" ) logging.info("report: %s", report) return report -def _calculate_file_name(current_directory: Path, table_path: types_.TablePath) -> str: - """Calculate file name given table path from the index file and current path \ - relative to the docs directory. - - Args: - current_directory: current directory of the file relative to the docs directory. - table_path: table path of the file from the index file, of format path-to-file-filename. - - Returns: - The filename derived by removing the directory path from given table path of the file. - """ - return table_path.removeprefix(f"{calculate_table_path(current_directory)}-") - - -def _extract_docs_from_table_rows( - table_rows: typing.Iterable[types_.TableRow], -) -> typing.Iterable[types_.MigrationFileMeta]: - """Extract necessary migration documents to build docs directory from server. - - Algorithm: - 1. For each table row: - 1.1. If row level is smaller than current working level: - 1.1.1. Yield GitkeepMeta if last working directory was empty. - 1.1.2. Navigate to parent directory based on current level and row level. - 1.2. If row is a directory: - 1.2.1. Create a virtual directory with given path - 1.2.2. Set created virtual directory as working directory. - 1.3. If row is a file: Yield DocumentMeta - 2. If last table row was a directory and yielded no DocumentMeta, yield GitkeepMeta. - - Args: - table_rows: Table rows from the index file in the order of directory hierarchy. - - Returns: - Migration documents with navlink to content.\ - .gitkeep file with no content if empty directory. - """ - table_rows = list(table_rows) - _validate_row_levels(table_rows=table_rows) - - level = 0 - last_dir_has_file = True # Assume root dir is not empty. - last_dir_row: types_.TableRow | None = None - cwd = Path() - for row in table_rows: - # Next set of hierarchies, change cwd path - if row.level <= level: - if not last_dir_has_file and last_dir_row is not None: - yield types_.GitkeepMeta(path=cwd / GITKEEP_FILE, table_row=last_dir_row) - while row.level <= level: - level -= 1 - cwd = cwd.parent - - # if row is directory, move cwd - if not row.navlink.link: - last_dir_has_file = False - last_dir_row = row - cwd = cwd / row.path - level = row.level - else: - last_dir_has_file = True - file_name = _calculate_file_name(cwd, row.path) - yield types_.DocumentMeta( - path=cwd / f"{file_name}.md", link=row.navlink.link, table_row=row - ) - - if not last_dir_has_file and last_dir_row: - yield types_.GitkeepMeta(path=cwd / GITKEEP_FILE, table_row=last_dir_row) - - -def _index_file_from_content(content: str): - """Get index file document metadata. - - Args: - content: Index file content. - - Returns: - Index file document metadata. - """ - return types_.IndexDocumentMeta(path=Path("index.md"), content=content) - - -def get_docs_metadata( +def _get_docs_metadata( table_rows: typing.Iterable[types_.TableRow], index_content: str ) -> typing.Iterable[types_.MigrationFileMeta]: """Get metadata for documents to be migrated. @@ -256,35 +348,42 @@ def get_docs_metadata( return itertools.chain([index_doc], table_docs) -def run( - documents: typing.Iterable[types_.MigrationFileMeta], discourse: Discourse, docs_path: Path -) -> typing.Iterable[types_.MigrationReport]: - """Write document content to docs_path. +def _assert_migration_success(migration_results: typing.Iterable[types_.ActionReport]) -> None: + """Assert all documents have been successfully migrated. Args: - documents: metadata about a file to be migrated to local docs directory. - discourse: Client to the documentation server. - docs_path: The path to the docs directory containing all the documentation. + migration_results: Migration results from server to local. Returns: - Migration result reports containing action result and failure reason if any. + None if success, raises MigrationError otherwise. """ - return [ - _run_one(file_meta=document, discourse=discourse, docs_path=docs_path) - for document in documents - ] + if any(result for result in migration_results if result.result is types_.ActionResult.FAIL): + raise exceptions.MigrationError( + "Error migrating the docs, please check the logs for more detail." + ) -def assert_migration_success(migration_results: typing.Iterable[types_.MigrationReport]) -> None: - """Assert all documents have been successfully migrated. +def run( + table_rows: typing.Iterable[types_.TableRow], + index_content: str, + discourse: Discourse, + docs_path: Path, +) -> None: + """Write document content to docs_path. Args: - migration_results: Migration results from server to local. + documents: metadata about a file to be migrated to local docs directory. + discourse: Client to the documentation server. + docs_path: The path to the docs directory containing all the documentation. + + Raises: + MigrationError if any migration error occurred during migration. Returns: - None if success, raises MigrationError otherwise. + Migration result reports containing action result and failure reason if any. """ - if [result for result in migration_results if result.result is types_.ActionResult.FAIL]: - raise exceptions.MigrationError( - "Error migrating the docs, please check the logs for more detail." - ) + migration_reports = ( + _run_one(file_meta=document, discourse=discourse, docs_path=docs_path) + for document in _get_docs_metadata(table_rows=table_rows, index_content=index_content) + ) + _assert_migration_success(migration_results=migration_reports) diff --git a/tests/factories.py b/tests/factories.py index d774afc2..54a0b5e9 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -86,3 +86,29 @@ class Meta: github_access_token = "test-token" # nosec dry_run = False delete_pages = False + + +class TableRowFactory(factory.Factory): + """Generate table row.""" + + class Meta: + """Configuration for factory.""" + + model = types_.TableRow + abstract = False + + class Params: + """Variable factory params for generating different type of table row.""" + + is_group = factory.Trait( + navlink=factory.Sequence(lambda n: types_.Navlink(f"navlink-title-{n}", link=None)) + ) + is_document = factory.Trait( + navlink=factory.Sequence( + lambda n: types_.Navlink(f"navlink-title-{n}", link=f"navlink-{n}") + ) + ) + + level = factory.Sequence(lambda n: n) + path = factory.Sequence(lambda n: f"path-{n}") + navlink = factory.Sequence(lambda n: types_.Navlink(f"navlink-title-{n}", link=f"navlink-{n}")) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 4c778627..9b830589 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -126,6 +126,8 @@ async def discourse(ops_test: OpsTest, discourse_hostname: str): async def get_discourse_status(): """Get the status of discourse.""" + # to help mypy understand model is not None. + assert ops_test.model # nosec return (await ops_test.model.get_status())["applications"]["discourse-k8s"].status[ "status" ] diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 19ae6fcc..8daabb0c 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -1,4 +1,4 @@ -# Copyright 2022 Canonical Ltd. +# Copyright 2023 Canonical Ltd. # See LICENSE file for licensing details. """Unit tests for migration module.""" @@ -15,477 +15,571 @@ from src import discourse, exceptions, migration, types_ from .. import factories -from .helpers import path_to_markdown +from .helpers import assert_substrings_in_string -# Pylint diesn't understand how the walrus operator works -# pylint: disable=undefined-variable,unused-variable @pytest.mark.parametrize( - "table_rows, expected_error_msg_contents", + "path, table_path, expected", [ + pytest.param(Path(""), types_.TablePath("test"), "test", id="table path only"), pytest.param( - [ - types_.TableRow( - level=-1, - path=(test_path := "path 1"), - navlink=(directory_navlink := types_.Navlink(title="title 1", link=None)), - ) - ], - (invalid_msg := "invalid level"), - id="negative table row level", - ), - pytest.param( - [ - types_.TableRow( - level=0, - path=(test_path), - navlink=directory_navlink, - ) - ], - invalid_msg, - id="zero table row level", - ), - pytest.param( - [ - types_.TableRow( - level=2, - path=(test_path), - navlink=directory_navlink, - ) - ], - (level_difference_msg := "level difference"), - id="invalid starting table row level", - ), - pytest.param( - [ - types_.TableRow( - level=1, - path=(test_path), - navlink=directory_navlink, - ), - types_.TableRow( - level=3, - path=(test_path), - navlink=directory_navlink, - ), - ], - level_difference_msg, - id="invalid table row level change", + Path("group-1"), types_.TablePath("group-1-test"), "test", id="test in group" ), pytest.param( - [ - types_.TableRow( - level=1, - path=(test_path), - navlink=(file_navlink := types_.Navlink(title="title 1", link="link 1")), - ), - types_.TableRow( - level=2, - path=(test_path), - navlink=(file_navlink := types_.Navlink(title="title 1", link="link 1")), - ), - ], - "invalid parent row", - id="invalid parent directory", + Path("group-1/nested/path"), + types_.TablePath("group-1-nested-path-test"), + "test", + id="test in group", + ), + pytest.param( + Path("not/matching/group"), types_.TablePath("test"), "test", id="non-prefix path" ), ], ) -def test__validate_row_levels_invalid_rows( - table_rows: list[types_.TableRow], expected_error_msg_contents: str -): +def test__extract_name_from_paths(path: Path, table_path: types_.TablePath, expected: str): """ - arrange: given table rows with invalid levels - act: when _validate_row_levels is called - assert: InvalidRow exception is raised with expected error message contents. + arrange: given a path and table path composed from groups + act: when _extract_name_from_paths is called + assert: the name part is extracted from table path. """ - with pytest.raises(exceptions.InvalidTableRowError) as exc_info: - migration._validate_row_levels(table_rows=table_rows) - - exc_str = str(exc_info.value).lower() - assert expected_error_msg_contents in exc_str + assert migration._extract_name_from_paths(current_path=path, table_path=table_path) == expected @pytest.mark.parametrize( - "table_rows", + "depth, row, is_first_row, expected_message_contents", [ pytest.param( - [ - types_.TableRow( - level=1, - path=("path 1"), - navlink=(types_.Navlink(title="title 1", link=None)), - ), - ], - id="valid level", + 0, + factories.TableRowFactory(level=2), + True, + ( + "invalid starting row level", + "a table row must start with level value 1", + "please fix the upstream first and re-run", + ), + id="Invalid starting row", ), pytest.param( - [ - types_.TableRow( - level=1, - path=("path 1"), - navlink=(types_.Navlink(title="title 1", link=None)), - ), - types_.TableRow( - level=2, - path=("path 2"), - navlink=(types_.Navlink(title="title 2", link="link")), - ), - ], - id="increasing levels", + 1, + factories.TableRowFactory(level=0), + False, + ("invalid row level", "zero or negative level value is invalid."), + id="Invalid level(0)", ), pytest.param( - [ - types_.TableRow( - level=1, - path=("path 1"), - navlink=(types_.Navlink(title="title 1", link=None)), - ), - types_.TableRow( - level=2, - path=("path 2"), - navlink=(types_.Navlink(title="title 2", link="link 1")), - ), - types_.TableRow( - level=1, - path=("path 3"), - navlink=(types_.Navlink(title="title 3", link="link 2")), - ), - ], - id="descend one level", + 1, + factories.TableRowFactory(level=-1), + False, + ("invalid row level", "zero or negative level value is invalid."), + id="Invalid level(negative value)", ), pytest.param( - [ - types_.TableRow( - level=1, - path=("path 1"), - navlink=(types_.Navlink(title="title 1", link=None)), - ), - types_.TableRow( - level=2, - path=("path 2"), - navlink=(types_.Navlink(title="title 2", link=None)), - ), - types_.TableRow( - level=3, - path=("path 3"), - navlink=(types_.Navlink(title="title 3", link="link 2")), - ), - types_.TableRow( - level=1, - path=("path 4"), - navlink=(types_.Navlink(title="title 4", link="link 3")), - ), - ], - id="descend multiple levels", + 1, + factories.TableRowFactory(level=3), + False, + ( + "invalid row level value sequence", + "level sequence jumps of more than 1 is invalid.", + ), + id="Invalid level sequence jump", ), ], ) -def test__validate_row_levels(table_rows: list[types_.TableRow]): +def test__assert_valid_row_error( + depth: int, row: types_.TableRow, is_first_row: bool, expected_message_contents: Iterable[str] +): """ - arrange: given table rows with valid levels - act: when __validate_row_levels is called - assert: no exceptions are raised. + arrange: given an invalid group depth(level), table row and is_first_row combinations + act: when _assert_valid_row is called + assert: InputError is raised with expected error message contents. """ - migration._validate_row_levels(table_rows=table_rows) + with pytest.raises(exceptions.InputError) as exc: + migration._assert_valid_row(group_depth=depth, row=row, is_first_row=is_first_row) + + assert_substrings_in_string(expected_message_contents, str(exc.value).lower()) @pytest.mark.parametrize( - "table_rows, expected_files", + "depth, row, is_first_row", [ pytest.param( - [ - root_dir_row := types_.TableRow( - level=1, - path="root path 1", - navlink=(dir_navlink := types_.Navlink(title="title 1", link=None)), - ) - ], - [ - root_dir_gitkeep := types_.GitkeepMeta( - path=Path(root_dir_row.path) / (gitkeep_file := Path(".gitkeep")), - table_row=root_dir_row, - ) - ], - id="table row no navlink", - ), - pytest.param( - [ - root_dir_row, - ( - root_dir_row_2 := types_.TableRow( - level=1, - path="root path 2", - navlink=dir_navlink, - ) - ), - ], - [ - root_dir_gitkeep, - root_dir_2_gitkeep := types_.GitkeepMeta( - path=Path(root_dir_row_2.path) / gitkeep_file, table_row=root_dir_row_2 - ), - ], - id="multiple empty directories", + 0, + factories.TableRowFactory(level=1), + True, + id="Valid starting row", ), pytest.param( - [ - root_dir_row, - sub_dir_row := types_.TableRow( - level=2, - path="sub path 1", - navlink=(dir_navlink), - ), - ], - [ - types_.GitkeepMeta( - path=Path(root_dir_row.path) / Path(sub_dir_row.path) / gitkeep_file, - table_row=sub_dir_row, - ), - ], - id="nested empty directories", + 1, + factories.TableRowFactory(level=2), + False, + id="Valid row sequence(increase)", + ), + pytest.param( + 3, + factories.TableRowFactory(level=2), + False, + id="Valid row sequence(decrease)", + ), + pytest.param( + 3, + factories.TableRowFactory(level=1), + False, + id="Valid row sequence(decrease multi)", ), ], ) -def test_extract_docs__from_table_rows_empty_directory_rows( - table_rows: Iterable[types_.TableRow], - expected_files: list[types_.MigrationFileMeta], -): +def test__assert_valid_row(depth: int, row: types_.TableRow, is_first_row: bool): """ - arrange: given valid table rows with no navlink(only directories) - act: when migrate is called - assert: .gitkeep files metadata with respective directories are returned. + arrange: given a valid group depth(level), table row and is_first_row combinations + act: when _assert_valid_row is called + assert: No exceptions are raised. """ - assert list(migration._extract_docs_from_table_rows(table_rows=table_rows)) == expected_files - - -def test__index_file_from_content(): - """ - arrange: given content to write to index file - act: when _index_file_from_content is called - assert: index file metadata is returned. - """ - content = "content 1" - - assert migration._index_file_from_content(content=content) == types_.IndexDocumentMeta( - path=Path("index.md"), content=content - ) + migration._assert_valid_row(group_depth=depth, row=row, is_first_row=is_first_row) @pytest.mark.parametrize( - "table_rows, index_content, expected_migration_metadata", + "table_rows", [ pytest.param( - [], - content := "content 1", - [index_meta := types_.IndexDocumentMeta(path=Path("index.md"), content=content)], - id="no table rows", + ( + factories.TableRowFactory(level=1, is_document=True), + factories.TableRowFactory(level=2, is_document=True), + ), + id="document sequence level increase(no group)", + ), + pytest.param( + ( + factories.TableRowFactory(level=1, is_document=True), + factories.TableRowFactory(level=3, is_document=True), + ), + id="document sequence level increase(skip level)", ), pytest.param( - [root_dir_row, root_dir_row_2], - content, - [index_meta, root_dir_gitkeep, root_dir_2_gitkeep], - id="multiple table_rows", + ( + factories.TableRowFactory(level=1, is_group=True), + factories.TableRowFactory(level=3, is_group=True), + ), + id="group sequence level increase(skip level)", + ), + pytest.param( + ( + factories.TableRowFactory(level=1, is_document=True), + factories.TableRowFactory(level=2, is_group=True), + ), + id="document group sequence level increase(no group)", + ), + pytest.param( + ( + factories.TableRowFactory(level=1, is_group=True), + factories.TableRowFactory(level=2, is_document=True), + factories.TableRowFactory(level=3, is_group=True), + ), + id="document group sequence level increase(doc doesn't increase group depth)", ), ], ) -def test_get_docs_metadata( - table_rows: list[types_.TableRow], - index_content: str, - expected_migration_metadata: list[types_.MigrationFileMeta], -): +def test__extract_docs_from_table_rows_invalid_sequence(table_rows: Iterable[types_.TableRow]): """ - arrange: given document table rows and index file content - act: when get_docs_metadata is called - assert: expected metadata are returned. + arrange: given an invalid table row sequence + act: when _extract_docs_from_table_rows is called + assert: InputError is raised with invalid level value sequence error message. """ - assert ( - list(migration.get_docs_metadata(table_rows=table_rows, index_content=index_content)) - == expected_migration_metadata + with pytest.raises(exceptions.InputError) as exc: + all(migration._extract_docs_from_table_rows(table_rows=table_rows)) + + assert_substrings_in_string( + ("invalid row level value sequence", "level sequence jumps of more than 1 is invalid"), + str(exc.value).lower(), ) +# Pylint doesn't understand how the walrus operator works +# pylint: disable=undefined-variable,unused-variable @pytest.mark.parametrize( - "table_rows, expected_files", + "table_rows, expected_metas", [ pytest.param( - [ - root_file_row := types_.TableRow( - level=1, - path="root file 1", - navlink=( - file_navlink := types_.Navlink( - title="title 1", link=(link_str := "link 1") - ) - ), - ) - ], - [ + (doc_row_1 := factories.TableRowFactory(level=1, path="doc-1", is_document=True),), + ( types_.DocumentMeta( - path=path_to_markdown(Path(root_file_row.path)), - link=link_str, - table_row=root_file_row, - ) - ], - id="single file", - ), - pytest.param( - [ - root_dir_row, - sub_file_row := types_.TableRow( - level=2, - path="sub file 1", - navlink=file_navlink, + path=Path("doc-1.md"), link=doc_row_1.navlink.link, table_row=doc_row_1 ), - ], - [ + ), + id="single initial document", + ), + pytest.param( + (group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True),), + (types_.GitkeepMeta(path=Path("group-1/.gitkeep"), table_row=group_row_1),), + id="single initial group", + ), + pytest.param( + ( + doc_row_1 := factories.TableRowFactory(level=1, path="doc-1", is_document=True), + doc_row_2 := factories.TableRowFactory(level=1, path="doc-2", is_document=True), + ), + ( types_.DocumentMeta( - path=path_to_markdown(Path(root_dir_row.path) / Path(sub_file_row.path)), - link=link_str, - table_row=sub_file_row, - ) - ], - id="single file in directory", - ), - pytest.param( - [ - root_file_row, - root_file_row_2 := types_.TableRow( - level=1, - path="root file 2", - navlink=file_navlink, + path=Path("doc-1.md"), link=doc_row_1.navlink.link, table_row=doc_row_1 ), - ], - [ types_.DocumentMeta( - path=path_to_markdown(Path(root_file_row.path)), - link=link_str, - table_row=root_file_row, + path=Path("doc-2.md"), link=doc_row_2.navlink.link, table_row=doc_row_2 ), + ), + id="two documents", + ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + group_row_2 := factories.TableRowFactory(level=1, path="group-2", is_group=True), + ), + ( + types_.GitkeepMeta(path=Path("group-1/.gitkeep"), table_row=group_row_1), + types_.GitkeepMeta(path=Path("group-2/.gitkeep"), table_row=group_row_2), + ), + id="distinct two groups", + ), + pytest.param( + ( + doc_row_1 := factories.TableRowFactory(level=1, path="doc-1", is_document=True), + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + ), + ( types_.DocumentMeta( - path=path_to_markdown(Path(root_file_row_2.path)), - link=link_str, - table_row=root_file_row_2, + path=Path("doc-1.md"), link=doc_row_1.navlink.link, table_row=doc_row_1 ), - ], - id="multiple files", + types_.GitkeepMeta(path=Path("group-1/.gitkeep"), table_row=group_row_1), + ), + id="distinct document and group", ), pytest.param( - [ - root_dir_row, - sub_file_row, - sub_file_row_2 := types_.TableRow( - level=2, - path="sub file 2", - navlink=(file_navlink), - ), - ], - [ + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + doc_row_1 := factories.TableRowFactory(level=1, path="doc-1", is_document=True), + ), + ( + types_.GitkeepMeta(path=Path("group-1/.gitkeep"), table_row=group_row_1), types_.DocumentMeta( - path=path_to_markdown(Path(root_dir_row.path) / Path(sub_file_row.path)), - link=link_str, - table_row=sub_file_row, + path=Path("doc-1.md"), link=doc_row_1.navlink.link, table_row=doc_row_1 ), + ), + id="distinct group and document", + ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + doc_row_1 := factories.TableRowFactory(level=2, path="doc-1", is_document=True), + ), + ( types_.DocumentMeta( - path=path_to_markdown(Path(root_dir_row.path) / Path(sub_file_row_2.path)), - link=link_str, - table_row=sub_file_row_2, + path=Path("group-1/doc-1.md"), + link=doc_row_1.navlink.link, + table_row=doc_row_1, ), - ], - id="multiple files in directory", - ), - pytest.param( - [ - root_dir_row, - sub_file_row, - sub_file_row_2, - root_dir_row_2, - sub_file_row, - sub_file_row_2, - ], - [ + ), + id="nested document in group", + ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + group_row_2 := factories.TableRowFactory(level=2, path="group-2", is_group=True), + ), + (types_.GitkeepMeta(path=Path("group-1/group-2/.gitkeep"), table_row=group_row_2),), + id="nested group in group", + ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + group_row_2 := factories.TableRowFactory(level=1, path="group-2", is_group=True), + group_row_3 := factories.TableRowFactory(level=1, path="group-3", is_group=True), + ), + ( + types_.GitkeepMeta(path=Path("group-1/.gitkeep"), table_row=group_row_1), + types_.GitkeepMeta(path=Path("group-2/.gitkeep"), table_row=group_row_2), + types_.GitkeepMeta(path=Path("group-3/.gitkeep"), table_row=group_row_3), + ), + id="distinct rows(group, group, group)", + ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + doc_row_1 := factories.TableRowFactory(level=1, path="doc-1", is_document=True), + group_row_2 := factories.TableRowFactory(level=1, path="group-2", is_group=True), + ), + ( + types_.GitkeepMeta(path=Path("group-1/.gitkeep"), table_row=group_row_1), types_.DocumentMeta( - path=path_to_markdown(Path(root_dir_row.path) / Path(sub_file_row.path)), - link=link_str, - table_row=sub_file_row, + path=Path("doc-1.md"), + link=doc_row_1.navlink.link, + table_row=doc_row_1, ), - types_.DocumentMeta( - path=path_to_markdown(Path(root_dir_row.path) / Path(sub_file_row_2.path)), - link=link_str, - table_row=sub_file_row_2, + types_.GitkeepMeta(path=Path("group-2/.gitkeep"), table_row=group_row_2), + ), + id="distinct rows(group, doc, group)", + ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + nested_doc_row_1 := factories.TableRowFactory( + level=2, path="group-1-doc-1", is_document=True ), + group_row_2 := factories.TableRowFactory(level=1, path="group-2", is_group=True), + ), + ( types_.DocumentMeta( - path=path_to_markdown(Path(root_dir_row_2.path) / Path(sub_file_row.path)), - link=link_str, - table_row=sub_file_row, + path=Path("group-1/doc-1.md"), + link=nested_doc_row_1.navlink.link, + table_row=nested_doc_row_1, ), + types_.GitkeepMeta(path=Path("group-2/.gitkeep"), table_row=group_row_2), + ), + id="multi rows 1 nested(group, nested-doc, group)", + ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + nested_doc_row_1 := factories.TableRowFactory( + level=2, path="group-1-doc-1", is_document=True + ), + nested_group_row_1 := factories.TableRowFactory( + level=2, path="group-1-group-2", is_group=True + ), + ), + ( types_.DocumentMeta( - path=path_to_markdown(Path(root_dir_row_2.path) / Path(sub_file_row_2.path)), - link=link_str, - table_row=sub_file_row_2, + path=Path("group-1/doc-1.md"), + link=nested_doc_row_1.navlink.link, + table_row=nested_doc_row_1, ), - ], - id="multiple files in multiple directory", - ), - pytest.param( - [ - root_dir_row, - sub_dir_row, - ( - nested_file_row := types_.TableRow( - level=3, - path="path 3", - navlink=(file_navlink), - ) + types_.GitkeepMeta( + path=Path("group-1/group-2/.gitkeep"), table_row=nested_group_row_1 + ), + ), + id="multi rows 2 separately nested(group, nested-group, nested-doc)", + ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + nested_group_row_1 := factories.TableRowFactory( + level=2, path="group-1-group-2", is_group=True + ), + nested_doc_row_1 := factories.TableRowFactory( + level=3, path="group-1-group-2-doc-1", is_document=True ), - ], - [ + ), + ( types_.DocumentMeta( - path=path_to_markdown( - Path(root_dir_row.path) - / Path(sub_dir_row.path) - / Path(nested_file_row.path) - ), - link=link_str, - table_row=nested_file_row, + path=Path("group-1/group-2/doc-1.md"), + link=nested_doc_row_1.navlink.link, + table_row=nested_doc_row_1, ), - ], - id="nested directory file", + ), + id="multi rows nested(group, nested-group, nested-group-nested-doc)", ), ], ) -def test_extract_docs( - table_rows: Iterable[types_.TableRow], - expected_files: list[types_.MigrationFileMeta], +def test__extract_docs_from_table_rows( + table_rows: Iterable[types_.TableRow], expected_metas: Iterable[types_.DocumentMeta] ): """ - arrange: given valid table rows - act: when migrate is called - assert: document file with correct paths are returned. + arrange: given an valid table row sequences + act: when _extract_docs_from_table_rows is called + assert: expected document metadatas are yielded. """ - assert list(migration._extract_docs_from_table_rows(table_rows=table_rows)) == expected_files + assert ( + tuple(row for row in migration._extract_docs_from_table_rows(table_rows=table_rows)) + == expected_metas + ) -def test__migrate_gitkeep(tmp_path: Path): +@pytest.mark.parametrize( + "row, group_path, group_depth, expected_path_depth_pair", + [ + pytest.param( + factories.TableRowFactory(level=1, path="test-1", is_document=True), + Path(), + 0, + (Path(), 0), + id="single initial document", + ), + pytest.param( + factories.TableRowFactory(level=1, path="group-1", is_group=True), + Path(), + 0, + (Path("group-1"), 1), + id="single initial group", + ), + pytest.param( + factories.TableRowFactory(level=2, path="group-1-test-1", is_document=True), + Path("group-1"), + 1, + (Path("group-1"), 1), + id="document in group", + ), + pytest.param( + factories.TableRowFactory(level=2, path="group-1-group-2", is_group=True), + Path("group-1"), + 1, + (Path("group-1/group-2"), 2), + id="group in group", + ), + pytest.param( + factories.TableRowFactory(level=2, path="group-1-test-1", is_document=True), + Path("group-1/group-2"), + 2, + (Path("group-1"), 1), + id="document in same level group", + ), + pytest.param( + factories.TableRowFactory(level=2, path="group-1-group-4", is_group=True), + Path("group-1/group-2"), + 2, + (Path("group-1/group-4"), 2), + id="group in same level group", + ), + pytest.param( + factories.TableRowFactory(level=2, path="group-1-test-1", is_document=True), + Path("group-1/group-2/group-3"), + 3, + (Path("group-1"), 1), + id="document in lower level group", + ), + pytest.param( + factories.TableRowFactory(level=2, path="group-1-group-4", is_group=True), + Path("group-1/group-2/group-3"), + 3, + (Path("group-1/group-4"), 2), + id="group in lower level group", + ), + ], +) +def test__get_next_group_info( + row: types_.TableRow, + group_path: Path, + group_depth: int, + expected_path_depth_pair: tuple[Path, int], +): """ - arrange: given valid gitkeep metadata - act: when _migrate_gitkeep is called - assert: migration report is created with responsible table row, written path \ - and reason. + arrange: given table row, group path and group depth + act: when _get_next_group_info is called + assert: expected path with corresponding depth is returned. """ - path = Path("empty/docs/dir/.gitkeep") - table_row = types_.TableRow( - level=1, path="empty-directory", navlink=types_.Navlink(title="title 1", link=None) + assert ( + migration._get_next_group_info(row=row, group_path=group_path, group_depth=group_depth) + == expected_path_depth_pair ) - gitkeep_meta = types_.GitkeepMeta(path=path, table_row=table_row) - migration_report = migration._migrate_gitkeep(gitkeep_meta=gitkeep_meta, docs_path=tmp_path) - assert (file_path := tmp_path / path).is_file() - assert file_path.read_text(encoding="utf-8") == "" - assert migration_report.table_row == table_row - assert migration_report.result == types_.ActionResult.SUCCESS - assert migration_report.reason is not None - assert "created due to empty directory" in migration_report.reason +@pytest.mark.parametrize( + "row, path, expected_meta", + [ + pytest.param( + doc_row := factories.TableRowFactory(is_document=True, path="doc-1"), + Path(), + types_.DocumentMeta( + path=Path("doc-1.md"), link=doc_row.navlink.link, table_row=doc_row + ), + id="single doc file", + ), + pytest.param( + doc_row := factories.TableRowFactory(is_document=True, path="group-1-doc-1"), + Path("group-1"), + types_.DocumentMeta( + path=Path("group-1/doc-1.md"), link=doc_row.navlink.link, table_row=doc_row + ), + id="nested doc file", + ), + pytest.param( + doc_row := factories.TableRowFactory(is_document=True, path="group-2-doc-1"), + Path("group-1"), + types_.DocumentMeta( + path=Path("group-1/group-2-doc-1.md"), link=doc_row.navlink.link, table_row=doc_row + ), + id="typo in nested doc file path", + ), + ], +) +def test__create_document_meta( + row: types_.TableRow, path: Path, expected_meta: types_.DocumentMeta +): + """ + arrange: given a document table row + act: when _create_document_meta is called + assert: document meta with path to file is returned. + """ + assert migration._create_document_meta(row=row, path=path) == expected_meta + + +@pytest.mark.parametrize( + "row, path, expected_meta", + [ + pytest.param( + group_row := factories.TableRowFactory(is_group=True, path="group-1"), + Path("group-1"), + types_.GitkeepMeta(path=Path("group-1/.gitkeep"), table_row=group_row), + id="single group row", + ), + pytest.param( + group_row := factories.TableRowFactory(is_group=True, path="group-1-group-2"), + Path("group-1/group-2"), + types_.GitkeepMeta(path=Path("group-1/group-2/.gitkeep"), table_row=group_row), + id="nested group row with correct current path", + ), + ], +) +def test__create_gitkeep_meta(row: types_.TableRow, path: Path, expected_meta: types_.GitkeepMeta): + """ + arrange: given a empty group table row + act: when _create_gitkeep_meta is called + assert: gitkeep meta denoting empty group is returned. + """ + assert migration._create_gitkeep_meta(row=row, path=path) == expected_meta + + +@pytest.mark.parametrize( + "content, expected_meta", + [ + pytest.param( + content := "content-1", + types_.IndexDocumentMeta(path=Path("index.md"), content=content), + ), + ], +) +def test__index_file_from_content(content: str, expected_meta: types_.IndexDocumentMeta): + """ + arrange: given an index file content + act: when _index_file_from_content is called + assert: expected index document metadata is returned. + """ + assert migration._index_file_from_content(content) == expected_meta + + +@pytest.mark.parametrize( + "meta", + [ + pytest.param( + types_.GitkeepMeta(path=Path(".gitkeep"), table_row=factories.TableRowFactory()), + id="single .gitkeep", + ), + pytest.param( + types_.GitkeepMeta( + path=Path("nested-dir/.gitkeep"), table_row=factories.TableRowFactory() + ), + id="nested .gitkeep", + ), + ], +) +def test__migrate_gitkeep(meta: types_.GitkeepMeta, tmp_path: Path): + """ + arrange: given a gitkeep file metadata and a temporary path denoting docs directory + act: when _migrate_gitkeep is called + assert: Successful action report is returned and gitkeep file is created. + """ + returned_report = migration._migrate_gitkeep(gitkeep_meta=meta, docs_path=tmp_path) + assert returned_report.table_row == meta.table_row + assert returned_report.result == types_.ActionResult.SUCCESS + assert returned_report.location == tmp_path / meta.path + assert returned_report.reason == migration.EMPTY_DIR_REASON + assert (tmp_path / meta.path).is_file() def test__migrate_document_fail(tmp_path: Path): @@ -498,7 +592,7 @@ def test__migrate_document_fail(tmp_path: Path): mocked_discourse.retrieve_topic.side_effect = (error := exceptions.DiscourseError("fail")) table_row = types_.TableRow( level=(level := 1), - path=(path_str := "empty-directory"), + path=(path_str := "empty-group-path"), navlink=types_.Navlink(title=(navlink_title := "title 1"), link=(link_str := "link 1")), ) document_meta = types_.DocumentMeta( @@ -568,7 +662,7 @@ def test__migrate_index(tmp_path: Path): assert file_path.read_text(encoding="utf-8") == content assert returned_report.table_row is None assert returned_report.result == types_.ActionResult.SUCCESS - assert returned_report.path == tmp_path / path + assert returned_report.location == tmp_path / path assert returned_report.reason is None @@ -586,9 +680,9 @@ def test__migrate_index(tmp_path: Path): ) ), ), - gitkeep_report := types_.MigrationReport( + gitkeep_report := types_.ActionReport( table_row=table_row_sample, - path=gitkeep_path, + location=gitkeep_path, result=types_.ActionResult.SUCCESS, reason=migration.EMPTY_DIR_REASON, ), @@ -600,9 +694,9 @@ def test__migrate_index(tmp_path: Path): table_row=table_row_sample, link="samplelink", ), - document_report := types_.MigrationReport( + document_report := types_.ActionReport( table_row=table_row_sample, - path=document_path, + location=document_path, result=types_.ActionResult.SUCCESS, reason=None, ), @@ -612,9 +706,9 @@ def test__migrate_index(tmp_path: Path): types_.IndexDocumentMeta( path=(index_path := Path("index.md")), content="index content" ), - types_.MigrationReport( + types_.ActionReport( table_row=None, - path=index_path, + location=index_path, result=types_.ActionResult.SUCCESS, reason=None, ), @@ -623,7 +717,7 @@ def test__migrate_index(tmp_path: Path): ], ) def test__run_one( - file_meta: types_.MigrationFileMeta, expected_report: types_.MigrationReport, tmp_path: Path + file_meta: types_.MigrationFileMeta, expected_report: types_.ActionReport, tmp_path: Path ): """ arrange: given a migration metadata and mocked discourse @@ -637,100 +731,157 @@ def test__run_one( file_meta=file_meta, discourse=mocked_discourse, docs_path=tmp_path ) - assert returned_report.path is not None - assert returned_report.path.is_file() - assert expected_report.path is not None - assert returned_report.path == tmp_path / expected_report.path + assert isinstance(returned_report.location, Path) + assert returned_report.location.is_file() + assert isinstance(expected_report.location, Path) + assert returned_report.location == tmp_path / expected_report.location assert returned_report.result == expected_report.result assert returned_report.reason == expected_report.reason assert returned_report.table_row == expected_report.table_row +def test__get_docs_metadata(): + """ + arrange: given table rows from index table and the index_content from index file + act: when _get_docs_metadata is called + assert: an iterable starting with index migration metadata is returned. + """ + table_rows = (factories.TableRowFactory(level=1),) + index_content = "index-content-1" + + returned_docs_metadata = tuple( + meta + for meta in migration._get_docs_metadata( + table_rows=table_rows, + index_content=index_content, + ) + ) + + assert len(returned_docs_metadata) == 2 + assert isinstance(returned_docs_metadata[0], types_.IndexDocumentMeta) + assert isinstance(returned_docs_metadata[1], types_.MigrationFileMeta) + + @pytest.mark.parametrize( - "migration_metas, expected_results", + "migration_results", [ - pytest.param([document_meta], [document_report], id="single"), pytest.param( - [document_meta, gitkeep_meta], [document_report, gitkeep_report], id="multiple" + (factories.ActionReportFactory(is_failed=True, is_migrate=True),), + id="single failed result", + ), + pytest.param( + ( + factories.ActionReportFactory(is_success=True, is_migrate=True), + factories.ActionReportFactory(is_failed=True, is_migrate=True), + ), + id="single failed result in successful result", + ), + pytest.param( + ( + factories.ActionReportFactory(is_skipped=True, is_migrate=True), + factories.ActionReportFactory(is_failed=True, is_migrate=True), + ), + id="single failed result in skipped result", + ), + pytest.param( + ( + factories.ActionReportFactory(is_success=True, is_migrate=True), + factories.ActionReportFactory(is_failed=True, is_migrate=True), + factories.ActionReportFactory(is_skipped=True, is_migrate=True), + factories.ActionReportFactory(is_failed=True, is_migrate=True), + ), + id="multiple failed results in multiple result types", ), ], ) -def test_run( - migration_metas: list[types_.MigrationFileMeta], - expected_results: list[types_.MigrationReport], - tmp_path: Path, -): +def test__assert_migration_success_failed_result(migration_results: Iterable[types_.ActionReport]): """ - arrange: given migration metadata and mocked discourse - act: when run is called - assert: migration reports are returned and files are created. + arrange: given an migration results iterable with a failed result + act: when _assert_migration_success is called + assert: Migration error is raised. """ - mocked_discourse = mock.MagicMock(spec=discourse.Discourse) - mocked_discourse.retrieve_topic.side_effect = "content" - - returned_reports = migration.run( - documents=migration_metas, discourse=mocked_discourse, docs_path=tmp_path - ) - - for returned, expected in zip(returned_reports, expected_results): - assert returned.path is not None - assert returned.path.is_file() - assert expected.path is not None - assert returned.path == tmp_path / expected.path - assert returned.result == expected.result - assert returned.reason == expected.reason - assert returned.table_row == expected.table_row + with pytest.raises(exceptions.MigrationError): + migration._assert_migration_success(migration_results=migration_results) @pytest.mark.parametrize( "migration_results", [ pytest.param( - [failed_result := factories.MigrationReportFactory(is_failed=True)], id="failed result" + (factories.ActionReportFactory(is_success=True, is_migrate=True),), + id="successful result", ), pytest.param( - [success_result := factories.MigrationReportFactory(is_success=True), failed_result], - id="mixed result", + (factories.ActionReportFactory(is_skipped=True, is_migrate=True),), id="skipped result" + ), + pytest.param( + ( + factories.ActionReportFactory(is_success=True, is_migrate=True), + factories.ActionReportFactory(is_skipped=True, is_migrate=True), + ), + id="non-failed results", ), ], ) -def test_assert_migration_fail(migration_results: list[types_.MigrationReport]): +def test__assert_migration_success(migration_results: Iterable[types_.ActionReport]): """ - arrange: given at least one failed result in migration results - act: when assert_migration_success is called - assert: MigrationError exception is raised. - + arrange: given an migration results iterable with no failed result + act: when _assert_migration_success is called + assert: No exceptions are raised. """ - with pytest.raises(exceptions.MigrationError): - migration.assert_migration_success(migration_results=migration_results) + migration._assert_migration_success(migration_results=migration_results) @pytest.mark.parametrize( - "migration_results", + "table_rows, index_content, expected_files", [ pytest.param( - [success_result], - id="success result", + (factories.TableRowFactory(is_document=True, path="doc-1", level=1),), + "content-1", + (Path("doc-1.md"),), + id="single doc", ), pytest.param( - [skipped_result := factories.MigrationReportFactory(is_skipped=True)], - id="skipped result", + ( + factories.TableRowFactory(is_group=True, path="group-1", level=1), + factories.TableRowFactory(is_document=True, path="doc-1", level=2), + ), + "content-1", + (Path("group-1/doc-1.md"),), + id="nested doc", ), - pytest.param([success_result, success_result], id="success results"), - pytest.param([skipped_result, skipped_result], id="skipped results"), pytest.param( - [ - success_result, - skipped_result, - ], - id="mixed results", + ( + factories.TableRowFactory(is_group=True, path="group-1", level=1), + factories.TableRowFactory(is_group=True, path="group-2", level=2), + ), + "content-1", + (Path("group-1/group-2/.gitkeep"),), + id="nested group no docs", ), ], ) -def test_assert_migration_success(migration_results: list[types_.MigrationReport]): +def test_run( + table_rows: Iterable[types_.TableRow], + index_content: str, + tmp_path: Path, + expected_files: Iterable[Path], +): """ - arrange: given successful migration results - act: when assert_migration_success is called - assert: no exceptions are raised. + arrange: given table rows, index content, mocked discourse and a temporary docs path + act: when run is called + assert: table rows are successfully migrated """ - migration.assert_migration_success(migration_results=migration_results) + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.retrieve_topic.return_value = "document-content" + + migration.run( + table_rows=table_rows, + index_content=index_content, + discourse=mocked_discourse, + docs_path=tmp_path, + ) + + assert (tmp_path / "index.md").read_text() == index_content + for path in expected_files: + assert (tmp_path / path).is_file() From d8b4f19e09a3ae5508eab028bd160c7a6f29202d Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 14:59:52 +0800 Subject: [PATCH 065/107] fix iterable string --- tests/unit/test_pull_request.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index e4207323..da13fddd 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -212,7 +212,7 @@ def test_create_pull_request_no_dirty_files( pull_request.create_pull_request(repository=repository_client) assert_substrings_in_string( - ("no files seem to be migrated. please add contents upstream first."), + ("no files seem to be migrated. please add contents upstream first.",), str(exc.value).lower(), ) @@ -294,7 +294,7 @@ def test_get_repository_name_invalid(remote_url: str): pull_request._get_repository_name_from_git_url(remote_url=remote_url) assert_substrings_in_string( - ("invalid remote repository url"), + ("invalid remote repository url",), str(exc.value).lower(), ) From 42c33e6a41c5c108873fdff8f4d833703216f8ee Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 15:58:40 +0800 Subject: [PATCH 066/107] pin git version --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 6dc91d64..44198287 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.10-slim -RUN apt-get update && apt-get install -y --no-install-recommends git +RUN apt-get update && apt-get install -y --no-install-recommends git=2.34 RUN mkdir /usr/src/app WORKDIR /usr/src/app From 2ce737555fb3f501a09f1414029481e1368f7edd Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 15:58:57 +0800 Subject: [PATCH 067/107] remove newline --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index cd40f003..a9e9f986 100644 --- a/README.md +++ b/README.md @@ -73,7 +73,6 @@ charmhub. There is a nice parameter, `dry_run`, which will do everything except make changes on discourse and log what would have happened. This will help you see what the action would have done. - 6. Check the logs for the URL to the index topic that the action created. This is also available under the `index_url` output of the action. This needs to be added to the `metadata.yaml` under the `docs` key. From 60fe7d6e099271c6e829c86436c3713fed228dfd Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 16:12:08 +0800 Subject: [PATCH 068/107] remove support for custom branch names --- README.md | 3 ++- action.yaml | 6 ------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index a9e9f986..e8901b45 100644 --- a/README.md +++ b/README.md @@ -103,7 +103,8 @@ charmhub. run: echo '${{ steps.publishDocumentation.outputs.index_url }}' ``` - additional branch_name input can be specified to create a pull request from a specific branch name. + a branch name with `upload-charm-docs/migrate` will be created and a pull request named `[upload-charm-docs] Migrate charm docs` will be created towards the working branch the workflow was triggered with. + In order to ensure that the branches can be created successfully, please make sure that there are no existing branches clashing with the name above. The action will now compare the discourse topics with the files and directories under the `docs` directory and make any changes based on differences. diff --git a/action.yaml b/action.yaml index 48e47eb9..7cb246b2 100644 --- a/action.yaml +++ b/action.yaml @@ -40,12 +40,6 @@ inputs: Required if running in migration mode. required: false type: string - branch_name: - description: | - Branch name to create pull request branch. Defaults to - upload-charm-docs/migrate. - required: false - type: string outputs: urls_with_actions: description: | From b8f95577e5a19baa2935f67a35eb633fd9504ca1 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 16:13:54 +0800 Subject: [PATCH 069/107] add dry_run mode in migration description --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index e8901b45..1f535c92 100644 --- a/README.md +++ b/README.md @@ -105,6 +105,7 @@ charmhub. a branch name with `upload-charm-docs/migrate` will be created and a pull request named `[upload-charm-docs] Migrate charm docs` will be created towards the working branch the workflow was triggered with. In order to ensure that the branches can be created successfully, please make sure that there are no existing branches clashing with the name above. + Please note that `dry_run` parameter has no effect on migrate mode. The action will now compare the discourse topics with the files and directories under the `docs` directory and make any changes based on differences. From 3aeba0ce20fafdffc19e2cf90df00c246e2e4dd4 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 16:30:29 +0800 Subject: [PATCH 070/107] merge InvalidTableRowError with InputError --- src/exceptions.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/exceptions.py b/src/exceptions.py index 002d75f4..6867ee0e 100644 --- a/src/exceptions.py +++ b/src/exceptions.py @@ -36,10 +36,6 @@ class ActionError(BaseError): """A problem with the taking an action occurred.""" -class InvalidTableRowError(BaseError): - """A problematic table row is encountered.""" - - class MigrationError(BaseError): """A problem with migration occurred.""" From bfaa3214c6b0787e34499ed4453add2113fec09c Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 16:36:15 +0800 Subject: [PATCH 071/107] update docstrings related to metadata --- src/__init__.py | 8 +++++--- src/index.py | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index ced8fdae..f64b80b9 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -36,7 +36,8 @@ def _run_reconcile( """Upload the documentation to charmhub. Args: - base_path: The base path to look for the metadata file in. + base_path: The base path of the repository. + metadata: Information about the charm. discourse: A client to the documentation server. dry_run: If enabled, only log the action that would be taken. delete_pages: Whether to delete pages that are no longer needed. @@ -75,12 +76,13 @@ def _run_migrate( Args: base_path: The base path to look for the metadata file in. - metadata: A metadata file with a link to the docs url. + metadata: Information about the charm. discourse: A client to the documentation server. repository: Repository client for managing both local and remote git repositories. Returns: - A Pull Request link to the Github repository. + A single key-value pair dictionary containing a link to the Pull Request containing + migrated documentation as key and sucessful action result as value. """ index = get_index(metadata=metadata, base_path=base_path, server_client=discourse) server_content = ( diff --git a/src/index.py b/src/index.py index 9f94c8e7..88756c20 100644 --- a/src/index.py +++ b/src/index.py @@ -36,7 +36,7 @@ def get(metadata: Metadata, base_path: Path, server_client: Discourse) -> Index: """Retrieve the local and server index information. Args: - metadata: Parsed Metadata.yaml contents + metadata: Information about the charm. base_path: The base path to look for the metadata file in. server_client: A client to the documentation server. From 2fc549f5b5a2a8f4588a728bcfe0c04579fecd95 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 16:38:52 +0800 Subject: [PATCH 072/107] rename term group depth to group level --- src/migration.py | 36 ++++++++++++++++++------------------ tests/unit/test_migration.py | 10 +++++----- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/src/migration.py b/src/migration.py index be061c06..3db975d3 100644 --- a/src/migration.py +++ b/src/migration.py @@ -32,11 +32,11 @@ def _extract_name_from_paths(current_path: Path, table_path: types_.TablePath) - return table_path.removeprefix(f"{calculate_table_path(current_path)}-") -def _assert_valid_row(group_depth: int, row: types_.TableRow, is_first_row: bool) -> None: +def _assert_valid_row(group_level: int, row: types_.TableRow, is_first_row: bool) -> None: """Chekcs validity of the row with respect to group level. Args: - group_depth: Group depth in which the previous row was evaluated in. + group_level: Group level in which the previous row was evaluated in. row: Current row to be evaluated. is_first_row: True if current row is the first row in table. @@ -56,16 +56,16 @@ def _assert_valid_row(group_depth: int, row: types_.TableRow, is_first_row: bool "Zero or negative level value is invalid." f"Row: {row=!r}" ) - if row.level > group_depth + 1: + if row.level > group_level + 1: raise exceptions.InputError( "Invalid row level value sequence. Level sequence jumps of more than 1 is invalid." - f"Did you mean level {group_depth+1}?" + f"Did you mean level {group_level+1}?" f"Row: {row=!r}" ) def _get_next_group_info( - row: types_.TableRow, group_path: Path, group_depth: int + row: types_.TableRow, group_path: Path, group_level: int ) -> tuple[Path, int]: """Get next directory path representation of a group with it's depth. @@ -74,7 +74,7 @@ def _get_next_group_info( 2. While current group depth is not equal to target group depth 2.1. If current group depth is lower than target, should not be possible since it should have been caught during validation step. - target_group_depth being bigger than group_depth means traversing more than 1 level + target_group_level being bigger than group_level means traversing more than 1 level at a given step. 2.2. If current group depth is higher than target, decrement depth and adjust path by moving to parent path. @@ -83,24 +83,24 @@ def _get_next_group_info( Args: row: Table row in which to move the path to. group_path: Path representation of current group. - group_depth: Current group depth. + group_level: Current group level. Returns: A tuple consisting of next directory path representation of group and next group depth. """ - target_group_depth = row.level - 1 + target_group_level = row.level - 1 - while group_depth != target_group_depth: - group_depth -= 1 + while group_level != target_group_level: + group_level -= 1 group_path = group_path.parent if row.is_group: - group_depth += 1 + group_level += 1 group_path = group_path / _extract_name_from_paths( current_path=group_path, table_path=row.path ) - return (group_path, group_depth) + return (group_path, group_level) def _should_yield_gitkeep(row: types_.TableRow, next_depth: int, depth: int) -> bool: @@ -172,25 +172,25 @@ def _extract_docs_from_table_rows( Yields: Migration documents with navlink to content. .gitkeep file if empty group. """ - group_depth = 0 + group_level = 0 current_path = Path() previous_row: types_.TableRow | None = None for row in table_rows: - _assert_valid_row(group_depth=group_depth, row=row, is_first_row=previous_row is None) - (next_group_path, next_group_depth) = _get_next_group_info( - group_path=current_path, row=row, group_depth=group_depth + _assert_valid_row(group_level=group_level, row=row, is_first_row=previous_row is None) + (next_group_path, next_group_level) = _get_next_group_info( + group_path=current_path, row=row, group_level=group_level ) # if previously processed row was a group and it had nothing in it # we should yield a .gitkeep file to denote empty group. if ( previous_row and previous_row.is_group - and _should_yield_gitkeep(row=row, next_depth=next_group_depth, depth=group_depth) + and _should_yield_gitkeep(row=row, next_depth=next_group_level, depth=group_level) ): yield _create_gitkeep_meta(row=previous_row, path=current_path) - group_depth = next_group_depth + group_level = next_group_level current_path = next_group_path if not row.is_group: yield _create_document_meta(row=row, path=current_path) diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 8daabb0c..44981759 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -94,7 +94,7 @@ def test__assert_valid_row_error( assert: InputError is raised with expected error message contents. """ with pytest.raises(exceptions.InputError) as exc: - migration._assert_valid_row(group_depth=depth, row=row, is_first_row=is_first_row) + migration._assert_valid_row(group_level=depth, row=row, is_first_row=is_first_row) assert_substrings_in_string(expected_message_contents, str(exc.value).lower()) @@ -134,7 +134,7 @@ def test__assert_valid_row(depth: int, row: types_.TableRow, is_first_row: bool) act: when _assert_valid_row is called assert: No exceptions are raised. """ - migration._assert_valid_row(group_depth=depth, row=row, is_first_row=is_first_row) + migration._assert_valid_row(group_level=depth, row=row, is_first_row=is_first_row) @pytest.mark.parametrize( @@ -392,7 +392,7 @@ def test__extract_docs_from_table_rows( @pytest.mark.parametrize( - "row, group_path, group_depth, expected_path_depth_pair", + "row, group_path, group_level, expected_path_depth_pair", [ pytest.param( factories.TableRowFactory(level=1, path="test-1", is_document=True), @@ -455,7 +455,7 @@ def test__extract_docs_from_table_rows( def test__get_next_group_info( row: types_.TableRow, group_path: Path, - group_depth: int, + group_level: int, expected_path_depth_pair: tuple[Path, int], ): """ @@ -464,7 +464,7 @@ def test__get_next_group_info( assert: expected path with corresponding depth is returned. """ assert ( - migration._get_next_group_info(row=row, group_path=group_path, group_depth=group_depth) + migration._get_next_group_info(row=row, group_path=group_path, group_level=group_level) == expected_path_depth_pair ) From bc3174da77a72c42811dea66afc2fe5453b9c8cb Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 16:39:00 +0800 Subject: [PATCH 073/107] fix typo --- src/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/__init__.py b/src/__init__.py index f64b80b9..e082a50d 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -82,7 +82,7 @@ def _run_migrate( Returns: A single key-value pair dictionary containing a link to the Pull Request containing - migrated documentation as key and sucessful action result as value. + migrated documentation as key and successful action result as value. """ index = get_index(metadata=metadata, base_path=base_path, server_client=discourse) server_content = ( From 2d3619a76660d31faae23596c4aa8bfaae09bbe2 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 17:11:56 +0800 Subject: [PATCH 074/107] rename depth to level --- src/migration.py | 89 ++++++++++++++++++++---------------- tests/unit/test_migration.py | 32 ++++++------- 2 files changed, 66 insertions(+), 55 deletions(-) diff --git a/src/migration.py b/src/migration.py index 3db975d3..96035fc4 100644 --- a/src/migration.py +++ b/src/migration.py @@ -67,18 +67,18 @@ def _assert_valid_row(group_level: int, row: types_.TableRow, is_first_row: bool def _get_next_group_info( row: types_.TableRow, group_path: Path, group_level: int ) -> tuple[Path, int]: - """Get next directory path representation of a group with it's depth. + """Get next directory path representation of a group with it's level. Algorithm: - 1. Set target group depth as one above current row level. - 2. While current group depth is not equal to target group depth - 2.1. If current group depth is lower than target, + 1. Set target group level as one above current row level. + 2. While current group level is not equal to target group level + 2.1. If current group level is lower than target, should not be possible since it should have been caught during validation step. target_group_level being bigger than group_level means traversing more than 1 level at a given step. - 2.2. If current group depth is higher than target, decrement depth and adjust path by + 2.2. If current group level is higher than target, decrement level and adjust path by moving to parent path. - 3. If row is a group row, increment depth and adjust path by appending extracted row name. + 3. If row is a group row, increment level and adjust path by appending extracted row name. Args: row: Table row in which to move the path to. @@ -86,7 +86,7 @@ def _get_next_group_info( group_level: Current group level. Returns: - A tuple consisting of next directory path representation of group and next group depth. + A tuple consisting of next directory path representation of group and next group level. """ target_group_level = row.level - 1 @@ -103,20 +103,20 @@ def _get_next_group_info( return (group_path, group_level) -def _should_yield_gitkeep(row: types_.TableRow, next_depth: int, depth: int) -> bool: - """Determine whether to yield a gitkeep file depending on depth traversal. +def _should_yield_gitkeep(row: types_.TableRow, next_level: int, level: int) -> bool: + """Determine whether to yield a gitkeep file depending on level traversal. It is important to note that the previous row must have been an empty a group row. Args: row: Current table row to evaluate whether a gitkeep should be yielded first. - next_depth: Incoming group depth of current table row. - depth: Current depth being evaluated. + next_level: Incoming group level of current table row. + level: Current level being evaluated. Returns: True if gitkeep file should be yielded first before processing the row further. """ - return (row.is_group and next_depth <= depth) or (not row.is_group and next_depth < depth) + return (row.is_group and next_level <= level) or (not row.is_group and next_level < level) def _create_document_meta(row: types_.TableRow, path: Path) -> types_.DocumentMeta: @@ -153,13 +153,13 @@ def _extract_docs_from_table_rows( Algorithm: 1. For each row: - 1.1. Check if the row is valid with respect to current group depth. - 1.2. Calculate next group depth and next group path from row. + 1.1. Check if the row is valid with respect to current group level. + 1.2. Calculate next group level and next group path from row. 1.3. If previous row was a group and the current row is a document and we're traversing up the path OR the current row is a folder and we're in the in the same path or above, yield a gitkeep meta. - 1.4. Update current group depth and current group path. + 1.4. Update current group level and current group path. 1.5. If current row is a document, yield document meta. 2. If last row was a group, yield gitkeep meta. @@ -182,11 +182,11 @@ def _extract_docs_from_table_rows( group_path=current_path, row=row, group_level=group_level ) # if previously processed row was a group and it had nothing in it - # we should yield a .gitkeep file to denote empty group. + # it should yield a .gitkeep file to denote empty group. if ( previous_row and previous_row.is_group - and _should_yield_gitkeep(row=row, next_depth=next_group_level, depth=group_level) + and _should_yield_gitkeep(row=row, next_level=next_group_level, level=group_level) ): yield _create_gitkeep_meta(row=previous_row, path=current_path) @@ -214,11 +214,26 @@ def _index_file_from_content(content: str) -> types_.IndexDocumentMeta: return types_.IndexDocumentMeta(path=Path("index.md"), content=content) +def _build_path(docs_path: Path, document_meta: types_.MigrationFileMeta) -> Path: + """Construct path leading to document to be created. + + Args: + docs_path: Path to documentation directory. + document_meta: Information about document to be migrated. + + Returns: + Full path to document to be migrated. + """ + path = docs_path / document_meta.path + path.parent.mkdir(parents=True, exist_ok=True) + return path + + def _migrate_gitkeep(gitkeep_meta: types_.GitkeepMeta, docs_path: Path) -> types_.ActionReport: """Write gitkeep file to docs directory. Args: - gitkeep_meta: Gitkeep metadata from empty directory table row. + gitkeep_meta: Information about gitkeep file to be migrated. docs_path: Documentation folder path. Returns: @@ -226,13 +241,11 @@ def _migrate_gitkeep(gitkeep_meta: types_.GitkeepMeta, docs_path: Path) -> types """ logging.info("migrate meta: %s", gitkeep_meta) - path = docs_path / gitkeep_meta.path - path.parent.mkdir(parents=True, exist_ok=True) - path.touch() + full_path = _build_path(docs_path=docs_path, document_meta=gitkeep_meta) return types_.ActionReport( table_row=gitkeep_meta.table_row, result=types_.ActionResult.SUCCESS, - location=path, + location=full_path, reason=EMPTY_DIR_REASON, ) @@ -243,7 +256,7 @@ def _migrate_document( """Write document file with content to docs directory. Args: - document_meta: Document metadata from directory table row with link. + document_meta: Information about document file to be migrated. discourse: Client to the documentation server. docs_path: The path to the docs directory to migrate all the documentation. @@ -261,13 +274,12 @@ def _migrate_document( location=None, reason=str(exc), ) - path = docs_path / document_meta.path - path.parent.mkdir(parents=True, exist_ok=True) - path.write_text(content, encoding="utf-8") + full_path = _build_path(docs_path=docs_path, document_meta=document_meta) + full_path.write_text(content, encoding="utf-8") return types_.ActionReport( table_row=document_meta.table_row, result=types_.ActionResult.SUCCESS, - location=path, + location=full_path, reason=None, ) @@ -276,7 +288,7 @@ def _migrate_index(index_meta: types_.IndexDocumentMeta, docs_path: Path) -> typ """Write index document to docs repository. Args: - index_meta: Index file metadata. + index_meta: Information about index file to be migrated. docs_path: The path to the docs directory to migrate all the documentation. Returns: @@ -284,13 +296,12 @@ def _migrate_index(index_meta: types_.IndexDocumentMeta, docs_path: Path) -> typ """ logging.info("migrate meta: %s", index_meta) - path = docs_path / index_meta.path - path.parent.mkdir(parents=True, exist_ok=True) - path.write_text(index_meta.content, encoding="utf-8") + full_path = _build_path(docs_path=docs_path, document_meta=index_meta) + full_path.write_text(index_meta.content, encoding="utf-8") return types_.ActionReport( table_row=None, result=types_.ActionResult.SUCCESS, - location=path, + location=full_path, reason=None, ) @@ -301,7 +312,7 @@ def _run_one( """Write document content relative to docs directory. Args: - file_meta: Migration file metadata corresponding to a row in index table. + file_meta: Information about migration file corresponding to a row in index table. discourse: Client to the documentation server. docs_path: The path to the docs directory to migrate all the documentation. @@ -345,19 +356,19 @@ def _get_docs_metadata( """ index_doc = _index_file_from_content(content=index_content) table_docs = _extract_docs_from_table_rows(table_rows=table_rows) - return itertools.chain([index_doc], table_docs) + return itertools.chain((index_doc,), table_docs) -def _assert_migration_success(migration_results: typing.Iterable[types_.ActionReport]) -> None: +def _assert_migration_success(migration_reports: typing.Iterable[types_.ActionReport]) -> None: """Assert all documents have been successfully migrated. Args: - migration_results: Migration results from server to local. + migration_results: Report containing migration details from server to local repository. Returns: None if success, raises MigrationError otherwise. """ - if any(result for result in migration_results if result.result is types_.ActionResult.FAIL): + if any(result for result in migration_reports if result.result is types_.ActionResult.FAIL): raise exceptions.MigrationError( "Error migrating the docs, please check the logs for more detail." ) @@ -372,7 +383,7 @@ def run( """Write document content to docs_path. Args: - documents: metadata about a file to be migrated to local docs directory. + table_rows: Iterable sequence of documentation structure to be migrated. discourse: Client to the documentation server. docs_path: The path to the docs directory containing all the documentation. @@ -386,4 +397,4 @@ def run( _run_one(file_meta=document, discourse=discourse, docs_path=docs_path) for document in _get_docs_metadata(table_rows=table_rows, index_content=index_content) ) - _assert_migration_success(migration_results=migration_reports) + _assert_migration_success(migration_reports=migration_reports) diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 44981759..6d62bf1b 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -46,7 +46,7 @@ def test__extract_name_from_paths(path: Path, table_path: types_.TablePath, expe @pytest.mark.parametrize( - "depth, row, is_first_row, expected_message_contents", + "level, row, is_first_row, expected_message_contents", [ pytest.param( 0, @@ -86,21 +86,21 @@ def test__extract_name_from_paths(path: Path, table_path: types_.TablePath, expe ], ) def test__assert_valid_row_error( - depth: int, row: types_.TableRow, is_first_row: bool, expected_message_contents: Iterable[str] + level: int, row: types_.TableRow, is_first_row: bool, expected_message_contents: Iterable[str] ): """ - arrange: given an invalid group depth(level), table row and is_first_row combinations + arrange: given an invalid group level, table row and is_first_row combinations act: when _assert_valid_row is called assert: InputError is raised with expected error message contents. """ with pytest.raises(exceptions.InputError) as exc: - migration._assert_valid_row(group_level=depth, row=row, is_first_row=is_first_row) + migration._assert_valid_row(group_level=level, row=row, is_first_row=is_first_row) assert_substrings_in_string(expected_message_contents, str(exc.value).lower()) @pytest.mark.parametrize( - "depth, row, is_first_row", + "level, row, is_first_row", [ pytest.param( 0, @@ -128,13 +128,13 @@ def test__assert_valid_row_error( ), ], ) -def test__assert_valid_row(depth: int, row: types_.TableRow, is_first_row: bool): +def test__assert_valid_row(level: int, row: types_.TableRow, is_first_row: bool): """ - arrange: given a valid group depth(level), table row and is_first_row combinations + arrange: given a valid group level, table row and is_first_row combinations act: when _assert_valid_row is called assert: No exceptions are raised. """ - migration._assert_valid_row(group_level=depth, row=row, is_first_row=is_first_row) + migration._assert_valid_row(group_level=level, row=row, is_first_row=is_first_row) @pytest.mark.parametrize( @@ -174,7 +174,7 @@ def test__assert_valid_row(depth: int, row: types_.TableRow, is_first_row: bool) factories.TableRowFactory(level=2, is_document=True), factories.TableRowFactory(level=3, is_group=True), ), - id="document group sequence level increase(doc doesn't increase group depth)", + id="document group sequence level increase(doc doesn't increase group level)", ), ], ) @@ -392,7 +392,7 @@ def test__extract_docs_from_table_rows( @pytest.mark.parametrize( - "row, group_path, group_level, expected_path_depth_pair", + "row, group_path, group_level, expected_path_level_pair", [ pytest.param( factories.TableRowFactory(level=1, path="test-1", is_document=True), @@ -456,16 +456,16 @@ def test__get_next_group_info( row: types_.TableRow, group_path: Path, group_level: int, - expected_path_depth_pair: tuple[Path, int], + expected_path_level_pair: tuple[Path, int], ): """ - arrange: given table row, group path and group depth + arrange: given table row, group path and group level act: when _get_next_group_info is called - assert: expected path with corresponding depth is returned. + assert: expected path with corresponding level is returned. """ assert ( migration._get_next_group_info(row=row, group_path=group_path, group_level=group_level) - == expected_path_depth_pair + == expected_path_level_pair ) @@ -801,7 +801,7 @@ def test__assert_migration_success_failed_result(migration_results: Iterable[typ assert: Migration error is raised. """ with pytest.raises(exceptions.MigrationError): - migration._assert_migration_success(migration_results=migration_results) + migration._assert_migration_success(migration_reports=migration_results) @pytest.mark.parametrize( @@ -829,7 +829,7 @@ def test__assert_migration_success(migration_results: Iterable[types_.ActionRepo act: when _assert_migration_success is called assert: No exceptions are raised. """ - migration._assert_migration_success(migration_results=migration_results) + migration._assert_migration_success(migration_reports=migration_results) @pytest.mark.parametrize( From f71153fdf273d29ac5668eb9b6f9cbe847d726cd Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 17:20:00 +0800 Subject: [PATCH 075/107] update to specific messages --- src/pull_request.py | 9 ++++++--- src/types_.py | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/pull_request.py b/src/pull_request.py index 4ce7dd18..8f3eaef2 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -17,11 +17,14 @@ GITHUB_HOSTNAME = "github.com" HTTPS_URL_PATTERN = re.compile(rf"^https?:\/\/.*@?{GITHUB_HOSTNAME}\/(.+\/.+?)(.git)?$") -ACTIONS_USER_NAME = "actions-bot" -ACTIONS_USER_EMAIL = "actions-bot@users.noreply.github.com" +ACTIONS_USER_NAME = "upload-charms-docs-bot" +ACTIONS_USER_EMAIL = "upload-charms-docs-bot@users.noreply.github.com" ACTIONS_COMMIT_MESSAGE = "migrate docs from server" ACTIONS_PULL_REQUEST_TITLE = "[upload-charm-docs] Migrate charm docs" -ACTIONS_PULL_REQUEST_BODY = "This pull request was autogenerated by upload-charm-docs" +ACTIONS_PULL_REQUEST_BODY = ( + "This pull request was autogenerated by upload-charm-docs to migrate " + "existing documentation from server to the git repository." +) PR_LINK_NO_CHANGE = "" BRANCH_PREFIX = "upload-charm-docs" DEFAULT_BRANCH_NAME = f"{BRANCH_PREFIX}/migrate" diff --git a/src/types_.py b/src/types_.py index 86ec080c..ae730d05 100644 --- a/src/types_.py +++ b/src/types_.py @@ -36,7 +36,7 @@ class ReconcileInputs: @dataclasses.dataclass class UserInputs(ReconcileInputs, MigrationInputs): - """Parsed user input values used to run upload-charm-docs.""" + """Configurable user input values used to run upload-charm-docs.""" class Metadata(typing.NamedTuple): From 8097c5b986a4aa2fe14365e460e9a527de18a6fc Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 6 Jan 2023 17:27:18 +0800 Subject: [PATCH 076/107] add create gitkeep file --- src/migration.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/migration.py b/src/migration.py index 96035fc4..0602ff3e 100644 --- a/src/migration.py +++ b/src/migration.py @@ -242,6 +242,7 @@ def _migrate_gitkeep(gitkeep_meta: types_.GitkeepMeta, docs_path: Path) -> types logging.info("migrate meta: %s", gitkeep_meta) full_path = _build_path(docs_path=docs_path, document_meta=gitkeep_meta) + full_path.touch() return types_.ActionReport( table_row=gitkeep_meta.table_row, result=types_.ActionResult.SUCCESS, From 8ce53936d2a6b77fcb4176bb33edbff38af470bf Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Sat, 7 Jan 2023 00:49:33 +0800 Subject: [PATCH 077/107] add missing docstrings --- src/__init__.py | 3 +++ src/migration.py | 30 +++++++++++++++++------------- src/pull_request.py | 22 +++++++++++++++++++--- src/types_.py | 8 ++++++-- tests/conftest.py | 3 ++- tests/factories.py | 20 ++++++++++---------- tests/types.py | 7 ++++++- 7 files changed, 63 insertions(+), 30 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index e082a50d..bbf30988 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -110,6 +110,9 @@ def run(base_path: Path, discourse: Discourse, user_inputs: UserInputs) -> dict[ discourse: A client to the documentation server. user_inputs: Configurable inputs for running upload-charm-docs. + Raises: + InputError: if no valid running condition is matched. + Returns: All the URLs that had an action with the result of that action. """ diff --git a/src/migration.py b/src/migration.py index 0602ff3e..f309ee11 100644 --- a/src/migration.py +++ b/src/migration.py @@ -41,7 +41,7 @@ def _assert_valid_row(group_level: int, row: types_.TableRow, is_first_row: bool is_first_row: True if current row is the first row in table. Raises: - InputError on invalid row level or invalid row level sequence. + InputError: on invalid row level or invalid row level sequence. """ if is_first_row: if row.level != 1: @@ -125,6 +125,12 @@ def _create_document_meta(row: types_.TableRow, path: Path) -> types_.DocumentMe Args: row: Row containing link to document and path information. path: Relative path to where the document should reside. + + Raises: + MigrationError: if the table row that was passed in does not cantain a link to document. + + Returns: + Information required to migrate document. """ # this is to help mypy understand that link is not None. # this case cannot be possible since this is called for group rows only. @@ -142,6 +148,9 @@ def _create_gitkeep_meta(row: types_.TableRow, path: Path) -> types_.GitkeepMeta Args: row: An empty group row. path: Relative path to where the document should reside. + + Returns: + Information required to migrate empty group. """ return types_.GitkeepMeta(path=path / GITKEEP_FILENAME, table_row=row) @@ -166,9 +175,6 @@ def _extract_docs_from_table_rows( Args: table_rows: Table rows from the index file in the order of group hierarchy. - Raises: - InputError if invalid row level or invalid sequence of row level is found. - Yields: Migration documents with navlink to content. .gitkeep file if empty group. """ @@ -317,6 +323,9 @@ def _run_one( discourse: Client to the documentation server. docs_path: The path to the docs directory to migrate all the documentation. + Raises: + MigrationError: if file_meta is of invalid metadata type. + Returns: Migration report containing migration result. """ @@ -364,10 +373,10 @@ def _assert_migration_success(migration_reports: typing.Iterable[types_.ActionRe """Assert all documents have been successfully migrated. Args: - migration_results: Report containing migration details from server to local repository. + migration_reports: Report containing migration details from server to local repository. - Returns: - None if success, raises MigrationError otherwise. + Raises: + MigrationError: if any migration report has failed. """ if any(result for result in migration_reports if result.result is types_.ActionResult.FAIL): raise exceptions.MigrationError( @@ -385,14 +394,9 @@ def run( Args: table_rows: Iterable sequence of documentation structure to be migrated. + index_content: Main content describing the charm. discourse: Client to the documentation server. docs_path: The path to the docs directory containing all the documentation. - - Raises: - MigrationError if any migration error occurred during migration. - - Returns: - Migration result reports containing action result and failure reason if any. """ migration_reports = ( _run_one(file_meta=document, discourse=discourse, docs_path=docs_path) diff --git a/src/pull_request.py b/src/pull_request.py index 8f3eaef2..4a2e787a 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -61,6 +61,9 @@ def check_branch_exists(self, branch_name: str) -> bool: Args: branch_name: Branch name to check on remote. + Raises: + RepositoryClientError: if unexpected error occurred during git operation. + Returns: True if branch already exists, False otherwise. """ @@ -80,6 +83,9 @@ def create_branch(self, branch_name: str, commit_msg: str) -> None: Args: branch_name: New branch name. commit_msg: Commit message for current changes. + + Raises: + RepositoryClientError: if unexpected error occurred during git operation. """ logging.info("create new branch %s", branch_name) try: @@ -97,6 +103,9 @@ def create_github_pull_request(self, branch_name: str, base: str) -> str: branch_name: Branch name from which the pull request will be created. base: Base branch to which the pull request will be created. + Raises: + RepositoryClientError: if unexpected error occurred during git operation. + Returns: The web url to pull request page. """ @@ -132,13 +141,20 @@ def get_active_branch(self) -> str: return self._git_repo.active_branch.name def set_active_branch(self, branch_name: str) -> None: - """Set current active branch to an given branch that already exists.""" + """Set current active branch to an given branch that already exists. + + Args: + branch_name: target branch that already exists in git. + """ self._git_repo.git.checkout(branch_name) def create_pull_request(repository: RepositoryClient) -> str: """Create pull request for changes in given repository path. + Args: + repository: A git client to interact with local and remote git repository. + Raises: InputError: if pull request branch name is invalid or the a branch with same name already exists. @@ -184,7 +200,7 @@ def _get_repository_name_from_git_url(remote_url: str) -> str: e.g. https://github.com/canonical/upload-charm-docs.git Raises: - GitError if invalid remote url. + InputError: if invalid repository url was given. Returns: Git repository name. e.g. canonical/upload-charm-docs. @@ -203,7 +219,7 @@ def create_repository_client(access_token: str | None, base_path: Path) -> Repos base_path: Path where local .git resides in. Raises: - InputError: if invalid inputs are provided. + InputError: if invalid access token or invalid git remote URL is provided. Returns: A Github repository instance. diff --git a/src/types_.py b/src/types_.py index cdfac6f3..43e81888 100644 --- a/src/types_.py +++ b/src/types_.py @@ -355,7 +355,6 @@ class MigrationFileMeta: Attrs: path: The full document path to be written to. - table_row: The navigation table entry. """ path: Path @@ -363,7 +362,11 @@ class MigrationFileMeta: @dataclasses.dataclass class GitkeepMeta(MigrationFileMeta): - """Represents an empty directory from the index table.""" + """Represents an empty directory from the index table. + + Attrs: + table_row: Empty group row that is the source of .gitkeep file. + """ table_row: TableRow @@ -374,6 +377,7 @@ class DocumentMeta(MigrationFileMeta): Attrs: link: Link to content to read from. + table_row: Document row that is the source of document file. """ link: str diff --git a/tests/conftest.py b/tests/conftest.py index b1febc06..5612d88e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -94,10 +94,11 @@ def fixture_patch_create_repository_client( """Patch create_repository_client to return a mocked RepositoryClient.""" def mock_create_repository_client(access_token: str | None, base_path: Path): + """Mock create_repository_client patch function.""" # noqa: DCO020 # to accept keywords as arguments del access_token del base_path - return repository_client + return repository_client # noqa: DCO030 monkeypatch.setattr(src, "create_repository_client", mock_create_repository_client) diff --git a/tests/factories.py b/tests/factories.py index 7ad011a3..fecc5224 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -33,16 +33,16 @@ class Meta: class ActionReportFactory(factory.Factory): - """Generate Action reports.""" + """Generate Action reports.""" # noqa: DCO060 class Meta: - """Configuration for factory.""" + """Configuration for factory.""" # noqa: DCO060 model = types_.ActionReport abstract = False class Params: - """Variable factory params for generating different status report.""" + """Variable factory params for generating different status report.""" # noqa: DCO060 is_success = factory.Trait(result=types_.ActionResult.SUCCESS, reason=None) is_skipped = factory.Trait(result=types_.ActionResult.SKIP, reason="skipped") @@ -62,10 +62,10 @@ class Params: class ContentPageFactory(factory.Factory): - """Generate discourse content page.""" + """Generate discourse content page.""" # noqa: DCO060 class Meta: - """Configuration for factory.""" + """Configuration for factory.""" # noqa: DCO060 model = types.DiscoursePageMeta abstract = False @@ -75,10 +75,10 @@ class Meta: class UserInputFactory(factory.Factory): - """Generate user input tuple.""" + """Generate user input tuple.""" # noqa: DCO060 class Meta: - """Configuration for factory.""" + """Configuration for factory.""" # noqa: DCO060 model = types_.UserInputs abstract = False @@ -90,16 +90,16 @@ class Meta: class TableRowFactory(factory.Factory): - """Generate table row.""" + """Generate table row.""" # noqa: DCO060 class Meta: - """Configuration for factory.""" + """Configuration for factory.""" # noqa: DCO060 model = types_.TableRow abstract = False class Params: - """Variable factory params for generating different type of table row.""" + """Variable factory params for generating different type of table row.""" # noqa: DCO060 is_group = factory.Trait( navlink=factory.Sequence(lambda n: types_.Navlink(f"navlink-title-{n}", link=None)) diff --git a/tests/types.py b/tests/types.py index 15fca778..c9dd8f22 100644 --- a/tests/types.py +++ b/tests/types.py @@ -7,7 +7,12 @@ class DiscoursePageMeta(NamedTuple): - """Metadata for creating a discourse page.""" + """Metadata for creating a discourse page. + + Attrs: + title: Page title. + content: Page content. + """ title: str content: str From c26da1fbe4652c628179a241cc2bd9a184e64421 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Sat, 7 Jan 2023 10:19:28 +0800 Subject: [PATCH 078/107] pin git version --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 44198287..7768c4a1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.10-slim -RUN apt-get update && apt-get install -y --no-install-recommends git=2.34 +RUN apt-get update && apt-get install -y --no-install-recommends git=1:2.30.2-1 RUN mkdir /usr/src/app WORKDIR /usr/src/app From 8cd0e560e963db38b999f866c7d63a2e463796d3 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Sun, 8 Jan 2023 16:41:20 +0800 Subject: [PATCH 079/107] pyupgrade & refurb feedback --- tests/integration/test___init__run_migrate.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/integration/test___init__run_migrate.py b/tests/integration/test___init__run_migrate.py index 2a37f382..cb0548da 100644 --- a/tests/integration/test___init__run_migrate.py +++ b/tests/integration/test___init__run_migrate.py @@ -108,13 +108,13 @@ async def test_run_migrate( upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) upstream_doc_dir = upstream_repo_path / index.DOCUMENTATION_FOLDER_NAME assert tuple(urls_with_actions) == (mock_pull_request.html_url,) - assert ((group_1_path := upstream_doc_dir / "group-1")).is_dir() + assert (group_1_path := upstream_doc_dir / "group-1").is_dir() assert (group_1_path / migration.GITKEEP_FILENAME).is_file() - assert ((group_2_path := upstream_doc_dir / "group-2")).is_dir() + assert (group_2_path := upstream_doc_dir / "group-2").is_dir() assert (group_2_path / "content-1.md").read_text(encoding="utf-8") == content_page_1.content assert (group_2_path / "content-2.md").read_text(encoding="utf-8") == content_page_2.content - assert ((group_3_path := upstream_doc_dir / "group-3")).is_dir() - assert ((group_4_path := group_3_path / "group-4")).is_dir() + assert (group_3_path := upstream_doc_dir / "group-3").is_dir() + assert (group_4_path := group_3_path / "group-4").is_dir() assert (group_4_path / "content-3.md").read_text(encoding="utf-8") == content_page_3.content assert (group_3_path / "content-4.md").read_text(encoding="utf-8") == content_page_4.content assert (group_5_path := upstream_doc_dir / "group-5").is_dir() From a7b94f0aa8fb636b290466f1232f779df2829fa4 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Sun, 8 Jan 2023 18:08:30 +0800 Subject: [PATCH 080/107] add migrate mark --- pyproject.toml | 2 +- tests/integration/test___init__run_migrate.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index bc27d14a..acd41f75 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ show_missing = true [tool.pytest.ini_options] minversion = "6.0" log_cli_level = "INFO" -markers = ["init", "discourse"] +markers = ["init", "migrate", "discourse"] # Formatting tools configuration [tool.black] diff --git a/tests/integration/test___init__run_migrate.py b/tests/integration/test___init__run_migrate.py index cb0548da..8eda703a 100644 --- a/tests/integration/test___init__run_migrate.py +++ b/tests/integration/test___init__run_migrate.py @@ -20,10 +20,10 @@ from .. import factories from ..unit.helpers import assert_substrings_in_string, create_metadata_yaml -pytestmark = pytest.mark.init_migrate +pytestmark = pytest.mark.migrate -@pytest.mark.init_run_migrate +@pytest.mark.migrate @pytest.mark.asyncio @pytest.mark.usefixtures("patch_create_repository_client") async def test_run_migrate( From 540e94829eddd3f29c2ea77eee14446f874c0d97 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Sun, 8 Jan 2023 19:22:00 +0800 Subject: [PATCH 081/107] unpin git patch versions --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 7768c4a1..edd2e1b7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.10-slim -RUN apt-get update && apt-get install -y --no-install-recommends git=1:2.30.2-1 +RUN apt-get update && apt-get install -y --no-install-recommends git=1:2.30.* RUN mkdir /usr/src/app WORKDIR /usr/src/app From b3e0ad06b18af49a8afb65db5094caabb7242ffc Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 9 Jan 2023 11:11:49 +0800 Subject: [PATCH 082/107] convert row to markdown for error message --- src/migration.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/migration.py b/src/migration.py index f309ee11..fb22698f 100644 --- a/src/migration.py +++ b/src/migration.py @@ -48,19 +48,19 @@ def _assert_valid_row(group_level: int, row: types_.TableRow, is_first_row: bool raise exceptions.InputError( "Invalid starting row level. A table row must start with level value 1. " "Please fix the upstream first and re-run." - f"Row: {row=!r}" + f"Row: {row.to_markdown()}" ) if row.level < 1: raise exceptions.InputError( f"Invalid row level: {row.level=!r}." "Zero or negative level value is invalid." - f"Row: {row=!r}" + f"Row: {row.to_markdown()}" ) if row.level > group_level + 1: raise exceptions.InputError( "Invalid row level value sequence. Level sequence jumps of more than 1 is invalid." f"Did you mean level {group_level+1}?" - f"Row: {row=!r}" + f"Row: {row.to_markdown()}" ) From c3ffa4d8bbbdc610beed253bf855d401733d080e Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 9 Jan 2023 11:12:11 +0800 Subject: [PATCH 083/107] add explanation for skipped docstring check --- tests/factories.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/tests/factories.py b/tests/factories.py index fecc5224..ff24bbcf 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -17,6 +17,8 @@ # The attributes of these classes are generators for the attributes of the meta class class PathInfoFactory(factory.Factory): + # Docstrings have been abbreviated for factories, checking for docstrings on model attributes + # can be skipped. """Generate PathInfos.""" # noqa: DCO060 class Meta: @@ -42,7 +44,15 @@ class Meta: abstract = False class Params: - """Variable factory params for generating different status report.""" # noqa: DCO060 + """Variable factory params for generating different status report. + + Attrs: + is_success: flag to instantiate successful action result. + is_skipped: flag to instantiate skipped action result. + is_failed: flag to instantiate failed action result. + is_migrate: flag to instantiate migration action result. Generates reconcile action + reports by default. + """ is_success = factory.Trait(result=types_.ActionResult.SUCCESS, reason=None) is_skipped = factory.Trait(result=types_.ActionResult.SKIP, reason="skipped") @@ -99,7 +109,12 @@ class Meta: abstract = False class Params: - """Variable factory params for generating different type of table row.""" # noqa: DCO060 + """Variable factory params for generating different type of table row. + + Attrs: + is_group: flag to instantiate a table row representing a group. + is_document: flag to instantiate a table row representing a document(Default). + """ is_group = factory.Trait( navlink=factory.Sequence(lambda n: types_.Navlink(f"navlink-title-{n}", link=None)) From e70762fb2d1097b98ff672e1ec6c40f6a05bbcb8 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 9 Jan 2023 11:23:23 +0800 Subject: [PATCH 084/107] change permissions to be executable --- discourse_check_cleanup.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 discourse_check_cleanup.py diff --git a/discourse_check_cleanup.py b/discourse_check_cleanup.py old mode 100644 new mode 100755 From 285e22d38f7086fb0e3cc0b731f1de159c63bd90 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 9 Jan 2023 11:58:21 +0800 Subject: [PATCH 085/107] rename testing init module --- .github/workflows/integration_test.yaml | 2 +- pyproject.toml | 2 +- .../{test___init__.py => test___init__run_reconcile.py} | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) rename tests/integration/{test___init__.py => test___init__run_reconcile.py} (99%) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 74164ada..a995d9f6 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -9,7 +9,7 @@ jobs: secrets: inherit with: pre-run-script: tests/integration/pre_run.sh - modules: '["discourse", "init"]' + modules: '["discourse", "reconcile", "migrate"]' self-tests: runs-on: ubuntu-22.04 steps: diff --git a/pyproject.toml b/pyproject.toml index acd41f75..b68b86b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ show_missing = true [tool.pytest.ini_options] minversion = "6.0" log_cli_level = "INFO" -markers = ["init", "migrate", "discourse"] +markers = ["reconcile", "migrate", "discourse"] # Formatting tools configuration [tool.black] diff --git a/tests/integration/test___init__.py b/tests/integration/test___init__run_reconcile.py similarity index 99% rename from tests/integration/test___init__.py rename to tests/integration/test___init__run_reconcile.py index 657ed9c1..ddbbcc8c 100644 --- a/tests/integration/test___init__.py +++ b/tests/integration/test___init__run_reconcile.py @@ -20,7 +20,7 @@ from .. import factories from ..unit.helpers import assert_substrings_in_string, create_metadata_yaml -pytestmark = pytest.mark.init +pytestmark = pytest.mark.reconcile @pytest.mark.asyncio From 22cf6693e0e7df22fe17e082d6fba7d12c383fe5 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 9 Jan 2023 15:43:16 +0800 Subject: [PATCH 086/107] move logging to control flow --- src/pull_request.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pull_request.py b/src/pull_request.py index 4a2e787a..0bc67f32 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -87,7 +87,6 @@ def create_branch(self, branch_name: str, commit_msg: str) -> None: Raises: RepositoryClientError: if unexpected error occurred during git operation. """ - logging.info("create new branch %s", branch_name) try: self._git_repo.git.checkout("-b", branch_name) self._git_repo.git.add(".") @@ -109,7 +108,6 @@ def create_github_pull_request(self, branch_name: str, base: str) -> str: Returns: The web url to pull request page. """ - logging.info("create pull request %s", branch_name) try: pull_request = self._github_repo.create_pull( title=ACTIONS_PULL_REQUEST_TITLE, @@ -176,10 +174,12 @@ def create_pull_request(repository: RepositoryClient) -> str: f"Please try again after removing {DEFAULT_BRANCH_NAME}." ) + logging.info("create new branch %s", DEFAULT_BRANCH_NAME) repository.create_branch( branch_name=DEFAULT_BRANCH_NAME, commit_msg=ACTIONS_COMMIT_MESSAGE, ) + logging.info("create pull request %s", DEFAULT_BRANCH_NAME) pull_request_web_link = repository.create_github_pull_request( branch_name=DEFAULT_BRANCH_NAME, base=base, From 9d5647d3e70d35ee31909d37e64b427f0b8f2ec9 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 9 Jan 2023 19:29:16 +0800 Subject: [PATCH 087/107] refactor validation out of main loop --- src/migration.py | 172 ++++++++++----------- tests/unit/test_migration.py | 281 ++++++++++++++++------------------- 2 files changed, 213 insertions(+), 240 deletions(-) diff --git a/src/migration.py b/src/migration.py index fb22698f..455cead2 100644 --- a/src/migration.py +++ b/src/migration.py @@ -32,91 +32,94 @@ def _extract_name_from_paths(current_path: Path, table_path: types_.TablePath) - return table_path.removeprefix(f"{calculate_table_path(current_path)}-") -def _assert_valid_row(group_level: int, row: types_.TableRow, is_first_row: bool) -> None: - """Chekcs validity of the row with respect to group level. +def _validate_table_rows( + table_rows: typing.Iterable[types_.TableRow], +) -> typing.Iterable[types_.TableRow]: + """Check whether a table row is valid in regards to the levels and grouping. Args: - group_level: Group level in which the previous row was evaluated in. - row: Current row to be evaluated. - is_first_row: True if current row is the first row in table. + table_rows: Parsed rows from the index table. Raises: - InputError: on invalid row level or invalid row level sequence. + InputError: if the row is the first row but the value of level is not 1 or + the level smaller than 1 or + if the level increment is greater than one. + + Yields: + Valid table row. """ - if is_first_row: - if row.level != 1: + is_first_row = True + current_group_level = 0 + for row in table_rows: + if is_first_row: + if row.level != 1: + raise exceptions.InputError( + "Invalid starting row level. A table row must start with level value 1. " + "Please fix the upstream first and re-run." + f"Row: {row.to_markdown()}" + ) + if row.level < 1: raise exceptions.InputError( - "Invalid starting row level. A table row must start with level value 1. " - "Please fix the upstream first and re-run." + f"Invalid row level: {row.level=!r}." + "Zero or negative level value is invalid." + f"Row: {row.to_markdown()}" + ) + if row.level > current_group_level + 1: + raise exceptions.InputError( + "Invalid row level value sequence. Level sequence jumps of more than 1 is invalid." + f"Did you mean level {current_group_level+1}?" f"Row: {row.to_markdown()}" ) - if row.level < 1: - raise exceptions.InputError( - f"Invalid row level: {row.level=!r}." - "Zero or negative level value is invalid." - f"Row: {row.to_markdown()}" - ) - if row.level > group_level + 1: - raise exceptions.InputError( - "Invalid row level value sequence. Level sequence jumps of more than 1 is invalid." - f"Did you mean level {group_level+1}?" - f"Row: {row.to_markdown()}" - ) + yield row -def _get_next_group_info( - row: types_.TableRow, group_path: Path, group_level: int -) -> tuple[Path, int]: - """Get next directory path representation of a group with it's level. + is_first_row = False + current_group_level = row.level if row.is_group else row.level - 1 - Algorithm: - 1. Set target group level as one above current row level. - 2. While current group level is not equal to target group level - 2.1. If current group level is lower than target, - should not be possible since it should have been caught during validation step. - target_group_level being bigger than group_level means traversing more than 1 level - at a given step. - 2.2. If current group level is higher than target, decrement level and adjust path by - moving to parent path. - 3. If row is a group row, increment level and adjust path by appending extracted row name. + +def _change_group_path( + group_path: Path, previous_row: types_.TableRow | None, row: types_.TableRow +) -> Path: + """Get path to row's working group. + + If row is a document, it's working group is the group one level below. + If row is a group, it should be the new working group. Args: - row: Table row in which to move the path to. - group_path: Path representation of current group. - group_level: Current group level. + group_path: the path of the group in which the last execution was run, it should be the + equivalent to previous_row's group path. + previous_row: table row evaluated before the current. None if current row is the first row + in execution. + row: A single row from table rows. Returns: - A tuple consisting of next directory path representation of group and next group level. + A path to the group where the row or contents of row should reside in. """ - target_group_level = row.level - 1 - - while group_level != target_group_level: - group_level -= 1 + # if it's the first row or the row level has increased from group row + if not previous_row: + # document belongs in current group path + if not row.is_group: + return group_path + # move one level of nesting into new group path + return group_path / _extract_name_from_paths(current_path=group_path, table_path=row.path) + + # working group path belongs in the group 1 level above + # i.e. group-1/document-1, group path is group-1 + # group-1/group-2, group-path is group-1/group-2 but both cases require + # moving to group-1 first to either generate document or group afterwards. + destination_group_level = row.level - 1 + current_group_level = previous_row.level if previous_row.is_group else previous_row.level - 1 + + while current_group_level != destination_group_level: + current_group_level -= 1 group_path = group_path.parent if row.is_group: - group_level += 1 group_path = group_path / _extract_name_from_paths( current_path=group_path, table_path=row.path ) - return (group_path, group_level) - - -def _should_yield_gitkeep(row: types_.TableRow, next_level: int, level: int) -> bool: - """Determine whether to yield a gitkeep file depending on level traversal. - - It is important to note that the previous row must have been an empty a group row. - - Args: - row: Current table row to evaluate whether a gitkeep should be yielded first. - next_level: Incoming group level of current table row. - level: Current level being evaluated. - - Returns: - True if gitkeep file should be yielded first before processing the row further. - """ - return (row.is_group and next_level <= level) or (not row.is_group and next_level < level) + return group_path def _create_document_meta(row: types_.TableRow, path: Path) -> types_.DocumentMeta: @@ -162,15 +165,11 @@ def _extract_docs_from_table_rows( Algorithm: 1. For each row: - 1.1. Check if the row is valid with respect to current group level. - 1.2. Calculate next group level and next group path from row. - 1.3. If previous row was a group and - the current row is a document and we're traversing up the path OR - the current row is a folder and we're in the in the same path or above, - yield a gitkeep meta. - 1.4. Update current group level and current group path. - 1.5. If current row is a document, yield document meta. - 2. If last row was a group, yield gitkeep meta. + 1.1. If previous row was a group and the level is equal to or lower than current + level, yield gitkeep meta + 1.2. Adjust current group path according to previous row path. + 1.3. If current row is a document, yield document meta. + 1.4. Set previous row as current row since we're done processing it. Args: table_rows: Table rows from the index file in the order of group hierarchy. @@ -178,34 +177,34 @@ def _extract_docs_from_table_rows( Yields: Migration documents with navlink to content. .gitkeep file if empty group. """ - group_level = 0 - current_path = Path() + current_group_path = Path() previous_row: types_.TableRow | None = None + previous_path: Path | None = None for row in table_rows: - _assert_valid_row(group_level=group_level, row=row, is_first_row=previous_row is None) - (next_group_path, next_group_level) = _get_next_group_info( - group_path=current_path, row=row, group_level=group_level - ) # if previously processed row was a group and it had nothing in it # it should yield a .gitkeep file to denote empty group. if ( previous_row + and previous_path and previous_row.is_group - and _should_yield_gitkeep(row=row, next_level=next_group_level, level=group_level) + and row.level <= previous_row.level ): - yield _create_gitkeep_meta(row=previous_row, path=current_path) + yield _create_gitkeep_meta(row=previous_row, path=previous_path) + + current_group_path = _change_group_path( + group_path=current_group_path, previous_row=previous_row, row=row + ) - group_level = next_group_level - current_path = next_group_path if not row.is_group: - yield _create_document_meta(row=row, path=current_path) + yield _create_document_meta(row=row, path=current_group_path) previous_row = row + previous_path = current_group_path # last group without documents yields gitkeep meta. if previous_row is not None and previous_row.is_group: - yield _create_gitkeep_meta(row=previous_row, path=current_path) + yield _create_gitkeep_meta(row=previous_row, path=current_group_path) def _index_file_from_content(content: str) -> types_.IndexDocumentMeta: @@ -398,8 +397,13 @@ def run( discourse: Client to the documentation server. docs_path: The path to the docs directory containing all the documentation. """ + valid_table_rows = ( + valid_table_row for valid_table_row in _validate_table_rows(table_rows=table_rows) + ) migration_reports = ( _run_one(file_meta=document, discourse=discourse, docs_path=docs_path) - for document in _get_docs_metadata(table_rows=table_rows, index_content=index_content) + for document in _get_docs_metadata( + table_rows=valid_table_rows, index_content=index_content + ) ) _assert_migration_success(migration_reports=migration_reports) diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 6d62bf1b..419b119f 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -46,12 +46,10 @@ def test__extract_name_from_paths(path: Path, table_path: types_.TablePath, expe @pytest.mark.parametrize( - "level, row, is_first_row, expected_message_contents", + "table_rows, expected_message_contents", [ pytest.param( - 0, - factories.TableRowFactory(level=2), - True, + (factories.TableRowFactory(level=2),), ( "invalid starting row level", "a table row must start with level value 1", @@ -60,91 +58,38 @@ def test__extract_name_from_paths(path: Path, table_path: types_.TablePath, expe id="Invalid starting row", ), pytest.param( - 1, - factories.TableRowFactory(level=0), - False, + ( + factories.TableRowFactory(level=1), + factories.TableRowFactory(level=0), + ), ("invalid row level", "zero or negative level value is invalid."), id="Invalid level(0)", ), pytest.param( - 1, - factories.TableRowFactory(level=-1), - False, + ( + factories.TableRowFactory(level=1), + factories.TableRowFactory(level=-1), + ), ("invalid row level", "zero or negative level value is invalid."), id="Invalid level(negative value)", ), pytest.param( - 1, - factories.TableRowFactory(level=3), - False, + (factories.TableRowFactory(level=1), factories.TableRowFactory(level=3)), ( "invalid row level value sequence", "level sequence jumps of more than 1 is invalid.", ), id="Invalid level sequence jump", ), - ], -) -def test__assert_valid_row_error( - level: int, row: types_.TableRow, is_first_row: bool, expected_message_contents: Iterable[str] -): - """ - arrange: given an invalid group level, table row and is_first_row combinations - act: when _assert_valid_row is called - assert: InputError is raised with expected error message contents. - """ - with pytest.raises(exceptions.InputError) as exc: - migration._assert_valid_row(group_level=level, row=row, is_first_row=is_first_row) - - assert_substrings_in_string(expected_message_contents, str(exc.value).lower()) - - -@pytest.mark.parametrize( - "level, row, is_first_row", - [ - pytest.param( - 0, - factories.TableRowFactory(level=1), - True, - id="Valid starting row", - ), - pytest.param( - 1, - factories.TableRowFactory(level=2), - False, - id="Valid row sequence(increase)", - ), - pytest.param( - 3, - factories.TableRowFactory(level=2), - False, - id="Valid row sequence(decrease)", - ), - pytest.param( - 3, - factories.TableRowFactory(level=1), - False, - id="Valid row sequence(decrease multi)", - ), - ], -) -def test__assert_valid_row(level: int, row: types_.TableRow, is_first_row: bool): - """ - arrange: given a valid group level, table row and is_first_row combinations - act: when _assert_valid_row is called - assert: No exceptions are raised. - """ - migration._assert_valid_row(group_level=level, row=row, is_first_row=is_first_row) - - -@pytest.mark.parametrize( - "table_rows", - [ pytest.param( ( factories.TableRowFactory(level=1, is_document=True), factories.TableRowFactory(level=2, is_document=True), ), + ( + "invalid row level value sequence", + "level sequence jumps of more than 1 is invalid.", + ), id="document sequence level increase(no group)", ), pytest.param( @@ -152,6 +97,10 @@ def test__assert_valid_row(level: int, row: types_.TableRow, is_first_row: bool) factories.TableRowFactory(level=1, is_document=True), factories.TableRowFactory(level=3, is_document=True), ), + ( + "invalid row level value sequence", + "level sequence jumps of more than 1 is invalid.", + ), id="document sequence level increase(skip level)", ), pytest.param( @@ -159,6 +108,10 @@ def test__assert_valid_row(level: int, row: types_.TableRow, is_first_row: bool) factories.TableRowFactory(level=1, is_group=True), factories.TableRowFactory(level=3, is_group=True), ), + ( + "invalid row level value sequence", + "level sequence jumps of more than 1 is invalid.", + ), id="group sequence level increase(skip level)", ), pytest.param( @@ -166,6 +119,10 @@ def test__assert_valid_row(level: int, row: types_.TableRow, is_first_row: bool) factories.TableRowFactory(level=1, is_document=True), factories.TableRowFactory(level=2, is_group=True), ), + ( + "invalid row level value sequence", + "level sequence jumps of more than 1 is invalid.", + ), id="document group sequence level increase(no group)", ), pytest.param( @@ -174,22 +131,69 @@ def test__assert_valid_row(level: int, row: types_.TableRow, is_first_row: bool) factories.TableRowFactory(level=2, is_document=True), factories.TableRowFactory(level=3, is_group=True), ), + ( + "invalid row level value sequence", + "level sequence jumps of more than 1 is invalid.", + ), id="document group sequence level increase(doc doesn't increase group level)", ), ], ) -def test__extract_docs_from_table_rows_invalid_sequence(table_rows: Iterable[types_.TableRow]): +def test__validate_table_rows_invalid_rows( + table_rows: Iterable[types_.TableRow], expected_message_contents: Iterable[str] +): """ - arrange: given an invalid table row sequence - act: when _extract_docs_from_table_rows is called - assert: InputError is raised with invalid level value sequence error message. + arrange: given invalid table_rows sequence + act: when _validate_table_rows is called + assert: InputError is raised with expected error message contents. """ with pytest.raises(exceptions.InputError) as exc: - all(migration._extract_docs_from_table_rows(table_rows=table_rows)) + tuple(row for row in migration._validate_table_rows(table_rows=table_rows)) + + assert_substrings_in_string(expected_message_contents, str(exc.value).lower()) - assert_substrings_in_string( - ("invalid row level value sequence", "level sequence jumps of more than 1 is invalid"), - str(exc.value).lower(), + +@pytest.mark.parametrize( + "table_rows", + [ + pytest.param( + (factories.TableRowFactory(level=1),), + id="Valid starting row", + ), + pytest.param( + ( + factories.TableRowFactory(level=1, is_group=True), + factories.TableRowFactory(level=2, is_group=True), + ), + id="Valid row sequence(increase)", + ), + pytest.param( + ( + factories.TableRowFactory(level=1, is_group=True), + factories.TableRowFactory(level=2, is_group=True), + factories.TableRowFactory(level=1, is_group=True), + ), + id="Valid row sequence(decrease)", + ), + pytest.param( + ( + factories.TableRowFactory(level=1, is_group=True), + factories.TableRowFactory(level=2, is_group=True), + factories.TableRowFactory(level=3, is_group=True), + factories.TableRowFactory(level=1, is_group=True), + ), + id="Valid row sequence(decrease multi)", + ), + ], +) +def test__validate_table_rows(table_rows: Iterable[types_.TableRow]): + """ + arrange: given table rows of valid sequence + act: when _validate_table_rows is called + assert: an iterable with original sequence preserved is returned. + """ + assert tuple(row for row in migration._validate_table_rows(table_rows=table_rows)) == tuple( + row for row in table_rows ) @@ -198,6 +202,7 @@ def test__extract_docs_from_table_rows_invalid_sequence(table_rows: Iterable[typ @pytest.mark.parametrize( "table_rows, expected_metas", [ + pytest.param((), (), id="no table rows"), pytest.param( (doc_row_1 := factories.TableRowFactory(level=1, path="doc-1", is_document=True),), ( @@ -356,6 +361,48 @@ def test__extract_docs_from_table_rows_invalid_sequence(table_rows: Iterable[typ ), id="multi rows 2 separately nested(group, nested-group, nested-doc)", ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + nested_group_row_1 := factories.TableRowFactory( + level=2, path="group-1-group-2", is_group=True + ), + doc_row_1 := factories.TableRowFactory(level=1, path="doc-1", is_document=True), + ), + ( + types_.GitkeepMeta( + path=Path("group-1/group-2/.gitkeep"), table_row=nested_group_row_1 + ), + types_.DocumentMeta( + path=Path("doc-1.md"), + link=doc_row_1.navlink.link, + table_row=doc_row_1, + ), + ), + id="multi rows 2 separately nested(group, nested-group, doc)", + ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + nested_group_row_1 := factories.TableRowFactory( + level=2, path="group-1-group-2", is_group=True + ), + nested_doc_row_1 := factories.TableRowFactory( + level=2, path="group-1-doc-1", is_document=True + ), + ), + ( + types_.GitkeepMeta( + path=Path("group-1/group-2/.gitkeep"), table_row=nested_group_row_1 + ), + types_.DocumentMeta( + path=Path("group-1/doc-1.md"), + link=nested_doc_row_1.navlink.link, + table_row=nested_doc_row_1, + ), + ), + id="multi rows 2 separately nested(group, nested-group, nested-doc)", + ), pytest.param( ( group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), @@ -391,84 +438,6 @@ def test__extract_docs_from_table_rows( ) -@pytest.mark.parametrize( - "row, group_path, group_level, expected_path_level_pair", - [ - pytest.param( - factories.TableRowFactory(level=1, path="test-1", is_document=True), - Path(), - 0, - (Path(), 0), - id="single initial document", - ), - pytest.param( - factories.TableRowFactory(level=1, path="group-1", is_group=True), - Path(), - 0, - (Path("group-1"), 1), - id="single initial group", - ), - pytest.param( - factories.TableRowFactory(level=2, path="group-1-test-1", is_document=True), - Path("group-1"), - 1, - (Path("group-1"), 1), - id="document in group", - ), - pytest.param( - factories.TableRowFactory(level=2, path="group-1-group-2", is_group=True), - Path("group-1"), - 1, - (Path("group-1/group-2"), 2), - id="group in group", - ), - pytest.param( - factories.TableRowFactory(level=2, path="group-1-test-1", is_document=True), - Path("group-1/group-2"), - 2, - (Path("group-1"), 1), - id="document in same level group", - ), - pytest.param( - factories.TableRowFactory(level=2, path="group-1-group-4", is_group=True), - Path("group-1/group-2"), - 2, - (Path("group-1/group-4"), 2), - id="group in same level group", - ), - pytest.param( - factories.TableRowFactory(level=2, path="group-1-test-1", is_document=True), - Path("group-1/group-2/group-3"), - 3, - (Path("group-1"), 1), - id="document in lower level group", - ), - pytest.param( - factories.TableRowFactory(level=2, path="group-1-group-4", is_group=True), - Path("group-1/group-2/group-3"), - 3, - (Path("group-1/group-4"), 2), - id="group in lower level group", - ), - ], -) -def test__get_next_group_info( - row: types_.TableRow, - group_path: Path, - group_level: int, - expected_path_level_pair: tuple[Path, int], -): - """ - arrange: given table row, group path and group level - act: when _get_next_group_info is called - assert: expected path with corresponding level is returned. - """ - assert ( - migration._get_next_group_info(row=row, group_path=group_path, group_level=group_level) - == expected_path_level_pair - ) - - @pytest.mark.parametrize( "row, path, expected_meta", [ From 9a2c347f3923419a691b1e5c48a0b7b8ede9ce28 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 9 Jan 2023 19:40:36 +0800 Subject: [PATCH 088/107] add additional comment on algorithm --- src/migration.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/migration.py b/src/migration.py index 455cead2..65944f78 100644 --- a/src/migration.py +++ b/src/migration.py @@ -103,7 +103,7 @@ def _change_group_path( # move one level of nesting into new group path return group_path / _extract_name_from_paths(current_path=group_path, table_path=row.path) - # working group path belongs in the group 1 level above + # working group path belongs in the group 1 level above current row's level. # i.e. group-1/document-1, group path is group-1 # group-1/group-2, group-path is group-1/group-2 but both cases require # moving to group-1 first to either generate document or group afterwards. @@ -114,6 +114,9 @@ def _change_group_path( current_group_level -= 1 group_path = group_path.parent + # current state of group_path is 1 level above current row's level. + # move working group path to current group + # i.e. current: group-1, destination: group-1/group-2, row: group-1-group-2 if row.is_group: group_path = group_path / _extract_name_from_paths( current_path=group_path, table_path=row.path From cbb721c9f328b00a757d70822fda30a6d26ce1cf Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 09:06:08 +0800 Subject: [PATCH 089/107] improve docs readability --- README.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 1f535c92..d529a6d9 100644 --- a/README.md +++ b/README.md @@ -103,8 +103,11 @@ charmhub. run: echo '${{ steps.publishDocumentation.outputs.index_url }}' ``` - a branch name with `upload-charm-docs/migrate` will be created and a pull request named `[upload-charm-docs] Migrate charm docs` will be created towards the working branch the workflow was triggered with. - In order to ensure that the branches can be created successfully, please make sure that there are no existing branches clashing with the name above. + a branch name with `upload-charm-docs/migrate` will be created and a pull + request named `[upload-charm-docs] Migrate charm docs` will be created + towards the working branch the workflow was triggered with. + In order to ensure that the branches can be created successfully, please + make sure that there are no existing branches clashing with the name above. Please note that `dry_run` parameter has no effect on migrate mode. The action will now compare the discourse topics with the files and directories From 54c59649cf32e3033dfcb242f42b48935ae9d633 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 09:18:34 +0800 Subject: [PATCH 090/107] update docstrings to make terms clearer --- src/__init__.py | 4 ++-- src/migration.py | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index bbf30988..4decc567 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -75,7 +75,7 @@ def _run_migrate( """Migrate existing docs from charmhub to local repository. Args: - base_path: The base path to look for the metadata file in. + base_path: The base path of the repository. metadata: Information about the charm. discourse: A client to the documentation server. repository: Repository client for managing both local and remote git repositories. @@ -111,7 +111,7 @@ def run(base_path: Path, discourse: Discourse, user_inputs: UserInputs) -> dict[ user_inputs: Configurable inputs for running upload-charm-docs. Raises: - InputError: if no valid running condition is matched. + InputError: if no valid running mode is matched. Returns: All the URLs that had an action with the result of that action. diff --git a/src/migration.py b/src/migration.py index 65944f78..345102a6 100644 --- a/src/migration.py +++ b/src/migration.py @@ -163,7 +163,7 @@ def _create_gitkeep_meta(row: types_.TableRow, path: Path) -> types_.GitkeepMeta def _extract_docs_from_table_rows( table_rows: typing.Iterable[types_.TableRow], -) -> typing.Generator[types_.MigrationFileMeta, None, None]: +) -> typing.Iterable[types_.MigrationFileMeta]: """Extract necessary migration documents to build docs directory from server. Algorithm: @@ -222,7 +222,7 @@ def _index_file_from_content(content: str) -> types_.IndexDocumentMeta: return types_.IndexDocumentMeta(path=Path("index.md"), content=content) -def _build_path(docs_path: Path, document_meta: types_.MigrationFileMeta) -> Path: +def make_parent(docs_path: Path, document_meta: types_.MigrationFileMeta) -> Path: """Construct path leading to document to be created. Args: @@ -230,7 +230,7 @@ def _build_path(docs_path: Path, document_meta: types_.MigrationFileMeta) -> Pat document_meta: Information about document to be migrated. Returns: - Full path to document to be migrated. + Full path to the parent directory of the document to be migrated. """ path = docs_path / document_meta.path path.parent.mkdir(parents=True, exist_ok=True) @@ -238,7 +238,7 @@ def _build_path(docs_path: Path, document_meta: types_.MigrationFileMeta) -> Pat def _migrate_gitkeep(gitkeep_meta: types_.GitkeepMeta, docs_path: Path) -> types_.ActionReport: - """Write gitkeep file to docs directory. + """Write gitkeep file to a path inside docs directory. Args: gitkeep_meta: Information about gitkeep file to be migrated. @@ -249,7 +249,7 @@ def _migrate_gitkeep(gitkeep_meta: types_.GitkeepMeta, docs_path: Path) -> types """ logging.info("migrate meta: %s", gitkeep_meta) - full_path = _build_path(docs_path=docs_path, document_meta=gitkeep_meta) + full_path = make_parent(docs_path=docs_path, document_meta=gitkeep_meta) full_path.touch() return types_.ActionReport( table_row=gitkeep_meta.table_row, @@ -283,7 +283,7 @@ def _migrate_document( location=None, reason=str(exc), ) - full_path = _build_path(docs_path=docs_path, document_meta=document_meta) + full_path = make_parent(docs_path=docs_path, document_meta=document_meta) full_path.write_text(content, encoding="utf-8") return types_.ActionReport( table_row=document_meta.table_row, @@ -305,7 +305,7 @@ def _migrate_index(index_meta: types_.IndexDocumentMeta, docs_path: Path) -> typ """ logging.info("migrate meta: %s", index_meta) - full_path = _build_path(docs_path=docs_path, document_meta=index_meta) + full_path = make_parent(docs_path=docs_path, document_meta=index_meta) full_path.write_text(index_meta.content, encoding="utf-8") return types_.ActionReport( table_row=None, @@ -392,7 +392,7 @@ def run( discourse: Discourse, docs_path: Path, ) -> None: - """Write document content to docs_path. + """Write table contents to the document directory. Args: table_rows: Iterable sequence of documentation structure to be migrated. From 9201659682c7645045f431595f7a53ba2f6f0615 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 09:24:27 +0800 Subject: [PATCH 091/107] remove redundant test for missing docs --- .../integration/test___init__run_reconcile.py | 93 ++++++++----------- 1 file changed, 39 insertions(+), 54 deletions(-) diff --git a/tests/integration/test___init__run_reconcile.py b/tests/integration/test___init__run_reconcile.py index ddbbcc8c..fc5a6dd4 100644 --- a/tests/integration/test___init__run_reconcile.py +++ b/tests/integration/test___init__run_reconcile.py @@ -14,7 +14,7 @@ import pytest from git.repo import Repo -from src import GETTING_STARTED, exceptions, index, metadata, reconcile, run +from src import exceptions, index, metadata, reconcile, run from src.discourse import Discourse from .. import factories @@ -33,56 +33,41 @@ async def test_run( """ arrange: given running discourse server act: when run is called with: - 1. docs empty - 2. docs with an index file in dry run mode - 3. docs with an index file - 4. docs with a documentation file added in dry run mode - 5. docs with a documentation file added - 6. docs with a documentation file updated in dry run mode - 7. docs with a documentation file updated - 8. docs with a nested directory added - 9. docs with a documentation file added in the nested directory - 10. docs with the documentation file in the nested directory removed in dry run mode - 11. docs with the documentation file in the nested directory removed with page deletion + 1. docs with an index file in dry run mode + 2. docs with an index file + 3. docs with a documentation file added in dry run mode + 4. docs with a documentation file added + 5. docs with a documentation file updated in dry run mode + 6. docs with a documentation file updated + 7. docs with a nested directory added + 8. docs with a documentation file added in the nested directory + 9. docs with the documentation file in the nested directory removed in dry run mode + 10. docs with the documentation file in the nested directory removed with page deletion disabled - 12. with the nested directory removed - 13. with the documentation file removed - 14. with the index file removed + 11. with the nested directory removed + 12. with the documentation file removed + 13. with the index file removed assert: then: - 1. an index page is created with an empty navigation table - 2. an index page is not updated - 3. an index page is updated - 4. the documentation page is not created - 5. the documentation page is created - 6. the documentation page is not updated - 7. the documentation page is updated - 8. the nested directory is added to the navigation table - 9. the documentation file in the nested directory is created + 1. an index page is not updated + 2. an index page is updated + 3. the documentation page is not created + 4. the documentation page is created + 5. the documentation page is not updated + 6. the documentation page is updated + 7. the nested directory is added to the navigation table + 8. the documentation file in the nested directory is created + 9. the documentation file in the nested directory is not removed 10. the documentation file in the nested directory is not removed - 11. the documentation file in the nested directory is not removed - 12. the nested directory is removed from the navigation table - 13. the documentation page is deleted - 14. an index page is not updated + 11. the nested directory is removed from the navigation table + 12. the documentation page is deleted + 13. an index page is not updated """ (_, repo_path) = repository document_name = "name 1" caplog.set_level(logging.INFO) create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: {document_name}", path=repo_path) - # 1. docs empty - with pytest.raises(exceptions.InputError) as exc_info: - urls_with_actions = run( - base_path=repo_path, - discourse=discourse_api, - user_inputs=factories.UserInputFactory( - dry_run=False, - delete_pages=True, - ), - ) - - assert str(exc_info.value) == GETTING_STARTED - - # 2. docs with an index file in dry run mode + # 1. docs with an index file in dry run mode caplog.clear() index_url = discourse_api.create_topic( title=f"{document_name.replace('-', ' ').title()} Documentation Overview", @@ -109,7 +94,7 @@ async def test_run( assert index_topic == f"{reconcile.NAVIGATION_TABLE_START}".strip() assert_substrings_in_string((index_url, "Update", "'skip'"), caplog.text) - # 3. docs with an index file + # 2. docs with an index file caplog.clear() urls_with_actions = run( @@ -126,7 +111,7 @@ async def test_run( assert index_topic == f"{index_content}{reconcile.NAVIGATION_TABLE_START}" assert_substrings_in_string((index_url, "Update", "'success'"), caplog.text) - # 4. docs with a documentation file added in dry run mode + # 3. docs with a documentation file added in dry run mode caplog.clear() doc_table_key = "doc" (doc_file := docs_dir / f"{doc_table_key}.md").write_text(doc_content_1 := "doc content 1") @@ -145,7 +130,7 @@ async def test_run( index_topic = discourse_api.retrieve_topic(url=index_url) assert doc_content_1 not in index_topic - # 5. docs with a documentation file added + # 4. docs with a documentation file added caplog.clear() urls_with_actions = run( @@ -169,7 +154,7 @@ async def test_run( doc_topic = discourse_api.retrieve_topic(url=doc_url) assert doc_topic == doc_content_1 - # 6. docs with a documentation file updated in dry run mode + # 5. docs with a documentation file updated in dry run mode caplog.clear() doc_file.write_text(doc_content_2 := "doc content 2") @@ -189,7 +174,7 @@ async def test_run( doc_topic = discourse_api.retrieve_topic(url=doc_url) assert doc_topic == doc_content_1 - # 7. docs with a documentation file updated + # 6. docs with a documentation file updated caplog.clear() urls_with_actions = run( @@ -211,7 +196,7 @@ async def test_run( doc_topic = discourse_api.retrieve_topic(url=doc_url) assert doc_topic == doc_content_2 - # 8. docs with a nested directory added + # 7. docs with a nested directory added caplog.clear() nested_dir_table_key = "nested-dir" (nested_dir := docs_dir / nested_dir_table_key).mkdir() @@ -233,7 +218,7 @@ async def test_run( index_topic = discourse_api.retrieve_topic(url=index_url) assert nested_dir_table_line in index_topic - # 9. docs with a documentation file added in the nested directory + # 8. docs with a documentation file added in the nested directory caplog.clear() nested_dir_doc_table_key = "nested-dir-doc" (nested_dir_doc_file := nested_dir / "doc.md").write_text( @@ -264,7 +249,7 @@ async def test_run( nested_dir_doc_topic = discourse_api.retrieve_topic(url=nested_dir_doc_url) assert nested_dir_doc_topic == nested_dir_doc_content - # 10. docs with the documentation file in the nested directory removed in dry run mode + # 9. docs with the documentation file in the nested directory removed in dry run mode caplog.clear() nested_dir_doc_file.unlink() @@ -286,7 +271,7 @@ async def test_run( nested_dir_doc_topic = discourse_api.retrieve_topic(url=nested_dir_doc_url) assert nested_dir_doc_topic == nested_dir_doc_content - # 11. docs with the documentation file in the nested directory removed with page deletion + # 10. docs with the documentation file in the nested directory removed with page deletion # disabled caplog.clear() @@ -308,7 +293,7 @@ async def test_run( nested_dir_doc_topic = discourse_api.retrieve_topic(url=nested_dir_doc_url) assert nested_dir_doc_topic == nested_dir_doc_content - # 12. with the nested directory removed + # 11. with the nested directory removed caplog.clear() nested_dir.rmdir() @@ -328,7 +313,7 @@ async def test_run( index_topic = discourse_api.retrieve_topic(url=index_url) assert nested_dir_table_line not in index_topic - # 13. with the documentation file removed + # 12. with the documentation file removed caplog.clear() doc_file.unlink() @@ -350,7 +335,7 @@ async def test_run( with pytest.raises(exceptions.DiscourseError): discourse_api.retrieve_topic(url=doc_url) - # 14. with the index file removed + # 13. with the index file removed caplog.clear() index_file.unlink() From c75ec146646653b4aa36f126b830d01234024a6e Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 09:24:41 +0800 Subject: [PATCH 092/107] remove redundant marker --- tests/integration/test___init__run_migrate.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/test___init__run_migrate.py b/tests/integration/test___init__run_migrate.py index 8eda703a..88f67fcf 100644 --- a/tests/integration/test___init__run_migrate.py +++ b/tests/integration/test___init__run_migrate.py @@ -23,7 +23,6 @@ pytestmark = pytest.mark.migrate -@pytest.mark.migrate @pytest.mark.asyncio @pytest.mark.usefixtures("patch_create_repository_client") async def test_run_migrate( From 48c087fb4dc14a99304f3f5e39b6b8d5aa2ef139 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 13:20:40 +0800 Subject: [PATCH 093/107] group user inputs --- main.py | 101 +++++++++++++++++++++++++---------- src/discourse.py | 25 +++------ src/types_.py | 35 ++++++------ tests/factories.py | 7 ++- tests/unit/test_discourse.py | 42 ++++----------- 5 files changed, 109 insertions(+), 101 deletions(-) diff --git a/main.py b/main.py index b6fca908..639e06be 100755 --- a/main.py +++ b/main.py @@ -11,51 +11,70 @@ import pathlib from functools import partial -from src import run, types_ +from src import GETTING_STARTED, exceptions, run, types_ from src.discourse import create_discourse -# pylint: disable=too-many-locals -def main() -> None: - """Execute the action.""" - logging.basicConfig(level=logging.INFO) +def _parse_env_vars() -> types_.UserInputs: + """Parse user inputs from environment variables. - # Read input + Returns: + Wrapped user input variables. + """ + discourse_host = os.getenv("INPUT_DISCOURSE_HOST", "") + discourse_category_id = os.getenv("INPUT_DISCOURSE_CATEGORY_ID", "") + discourse_api_username = os.getenv("INPUT_DISCOURSE_API_USERNAME", "") + discourse_api_key = os.getenv("INPUT_DISCOURSE_API_KEY", "") delete_topics = os.getenv("INPUT_DELETE_TOPICS") == "true" dry_run = os.getenv("INPUT_DRY_RUN") == "true" - discourse_host = os.getenv("INPUT_DISCOURSE_HOST") - discourse_category_id = os.getenv("INPUT_DISCOURSE_CATEGORY_ID") - discourse_api_username = os.getenv("INPUT_DISCOURSE_API_USERNAME") - discourse_api_key = os.getenv("INPUT_DISCOURSE_API_KEY") github_access_token = os.getenv("INPUT_GITHUB_TOKEN") - # Execute action - create_discourse_kwargs = { - "hostname": discourse_host, - "category_id": discourse_category_id, - "api_username": discourse_api_username, - "api_key": discourse_api_key, - } - base_path = pathlib.Path() - discourse = create_discourse(**create_discourse_kwargs) - urls_with_actions_dict = run( - base_path=base_path, - discourse=discourse, - user_inputs=types_.UserInputs( - dry_run=dry_run, delete_pages=delete_topics, github_access_token=github_access_token - ), + return types_.UserInputs( + discourse_hostname=discourse_host, + discourse_category_id=discourse_category_id, + discourse_api_username=discourse_api_username, + discourse_api_key=discourse_api_key, + delete_pages=delete_topics, + dry_run=dry_run, + github_access_token=github_access_token, ) - # Write output - github_output = pathlib.Path(os.getenv("GITHUB_OUTPUT", "")) + +def _write_github_output( + urls_with_actions_dict: dict[str, str], user_inputs: types_.UserInputs +) -> None: + """Writes results produced by the action to github_output. + + Args: + urls_with_actions_dict: key value pairs of link to result of action. + user_inputs: parsed input variables used to run the action. + + Raises: + InputError: if not running inside a github actions environment. + """ + github_output = os.getenv("GITHUB_OUTPUT") + if not github_output: + raise exceptions.InputError( + f"Invalid 'GITHUB_OUTPUT' input, it must be non-empty, got {github_output=!r}" + f"This action is intended to run inside github-actions. {GETTING_STARTED}" + ) + + github_output_path = pathlib.Path() compact_json = partial(json.dumps, separators=(",", ":")) urls_with_actions = compact_json(urls_with_actions_dict) if urls_with_actions_dict: *_, index_url = urls_with_actions_dict.keys() else: index_url = "" - discourse_config = compact_json(create_discourse_kwargs) - github_output.write_text( + discourse_config = compact_json( + { + "hostname": user_inputs.discourse_hostname, + "category_id": user_inputs.discourse_category_id, + "api_username": user_inputs.discourse_api_username, + "api_key": user_inputs.discourse_api_key, + } + ) + github_output_path.write_text( f"urls_with_actions={urls_with_actions}\n" f"index_url={index_url}\n" f"discourse_config={discourse_config}\n", @@ -63,5 +82,29 @@ def main() -> None: ) +def main() -> None: + """Execute the action.""" + logging.basicConfig(level=logging.INFO) + + # Read input + user_inputs = _parse_env_vars() + + # Execute action + discourse = create_discourse( + hostname=user_inputs.discourse_hostname, + category_id=user_inputs.discourse_category_id, + api_username=user_inputs.discourse_api_username, + api_key=user_inputs.discourse_api_key, + ) + urls_with_actions_dict = run( + base_path=pathlib.Path(), + discourse=discourse, + user_inputs=user_inputs, + ) + + # Write output + _write_github_output(urls_with_actions_dict=urls_with_actions_dict, user_inputs=user_inputs) + + if __name__ == "__main__": main() diff --git a/src/discourse.py b/src/discourse.py index 10464d88..d36ac44b 100644 --- a/src/discourse.py +++ b/src/discourse.py @@ -418,7 +418,7 @@ def update_topic( def create_discourse( - hostname: typing.Any, category_id: typing.Any, api_username: typing.Any, api_key: typing.Any + hostname: str, category_id: str, api_username: str, api_key: str ) -> Discourse: """Create discourse client. @@ -437,8 +437,6 @@ def create_discourse( is not an integer or a string that can be converted to an integer. """ - if not isinstance(hostname, str): - raise InputError(f"Invalid 'discourse_host' input, it must be a string, got {hostname=!r}") if not hostname: raise InputError( f"Invalid 'discourse_host' input, it must be non-empty, got {hostname=!r}" @@ -450,31 +448,22 @@ def create_discourse( f"got {hostname=!r}" ) - if not isinstance(category_id, int) and not ( - isinstance(category_id, str) and category_id.isdigit() - ): + if not category_id: + raise InputError( + f"Invalid 'discourse_category_id' input, it must be non-empty, got {category_id=!r}" + ) + if not category_id.isdigit(): raise InputError( "Invalid 'discourse_category_id' input, it must be an integer or a string that can be " f"converted to an integer, got {category_id=!r}" ) - if isinstance(category_id, str): - category_id_int = int(category_id) - else: - category_id_int = category_id + category_id_int = int(category_id) - if not isinstance(api_username, str): - raise InputError( - f"Invalid 'discourse_api_username' input, it must be a string, got {api_username=!r}" - ) if not api_username: raise InputError( f"Invalid 'discourse_api_username' input, it must be non-empty, got {api_username=!r}" ) - if not isinstance(api_key, str): - raise InputError( - f"Invalid 'discourse_api_key' input, it must be a string, got {api_key=!r}" - ) if not api_key: raise InputError( f"Invalid 'discourse_api_key' input, it must be non-empty, got {api_key=!r}" diff --git a/src/types_.py b/src/types_.py index 43e81888..2181b234 100644 --- a/src/types_.py +++ b/src/types_.py @@ -11,32 +11,29 @@ @dataclasses.dataclass -class MigrationInputs: - """Configurable parameters for migration mode. +class UserInputs: + """Configurable user input values used to run upload-charm-docs. Attrs: + discourse_hostname: The discourse hostname. + discourse_category_id: The category identifier to use on discourse for all topics. + discourse_api_username: The discourse API username to use for interactions with the server. + discourse_api_key: The discourse API key to use for interactions with the server. + dry_run: If enabled, only log the action that would be taken. Has no effect in migration + mode. + delete_pages: Whether to delete pages that are no longer needed. Has no effect in + migration mode. github_access_token: A Personal Access Token(PAT) or access token with repository access. + Required in migration mode. """ - github_access_token: str | None - - -@dataclasses.dataclass -class ReconcileInputs: - """Configurable parameters for reconcile mode. - - Attrs: - dry_run: If enabled, only log the action that would be taken. - delete_pages: Whether to delete pages that are no longer needed. - """ - + discourse_hostname: str + discourse_category_id: str + discourse_api_username: str + discourse_api_key: str dry_run: bool delete_pages: bool - - -@dataclasses.dataclass -class UserInputs(ReconcileInputs, MigrationInputs): - """Configurable user input values used to run upload-charm-docs.""" + github_access_token: str | None class Metadata(typing.NamedTuple): diff --git a/tests/factories.py b/tests/factories.py index ff24bbcf..4c1641ce 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -93,8 +93,11 @@ class Meta: model = types_.UserInputs abstract = False - # the following token is a test variable for testing. - github_access_token = "test-token" # nosec + discourse_hostname = "http://discourse" + discourse_category_id = factory.Sequence(lambda n: n) + discourse_api_username = "discourse-test-user" + discourse_api_key = "discourse-test-key" + github_access_token = "test-token" # nosec the following token is a test variable. dry_run = False delete_pages = False diff --git a/tests/unit/test_discourse.py b/tests/unit/test_discourse.py index ef1626dd..3c60e8e2 100644 --- a/tests/unit/test_discourse.py +++ b/tests/unit/test_discourse.py @@ -692,19 +692,14 @@ def test_absolute_url(base_path: str, discourse: Discourse): "kwargs, expected_error_msg_contents", [ pytest.param( - {"hostname": None, "category_id": 1, "api_username": "user 1", "api_key": "key 1"}, - ("invalid", "'discourse_host'", "string", f"{None!r}"), - id="hostname is not string", - ), - pytest.param( - {"hostname": "", "category_id": 1, "api_username": "user 1", "api_key": "key 1"}, + {"hostname": "", "category_id": "1", "api_username": "user 1", "api_key": "key 1"}, ("invalid", "'discourse_host'", "empty", f"{''!r}"), id="hostname empty", ), pytest.param( { "hostname": "http://discourse", - "category_id": 1, + "category_id": "1", "api_username": "user 1", "api_key": "key 1", }, @@ -714,7 +709,7 @@ def test_absolute_url(base_path: str, discourse: Discourse): pytest.param( { "hostname": "HTTP://discourse", - "category_id": 1, + "category_id": "1", "api_username": "user 1", "api_key": "key 1", }, @@ -724,7 +719,7 @@ def test_absolute_url(base_path: str, discourse: Discourse): pytest.param( { "hostname": "https://discourse", - "category_id": 1, + "category_id": "1", "api_username": "user 1", "api_key": "key 1", }, @@ -734,12 +729,12 @@ def test_absolute_url(base_path: str, discourse: Discourse): pytest.param( { "hostname": "discourse", - "category_id": None, + "category_id": "", "api_username": "user 1", "api_key": "key 1", }, - ("invalid", "'discourse_category_id'", "integer", f"{None!r}"), - id="category_id None", + ("invalid", "'discourse_category_id'", "it must be non-empty"), + id="empty category_id", ), pytest.param( { @@ -752,22 +747,12 @@ def test_absolute_url(base_path: str, discourse: Discourse): id="category_id str that is not convertible to int", ), pytest.param( - {"hostname": "discourse", "category_id": 1, "api_username": None, "api_key": "key 1"}, - ("invalid", "'discourse_api_username'", "string", f"{None!r}"), - id="api_username None", - ), - pytest.param( - {"hostname": "discourse", "category_id": 1, "api_username": "", "api_key": "key 1"}, + {"hostname": "discourse", "category_id": "1", "api_username": "", "api_key": "key 1"}, ("empty", "'discourse_api_username'", f"{''!r}"), id="api_username empty", ), pytest.param( - {"hostname": "discourse", "category_id": 1, "api_username": "user 1", "api_key": None}, - ("invalid", "'discourse_api_key'", "string", f"{None!r}"), - id="api_key None", - ), - pytest.param( - {"hostname": "discourse", "category_id": 1, "api_username": "user 1", "api_key": ""}, + {"hostname": "discourse", "category_id": "1", "api_username": "user 1", "api_key": ""}, ("empty", "'discourse_api_key'", f"{''!r}"), id="api_key empty", ), @@ -790,15 +775,6 @@ def test_create_discourse_error(kwargs: dict, expected_error_msg_contents: tuple @pytest.mark.parametrize( "kwargs", [ - pytest.param( - { - "hostname": "discourse", - "category_id": 1, - "api_username": "user 1", - "api_key": "key 1", - }, - id="category_id int", - ), pytest.param( { "hostname": "discourse", From a3c17f0711b2683731695ea724404f23c4958c20 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 13:21:09 +0800 Subject: [PATCH 094/107] minor wording changes --- src/migration.py | 14 ++++++++------ src/pull_request.py | 2 +- tests/unit/test_migration.py | 5 ++++- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/src/migration.py b/src/migration.py index 345102a6..d7863091 100644 --- a/src/migration.py +++ b/src/migration.py @@ -16,20 +16,20 @@ GITKEEP_FILENAME = ".gitkeep" -def _extract_name_from_paths(current_path: Path, table_path: types_.TablePath) -> str: +def _extract_name_from_paths(current_group_path: Path, table_path: types_.TablePath) -> str: """Extract name given a current working directory and table path. If there is a matching prefix in table path's prefix generated from the current directory, the prefix is removed and the remaining segment is returned as the extracted name. Args: - current_path: current path of the file relative to the directory. + current_group_path: current path of the file relative to the group's path directory. table_path: table path of the file from the index file, of format path-to-file-filename. Returns: The filename derived by removing the directory path from given table path of the file. """ - return table_path.removeprefix(f"{calculate_table_path(current_path)}-") + return table_path.removeprefix(f"{calculate_table_path(current_group_path)}-") def _validate_table_rows( @@ -101,7 +101,9 @@ def _change_group_path( if not row.is_group: return group_path # move one level of nesting into new group path - return group_path / _extract_name_from_paths(current_path=group_path, table_path=row.path) + return group_path / _extract_name_from_paths( + current_group_path=group_path, table_path=row.path + ) # working group path belongs in the group 1 level above current row's level. # i.e. group-1/document-1, group path is group-1 @@ -119,7 +121,7 @@ def _change_group_path( # i.e. current: group-1, destination: group-1/group-2, row: group-1-group-2 if row.is_group: group_path = group_path / _extract_name_from_paths( - current_path=group_path, table_path=row.path + current_group_path=group_path, table_path=row.path ) return group_path @@ -144,7 +146,7 @@ def _create_document_meta(row: types_.TableRow, path: Path) -> types_.DocumentMe raise exceptions.MigrationError( "Internal error, no implementation for creating document meta with missing link in row." ) - name = _extract_name_from_paths(current_path=path, table_path=row.path) + name = _extract_name_from_paths(current_group_path=path, table_path=row.path) return types_.DocumentMeta(path=path / f"{name}.md", link=row.navlink.link, table_row=row) diff --git a/src/pull_request.py b/src/pull_request.py index 0bc67f32..ee5b8237 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -1,4 +1,4 @@ -# Copyright 2022 Canonical Ltd. +# Copyright 2023 Canonical Ltd. # See LICENSE file for licensing details. """Module for handling git repository.""" diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 419b119f..5e7f4b8c 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -42,7 +42,10 @@ def test__extract_name_from_paths(path: Path, table_path: types_.TablePath, expe act: when _extract_name_from_paths is called assert: the name part is extracted from table path. """ - assert migration._extract_name_from_paths(current_path=path, table_path=table_path) == expected + assert ( + migration._extract_name_from_paths(current_group_path=path, table_path=table_path) + == expected + ) @pytest.mark.parametrize( From ae3f414fc20e2451f776623b5553e4ca75c7901c Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 13:31:40 +0800 Subject: [PATCH 095/107] remove redundant test case --- tests/unit/test_migration.py | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 5e7f4b8c..98eaef19 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -95,28 +95,6 @@ def test__extract_name_from_paths(path: Path, table_path: types_.TablePath, expe ), id="document sequence level increase(no group)", ), - pytest.param( - ( - factories.TableRowFactory(level=1, is_document=True), - factories.TableRowFactory(level=3, is_document=True), - ), - ( - "invalid row level value sequence", - "level sequence jumps of more than 1 is invalid.", - ), - id="document sequence level increase(skip level)", - ), - pytest.param( - ( - factories.TableRowFactory(level=1, is_group=True), - factories.TableRowFactory(level=3, is_group=True), - ), - ( - "invalid row level value sequence", - "level sequence jumps of more than 1 is invalid.", - ), - id="group sequence level increase(skip level)", - ), pytest.param( ( factories.TableRowFactory(level=1, is_document=True), From b430b5c64ad6f626f445990cbd87fd74185e0b2f Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 13:48:57 +0800 Subject: [PATCH 096/107] add test for content after table --- tests/unit/test_index.py | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index c02a3031..1a9ebbf2 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -138,24 +138,14 @@ def test_get_metadata_yaml_retrieve_empty(tmp_path: Path): id="multiline content only", ), pytest.param( - f"{content}{index.NAVIGATION_TABLE_START}", + f"{(content := 'Page content')}{index.NAVIGATION_TABLE_START}", content, id="page with content and navigation table", ), pytest.param( - f"{multiline_content}{index.NAVIGATION_TABLE_START}", - multiline_content, - id="page with multiline content and navigation table", - ), - pytest.param( - (separated_multiline_content := "Page content\n\nManyMultiline"), - separated_multiline_content, - id="page with separated multiline content", - ), - pytest.param( - f"{separated_multiline_content}{index.NAVIGATION_TABLE_START}", - separated_multiline_content, - id="page with separated multiline content and navigation table", + f"{(content := 'page content')}{index.NAVIGATION_TABLE_START}\ncontent-afterwards", + content, + id="page with content after the navigation table", ), ], ) From 2fa5de7214c616abb9f8052491dd4c0962894fde Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 13:49:27 +0800 Subject: [PATCH 097/107] minor feedback adjustments --- src/pull_request.py | 5 +++-- tests/unit/test___init__.py | 6 ++---- tests/unit/test_migration.py | 12 ++++-------- tests/unit/test_pull_request.py | 16 ++++++---------- 4 files changed, 15 insertions(+), 24 deletions(-) diff --git a/src/pull_request.py b/src/pull_request.py index ee5b8237..e053b70f 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -95,7 +95,7 @@ def create_branch(self, branch_name: str, commit_msg: str) -> None: except GitCommandError as exc: raise RepositoryClientError(f"Unexpected error creating new branch. {exc=!r}") from exc - def create_github_pull_request(self, branch_name: str, base: str) -> str: + def create_pull_request(self, branch_name: str, base: str) -> str: """Create a pull request from given branch to base. Args: @@ -164,6 +164,7 @@ def create_pull_request(repository: RepositoryClient) -> str: if base == DEFAULT_BRANCH_NAME: raise InputError( f"Pull request branch cannot be named {DEFAULT_BRANCH_NAME}." + f"Branch name {DEFAULT_BRANCH_NAME} is reserved for creating a migration branch." "Please try again after changing the branch name." ) if not repository.is_dirty(): @@ -180,7 +181,7 @@ def create_pull_request(repository: RepositoryClient) -> str: commit_msg=ACTIONS_COMMIT_MESSAGE, ) logging.info("create pull request %s", DEFAULT_BRANCH_NAME) - pull_request_web_link = repository.create_github_pull_request( + pull_request_web_link = repository.create_pull_request( branch_name=DEFAULT_BRANCH_NAME, base=base, ) diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 5498f255..75b5e4db 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -161,7 +161,7 @@ def test__run_migrate_server_error_index( """ meta = types_.Metadata(name="name 1", docs="http://discourse/t/docs") mocked_discourse = mock.MagicMock(spec=discourse.Discourse) - mocked_discourse.retrieve_topic.side_effect = [exceptions.DiscourseError] + mocked_discourse.retrieve_topic.side_effect = exceptions.DiscourseError with pytest.raises(exceptions.ServerError) as exc: _run_migrate( @@ -333,11 +333,9 @@ def test_run_no_docs_dir( user_input = factories.UserInputFactory() # run is repeated in unit tests / integration tests - # pylint: disable=duplicate-code returned_migration_reports = run( base_path=repo_path, discourse=mocked_discourse, user_inputs=user_input - ) - # pylint: enable=duplicate-code + ) # pylint: disable=duplicate-code (upstream_repo, upstream_path) = upstream_repository upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 98eaef19..c624b9c5 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -21,19 +21,15 @@ @pytest.mark.parametrize( "path, table_path, expected", [ - pytest.param(Path(""), types_.TablePath("test"), "test", id="table path only"), - pytest.param( - Path("group-1"), types_.TablePath("group-1-test"), "test", id="test in group" - ), + pytest.param(Path(""), "test", "test", id="table path only"), + pytest.param(Path("group-1"), "group-1-test", "test", id="test in group"), pytest.param( Path("group-1/nested/path"), - types_.TablePath("group-1-nested-path-test"), + "group-1-nested-path-test", "test", id="test in group", ), - pytest.param( - Path("not/matching/group"), types_.TablePath("test"), "test", id="non-prefix path" - ), + pytest.param(Path("not/matching/group"), "test", "test", id="non-prefix path"), ], ) def test__extract_name_from_paths(path: Path, table_path: types_.TablePath, expected: str): diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index da13fddd..25d1b63d 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -137,12 +137,12 @@ def test__create_branch( ) -def test__create_github_pull_request_error( +def test__create_pull_request_error( monkeypatch: pytest.MonkeyPatch, repository_client: RepositoryClient ): """ arrange: given RepositoryClient with a mocked github repository client that raises an exception - act: when _create_github_pull_request is called + act: when _create_pull_request is called assert: RepositoryClientError is raised. """ mock_github_repository = mock.MagicMock(spec=Repository) @@ -152,24 +152,20 @@ def test__create_github_pull_request_error( monkeypatch.setattr(repository_client, "_github_repo", mock_github_repository) with pytest.raises(RepositoryClientError) as exc: - repository_client.create_github_pull_request( - branch_name="branchname-1", base="base-branchname" - ) + repository_client.create_pull_request(branch_name="branchname-1", base="base-branchname") assert_substrings_in_string( ("unexpected error creating pull request", "githubexception"), str(exc.value).lower() ) -def test__create_github_pull_request( - repository_client: RepositoryClient, mock_pull_request: PullRequest -): +def test__create_pull_request(repository_client: RepositoryClient, mock_pull_request: PullRequest): """ arrange: given RepositoryClient with a mocked github client that returns a mocked pull request - act: when _create_github_pull_request is called + act: when _create_pull_request is called assert: a pull request's page link is returned. """ - returned_url = repository_client.create_github_pull_request("branchname-1", "base-branchname") + returned_url = repository_client.create_pull_request("branchname-1", "base-branchname") assert returned_url == mock_pull_request.html_url From 3ed634d9d426228fc275a6e5a640d9cb4349b41f Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 15:41:45 +0800 Subject: [PATCH 098/107] separate repo and repo path --- tests/conftest.py | 54 +++++++----- tests/integration/test___init__run_migrate.py | 20 ++--- .../integration/test___init__run_reconcile.py | 38 ++++---- tests/unit/test___init__.py | 59 ++++++------- tests/unit/test_pull_request.py | 86 +++++++++---------- 5 files changed, 135 insertions(+), 122 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 5612d88e..3fff3543 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,36 +17,51 @@ from src import pull_request -@pytest.fixture(name="upstream_repository") -def fixture_upstream_repository(tmp_path: Path) -> tuple[Repo, Path]: - """Create upstream repository.""" +@pytest.fixture(name="upstream_repository_path") +def fixture_upstream_repository_path(tmp_path: Path) -> Path: + """Create a path for upstream repository.""" upstream_path = tmp_path / "upstream" upstream_path.mkdir() - upstream = Repo.init(upstream_path) - writer = upstream.config_writer() + return upstream_path + + +@pytest.fixture(name="upstream_repository") +def fixture_upstream_repository(upstream_repository_path: Path) -> Repo: + """Initialize upstream repository.""" + upstream_repository = Repo.init(upstream_repository_path) + writer = upstream_repository.config_writer() writer.set_value("user", "name", "upstream_user") writer.set_value("user", "email", "upstream_email") writer.release() - upstream.git.checkout("-b", "main") - (upstream_path / ".gitkeep").touch() - upstream.git.add(".") - upstream.git.commit("-m", "'initial commit'") + upstream_repository.git.checkout("-b", "main") + (upstream_repository_path / ".gitkeep").touch() + upstream_repository.git.add(".") + upstream_repository.git.commit("-m", "'initial commit'") - return (upstream, upstream_path) + return upstream_repository + + +@pytest.fixture(name="repository_path") +def fixture_repository_path(tmp_path: Path) -> Path: + """Create path for testing repository.""" + repo_path = tmp_path / "mocked" + repo_path.mkdir() + return repo_path @pytest.fixture(name="repository") def fixture_repository( - upstream_repository: tuple[Repo, Path], tmp_path: Path -) -> tuple[Repo, Path]: + upstream_repository: Repo, upstream_repository_path: Path, repository_path: Path +) -> Repo: """Create repository with mocked upstream.""" - (_, upstream_path) = upstream_repository - repo_path = tmp_path / "mocked" - repo_path.mkdir() - repo = Repo.clone_from(url=upstream_path, to_path=repo_path) + # uptream_repository is added to create a dependency for the current fixture in order to ensure + # that the repository can be cloned after the upstream has fully initialized. + del upstream_repository + + repo = Repo.clone_from(url=upstream_repository_path, to_path=repository_path) repo.git.checkout("main") repo.git.pull() - return (repo, repo_path) + return repo @pytest.fixture(name="mock_pull_request") @@ -80,11 +95,10 @@ def fixture_mock_github(mock_github_repo: Repository) -> Github: @pytest.fixture(name="repository_client") def fixture_repository_client( - repository: tuple[Repo, Path], mock_github_repo: Repository + repository: Repo, mock_github_repo: Repository ) -> pull_request.RepositoryClient: """Get repository client.""" - (repo, _) = repository - return pull_request.RepositoryClient(repository=repo, github_repository=mock_github_repo) + return pull_request.RepositoryClient(repository=repository, github_repository=mock_github_repo) @pytest.fixture(name="patch_create_repository_client") diff --git a/tests/integration/test___init__run_migrate.py b/tests/integration/test___init__run_migrate.py index 88f67fcf..79ac6d12 100644 --- a/tests/integration/test___init__run_migrate.py +++ b/tests/integration/test___init__run_migrate.py @@ -29,8 +29,10 @@ async def test_run_migrate( discourse_hostname: str, discourse_api: Discourse, caplog: pytest.LogCaptureFixture, - repository: tuple[Repo, Path], - upstream_repository: tuple[Repo, Path], + repository: Repo, + repository_path: Path, + upstream_repository: Repo, + upstream_repository_path: Path, mock_pull_request: PullRequest, ): """ @@ -47,8 +49,6 @@ async def test_run_migrate( caplog.set_level(logging.INFO) document_name = "migration name 1" discourse_prefix = f"http://{discourse_hostname}" - (repo, repo_path) = repository - (upstream_repo, upstream_repo_path) = upstream_repository content_page_1 = factories.ContentPageFactory() content_page_1_url = discourse_api.create_topic( title=content_page_1.title, @@ -95,17 +95,17 @@ async def test_run_migrate( caplog.clear() create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", - path=repo_path, + path=repository_path, ) urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory(), ) - upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) - upstream_doc_dir = upstream_repo_path / index.DOCUMENTATION_FOLDER_NAME + upstream_repository.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + upstream_doc_dir = upstream_repository_path / index.DOCUMENTATION_FOLDER_NAME assert tuple(urls_with_actions) == (mock_pull_request.html_url,) assert (group_1_path := upstream_doc_dir / "group-1").is_dir() assert (group_1_path / migration.GITKEEP_FILENAME).is_file() @@ -121,10 +121,10 @@ async def test_run_migrate( # 2. with no changes applied after migration caplog.clear() - repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + repository.git.checkout(pull_request.DEFAULT_BRANCH_NAME) urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory(), ) diff --git a/tests/integration/test___init__run_reconcile.py b/tests/integration/test___init__run_reconcile.py index fc5a6dd4..d8e4d3c7 100644 --- a/tests/integration/test___init__run_reconcile.py +++ b/tests/integration/test___init__run_reconcile.py @@ -12,7 +12,6 @@ from urllib.parse import urlparse import pytest -from git.repo import Repo from src import exceptions, index, metadata, reconcile, run from src.discourse import Discourse @@ -28,7 +27,7 @@ async def test_run( discourse_api: Discourse, caplog: pytest.LogCaptureFixture, - repository: tuple[Repo, Path], + repository_path: Path, ): """ arrange: given running discourse server @@ -62,10 +61,11 @@ async def test_run( 12. the documentation page is deleted 13. an index page is not updated """ - (_, repo_path) = repository document_name = "name 1" caplog.set_level(logging.INFO) - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: {document_name}", path=repo_path) + create_metadata_yaml( + content=f"{metadata.METADATA_NAME_KEY}: {document_name}", path=repository_path + ) # 1. docs with an index file in dry run mode caplog.clear() @@ -75,13 +75,13 @@ async def test_run( ) create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n{metadata.METADATA_DOCS_KEY}: {index_url}", - path=repo_path, + path=repository_path, ) - (docs_dir := repo_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() + (docs_dir := repository_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() (index_file := docs_dir / "index.md").write_text(index_content := "index content 1") urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=True, @@ -98,7 +98,7 @@ async def test_run( caplog.clear() urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=False, @@ -117,7 +117,7 @@ async def test_run( (doc_file := docs_dir / f"{doc_table_key}.md").write_text(doc_content_1 := "doc content 1") urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=True, @@ -134,7 +134,7 @@ async def test_run( caplog.clear() urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=False, @@ -159,7 +159,7 @@ async def test_run( doc_file.write_text(doc_content_2 := "doc content 2") urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=True, @@ -178,7 +178,7 @@ async def test_run( caplog.clear() urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=False, @@ -202,7 +202,7 @@ async def test_run( (nested_dir := docs_dir / nested_dir_table_key).mkdir() urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=False, @@ -226,7 +226,7 @@ async def test_run( ) urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=False, @@ -254,7 +254,7 @@ async def test_run( nested_dir_doc_file.unlink() urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=True, @@ -276,7 +276,7 @@ async def test_run( caplog.clear() urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=False, @@ -298,7 +298,7 @@ async def test_run( nested_dir.rmdir() urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=False, @@ -318,7 +318,7 @@ async def test_run( doc_file.unlink() urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=False, @@ -340,7 +340,7 @@ async def test_run( index_file.unlink() urls_with_actions = run( - base_path=repo_path, + base_path=repository_path, discourse=discourse_api, user_inputs=factories.UserInputFactory( dry_run=False, diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 75b5e4db..752161d3 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -175,7 +175,7 @@ def test__run_migrate_server_error_index( def test__run_migrate_server_error_topic( - repository: tuple[Repo, Path], + repository_path: Path, repository_client: pull_request.RepositoryClient, ): """ @@ -198,11 +198,10 @@ def test__run_migrate_server_error_topic( meta = types_.Metadata(name="name 1", docs=index_url) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.retrieve_topic.side_effect = [index_content, exceptions.DiscourseError] - (_, repo_path) = repository with pytest.raises(exceptions.MigrationError): _run_migrate( - base_path=repo_path, + base_path=repository_path, metadata=meta, discourse=mocked_discourse, repository=repository_client, @@ -211,8 +210,9 @@ def test__run_migrate_server_error_topic( # pylint: disable=too-many-locals def test__run_migrate( - repository: tuple[Repo, Path], - upstream_repository: tuple[Repo, Path], + repository_path: Path, + upstream_repository: Repo, + upstream_repository_path: Path, repository_client: pull_request.RepositoryClient, mock_pull_request: PullRequest, ): @@ -233,20 +233,22 @@ def test__run_migrate( index_page, (link_content := "link 1 content"), ] - (_, repo_path) = repository returned_migration_reports = _run_migrate( - base_path=repo_path, + base_path=repository_path, metadata=meta, discourse=mocked_discourse, repository=repository_client, ) - (upstream_repo, upstream_path) = upstream_repository - upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + upstream_repository.git.checkout(pull_request.DEFAULT_BRANCH_NAME) assert returned_migration_reports == {mock_pull_request.html_url: types_.ActionResult.SUCCESS} - assert (index_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() - assert (path_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "path-1.md").is_file() + assert ( + index_file := upstream_repository_path / DOCUMENTATION_FOLDER_NAME / "index.md" + ).is_file() + assert ( + path_file := upstream_repository_path / DOCUMENTATION_FOLDER_NAME / "path-1.md" + ).is_file() assert index_file.read_text(encoding="utf-8") == index_content assert path_file.read_text(encoding="utf-8") == link_content @@ -254,36 +256,34 @@ def test__run_migrate( # pylint: enable=too-many-locals -def test_run_no_docs_no_dir(repository: tuple[Repo, Path]): +def test_run_no_docs_no_dir(repository_path: Path): """ arrange: given a path with a metadata.yaml that has no docs key and no docs directory and mocked discourse act: when run is called assert: InputError is raised with a guide to getting started. """ - (_, repo_path) = repository - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) + create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repository_path) mocked_discourse = mock.MagicMock(spec=discourse.Discourse) user_input = factories.UserInputFactory() with pytest.raises(exceptions.InputError) as exc: # run is repeated in unit tests / integration tests # pylint: disable=duplicate-code - _ = run(base_path=repo_path, discourse=mocked_discourse, user_inputs=user_input) + _ = run(base_path=repository_path, discourse=mocked_discourse, user_inputs=user_input) assert str(exc.value) == GETTING_STARTED -def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): +def test_run_no_docs_empty_dir(repository_path: Path): """ arrange: given a path with a metadata.yaml that has no docs key and has empty docs directory and mocked discourse act: when run is called assert: then an index page is created with empty navigation table. """ - (_, repo_path) = repository - create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repo_path) - (repo_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() + create_metadata_yaml(content=f"{metadata.METADATA_NAME_KEY}: name 1", path=repository_path) + (repository_path / index.DOCUMENTATION_FOLDER_NAME).mkdir() mocked_discourse = mock.MagicMock(spec=discourse.Discourse) mocked_discourse.create_topic.return_value = (url := "url 1") user_input = factories.UserInputFactory() @@ -291,7 +291,7 @@ def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): # run is repeated in unit tests / integration tests # pylint: disable=duplicate-code returned_page_interactions = run( - base_path=repo_path, discourse=mocked_discourse, user_inputs=user_input + base_path=repository_path, discourse=mocked_discourse, user_inputs=user_input ) mocked_discourse.create_topic.assert_called_once_with( @@ -304,8 +304,9 @@ def test_run_no_docs_empty_dir(repository: tuple[Repo, Path]): # pylint: disable=too-many-locals @pytest.mark.usefixtures("patch_create_repository_client") def test_run_no_docs_dir( - repository: tuple[Repo, Path], - upstream_repository: tuple[Repo, Path], + repository_path: Path, + upstream_repository: Repo, + upstream_repository_path: Path, mock_pull_request: PullRequest, ): """ @@ -315,10 +316,9 @@ def test_run_no_docs_dir( assert: then docs from the server is migrated into local docs path and the files created are return as the result. """ - (_, repo_path) = repository create_metadata_yaml( content=f"{metadata.METADATA_NAME_KEY}: name 1\n" f"{metadata.METADATA_DOCS_KEY}: docsUrl", - path=repo_path, + path=repository_path, ) index_content = """Content header. @@ -334,15 +334,16 @@ def test_run_no_docs_dir( # run is repeated in unit tests / integration tests returned_migration_reports = run( - base_path=repo_path, discourse=mocked_discourse, user_inputs=user_input + base_path=repository_path, discourse=mocked_discourse, user_inputs=user_input ) # pylint: disable=duplicate-code - (upstream_repo, upstream_path) = upstream_repository - upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + upstream_repository.git.checkout(pull_request.DEFAULT_BRANCH_NAME) assert returned_migration_reports == {mock_pull_request.html_url: types_.ActionResult.SUCCESS} - assert (index_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "index.md").is_file() assert ( - path_file := upstream_path / DOCUMENTATION_FOLDER_NAME / "path-1" / "file-1.md" + index_file := upstream_repository_path / DOCUMENTATION_FOLDER_NAME / "index.md" + ).is_file() + assert ( + path_file := upstream_repository_path / DOCUMENTATION_FOLDER_NAME / "path-1" / "file-1.md" ).is_file() assert index_file.read_text(encoding="utf-8") == index_content assert path_file.read_text(encoding="utf-8") == navlink_page diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index 25d1b63d..d944071a 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -24,16 +24,14 @@ from .helpers import assert_substrings_in_string -def test___init__(repository: tuple[Repo, Path], mock_github_repo: Repository): +def test___init__(repository: Repo, mock_github_repo: Repository): """ arrange: given a local git repository client and mock github repository client act: when RepositoryClient is initialized assert: RepositoryClient is created and git user is configured. """ - (repo, _) = repository - repository_client = pull_request.RepositoryClient( - repository=repo, github_repository=mock_github_repo + repository=repository, github_repository=mock_github_repo ) config_reader = repository_client._git_repo.config_reader() @@ -73,20 +71,19 @@ def test__check_branch_not_exists(repository_client: RepositoryClient): def test__check_branch_exists( - repository_client: RepositoryClient, upstream_repository: tuple[Repo, Path] + repository_client: RepositoryClient, upstream_repository: Repo, upstream_repository_path: Path ): """ arrange: given RepositoryClient with an upstream repository with check-branch-exists branch act: when _check_branch_exists is called assert: True is returned. """ - (upstream_repo, upstream_path) = upstream_repository branch_name = "check-branch-exists" - head = upstream_repo.create_head(branch_name) + head = upstream_repository.create_head(branch_name) head.checkout() - (upstream_path / "filler-file").touch() - upstream_repo.git.add(".") - upstream_repo.git.commit("-m", "test") + (upstream_repository_path / "filler-file").touch() + upstream_repository.git.add(".") + upstream_repository.git.commit("-m", "test") assert repository_client.check_branch_exists(branch_name) @@ -114,26 +111,26 @@ def test__create_branch_error( def test__create_branch( repository_client: RepositoryClient, - repository: tuple[Repo, Path], - upstream_repository: tuple[Repo, Path], + repository_path: Path, + upstream_repository: Repo, ): """ arrange: given RepositoryClient and newly created files in repo directory act: when _create_branch is called assert: a new branch is successfully created upstream. """ - (_, repo_path) = repository testfile = "testfile.txt" testfile_content = "test" - (repo_path / testfile).write_text(testfile_content) - (upstream_repo, _) = upstream_repository + (repository_path / testfile).write_text(testfile_content) branch_name = "test-create-branch" repository_client.create_branch(branch_name=branch_name, commit_msg="commit-1") # mypy false positive in lib due to getter/setter not being next to each other. assert any( - branch for branch in upstream_repo.branches if branch.name == branch_name # type: ignore + branch + for branch in upstream_repository.branches # type: ignore + if branch.name == branch_name ) @@ -171,7 +168,7 @@ def test__create_pull_request(repository_client: RepositoryClient, mock_pull_req def test_create_pull_request_on_default_branchname( - repository: tuple[Repo, Path], + repository: Repo, repository_client: RepositoryClient, ): """ @@ -179,8 +176,7 @@ def test_create_pull_request_on_default_branchname( act: when create_pull_request is called assert: InputError is raised. """ - (repo, _) = repository - head = repo.create_head(pull_request.DEFAULT_BRANCH_NAME) + head = repository.create_head(pull_request.DEFAULT_BRANCH_NAME) head.checkout() with pytest.raises(InputError) as exc: @@ -215,23 +211,23 @@ def test_create_pull_request_no_dirty_files( def test_create_pull_request_existing_branch( repository_client: RepositoryClient, - upstream_repository: tuple[Repo, Path], - repository: tuple[Repo, Path], + upstream_repository: Repo, + upstream_repository_path: Path, + repository_path: Path, ): """ arrange: given RepositoryClient and an upstream repository that already has migration branch act: when create_pull_request is called assert: InputError is raised. """ - (_, repo_path) = repository - (repo_path / "filler-file").write_text("filler-content") - (upstream_repo, upstream_path) = upstream_repository + (repository_path / "filler-file").write_text("filler-content") + branch_name = pull_request.DEFAULT_BRANCH_NAME - head = upstream_repo.create_head(branch_name) + head = upstream_repository.create_head(branch_name) head.checkout() - (upstream_path / "filler-file").touch() - upstream_repo.git.add(".") - upstream_repo.git.commit("-m", "test") + (upstream_repository_path / "filler-file").touch() + upstream_repository.git.add(".") + upstream_repository.git.commit("-m", "test") with pytest.raises(InputError) as exc: pull_request.create_pull_request(repository=repository_client) @@ -249,8 +245,9 @@ def test_create_pull_request_existing_branch( def test_create_pull_request( repository_client: RepositoryClient, - upstream_repository: tuple[Repo, Path], - repository: tuple[Repo, Path], + upstream_repository: Repo, + upstream_repository_path: Path, + repository_path: Path, mock_pull_request: PullRequest, ): """ @@ -258,18 +255,16 @@ def test_create_pull_request( act: when create_pull_request is called assert: changes are pushed to default branch and pull request link is returned. """ - (_, repo_path) = repository filler_filename = "filler-file" - filler_file = repo_path / filler_filename + filler_file = repository_path / filler_filename filler_text = "filler-text" filler_file.write_text(filler_text) returned_pr_link = pull_request.create_pull_request(repository=repository_client) - (upstream_repo, upstream_path) = upstream_repository - upstream_repo.git.checkout(pull_request.DEFAULT_BRANCH_NAME) + upstream_repository.git.checkout(pull_request.DEFAULT_BRANCH_NAME) assert returned_pr_link == mock_pull_request.html_url - assert (upstream_path / filler_filename).read_text() == filler_text + assert (upstream_repository_path / filler_filename).read_text() == filler_text @pytest.mark.parametrize( @@ -322,18 +317,19 @@ def test_get_repository_name(remote_url: str, expected_repository_name: str): ) -def test_create_repository_client_no_token(repository: tuple[Repo, Path]): +def test_create_repository_client_no_token( + repository_path: Path, +): """ arrange: given valid repository path and empty access_token act: when create_repository_client is called assert: InputError is raised. """ - (_, repo_path) = repository # the following token is for testing purposes only. test_token = "" # nosec with pytest.raises(InputError) as exc: - pull_request.create_repository_client(access_token=test_token, base_path=repo_path) + pull_request.create_repository_client(access_token=test_token, base_path=repository_path) assert_substrings_in_string( ("invalid", "access_token", "input", "it must be", "non-empty"), @@ -342,17 +338,19 @@ def test_create_repository_client_no_token(repository: tuple[Repo, Path]): def test_create_repository_client( - monkeypatch: pytest.MonkeyPatch, repository: tuple[Repo, Path], mock_github_repo: Repository + monkeypatch: pytest.MonkeyPatch, + repository: Repo, + repository_path: Path, + mock_github_repo: Repository, ): """ arrange: given valid repository path and a valid access_token and a mocked github client act: when create_repository_client is called assert: RepositoryClient is returned. """ - (repo, repo_path) = repository - origin = repo.remote("origin") - repo.delete_remote(origin) - repo.create_remote("origin", "https://github.com/test-user/test-repo.git") + origin = repository.remote("origin") + repository.delete_remote(origin) + repository.create_remote("origin", "https://github.com/test-user/test-repo.git") # the following token is for testing purposes only. test_token = "testing-token" # nosec mock_github_client = mock.MagicMock(spec=Github) @@ -360,7 +358,7 @@ def test_create_repository_client( monkeypatch.setattr(pull_request, "Github", mock_github_client) returned_client = pull_request.create_repository_client( - access_token=test_token, base_path=repo_path + access_token=test_token, base_path=repository_path ) assert isinstance(returned_client, pull_request.RepositoryClient) From ca909a3e6f0ab4ea61b4295580d56759a0f0f59c Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 16:15:09 +0800 Subject: [PATCH 099/107] add github output path --- main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.py b/main.py index 639e06be..a2753c20 100755 --- a/main.py +++ b/main.py @@ -59,7 +59,7 @@ def _write_github_output( f"This action is intended to run inside github-actions. {GETTING_STARTED}" ) - github_output_path = pathlib.Path() + github_output_path = pathlib.Path(github_output) compact_json = partial(json.dumps, separators=(",", ":")) urls_with_actions = compact_json(urls_with_actions_dict) if urls_with_actions_dict: From c0d900ee83f7d51dffd7381872f20d91de41c0a7 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 16:31:44 +0800 Subject: [PATCH 100/107] add specific type hints --- src/migration.py | 2 +- tests/unit/test_migration.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/migration.py b/src/migration.py index d7863091..487ed2fa 100644 --- a/src/migration.py +++ b/src/migration.py @@ -358,7 +358,7 @@ def _run_one( def _get_docs_metadata( table_rows: typing.Iterable[types_.TableRow], index_content: str -) -> typing.Iterable[types_.MigrationFileMeta]: +) -> itertools.chain[types_.MigrationFileMeta]: """Get metadata for documents to be migrated. Args: diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index c624b9c5..78d2d240 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -117,7 +117,7 @@ def test__extract_name_from_paths(path: Path, table_path: types_.TablePath, expe ], ) def test__validate_table_rows_invalid_rows( - table_rows: Iterable[types_.TableRow], expected_message_contents: Iterable[str] + table_rows: tuple[types_.TableRow, ...], expected_message_contents: Iterable[str] ): """ arrange: given invalid table_rows sequence @@ -163,7 +163,7 @@ def test__validate_table_rows_invalid_rows( ), ], ) -def test__validate_table_rows(table_rows: Iterable[types_.TableRow]): +def test__validate_table_rows(table_rows: tuple[types_.TableRow, ...]): """ arrange: given table rows of valid sequence act: when _validate_table_rows is called @@ -402,7 +402,7 @@ def test__validate_table_rows(table_rows: Iterable[types_.TableRow]): ], ) def test__extract_docs_from_table_rows( - table_rows: Iterable[types_.TableRow], expected_metas: Iterable[types_.DocumentMeta] + table_rows: tuple[types_.TableRow, ...], expected_metas: Iterable[types_.DocumentMeta] ): """ arrange: given an valid table row sequences @@ -808,7 +808,7 @@ def test__assert_migration_success(migration_results: Iterable[types_.ActionRepo ], ) def test_run( - table_rows: Iterable[types_.TableRow], + table_rows: tuple[types_.TableRow, ...], index_content: str, tmp_path: Path, expected_files: Iterable[Path], From 9bb9c150601cd110c38366cd23cb9b385018b334 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 16:57:11 +0800 Subject: [PATCH 101/107] apply table row factory --- tests/unit/test_migration.py | 10 ++----- tests/unit/test_reconcile.py | 56 ++++++++++-------------------------- 2 files changed, 17 insertions(+), 49 deletions(-) diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 78d2d240..37dfae59 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -618,13 +618,7 @@ def test__migrate_index(tmp_path: Path): pytest.param( gitkeep_meta := types_.GitkeepMeta( path=(gitkeep_path := Path(".gitkeep")), - table_row=( - table_row_sample := types_.TableRow( - level=1, - path="tablepath", - navlink=types_.Navlink(title="navlink", link=None), - ) - ), + table_row=(table_row_sample := factories.TableRowFactory()), ), gitkeep_report := types_.ActionReport( table_row=table_row_sample, @@ -637,7 +631,7 @@ def test__migrate_index(tmp_path: Path): pytest.param( document_meta := types_.DocumentMeta( path=(document_path := Path("document.md")), - table_row=table_row_sample, + table_row=(table_row_sample := factories.TableRowFactory()), link="samplelink", ), document_report := types_.ActionReport( diff --git a/tests/unit/test_reconcile.py b/tests/unit/test_reconcile.py index b6b63693..d73aafd2 100644 --- a/tests/unit/test_reconcile.py +++ b/tests/unit/test_reconcile.py @@ -432,15 +432,15 @@ def path_info_mkdir(path_info: types_.PathInfo, base_dir: Path) -> types_.PathIn factories.PathInfoFactory( table_path=(path := "path 1"), navlink_title=(title := "title 1") ), - types_.TableRow(level=1, path=path, navlink=types_.Navlink(title=title, link=None)), + factories.TableRowFactory( + level=1, path=path, navlink=types_.Navlink(title=title, link=None) + ), types_.NoopAction, id="path info defined table row defined", ), pytest.param( None, - types_.TableRow( - level=1, path="path 1", navlink=types_.Navlink(title="title 1", link=None) - ), + factories.TableRowFactory(level=1, navlink=types_.Navlink(title=title, link=None)), types_.DeleteAction, id="path info None table row defined", ), @@ -510,11 +510,7 @@ def test__calculate_action( ), pytest.param( (), - ( - table_row_1 := types_.TableRow( - level=1, path="path 1", navlink=types_.Navlink(title="title 1", link=None) - ), - ), + (table_row_1 := factories.TableRowFactory(level=1, path="path 1", is_group=True),), (types_.DeleteAction,), ((table_row_1.level, table_row_1.path),), id="empty path infos single table row", @@ -522,12 +518,8 @@ def test__calculate_action( pytest.param( (), ( - table_row_1 := types_.TableRow( - level=1, path="path 1", navlink=types_.Navlink(title="title 1", link=None) - ), - table_row_2 := types_.TableRow( - level=2, path="path 2", navlink=types_.Navlink(title="title 2", link=None) - ), + table_row_1 := factories.TableRowFactory(level=1, path="path 1", is_group=True), + table_row_2 := factories.TableRowFactory(level=2, path="path 2", is_group=True), ), (types_.DeleteAction, types_.DeleteAction), ((table_row_1.level, table_row_1.path), (table_row_2.level, table_row_2.path)), @@ -536,7 +528,7 @@ def test__calculate_action( pytest.param( (path_info_1 := factories.PathInfoFactory(),), ( - types_.TableRow( + factories.TableRowFactory( level=path_info_1.level, path=path_info_1.table_path, navlink=types_.Navlink(title=path_info_1.navlink_title, link=None), @@ -549,10 +541,8 @@ def test__calculate_action( pytest.param( (path_info_1 := factories.PathInfoFactory(level=1),), ( - table_row_1 := types_.TableRow( - level=2, - path=path_info_1.table_path, - navlink=types_.Navlink(title=title, link=None), + table_row_1 := factories.TableRowFactory( + level=2, path=path_info_1.table_path, is_group=True ), ), (types_.CreateAction, types_.DeleteAction), @@ -565,10 +555,8 @@ def test__calculate_action( pytest.param( (path_info_1 := factories.PathInfoFactory(table_path="path 1"),), ( - table_row := types_.TableRow( - level=path_info_1.level, - path="path 2", - navlink=types_.Navlink(title=path_info_1.navlink_title, link=None), + table_row := factories.TableRowFactory( + level=path_info_1.level, path="path 2", is_group=True ), ), (types_.CreateAction, types_.DeleteAction), @@ -648,13 +636,7 @@ def test_run( ), name="name 1", ), - ( - table_row := types_.TableRow( - level=1, - path="path 1", - navlink=types_.Navlink(title="navlink title 1", link=None), - ), - ), + (table_row := factories.TableRowFactory(level=1),), types_.CreateIndexAction( title=local_title, content=( @@ -673,16 +655,8 @@ def test_run( name="name 1", ), ( - table_row_1 := types_.TableRow( - level=1, - path="path 1", - navlink=types_.Navlink(title="navlink title 1", link=None), - ), - table_row_2 := types_.TableRow( - level=2, - path="path 2", - navlink=types_.Navlink(title="navlink title 2", link=None), - ), + table_row_1 := factories.TableRowFactory(level=1), + table_row_2 := factories.TableRowFactory(level=2), ), types_.CreateIndexAction( title=local_title, From 9b7502840f95ccc37813ef350a0b5257b750c2bb Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 17:34:30 +0800 Subject: [PATCH 102/107] group tests together --- tests/unit/test_migration.py | 194 ++++++++++++++++++++--------------- 1 file changed, 109 insertions(+), 85 deletions(-) diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 37dfae59..289b3613 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -176,6 +176,90 @@ def test__validate_table_rows(table_rows: tuple[types_.TableRow, ...]): # Pylint doesn't understand how the walrus operator works # pylint: disable=undefined-variable,unused-variable +@pytest.mark.parametrize( + "row, path, expected_meta", + [ + pytest.param( + doc_row := factories.TableRowFactory(is_document=True, path="doc-1"), + Path(), + types_.DocumentMeta( + path=Path("doc-1.md"), link=doc_row.navlink.link, table_row=doc_row + ), + id="single doc file", + ), + pytest.param( + doc_row := factories.TableRowFactory(is_document=True, path="group-1-doc-1"), + Path("group-1"), + types_.DocumentMeta( + path=Path("group-1/doc-1.md"), link=doc_row.navlink.link, table_row=doc_row + ), + id="nested doc file", + ), + pytest.param( + doc_row := factories.TableRowFactory(is_document=True, path="group-2-doc-1"), + Path("group-1"), + types_.DocumentMeta( + path=Path("group-1/group-2-doc-1.md"), link=doc_row.navlink.link, table_row=doc_row + ), + id="typo in nested doc file path", + ), + ], +) +def test__create_document_meta( + row: types_.TableRow, path: Path, expected_meta: types_.DocumentMeta +): + """ + arrange: given a document table row + act: when _create_document_meta is called + assert: document meta with path to file is returned. + """ + assert migration._create_document_meta(row=row, path=path) == expected_meta + + +@pytest.mark.parametrize( + "row, path, expected_meta", + [ + pytest.param( + group_row := factories.TableRowFactory(is_group=True, path="group-1"), + Path("group-1"), + types_.GitkeepMeta(path=Path("group-1/.gitkeep"), table_row=group_row), + id="single group row", + ), + pytest.param( + group_row := factories.TableRowFactory(is_group=True, path="group-1-group-2"), + Path("group-1/group-2"), + types_.GitkeepMeta(path=Path("group-1/group-2/.gitkeep"), table_row=group_row), + id="nested group row with correct current path", + ), + ], +) +def test__create_gitkeep_meta(row: types_.TableRow, path: Path, expected_meta: types_.GitkeepMeta): + """ + arrange: given a empty group table row + act: when _create_gitkeep_meta is called + assert: gitkeep meta denoting empty group is returned. + """ + assert migration._create_gitkeep_meta(row=row, path=path) == expected_meta + + +@pytest.mark.parametrize( + "content, expected_meta", + [ + pytest.param( + content := "content-1", + types_.IndexDocumentMeta(path=Path("index.md"), content=content), + ), + ], +) +def test__index_file_from_content(content: str, expected_meta: types_.IndexDocumentMeta): + """ + arrange: given an index file content + act: when _index_file_from_content is called + assert: expected index document metadata is returned. + """ + assert migration._index_file_from_content(content) == expected_meta + + @pytest.mark.parametrize( "table_rows, expected_metas", [ @@ -338,6 +422,30 @@ def test__validate_table_rows(table_rows: tuple[types_.TableRow, ...]): ), id="multi rows 2 separately nested(group, nested-group, nested-doc)", ), + pytest.param( + ( + group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), + nested_doc_row_1 := factories.TableRowFactory( + level=2, path="group-1-doc-1", is_document=True + ), + nested_doc_row_2 := factories.TableRowFactory( + level=2, path="group-1-doc-2", is_document=True + ), + ), + ( + types_.DocumentMeta( + path=Path("group-1/doc-1.md"), + link=nested_doc_row_1.navlink.link, + table_row=nested_doc_row_1, + ), + types_.DocumentMeta( + path=Path("group-1/doc-2.md"), + link=nested_doc_row_2.navlink.link, + table_row=nested_doc_row_2, + ), + ), + id="multi rows 2 nested in group(group, nested-doc, nested-doc)", + ), pytest.param( ( group_row_1 := factories.TableRowFactory(level=1, path="group-1", is_group=True), @@ -402,7 +510,7 @@ def test__validate_table_rows(table_rows: tuple[types_.TableRow, ...]): ], ) def test__extract_docs_from_table_rows( - table_rows: tuple[types_.TableRow, ...], expected_metas: Iterable[types_.DocumentMeta] + table_rows: tuple[types_.TableRow, ...], expected_metas: tuple[types_.DocumentMeta, ...] ): """ arrange: given an valid table row sequences @@ -415,90 +523,6 @@ def test__extract_docs_from_table_rows( ) -@pytest.mark.parametrize( - "row, path, expected_meta", - [ - pytest.param( - doc_row := factories.TableRowFactory(is_document=True, path="doc-1"), - Path(), - types_.DocumentMeta( - path=Path("doc-1.md"), link=doc_row.navlink.link, table_row=doc_row - ), - id="single doc file", - ), - pytest.param( - doc_row := factories.TableRowFactory(is_document=True, path="group-1-doc-1"), - Path("group-1"), - types_.DocumentMeta( - path=Path("group-1/doc-1.md"), link=doc_row.navlink.link, table_row=doc_row - ), - id="nested doc file", - ), - pytest.param( - doc_row := factories.TableRowFactory(is_document=True, path="group-2-doc-1"), - Path("group-1"), - types_.DocumentMeta( - path=Path("group-1/group-2-doc-1.md"), link=doc_row.navlink.link, table_row=doc_row - ), - id="typo in nested doc file path", - ), - ], -) -def test__create_document_meta( - row: types_.TableRow, path: Path, expected_meta: types_.DocumentMeta -): - """ - arrange: given a document table row - act: when _create_document_meta is called - assert: document meta with path to file is returned. - """ - assert migration._create_document_meta(row=row, path=path) == expected_meta - - -@pytest.mark.parametrize( - "row, path, expected_meta", - [ - pytest.param( - group_row := factories.TableRowFactory(is_group=True, path="group-1"), - Path("group-1"), - types_.GitkeepMeta(path=Path("group-1/.gitkeep"), table_row=group_row), - id="single group row", - ), - pytest.param( - group_row := factories.TableRowFactory(is_group=True, path="group-1-group-2"), - Path("group-1/group-2"), - types_.GitkeepMeta(path=Path("group-1/group-2/.gitkeep"), table_row=group_row), - id="nested group row with correct current path", - ), - ], -) -def test__create_gitkeep_meta(row: types_.TableRow, path: Path, expected_meta: types_.GitkeepMeta): - """ - arrange: given a empty group table row - act: when _create_gitkeep_meta is called - assert: gitkeep meta denoting empty group is returned. - """ - assert migration._create_gitkeep_meta(row=row, path=path) == expected_meta - - -@pytest.mark.parametrize( - "content, expected_meta", - [ - pytest.param( - content := "content-1", - types_.IndexDocumentMeta(path=Path("index.md"), content=content), - ), - ], -) -def test__index_file_from_content(content: str, expected_meta: types_.IndexDocumentMeta): - """ - arrange: given an index file content - act: when _index_file_from_content is called - assert: expected index document metadata is returned. - """ - assert migration._index_file_from_content(content) == expected_meta - - @pytest.mark.parametrize( "meta", [ From 6d30fcd7ca457c53d060ca32cb35d8b92ef1d1ef Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 10 Jan 2023 18:33:57 +0800 Subject: [PATCH 103/107] minor docstring changes --- README.md | 2 - main.py | 2 +- src/migration.py | 68 ++++++++++------------- src/pull_request.py | 2 +- tests/unit/test___init__.py | 13 ++--- tests/unit/test_docs_directory.py | 4 +- tests/unit/test_migration.py | 89 +++++++------------------------ 7 files changed, 55 insertions(+), 125 deletions(-) diff --git a/README.md b/README.md index d529a6d9..28d58de3 100644 --- a/README.md +++ b/README.md @@ -99,8 +99,6 @@ charmhub. discourse_api_username: ${{ secrets.DISCOURSE_API_USERNAME }} discourse_api_key: ${{ secrets.DISCOURSE_API_KEY }} github_token: ${{ secrets.GITHUB_TOKEN }} - - name: Show index page - run: echo '${{ steps.publishDocumentation.outputs.index_url }}' ``` a branch name with `upload-charm-docs/migrate` will be created and a pull diff --git a/main.py b/main.py index a2753c20..e9611f8d 100755 --- a/main.py +++ b/main.py @@ -16,7 +16,7 @@ def _parse_env_vars() -> types_.UserInputs: - """Parse user inputs from environment variables. + """Instantiate user inputs from environment variables. Returns: Wrapped user input variables. diff --git a/src/migration.py b/src/migration.py index 487ed2fa..cbeb2a6f 100644 --- a/src/migration.py +++ b/src/migration.py @@ -1,7 +1,7 @@ # Copyright 2023 Canonical Ltd. # See LICENSE file for licensing details. -"""Module for transforming index table rows into local files.""" +"""Module for migrating remote documentation into local git repository.""" import itertools import logging @@ -16,10 +16,10 @@ GITKEEP_FILENAME = ".gitkeep" -def _extract_name_from_paths(current_group_path: Path, table_path: types_.TablePath) -> str: +def _extract_name(current_group_path: Path, table_path: types_.TablePath) -> str: """Extract name given a current working directory and table path. - If there is a matching prefix in table path's prefix generated from the current directory, + If there is a matching prefix in table path's prefix generated from the current group path, the prefix is removed and the remaining segment is returned as the extracted name. Args: @@ -35,7 +35,10 @@ def _extract_name_from_paths(current_group_path: Path, table_path: types_.TableP def _validate_table_rows( table_rows: typing.Iterable[types_.TableRow], ) -> typing.Iterable[types_.TableRow]: - """Check whether a table row is valid in regards to the levels and grouping. + """Check whether a table row is valid in regards to the sequence. + + By tracking the current group level for each row, it validates whether a given row is valid + among the sequence of rows given. Args: table_rows: Parsed rows from the index table. @@ -77,23 +80,22 @@ def _validate_table_rows( current_group_level = row.level if row.is_group else row.level - 1 -def _change_group_path( +def _get_row_group_path( group_path: Path, previous_row: types_.TableRow | None, row: types_.TableRow ) -> Path: """Get path to row's working group. - If row is a document, it's working group is the group one level below. - If row is a group, it should be the new working group. + If given row is a document row, it's working group is the group one level below. + If given row is a group row, it's working group should be the path to row's group. Args: - group_path: the path of the group in which the last execution was run, it should be the + group_path: The path of the group in which the previous row was in, it should be the equivalent to previous_row's group path. - previous_row: table row evaluated before the current. None if current row is the first row - in execution. - row: A single row from table rows. + previous_row: Previous row in the sequence. None if row is the first in sequence. + row: Target row to adjust group path to. Returns: - A path to the group where the row or contents of row should reside in. + A path to the group where the row belongs. """ # if it's the first row or the row level has increased from group row if not previous_row: @@ -101,9 +103,7 @@ def _change_group_path( if not row.is_group: return group_path # move one level of nesting into new group path - return group_path / _extract_name_from_paths( - current_group_path=group_path, table_path=row.path - ) + return group_path / _extract_name(current_group_path=group_path, table_path=row.path) # working group path belongs in the group 1 level above current row's level. # i.e. group-1/document-1, group path is group-1 @@ -120,9 +120,7 @@ def _change_group_path( # move working group path to current group # i.e. current: group-1, destination: group-1/group-2, row: group-1-group-2 if row.is_group: - group_path = group_path / _extract_name_from_paths( - current_group_path=group_path, table_path=row.path - ) + group_path = group_path / _extract_name(current_group_path=group_path, table_path=row.path) return group_path @@ -146,7 +144,7 @@ def _create_document_meta(row: types_.TableRow, path: Path) -> types_.DocumentMe raise exceptions.MigrationError( "Internal error, no implementation for creating document meta with missing link in row." ) - name = _extract_name_from_paths(current_group_path=path, table_path=row.path) + name = _extract_name(current_group_path=path, table_path=row.path) return types_.DocumentMeta(path=path / f"{name}.md", link=row.navlink.link, table_row=row) @@ -166,7 +164,7 @@ def _create_gitkeep_meta(row: types_.TableRow, path: Path) -> types_.GitkeepMeta def _extract_docs_from_table_rows( table_rows: typing.Iterable[types_.TableRow], ) -> typing.Iterable[types_.MigrationFileMeta]: - """Extract necessary migration documents to build docs directory from server. + """Extract necessary migration documents to build docs directory. Algorithm: 1. For each row: @@ -197,7 +195,7 @@ def _extract_docs_from_table_rows( ): yield _create_gitkeep_meta(row=previous_row, path=previous_path) - current_group_path = _change_group_path( + current_group_path = _get_row_group_path( group_path=current_group_path, previous_row=previous_row, row=row ) @@ -320,7 +318,7 @@ def _migrate_index(index_meta: types_.IndexDocumentMeta, docs_path: Path) -> typ def _run_one( file_meta: types_.MigrationFileMeta, discourse: Discourse, docs_path: Path ) -> types_.ActionReport: - """Write document content relative to docs directory. + """Write document content inside the docs directory. Args: file_meta: Information about migration file corresponding to a row in index table. @@ -359,7 +357,7 @@ def _run_one( def _get_docs_metadata( table_rows: typing.Iterable[types_.TableRow], index_content: str ) -> itertools.chain[types_.MigrationFileMeta]: - """Get metadata for documents to be migrated. + """Get metadata for all documents to be migrated. Args: table_rows: Table rows from the index table. @@ -373,21 +371,6 @@ def _get_docs_metadata( return itertools.chain((index_doc,), table_docs) -def _assert_migration_success(migration_reports: typing.Iterable[types_.ActionReport]) -> None: - """Assert all documents have been successfully migrated. - - Args: - migration_reports: Report containing migration details from server to local repository. - - Raises: - MigrationError: if any migration report has failed. - """ - if any(result for result in migration_reports if result.result is types_.ActionResult.FAIL): - raise exceptions.MigrationError( - "Error migrating the docs, please check the logs for more detail." - ) - - def run( table_rows: typing.Iterable[types_.TableRow], index_content: str, @@ -401,6 +384,9 @@ def run( index_content: Main content describing the charm. discourse: Client to the documentation server. docs_path: The path to the docs directory containing all the documentation. + + Raises: + MigrationError: if any migration report has failed. """ valid_table_rows = ( valid_table_row for valid_table_row in _validate_table_rows(table_rows=table_rows) @@ -411,4 +397,8 @@ def run( table_rows=valid_table_rows, index_content=index_content ) ) - _assert_migration_success(migration_reports=migration_reports) + + if any(result for result in migration_reports if result.result is types_.ActionResult.FAIL): + raise exceptions.MigrationError( + "Error migrating the docs, please check the logs for more detail." + ) diff --git a/src/pull_request.py b/src/pull_request.py index e053b70f..fa138889 100644 --- a/src/pull_request.py +++ b/src/pull_request.py @@ -1,7 +1,7 @@ # Copyright 2023 Canonical Ltd. # See LICENSE file for licensing details. -"""Module for handling git repository.""" +"""Module for handling interactions with git repository.""" import logging import re diff --git a/tests/unit/test___init__.py b/tests/unit/test___init__.py index 752161d3..9b0e0a0e 100644 --- a/tests/unit/test___init__.py +++ b/tests/unit/test___init__.py @@ -208,7 +208,6 @@ def test__run_migrate_server_error_topic( ) -# pylint: disable=too-many-locals def test__run_migrate( repository_path: Path, upstream_repository: Repo, @@ -253,9 +252,6 @@ def test__run_migrate( assert path_file.read_text(encoding="utf-8") == link_content -# pylint: enable=too-many-locals - - def test_run_no_docs_no_dir(repository_path: Path): """ arrange: given a path with a metadata.yaml that has no docs key and no docs directory @@ -269,8 +265,9 @@ def test_run_no_docs_no_dir(repository_path: Path): with pytest.raises(exceptions.InputError) as exc: # run is repeated in unit tests / integration tests - # pylint: disable=duplicate-code - _ = run(base_path=repository_path, discourse=mocked_discourse, user_inputs=user_input) + _ = run( + base_path=repository_path, discourse=mocked_discourse, user_inputs=user_input + ) # pylint: disable=duplicate-code assert str(exc.value) == GETTING_STARTED @@ -289,10 +286,9 @@ def test_run_no_docs_empty_dir(repository_path: Path): user_input = factories.UserInputFactory() # run is repeated in unit tests / integration tests - # pylint: disable=duplicate-code returned_page_interactions = run( base_path=repository_path, discourse=mocked_discourse, user_inputs=user_input - ) + ) # pylint: disable=duplicate-code mocked_discourse.create_topic.assert_called_once_with( title="Name 1 Documentation Overview", @@ -301,7 +297,6 @@ def test_run_no_docs_empty_dir(repository_path: Path): assert returned_page_interactions == {url: types_.ActionResult.SUCCESS} -# pylint: disable=too-many-locals @pytest.mark.usefixtures("patch_create_repository_client") def test_run_no_docs_dir( repository_path: Path, diff --git a/tests/unit/test_docs_directory.py b/tests/unit/test_docs_directory.py index 81179500..19fd3c71 100644 --- a/tests/unit/test_docs_directory.py +++ b/tests/unit/test_docs_directory.py @@ -183,12 +183,12 @@ def test__calculate_level( ), ], ) -def test__calculate_table_path( +def test_calculate_table_path( directories: tuple[str, ...], file: str | None, expected_table_path: str, tmp_path: Path ): """ arrange: given docs directory with given directories and file created - act: when _calculate_table_path is called with the docs directory + act: when calculate_table_path is called with the docs directory assert: then the expected table path is returned. """ path = create_nested_directories_file(base_path=tmp_path, directories=directories, file=file) diff --git a/tests/unit/test_migration.py b/tests/unit/test_migration.py index 289b3613..3618f2ba 100644 --- a/tests/unit/test_migration.py +++ b/tests/unit/test_migration.py @@ -32,16 +32,13 @@ pytest.param(Path("not/matching/group"), "test", "test", id="non-prefix path"), ], ) -def test__extract_name_from_paths(path: Path, table_path: types_.TablePath, expected: str): +def test__extract_name(path: Path, table_path: types_.TablePath, expected: str): """ arrange: given a path and table path composed from groups - act: when _extract_name_from_paths is called + act: when _extract_name is called assert: the name part is extracted from table path. """ - assert ( - migration._extract_name_from_paths(current_group_path=path, table_path=table_path) - == expected - ) + assert migration._extract_name(current_group_path=path, table_path=table_path) == expected @pytest.mark.parametrize( @@ -726,74 +723,24 @@ def test__get_docs_metadata(): assert isinstance(returned_docs_metadata[1], types_.MigrationFileMeta) -@pytest.mark.parametrize( - "migration_results", - [ - pytest.param( - (factories.ActionReportFactory(is_failed=True, is_migrate=True),), - id="single failed result", - ), - pytest.param( - ( - factories.ActionReportFactory(is_success=True, is_migrate=True), - factories.ActionReportFactory(is_failed=True, is_migrate=True), - ), - id="single failed result in successful result", - ), - pytest.param( - ( - factories.ActionReportFactory(is_skipped=True, is_migrate=True), - factories.ActionReportFactory(is_failed=True, is_migrate=True), - ), - id="single failed result in skipped result", - ), - pytest.param( - ( - factories.ActionReportFactory(is_success=True, is_migrate=True), - factories.ActionReportFactory(is_failed=True, is_migrate=True), - factories.ActionReportFactory(is_skipped=True, is_migrate=True), - factories.ActionReportFactory(is_failed=True, is_migrate=True), - ), - id="multiple failed results in multiple result types", - ), - ], -) -def test__assert_migration_success_failed_result(migration_results: Iterable[types_.ActionReport]): +def test_run_error(tmp_path: Path): """ - arrange: given an migration results iterable with a failed result - act: when _assert_migration_success is called - assert: Migration error is raised. + arrange: given table rows, index content, mocked discourse that throws an exception and a + temporary docs path + act: when run is called + assert: table rows are successfully migrated """ - with pytest.raises(exceptions.MigrationError): - migration._assert_migration_success(migration_reports=migration_results) - + mocked_discourse = mock.MagicMock(spec=discourse.Discourse) + mocked_discourse.retrieve_topic.side_effect = exceptions.DiscourseError + table_rows = (factories.TableRowFactory(level=1),) -@pytest.mark.parametrize( - "migration_results", - [ - pytest.param( - (factories.ActionReportFactory(is_success=True, is_migrate=True),), - id="successful result", - ), - pytest.param( - (factories.ActionReportFactory(is_skipped=True, is_migrate=True),), id="skipped result" - ), - pytest.param( - ( - factories.ActionReportFactory(is_success=True, is_migrate=True), - factories.ActionReportFactory(is_skipped=True, is_migrate=True), - ), - id="non-failed results", - ), - ], -) -def test__assert_migration_success(migration_results: Iterable[types_.ActionReport]): - """ - arrange: given an migration results iterable with no failed result - act: when _assert_migration_success is called - assert: No exceptions are raised. - """ - migration._assert_migration_success(migration_reports=migration_results) + with pytest.raises(exceptions.MigrationError): + migration.run( + table_rows=table_rows, + index_content="content-1", + discourse=mocked_discourse, + docs_path=tmp_path, + ) @pytest.mark.parametrize( From 7dbc67d9fb18ead0581bba6b43d3f80081c04ab8 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 11 Jan 2023 16:17:54 +0800 Subject: [PATCH 104/107] explicitly describe discourse_hostname --- action.yaml | 2 +- src/types_.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/action.yaml b/action.yaml index 7cb246b2..2df5f959 100644 --- a/action.yaml +++ b/action.yaml @@ -16,7 +16,7 @@ inputs: required: false type: boolean discourse_host: - description: The discourse host name. + description: The base path(hostname) to the discourse server. required: true type: string discourse_api_username: diff --git a/src/types_.py b/src/types_.py index 2181b234..9f234265 100644 --- a/src/types_.py +++ b/src/types_.py @@ -15,7 +15,7 @@ class UserInputs: """Configurable user input values used to run upload-charm-docs. Attrs: - discourse_hostname: The discourse hostname. + discourse_hostname: The base path to the discourse server. discourse_category_id: The category identifier to use on discourse for all topics. discourse_api_username: The discourse API username to use for interactions with the server. discourse_api_key: The discourse API key to use for interactions with the server. From 1227faa3afdc91cb7a7644d649f5014b3b0c925c Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 11 Jan 2023 16:18:54 +0800 Subject: [PATCH 105/107] separate out generator steps --- src/migration.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/migration.py b/src/migration.py index cbeb2a6f..c8dfc174 100644 --- a/src/migration.py +++ b/src/migration.py @@ -388,14 +388,13 @@ def run( Raises: MigrationError: if any migration report has failed. """ - valid_table_rows = ( - valid_table_row for valid_table_row in _validate_table_rows(table_rows=table_rows) + valid_table_rows = _validate_table_rows(table_rows=table_rows) + document_metadata = _get_docs_metadata( + table_rows=valid_table_rows, index_content=index_content ) migration_reports = ( _run_one(file_meta=document, discourse=discourse, docs_path=docs_path) - for document in _get_docs_metadata( - table_rows=valid_table_rows, index_content=index_content - ) + for document in document_metadata ) if any(result for result in migration_reports if result.result is types_.ActionResult.FAIL): From 8d4896e5f05121fc1e868f20b397957c2199defb Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 11 Jan 2023 16:20:09 +0800 Subject: [PATCH 106/107] rename tests for repository client --- tests/unit/test_pull_request.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/tests/unit/test_pull_request.py b/tests/unit/test_pull_request.py index d944071a..8ca50715 100644 --- a/tests/unit/test_pull_request.py +++ b/tests/unit/test_pull_request.py @@ -24,7 +24,7 @@ from .helpers import assert_substrings_in_string -def test___init__(repository: Repo, mock_github_repo: Repository): +def test_repository_client__init__(repository: Repo, mock_github_repo: Repository): """ arrange: given a local git repository client and mock github repository client act: when RepositoryClient is initialized @@ -39,7 +39,7 @@ def test___init__(repository: Repo, mock_github_repo: Repository): assert config_reader.get_value("user", "email") == pull_request.ACTIONS_USER_EMAIL -def test__check_branch_exists_error( +def test_repository_client_check_branch_exists_error( monkeypatch: pytest.MonkeyPatch, repository_client: RepositoryClient ): """ @@ -61,7 +61,7 @@ def test__check_branch_exists_error( ) -def test__check_branch_not_exists(repository_client: RepositoryClient): +def test_repository_client_check_branch_not_exists(repository_client: RepositoryClient): """ arrange: given RepositoryClient with an upstream repository act: when _check_branch_exists is called @@ -70,7 +70,7 @@ def test__check_branch_not_exists(repository_client: RepositoryClient): assert not repository_client.check_branch_exists("no-such-branchname") -def test__check_branch_exists( +def test_repository_client_check_branch_exists( repository_client: RepositoryClient, upstream_repository: Repo, upstream_repository_path: Path ): """ @@ -88,7 +88,7 @@ def test__check_branch_exists( assert repository_client.check_branch_exists(branch_name) -def test__create_branch_error( +def test_repository_client_create_branch_error( monkeypatch: pytest.MonkeyPatch, repository_client: RepositoryClient ): """ @@ -109,7 +109,7 @@ def test__create_branch_error( ) -def test__create_branch( +def test_repository_client_create_branch( repository_client: RepositoryClient, repository_path: Path, upstream_repository: Repo, @@ -134,7 +134,7 @@ def test__create_branch( ) -def test__create_pull_request_error( +def test_repository_client_create_pull_request_error( monkeypatch: pytest.MonkeyPatch, repository_client: RepositoryClient ): """ @@ -156,7 +156,9 @@ def test__create_pull_request_error( ) -def test__create_pull_request(repository_client: RepositoryClient, mock_pull_request: PullRequest): +def test_repository_client_create_pull_request( + repository_client: RepositoryClient, mock_pull_request: PullRequest +): """ arrange: given RepositoryClient with a mocked github client that returns a mocked pull request act: when _create_pull_request is called From 9547a6c5ac13d6256796caa271329f970f2e38cc Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 12 Jan 2023 17:31:26 +0800 Subject: [PATCH 107/107] sort requirements alphabetically --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index cba923b9..41288c1d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ +GitPython>=3.1,<3.2 pydiscourse>=1.3,<1.4 +PyGithub>=1.57,<1.58 PyYAML>=6.0,<6.1 requests>=2.28,<2.29 -GitPython>=3.1,<3.2 -PyGithub>=1.57,<1.58