diff --git a/astro-airflow-iris/{{ cookiecutter.repo_name }}/docs/source/conf.py b/astro-airflow-iris/{{ cookiecutter.repo_name }}/docs/source/conf.py index e8ffa078..a2c221bc 100644 --- a/astro-airflow-iris/{{ cookiecutter.repo_name }}/docs/source/conf.py +++ b/astro-airflow-iris/{{ cookiecutter.repo_name }}/docs/source/conf.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # {{ cookiecutter.python_package }} documentation build diff --git a/astro-airflow-iris/{{ cookiecutter.repo_name }}/pyproject.toml b/astro-airflow-iris/{{ cookiecutter.repo_name }}/pyproject.toml index 8ca93b36..501367b1 100644 --- a/astro-airflow-iris/{{ cookiecutter.repo_name }}/pyproject.toml +++ b/astro-airflow-iris/{{ cookiecutter.repo_name }}/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" dynamic = ["dependencies", "version"] [project.scripts] -{{ cookiecutter.repo_name }} = "{{ cookiecutter.python_package }}.__main__:main" +"{{ cookiecutter.repo_name }}" = "{{ cookiecutter.python_package }}.__main__:main" [project.entry-points."kedro.hooks"] diff --git a/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py b/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py index 2d4272e3..d3aa4d38 100644 --- a/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +++ b/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py @@ -1,11 +1,10 @@ """Project pipelines.""" -from typing import Dict from kedro.framework.project import find_pipelines from kedro.pipeline import Pipeline -def register_pipelines() -> Dict[str, Pipeline]: +def register_pipelines() -> dict[str, Pipeline]: """Register the project's pipelines. Returns: diff --git a/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_engineering/nodes.py b/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_engineering/nodes.py index 87e650db..8f6be87b 100644 --- a/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_engineering/nodes.py +++ b/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_engineering/nodes.py @@ -4,12 +4,12 @@ PLEASE DELETE THIS FILE ONCE YOU START WORKING ON YOUR OWN PROJECT! """ -from typing import Any, Dict +from typing import Any import pandas as pd -def split_data(data: pd.DataFrame, example_test_data_ratio: float) -> Dict[str, Any]: +def split_data(data: pd.DataFrame, example_test_data_ratio: float) -> dict[str, Any]: """Node for splitting the classical Iris data set into training and test sets, each split into features and labels. The split ratio parameter is taken from conf/project/parameters.yml. diff --git a/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py b/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py index 62dfa201..4a78f84e 100644 --- a/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py +++ b/astro-airflow-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py @@ -4,14 +4,14 @@ Delete this when you start working on your own Kedro project. """ import logging -from typing import Any, Dict +from typing import Any import numpy as np import pandas as pd def train_model( - train_x: pd.DataFrame, train_y: pd.DataFrame, parameters: Dict[str, Any] + train_x: pd.DataFrame, train_y: pd.DataFrame, parameters: dict[str, Any] ) -> np.ndarray: """Node for training a simple multi-class logistic regression model. The number of training iterations as well as the learning rate are taken from diff --git a/databricks-iris/{{ cookiecutter.repo_name }}/pyproject.toml b/databricks-iris/{{ cookiecutter.repo_name }}/pyproject.toml index 8ca93b36..501367b1 100644 --- a/databricks-iris/{{ cookiecutter.repo_name }}/pyproject.toml +++ b/databricks-iris/{{ cookiecutter.repo_name }}/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" dynamic = ["dependencies", "version"] [project.scripts] -{{ cookiecutter.repo_name }} = "{{ cookiecutter.python_package }}.__main__:main" +"{{ cookiecutter.repo_name }}" = "{{ cookiecutter.python_package }}.__main__:main" [project.entry-points."kedro.hooks"] diff --git a/databricks-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/nodes.py b/databricks-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/nodes.py index 365bd796..fe94e38c 100644 --- a/databricks-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/nodes.py +++ b/databricks-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/nodes.py @@ -4,14 +4,13 @@ """ import logging -from typing import Dict, Tuple import numpy as np import pandas as pd from pyspark.sql import DataFrame -def split_data(data: DataFrame, parameters: Dict) -> Tuple: +def split_data(data: DataFrame, parameters: dict) -> tuple: """Splits data into features and targets training and test sets. Args: diff --git a/databricks-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py b/databricks-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py index 2d4272e3..d3aa4d38 100644 --- a/databricks-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +++ b/databricks-iris/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py @@ -1,11 +1,10 @@ """Project pipelines.""" -from typing import Dict from kedro.framework.project import find_pipelines from kedro.pipeline import Pipeline -def register_pipelines() -> Dict[str, Pipeline]: +def register_pipelines() -> dict[str, Pipeline]: """Register the project's pipelines. Returns: diff --git a/features/environment.py b/features/environment.py index c3aa5b10..8b870cb5 100644 --- a/features/environment.py +++ b/features/environment.py @@ -8,9 +8,9 @@ import tempfile import venv from pathlib import Path -from typing import Any, Set +from typing import Any -_PATHS_TO_REMOVE: Set[Path] = set() +_PATHS_TO_REMOVE: set[Path] = set() diff --git a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/docs/source/conf.py b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/docs/source/conf.py index 94fecac3..600212e4 100644 --- a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/docs/source/conf.py +++ b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/docs/source/conf.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # {{ cookiecutter.python_package }} documentation build diff --git a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/pyproject.toml b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/pyproject.toml index b7ef36fc..e17ddb9b 100644 --- a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/pyproject.toml +++ b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" dynamic = ["dependencies", "version"] [project.scripts] -{{ cookiecutter.repo_name }} = "{{ cookiecutter.python_package }}.__main__:main" +"{{ cookiecutter.repo_name }}" = "{{ cookiecutter.python_package }}.__main__:main" [project.entry-points."kedro.hooks"] @@ -43,7 +43,7 @@ namespaces = false package_name = "{{ cookiecutter.python_package }}" project_name = "{{ cookiecutter.project_name }}" kedro_init_version = "{{ cookiecutter.kedro_version }}" -tools = {{ cookiecutter.tools | default('') | string | replace('\"', '\\\"') }} +tools = "{{ cookiecutter.tools | default('') | string | replace('\"', '\\\"') }}" example_pipeline = "{{ cookiecutter.example_pipeline }}" source_dir = "src" diff --git a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py index 2d4272e3..d3aa4d38 100644 --- a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +++ b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py @@ -1,11 +1,10 @@ """Project pipelines.""" -from typing import Dict from kedro.framework.project import find_pipelines from kedro.pipeline import Pipeline -def register_pipelines() -> Dict[str, Pipeline]: +def register_pipelines() -> dict[str, Pipeline]: """Register the project's pipelines. Returns: diff --git a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py index 20f3362b..9357c8ec 100755 --- a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py +++ b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py @@ -1,4 +1,3 @@ -from typing import Dict, Tuple import pandas as pd @@ -19,7 +18,7 @@ def _parse_money(x: pd.Series) -> pd.Series: return x -def preprocess_companies(companies: pd.DataFrame) -> Tuple[pd.DataFrame, Dict]: +def preprocess_companies(companies: pd.DataFrame) -> tuple[pd.DataFrame, dict]: """Preprocesses the data for companies. Args: diff --git a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py index a49f3cd6..560db6e0 100755 --- a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py +++ b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py @@ -1,5 +1,4 @@ import logging -from typing import Dict, Tuple import pandas as pd from sklearn.linear_model import LinearRegression @@ -7,7 +6,7 @@ from sklearn.model_selection import train_test_split -def split_data(data: pd.DataFrame, parameters: Dict) -> Tuple: +def split_data(data: pd.DataFrame, parameters: dict) -> tuple: """Splits data into features and targets training and test sets. Args: @@ -41,7 +40,7 @@ def train_model(X_train: pd.DataFrame, y_train: pd.Series) -> LinearRegression: def evaluate_model( regressor: LinearRegression, X_test: pd.DataFrame, y_test: pd.Series -) -> Dict[str, float]: +) -> dict[str, float]: """Calculates and logs the coefficient of determination. Args: diff --git a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py index 1184f123..29131844 100644 --- a/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py +++ b/spaceflights-pandas-viz/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py @@ -64,4 +64,4 @@ def test_data_science_pipeline(caplog, dummy_data, dummy_parameters): SequentialRunner().run(pipeline, catalog) - assert successful_run_msg in caplog.text \ No newline at end of file + assert successful_run_msg in caplog.text diff --git a/spaceflights-pandas/{{ cookiecutter.repo_name }}/docs/source/conf.py b/spaceflights-pandas/{{ cookiecutter.repo_name }}/docs/source/conf.py index 94fecac3..600212e4 100644 --- a/spaceflights-pandas/{{ cookiecutter.repo_name }}/docs/source/conf.py +++ b/spaceflights-pandas/{{ cookiecutter.repo_name }}/docs/source/conf.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # {{ cookiecutter.python_package }} documentation build diff --git a/spaceflights-pandas/{{ cookiecutter.repo_name }}/pyproject.toml b/spaceflights-pandas/{{ cookiecutter.repo_name }}/pyproject.toml index be5a74c9..9bbc4911 100644 --- a/spaceflights-pandas/{{ cookiecutter.repo_name }}/pyproject.toml +++ b/spaceflights-pandas/{{ cookiecutter.repo_name }}/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" dynamic = ["dependencies", "version"] [project.scripts] -{{ cookiecutter.repo_name }} = "{{ cookiecutter.python_package }}.__main__:main" +"{{ cookiecutter.repo_name }}" = "{{ cookiecutter.python_package }}.__main__:main" [project.entry-points."kedro.hooks"] @@ -43,7 +43,7 @@ namespaces = false package_name = "{{ cookiecutter.python_package }}" project_name = "{{ cookiecutter.project_name }}" kedro_init_version = "{{ cookiecutter.kedro_version }}" -tools = {{ cookiecutter.tools | default('') | string | replace('\"', '\\\"') }} +tools = "{{ cookiecutter.tools | default('') | string | replace('\"', '\\\"') }}" example_pipeline = "{{ cookiecutter.example_pipeline }}" source_dir = "src" diff --git a/spaceflights-pandas/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py b/spaceflights-pandas/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py index 2d4272e3..d3aa4d38 100644 --- a/spaceflights-pandas/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +++ b/spaceflights-pandas/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py @@ -1,11 +1,10 @@ """Project pipelines.""" -from typing import Dict from kedro.framework.project import find_pipelines from kedro.pipeline import Pipeline -def register_pipelines() -> Dict[str, Pipeline]: +def register_pipelines() -> dict[str, Pipeline]: """Register the project's pipelines. Returns: diff --git a/spaceflights-pandas/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py b/spaceflights-pandas/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py index beeb80c3..ce7fc487 100755 --- a/spaceflights-pandas/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py +++ b/spaceflights-pandas/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py @@ -1,5 +1,4 @@ import logging -from typing import Dict, Tuple import pandas as pd from sklearn.linear_model import LinearRegression @@ -7,7 +6,7 @@ from sklearn.model_selection import train_test_split -def split_data(data: pd.DataFrame, parameters: Dict) -> Tuple: +def split_data(data: pd.DataFrame, parameters: dict) -> tuple: """Splits data into features and targets training and test sets. Args: diff --git a/spaceflights-pandas/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py b/spaceflights-pandas/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py index 1184f123..29131844 100644 --- a/spaceflights-pandas/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py +++ b/spaceflights-pandas/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py @@ -64,4 +64,4 @@ def test_data_science_pipeline(caplog, dummy_data, dummy_parameters): SequentialRunner().run(pipeline, catalog) - assert successful_run_msg in caplog.text \ No newline at end of file + assert successful_run_msg in caplog.text diff --git a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/docs/source/conf.py b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/docs/source/conf.py index 94fecac3..600212e4 100644 --- a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/docs/source/conf.py +++ b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/docs/source/conf.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # {{ cookiecutter.python_package }} documentation build diff --git a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/pyproject.toml b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/pyproject.toml index b7ef36fc..e17ddb9b 100644 --- a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/pyproject.toml +++ b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" dynamic = ["dependencies", "version"] [project.scripts] -{{ cookiecutter.repo_name }} = "{{ cookiecutter.python_package }}.__main__:main" +"{{ cookiecutter.repo_name }}" = "{{ cookiecutter.python_package }}.__main__:main" [project.entry-points."kedro.hooks"] @@ -43,7 +43,7 @@ namespaces = false package_name = "{{ cookiecutter.python_package }}" project_name = "{{ cookiecutter.project_name }}" kedro_init_version = "{{ cookiecutter.kedro_version }}" -tools = {{ cookiecutter.tools | default('') | string | replace('\"', '\\\"') }} +tools = "{{ cookiecutter.tools | default('') | string | replace('\"', '\\\"') }}" example_pipeline = "{{ cookiecutter.example_pipeline }}" source_dir = "src" diff --git a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py index 2d4272e3..d3aa4d38 100644 --- a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +++ b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py @@ -1,11 +1,10 @@ """Project pipelines.""" -from typing import Dict from kedro.framework.project import find_pipelines from kedro.pipeline import Pipeline -def register_pipelines() -> Dict[str, Pipeline]: +def register_pipelines() -> dict[str, Pipeline]: """Register the project's pipelines. Returns: diff --git a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py index 5ec2cd7b..2a791000 100755 --- a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py +++ b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py @@ -1,4 +1,3 @@ -from typing import Dict, Tuple import pandas as pd from pyspark.sql import Column @@ -24,7 +23,7 @@ def _parse_money(x: Column) -> Column: return x -def preprocess_companies(companies: SparkDataFrame) -> Tuple[SparkDataFrame, Dict]: +def preprocess_companies(companies: SparkDataFrame) -> tuple[SparkDataFrame, dict]: """Preprocesses the data for companies. Args: diff --git a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py index a49f3cd6..560db6e0 100755 --- a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py +++ b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py @@ -1,5 +1,4 @@ import logging -from typing import Dict, Tuple import pandas as pd from sklearn.linear_model import LinearRegression @@ -7,7 +6,7 @@ from sklearn.model_selection import train_test_split -def split_data(data: pd.DataFrame, parameters: Dict) -> Tuple: +def split_data(data: pd.DataFrame, parameters: dict) -> tuple: """Splits data into features and targets training and test sets. Args: @@ -41,7 +40,7 @@ def train_model(X_train: pd.DataFrame, y_train: pd.Series) -> LinearRegression: def evaluate_model( regressor: LinearRegression, X_test: pd.DataFrame, y_test: pd.Series -) -> Dict[str, float]: +) -> dict[str, float]: """Calculates and logs the coefficient of determination. Args: diff --git a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py index 1184f123..29131844 100644 --- a/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py +++ b/spaceflights-pyspark-viz/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py @@ -64,4 +64,4 @@ def test_data_science_pipeline(caplog, dummy_data, dummy_parameters): SequentialRunner().run(pipeline, catalog) - assert successful_run_msg in caplog.text \ No newline at end of file + assert successful_run_msg in caplog.text diff --git a/spaceflights-pyspark/{{ cookiecutter.repo_name }}/docs/source/conf.py b/spaceflights-pyspark/{{ cookiecutter.repo_name }}/docs/source/conf.py index 94fecac3..600212e4 100644 --- a/spaceflights-pyspark/{{ cookiecutter.repo_name }}/docs/source/conf.py +++ b/spaceflights-pyspark/{{ cookiecutter.repo_name }}/docs/source/conf.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # {{ cookiecutter.python_package }} documentation build diff --git a/spaceflights-pyspark/{{ cookiecutter.repo_name }}/pyproject.toml b/spaceflights-pyspark/{{ cookiecutter.repo_name }}/pyproject.toml index b7ef36fc..e17ddb9b 100644 --- a/spaceflights-pyspark/{{ cookiecutter.repo_name }}/pyproject.toml +++ b/spaceflights-pyspark/{{ cookiecutter.repo_name }}/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" dynamic = ["dependencies", "version"] [project.scripts] -{{ cookiecutter.repo_name }} = "{{ cookiecutter.python_package }}.__main__:main" +"{{ cookiecutter.repo_name }}" = "{{ cookiecutter.python_package }}.__main__:main" [project.entry-points."kedro.hooks"] @@ -43,7 +43,7 @@ namespaces = false package_name = "{{ cookiecutter.python_package }}" project_name = "{{ cookiecutter.project_name }}" kedro_init_version = "{{ cookiecutter.kedro_version }}" -tools = {{ cookiecutter.tools | default('') | string | replace('\"', '\\\"') }} +tools = "{{ cookiecutter.tools | default('') | string | replace('\"', '\\\"') }}" example_pipeline = "{{ cookiecutter.example_pipeline }}" source_dir = "src" diff --git a/spaceflights-pyspark/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py b/spaceflights-pyspark/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py index 2d4272e3..d3aa4d38 100644 --- a/spaceflights-pyspark/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +++ b/spaceflights-pyspark/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py @@ -1,11 +1,10 @@ """Project pipelines.""" -from typing import Dict from kedro.framework.project import find_pipelines from kedro.pipeline import Pipeline -def register_pipelines() -> Dict[str, Pipeline]: +def register_pipelines() -> dict[str, Pipeline]: """Register the project's pipelines. Returns: diff --git a/spaceflights-pyspark/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py b/spaceflights-pyspark/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py index beeb80c3..ce7fc487 100755 --- a/spaceflights-pyspark/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py +++ b/spaceflights-pyspark/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_science/nodes.py @@ -1,5 +1,4 @@ import logging -from typing import Dict, Tuple import pandas as pd from sklearn.linear_model import LinearRegression @@ -7,7 +6,7 @@ from sklearn.model_selection import train_test_split -def split_data(data: pd.DataFrame, parameters: Dict) -> Tuple: +def split_data(data: pd.DataFrame, parameters: dict) -> tuple: """Splits data into features and targets training and test sets. Args: diff --git a/spaceflights-pyspark/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py b/spaceflights-pyspark/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py index 1184f123..29131844 100644 --- a/spaceflights-pyspark/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py +++ b/spaceflights-pyspark/{{ cookiecutter.repo_name }}/tests/pipelines/data_science/test_pipeline.py @@ -64,4 +64,4 @@ def test_data_science_pipeline(caplog, dummy_data, dummy_parameters): SequentialRunner().run(pipeline, catalog) - assert successful_run_msg in caplog.text \ No newline at end of file + assert successful_run_msg in caplog.text