Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
For more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Jan 13, 2025
1 parent 90146a3 commit cde0265
Show file tree
Hide file tree
Showing 6 changed files with 12 additions and 12 deletions.
4 changes: 2 additions & 2 deletions src/usage_metrics/core/github_nonpartitioned.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def core_github_forks(
["owner", "permissions", "license", "topics"]
].astype(str)

context.log.info(f"Saving to {os.getenv("METRICS_PROD_ENV", "local")} environment.")
context.log.info(f"Saving to {os.getenv('METRICS_PROD_ENV', 'local')} environment.")

return df.reset_index()

Expand Down Expand Up @@ -73,6 +73,6 @@ def core_github_stargazers(
df = df.set_index("id")
assert df.index.is_unique

context.log.info(f"Saving to {os.getenv("METRICS_PROD_ENV", "local")} environment.")
context.log.info(f"Saving to {os.getenv('METRICS_PROD_ENV', 'local')} environment.")

return df.reset_index()
8 changes: 4 additions & 4 deletions src/usage_metrics/core/github_partitioned.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def core_github_popular_referrers(
df = df.set_index(["metrics_date", "referrer"])
assert df.index.is_unique

context.log.info(f"Saving to {os.getenv("METRICS_PROD_ENV", "local")} environment.")
context.log.info(f"Saving to {os.getenv('METRICS_PROD_ENV', 'local')} environment.")

return df.reset_index()

Expand Down Expand Up @@ -67,7 +67,7 @@ def core_github_popular_paths(
df = df.set_index(["metrics_date", "path"])
assert df.index.is_unique

context.log.info(f"Saving to {os.getenv("METRICS_PROD_ENV", "local")} environment.")
context.log.info(f"Saving to {os.getenv('METRICS_PROD_ENV', 'local')} environment.")

return df.reset_index()

Expand Down Expand Up @@ -119,7 +119,7 @@ def core_github_clones(
df = df.set_index("metrics_date")
assert df.index.is_unique

context.log.info(f"Saving to {os.getenv("METRICS_PROD_ENV", "local")} environment.")
context.log.info(f"Saving to {os.getenv('METRICS_PROD_ENV', 'local')} environment.")

return df.reset_index()

Expand Down Expand Up @@ -173,6 +173,6 @@ def core_github_views(
df = df.set_index("metrics_date")
assert df.index.is_unique

context.log.info(f"Saving to {os.getenv("METRICS_PROD_ENV", "local")} environment.")
context.log.info(f"Saving to {os.getenv('METRICS_PROD_ENV', 'local')} environment.")

return df.reset_index()
2 changes: 1 addition & 1 deletion src/usage_metrics/core/kaggle.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,6 @@ def core_kaggle_logs(
["data", "keywords", "licenses", "collaborators"]
].astype(str)

context.log.info(f"Saving to {os.getenv("METRICS_PROD_ENV", "local")} environment.")
context.log.info(f"Saving to {os.getenv('METRICS_PROD_ENV', 'local')} environment.")

return df.reset_index()
2 changes: 1 addition & 1 deletion src/usage_metrics/core/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,6 @@ def core_s3_logs(
]
)

context.log.info(f"Saving to {os.getenv("METRICS_PROD_ENV", "local")} environment.")
context.log.info(f"Saving to {os.getenv('METRICS_PROD_ENV', 'local')} environment.")

return geocoded_df.reset_index()
2 changes: 1 addition & 1 deletion src/usage_metrics/core/zenodo.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,6 @@ def core_zenodo_logs(
df["dataset_slug"] = df["concept_record_id"].map(dataset_slugs)
assert not df["dataset_slug"].isnull().to_numpy().any()

context.log.info(f"Saving to {os.getenv("METRICS_PROD_ENV", "local")} environment.")
context.log.info(f"Saving to {os.getenv('METRICS_PROD_ENV', 'local')} environment.")

return df.reset_index()
6 changes: 3 additions & 3 deletions src/usage_metrics/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,9 +136,9 @@ def unpack_json_series(series: pd.Series) -> pd.DataFrame:
series_dict = {index: v if v else {} for index, v in series_dict.items()}

unpacked_df = pd.DataFrame.from_dict(series_dict, orient="index")
assert len(unpacked_df) <= len(
series
), "Unpacked more JSON records than there are records in the DataFrame."
assert len(unpacked_df) <= len(series), (
"Unpacked more JSON records than there are records in the DataFrame."
)
return unpacked_df


Expand Down

0 comments on commit cde0265

Please sign in to comment.