Skip to content

Commit

Permalink
linting
Browse files Browse the repository at this point in the history
  • Loading branch information
austinweisgrau committed Sep 19, 2024
1 parent d095abd commit 107582e
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 32 deletions.
32 changes: 8 additions & 24 deletions parsons/google/google_cloud_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,7 @@ class GoogleCloudStorage(object):
GoogleCloudStorage Class
"""

def __init__(
self, app_creds: Optional[Union[str, dict, Credentials]] = None, project=None
):
def __init__(self, app_creds: Optional[Union[str, dict, Credentials]] = None, project=None):
if isinstance(app_creds, Credentials):
credentials = app_creds
else:
Expand Down Expand Up @@ -304,9 +302,7 @@ def delete_blob(self, bucket_name, blob_name):
blob.delete()
logger.info(f"{blob_name} blob in {bucket_name} bucket deleted.")

def upload_table(
self, table, bucket_name, blob_name, data_type="csv", default_acl=None
):
def upload_table(self, table, bucket_name, blob_name, data_type="csv", default_acl=None):
"""
Load the data from a Parsons table into a blob.
Expand Down Expand Up @@ -345,9 +341,7 @@ def upload_table(
local_file = table.to_json()
content_type = "application/json"
else:
raise ValueError(
f"Unknown data_type value ({data_type}): must be one of: csv or json"
)
raise ValueError(f"Unknown data_type value ({data_type}): must be one of: csv or json")

try:
blob.upload_from_filename(
Expand Down Expand Up @@ -417,9 +411,7 @@ def copy_bucket_to_gcs(
Secret key to authenticate storage transfer
"""
if source not in ["gcs", "s3"]:
raise ValueError(
f"Blob transfer only supports gcs and s3 sources [source={source}]"
)
raise ValueError(f"Blob transfer only supports gcs and s3 sources [source={source}]")
if source_path and source_path[-1] != "/":
raise ValueError("Source path much end in a '/'")

Expand Down Expand Up @@ -606,13 +598,9 @@ def unzip_blob(
}

file_extension = compression_params[compression_type]["file_extension"]
compression_function = compression_params[compression_type][
"compression_function"
]
compression_function = compression_params[compression_type]["compression_function"]

compressed_filepath = self.download_blob(
bucket_name=bucket_name, blob_name=blob_name
)
compressed_filepath = self.download_blob(bucket_name=bucket_name, blob_name=blob_name)

decompressed_filepath = compressed_filepath.replace(file_extension, "")
decompressed_blob_name = (
Expand Down Expand Up @@ -644,9 +632,7 @@ def __gzip_decompress_and_write_to_gcs(self, **kwargs):
bucket_name = kwargs.pop("bucket_name")

with gzip.open(compressed_filepath, "rb") as f_in:
logger.debug(
f"Uploading uncompressed file to GCS: {decompressed_blob_name}"
)
logger.debug(f"Uploading uncompressed file to GCS: {decompressed_blob_name}")
bucket = self.get_bucket(bucket_name=bucket_name)
blob = storage.Blob(name=decompressed_blob_name, bucket=bucket)
blob.upload_from_file(file_obj=f_in, rewind=True, timeout=3600)
Expand All @@ -666,9 +652,7 @@ def __zip_decompress_and_write_to_gcs(self, **kwargs):
with zipfile.ZipFile(compressed_filepath) as path_:
# Open the underlying file
with path_.open(decompressed_blob_in_archive) as f_in:
logger.debug(
f"Uploading uncompressed file to GCS: {decompressed_blob_name}"
)
logger.debug(f"Uploading uncompressed file to GCS: {decompressed_blob_name}")
bucket = self.get_bucket(bucket_name=bucket_name)
blob = storage.Blob(name=decompressed_blob_name, bucket=bucket)
blob.upload_from_file(file_obj=f_in, rewind=True, timeout=3600)
11 changes: 3 additions & 8 deletions parsons/sftp/sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,8 +216,7 @@ def get_files(
files_to_download.extend(
f
for file_list in [
self.list_files(directory, connection, pattern)
for directory in remote
self.list_files(directory, connection, pattern) for directory in remote
]
for f in file_list
)
Expand Down Expand Up @@ -357,9 +356,7 @@ def _list_contents(remote_path, connection, dir_pattern=None, file_pattern=None)
entry_pathname = remote_path + "/" + entry.filename
for method, pattern, do_search_full_path, paths in dirs_and_files:
string = entry_pathname if do_search_full_path else entry.filename
if method(entry.st_mode) and (
not pattern or re.search(pattern, string)
):
if method(entry.st_mode) and (not pattern or re.search(pattern, string)):
paths.append(entry_pathname)
except FileNotFoundError: # This error is raised when a directory is empty
pass
Expand Down Expand Up @@ -473,9 +470,7 @@ def _walk_tree(

depth += 1

dirs, files = self._list_contents(
remote_path, connection, dir_pattern, file_pattern
)
dirs, files = self._list_contents(remote_path, connection, dir_pattern, file_pattern)

if download:
self.get_files(files_to_download=files)
Expand Down

0 comments on commit 107582e

Please sign in to comment.