diff --git a/manywheel/upload.sh b/manywheel/upload.sh index a0c7b5b85..e43d0030b 100755 --- a/manywheel/upload.sh +++ b/manywheel/upload.sh @@ -33,12 +33,17 @@ for cuda_ver in "${CUDA_VERSIONS[@]}"; do # Upload the wheels to s3 if [[ -d "$wheel_dir" ]]; then + pushd "$wheel_dir" + find . -type f -exec sh -c 'unzip -j {} -d . "*.dist-info/METADATA" && mv METADATA {}.metadata' \; echo "Uploading all of: $(ls $wheel_dir) to $s3_wheel_dir" - ls "$wheel_dir" | xargs -I {} aws s3 cp "$wheel_dir"/{} "$s3_wheel_dir" --acl public-read + ls . | xargs -I {} aws s3 cp {} "$s3_wheel_dir" --acl public-read + popd fi if [[ -d "$libtorch_dir" ]]; then + pushd "$libtorch_dir" echo "Uploading all of: $(ls $libtorch_dir) to $s3_libtorch_dir" - ls "$libtorch_dir" | xargs -I {} aws s3 cp "$libtorch_dir"/{} "$s3_libtorch_dir" --acl public-read + ls . | xargs -I {} aws s3 cp {} "$s3_libtorch_dir" --acl public-read + popd fi done diff --git a/s3_management/manage.py b/s3_management/manage.py index a8bda72e2..2740f5287 100644 --- a/s3_management/manage.py +++ b/s3_management/manage.py @@ -21,7 +21,7 @@ CLIENT = boto3.client('s3') BUCKET = S3.Bucket('pytorch') -ACCEPTED_FILE_EXTENSIONS = ("whl", "zip", "tar.gz") +ACCEPTED_FILE_EXTENSIONS = ("whl", "zip", "tar.gz", ".whl.metadata") ACCEPTED_SUBDIR_PATTERNS = [ r"cu[0-9]+", # for cuda r"rocm[0-9]+\.[0-9]+", # for rocm @@ -148,8 +148,15 @@ def between_bad_dates(package_build_time: datetime): class S3Index: - def __init__(self: S3IndexType, objects: List[S3Object], prefix: str) -> None: + def __init__( + self: S3IndexType, + objects: List[S3Object], + # Maps an object's key to the sha256 of the relevant .metadata (if it exists) + object_metadatas: Dict[str, str], + prefix: str, + ) -> None: self.objects = objects + self.object_metadatas = object_metadatas self.prefix = prefix.rstrip("/") self.html_name = PREFIXES_WITH_HTML[self.prefix] # should dynamically grab subdirectories like whl/test/cu101 @@ -279,8 +286,14 @@ def to_simple_package_html( out.append('
') out.append('