From 143b3f873d9ffd1f017849f134953a4f3d3a7cd4 Mon Sep 17 00:00:00 2001 From: Saleel Date: Mon, 22 Jan 2024 17:26:34 +0530 Subject: [PATCH] chore: remove compression from s3 script --- packages/circuits/scripts/upload_to_s3.py | 66 +++-------------------- 1 file changed, 6 insertions(+), 60 deletions(-) diff --git a/packages/circuits/scripts/upload_to_s3.py b/packages/circuits/scripts/upload_to_s3.py index 0781b3a..b48936a 100644 --- a/packages/circuits/scripts/upload_to_s3.py +++ b/packages/circuits/scripts/upload_to_s3.py @@ -1,11 +1,9 @@ import boto3 import os -import tarfile -import time -import gzip import argparse import subprocess from dotenv import load_dotenv + load_dotenv('circuit.env') # Set up the client for the AWS S3 service @@ -15,43 +13,20 @@ parser.add_argument('--bucket_name', type=str, default='zkemail-zkey-chunks', help='Name of the S3 bucket') default_build_dir = 'build' -default_circuit_name = 'twitter' -default_prefix = 'vkey.json,email.wasm,verifier.sol' - build_dir_env = os.getenv('BUILD_DIR') -circuit_name_env = os.getenv('CIRCUIT_NAME') - if build_dir_env is None: print("Warning: BUILD_DIR not found in circuit.env, defaulting to '{default_build_dir}'") build_dir_env = default_build_dir -if circuit_name_env is None: - print("Warning: CIRCUIT_NAME not found in circuit.env, defaulting to '{default_circuit_name}'") - circuit_name_env = default_circuit_name - parser.add_argument('--build-dir', type=str, default=build_dir_env, help='Name of the build directory directory with the circuitname/ folder') -parser.add_argument('--circuit-name', type=str, default=circuit_name_env, help='Name of the circuit (i.e. the foldername in build_dir/)') -parser.add_argument('--prefix', type=str, default=default_prefix, help='Comma-seperated prefixes to upload without compression') args = parser.parse_args() bucket_name = args.bucket_name build_dir = args.build_dir -circuit_name = args.circuit_name -prefix_to_tar = args.circuit_name + ".zkey" - -prefixes = args.prefix.split(',') -prefixes.append(circuit_name + '_js') -prefixes.append(circuit_name + '_cpp') - -dirs = [os.path.join(build_dir, "")] # Get the latest commit hash commit_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode('utf-8').strip() # Set the name of the remote directory and the AWS bucket -# source = '~/Documents/projects/zk-email-verify' -# source = '.' -# zkey_dir = source + '/{build_dir}/{circuit_name}/' -# wasm_dir = source + '/{build_dir}/{circuit_name}/{circuit_name}_js/' def upload_to_s3(filename, dir=""): if os.path.isfile(os.path.join(dir, filename)): with open(os.path.join(dir, filename), 'rb') as file: @@ -75,37 +50,8 @@ def upload_to_s3(filename, dir=""): print(f"Skipping {os.path.join(dir, filename)} as it is not a valid file or directory.") -# Loop through the files in the remote directory -for dir in dirs: - print("Searching for files in: ", dir) - for file in os.listdir(dir): - # Check if the file matches the pattern - if file.startswith(prefix_to_tar): - source_file_path = dir + file - upload_to_s3(file, dir) # Uncompressed file - - # Make a .gz file - print("Compressing .gz: ", source_file_path) - gz_file = file + ".gz" - with open(source_file_path, 'rb') as f_in, gzip.open(gz_file, 'wb') as f_out: - f_out.write(f_in.read()) - gz_file_name = file + '.gz' - # Upload the zip file to the AWS bucket, overwriting any existing file with the same name - upload_to_s3(gz_file) - - # Create a .tar.gz file for the file - tar_file_name = file + '.tar.gz' - print("Compressing .tar.gz: ", source_file_path) - with tarfile.open(tar_file_name, 'w:gz') as tar_file: - tar_file.add(source_file_path, - arcname=os.path.basename(source_file_path)) - - # Upload the .tar.gz file to the AWS bucket, overwriting any existing file with the same name - upload_to_s3(tar_file_name) - - os.remove(tar_file_name) - os.remove(gz_file_name) - # If file starts with any one of the prefixes (including js/cpp directories) - if any(file.startswith(prefix) for prefix in prefixes): - # Upload the directory to the AWS bucket, overwriting any existing file with the same name - upload_to_s3(file, dir) +# Loop through the files in the artifacts directory +dir = os.path.join(build_dir, "artifacts") +print("Uploading for files in: ", dir) +for file in os.listdir(dir): + upload_to_s3(file, dir)