Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(aws-s3-storage): set ACL of tarball files #249

Merged
merged 22 commits into from
May 2, 2021
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 15 additions & 4 deletions plugins/aws-s3-storage/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Based on [`verdaccio-s3-storage`](https://github.com/Remitly/verdaccio-s3-storag

**See it in action** in our [Docker + LocalStack + Verdaccio 4 + S3 Plugin example](https://github.com/verdaccio/docker-examples/tree/master/amazon-s3-docker-example).

### Requirements
## Requirements

* AWS Account
* Verdaccio server (4.0) (for 3.x use `verdaccio-s3-storage` instead)
Expand Down Expand Up @@ -47,6 +47,7 @@ store:
region: us-west-2 # optional, will use aws s3's default behavior if not specified
endpoint: https://{service}.{region}.amazonaws.com # optional, will use aws s3's default behavior if not specified
s3ForcePathStyle: false # optional, will use path style URLs for S3 objects
tarballACL: private # optional, use public-read to work with CDN like Amazon CloudFront
accessKeyId: your-access-key-id # optional, aws accessKeyId for private S3 bucket
secretAccessKey: your-secret-access-key # optional, aws secretAccessKey for private S3 bucket
```
Expand Down Expand Up @@ -82,16 +83,26 @@ packages:
storage: 'public'
```

# Developer Testing #
### Specify ACL of Tarball Files

You can specify ACL of tarball files in S3 by the *tarballACL* configuration, set to 'private' by default. To enable S3 integrated CDN service (Amazon CloudFront for example), set *tarballACL* to 'public-read' to grant tarball files anonymous read permission.

```yaml
store:
aws-s3-storage:
tarballACL: public-read
```

## Developer Testing

In case of local testing, this project can be used self-efficiently. Four main ingredients are as follows:

* config.yaml, see [verdaccio documentation](https://verdaccio.org/docs/en/configuration.html)
* The provided docker file allows to test the plugin, with no need for main verdaccio application
* The provided docker-compose also provides minio in orchestration as a local substitute for S3 backend
* registry.envs set as follows. This file does not exist on the repo and should be generated after cloning the project.
* registry.envs set as follows. This file does not exist on the repo and should be generated after cloning the project.

```
```
AWS_ACCESS_KEY_ID=foobar
AWS_SECRET_ACCESS_KEY=1234567e
AWS_DEFAULT_REGION=eu-central-1
Expand Down
1 change: 1 addition & 0 deletions plugins/aws-s3-storage/src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ export interface S3Config extends Config {
endpoint?: string;
region?: string;
s3ForcePathStyle?: boolean;
tarballACL?: string;
accessKeyId?: string;
secretAccessKey?: string;
}
41 changes: 26 additions & 15 deletions plugins/aws-s3-storage/src/s3PackageManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,22 @@ export default class S3PackageManager implements ILocalPackageManager {
private readonly packageName: string;
private readonly s3: S3;
private readonly packagePath: string;
private readonly tarballACL: string;

public constructor(config: S3Config, packageName: string, logger: Logger) {
this.config = config;
this.packageName = packageName;
this.logger = logger;
const { endpoint, region, s3ForcePathStyle, accessKeyId, secretAccessKey } = config;
const { endpoint, region, s3ForcePathStyle, tarballACL, accessKeyId, secretAccessKey } = config;

this.tarballACL = tarballACL || 'private';

this.s3 = new S3({ endpoint, region, s3ForcePathStyle, accessKeyId, secretAccessKey });
this.logger.trace({ packageName }, 's3: [S3PackageManager constructor] packageName @{packageName}');
this.logger.trace({ endpoint }, 's3: [S3PackageManager constructor] endpoint @{endpoint}');
this.logger.trace({ region }, 's3: [S3PackageManager constructor] region @{region}');
this.logger.trace({ s3ForcePathStyle }, 's3: [S3PackageManager constructor] s3ForcePathStyle @{s3ForcePathStyle}');
this.logger.trace({ tarballACL }, 's3: [S3PackageManager constructor] tarballACL @{tarballACL}');
this.logger.trace({ accessKeyId }, 's3: [S3PackageManager constructor] accessKeyId @{accessKeyId}');
this.logger.trace({ secretAccessKey }, 's3: [S3PackageManager constructor] secretAccessKey @{secretAccessKey}');

Expand Down Expand Up @@ -84,9 +88,9 @@ export default class S3PackageManager implements ILocalPackageManager {
},
(err, response) => {
if (err) {
this.logger.error({ err }, 's3: [S3PackageManager _getData] aws @{err}');
this.logger.debug({ err }, 's3: [S3PackageManager _getData] aws @{err}');
const error: HttpError = convertS3Error(err);
this.logger.error({ error }, 's3: [S3PackageManager _getData] @{error}');
this.logger.debug({ error }, 's3: [S3PackageManager _getData] @{error}');

reject(error);
return;
Expand Down Expand Up @@ -159,9 +163,9 @@ export default class S3PackageManager implements ILocalPackageManager {
if (is404Error(s3Err)) {
this.logger.debug({ s3Err }, 's3: [S3PackageManager createPackage] 404 package not found]');
this.savePackage(name, value, callback);
this.logger.trace({ data }, 's3: [S3PackageManager createPackage] package saved data from s3: @data');
this.logger.trace({ data }, 's3: [S3PackageManager createPackage] package saved data from s3: @{data}');
} else {
this.logger.error({ s3Err }, 's3: [S3PackageManager createPackage error] @s3Err');
this.logger.error({ s3Err }, 's3: [S3PackageManager createPackage error] @{s3Err}');
callback(s3Err);
}
} else {
Expand All @@ -177,7 +181,7 @@ export default class S3PackageManager implements ILocalPackageManager {
{ name, packageName: this.packageName },
's3: [S3PackageManager savePackage init] name @{name}/@{packageName}'
);
this.logger.trace({ value }, 's3: [S3PackageManager savePackage ] init value @value');
this.logger.trace({ value }, 's3: [S3PackageManager savePackage ] init value @{value}');
this.s3.putObject(
{
// TODO: not sure whether save the object with spaces will increase storage size
Expand All @@ -199,12 +203,11 @@ export default class S3PackageManager implements ILocalPackageManager {
const data: Package = (await this._getData()) as Package;
this.logger.trace(
{ data, packageName: this.packageName },
's3: [S3PackageManager readPackage] packageName: @{packageName} / data @data'
's3: [S3PackageManager readPackage] packageName: @{packageName} / data @{data}'
);
callback(null, data);
} catch (err) {
this.logger.error({ err }, 's3: [S3PackageManager readPackage] @{err}');

this.logger.debug({ err }, 's3: [S3PackageManager readPackage] @{err}');
callback(err);
}
})();
Expand Down Expand Up @@ -242,15 +245,20 @@ export default class S3PackageManager implements ILocalPackageManager {
err => {
if (err) {
const convertedErr = convertS3Error(err);
this.logger.error({ convertedErr }, 's3: [S3PackageManager writeTarball headObject] @convertedErr');
this.logger.debug({ convertedErr }, 's3: [S3PackageManager writeTarball headObject] @{convertedErr}');

if (is404Error(convertedErr) === false) {
this.logger.error({ convertedErr }, 's3: [S3PackageManager writeTarball headObject] non a 404 emit error');
this.logger.error(
{ convertedErr },
's3: [S3PackageManager writeTarball headObject] @{convertedErr} non a 404 emit error'
);

uploadStream.emit('error', convertedErr);
} else {
this.logger.debug('s3: [S3PackageManager writeTarball managedUpload] init stream');
const managedUpload = this.s3.upload(Object.assign({}, baseS3Params, { Body: uploadStream }));
const managedUpload = this.s3.upload(
Object.assign({}, baseS3Params, { Body: uploadStream, ACL: this.tarballACL })
);
favoyang marked this conversation as resolved.
Show resolved Hide resolved
// NOTE: there's a managedUpload.promise, but it doesn't seem to work
const promise = new Promise((resolve): void => {
this.logger.debug('s3: [S3PackageManager writeTarball managedUpload] send');
Expand All @@ -264,7 +272,10 @@ export default class S3PackageManager implements ILocalPackageManager {

uploadStream.emit('error', error);
} else {
this.logger.trace({ data }, 's3: [S3PackageManager writeTarball managedUpload send] response @data');
this.logger.trace(
{ data },
's3: [S3PackageManager writeTarball managedUpload send] response @{data}'
);

resolve();
}
Expand Down Expand Up @@ -318,7 +329,7 @@ export default class S3PackageManager implements ILocalPackageManager {
} finally {
this.logger.debug(
{ name, baseS3Params },
's3: [S3PackageManager writeTarball uploadStream abort] s3.deleteObject @{name}/@baseS3Params'
's3: [S3PackageManager writeTarball uploadStream abort] s3.deleteObject @{name}/@{baseS3Params}'
);

this.s3.deleteObject(baseS3Params);
Expand Down Expand Up @@ -365,7 +376,7 @@ export default class S3PackageManager implements ILocalPackageManager {
this.logger.trace({ headers }, 's3: [S3PackageManager readTarball httpHeaders event] headers @headers');
this.logger.trace(
{ statusCode },
's3: [S3PackageManager readTarball httpHeaders event] statusCode @statusCode'
's3: [S3PackageManager readTarball httpHeaders event] statusCode @{statusCode}'
);
if (statusCode !== HTTP_STATUS.NOT_FOUND) {
if (headers[HEADERS.CONTENT_LENGTH]) {
Expand Down