From c25b2e4e108e25f0ef245e4a12d4c44a5d6f9e39 Mon Sep 17 00:00:00 2001 From: Robert Stupp Date: Tue, 17 Sep 2024 17:01:38 +0200 Subject: [PATCH] Richer access checks This change introduces the ability to distinguish individual checks by the (external) API being used (Nessie, Iceberg) and for Nessie Catalog (Iceberg REST) to information about the kind(s) of changes being applied. The individual changes that can be distinguished are: * Catalog API operation * Metadata update actions, with special actions wrt to the `location` property * Snapshot operation * Snapshot summary extracts (for example whether a snapshot added or removed data/delete files) All new attributes can be retrieved from the existing `Check` type via new attributes exposed via `AccessCheckMeta` holding the source API, "for write flag" and per-content-key flags. The flags represent the mentioned "individual changes". Fixes #9559 (and more) --- CHANGELOG.md | 3 + .../formats/iceberg/nessie/CatalogOps.java | 75 +++ .../IcebergTableMetadataUpdateState.java | 10 + .../IcebergViewMetadataUpdateState.java | 10 + .../iceberg/rest/IcebergMetadataUpdate.java | 123 +++++ catalog/service/common/build.gradle.kts | 2 + .../catalog/service/api/CatalogService.java | 16 +- .../service/impl/CatalogServiceImpl.java | 95 ++-- .../service/impl/MultiTableUpdate.java | 30 +- .../service/impl/AbstractCatalogService.java | 49 +- .../service/impl/TestCatalogServiceImpl.java | 44 +- .../service/rest/AbstractCatalogResource.java | 17 +- .../rest/IcebergApiV1GenericResource.java | 9 +- .../rest/IcebergApiV1NamespaceResource.java | 107 ++-- .../rest/IcebergApiV1ResourceBase.java | 49 +- .../rest/IcebergApiV1TableResource.java | 29 +- .../rest/IcebergApiV1ViewResource.java | 16 +- .../service/rest/IcebergS3SignParams.java | 10 +- .../service/rest/NessieCatalogResource.java | 22 +- .../service/rest/TestIcebergS3SignParams.java | 46 +- .../jersey/AuthorizerExtension.java | 3 +- .../jaxrs/ext/AuthorizerExtension.java | 3 +- .../server/authz/CelAuthorizer.java | 5 +- .../server/authz/CelBatchAccessChecker.java | 30 +- .../server/authz/QuarkusAuthorizer.java | 7 +- .../server/authz/TestCELAuthZ.java | 11 +- servers/quarkus-server/build.gradle.kts | 2 + .../server/authz/MockedAuthorizer.java | 110 +++++ .../server/authz/TestAuthzMeta.java | 460 ++++++++++++++++++ .../server/catalog/Catalogs.java | 14 +- .../services/rest/RestApiContext.java | 25 + .../services/rest/RestConfigResource.java | 4 +- .../services/rest/RestContentResource.java | 10 +- .../services/rest/RestDiffResource.java | 3 +- .../services/rest/RestNamespaceResource.java | 11 +- .../services/rest/RestTreeResource.java | 6 +- .../services/rest/RestV2ConfigResource.java | 4 +- .../services/rest/RestV2TreeResource.java | 18 +- .../authz/AbstractBatchAccessChecker.java | 51 +- .../services/authz/ApiContext.java | 30 ++ .../services/authz/Authorizer.java | 3 +- .../services/authz/BatchAccessChecker.java | 27 + .../projectnessie/services/authz/Check.java | 63 ++- .../authz/RetriableAccessChecker.java | 18 +- .../projectnessie/services/cel/CELUtil.java | 2 + .../services/impl/BaseApiImpl.java | 15 +- .../services/impl/ConfigApiImpl.java | 10 +- .../services/impl/ContentApiImpl.java | 33 +- .../services/impl/DiffApiImpl.java | 9 +- .../services/impl/NamespaceApiImpl.java | 36 +- .../services/impl/TreeApiImpl.java | 31 +- .../services/spi/ContentService.java | 11 +- .../services/spi/NamespaceService.java | 6 +- .../services/spi/TreeService.java | 4 +- .../authz/TestBatchAccessChecker.java | 13 +- .../authz/TestRetriableAccessChecker.java | 9 +- .../services/impl/TestNamespaceApi.java | 6 +- .../impl/AbstractTestAccessChecks.java | 12 +- .../services/impl/AbstractTestCommitLog.java | 5 +- .../services/impl/AbstractTestContents.java | 11 +- .../services/impl/AbstractTestEntries.java | 4 +- .../impl/AbstractTestInvalidRefs.java | 3 +- .../impl/AbstractTestMergeTransplant.java | 16 +- .../services/impl/AbstractTestNamespace.java | 39 +- .../services/impl/BaseTestServiceImpl.java | 31 +- site/in-dev/authorization.md | 80 +++ .../combined/CombinedClientBuilder.java | 10 +- .../projectnessie/versioned/RequestMeta.java | 84 ++++ 68 files changed, 1854 insertions(+), 306 deletions(-) create mode 100644 catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/CatalogOps.java create mode 100644 servers/quarkus-server/src/test/java/org/projectnessie/server/authz/MockedAuthorizer.java create mode 100644 servers/quarkus-server/src/test/java/org/projectnessie/server/authz/TestAuthzMeta.java create mode 100644 servers/rest-services/src/main/java/org/projectnessie/services/rest/RestApiContext.java create mode 100644 servers/services/src/main/java/org/projectnessie/services/authz/ApiContext.java create mode 100644 versioned/spi/src/main/java/org/projectnessie/versioned/RequestMeta.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 8049d4c0d33..f82e062c8f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,9 @@ as necessary. Empty sections will not end in the release notes. ### New Features +- Access check SPI has been enhanced to provide richer information in the `Check` type about the receiving + API (Nessie REST or Iceberg REST) and about the individual changes, especially during a commit operation. + ### Changes ### Deprecations diff --git a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/CatalogOps.java b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/CatalogOps.java new file mode 100644 index 00000000000..437b616fc94 --- /dev/null +++ b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/CatalogOps.java @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.catalog.formats.iceberg.nessie; + +/** + * Enum serving as a "constants pool" for the string values passed to Nessie access control checks. + */ +public enum CatalogOps { + // Iceberg metadata updates + META_ADD_VIEW_VERSION, + META_SET_CURRENT_VIEW_VERSION, + META_SET_STATISTICS, + META_REMOVE_STATISTICS, + META_SET_PARTITION_STATISTICS, + META_REMOVE_PARTITION_STATISTICS, + META_ASSIGN_UUID, + META_ADD_SCHEMA, + META_SET_CURRENT_SCHEMA, + META_ADD_PARTITION_SPEC, + META_SET_DEFAULT_PARTITION_SPEC, + META_ADD_SNAPSHOT, + META_ADD_SORT_ORDER, + META_SET_DEFAULT_SORT_ORDER, + META_SET_LOCATION, + META_SET_PROPERTIES, + META_REMOVE_PROPERTIES, + META_REMOVE_LOCATION_PROPERTY, + META_SET_SNAPSHOT_REF, + META_REMOVE_SNAPSHOT_REF, + META_UPGRADE_FORMAT_VERSION, + + // Catalog operations + CATALOG_CREATE_ENTITY, + CATALOG_UPDATE_ENTITY, + CATALOG_DROP_ENTITY, + CATALOG_RENAME_ENTITY_FROM, + CATALOG_RENAME_ENTITY_TO, + CATALOG_REGISTER_ENTITY, + CATALOG_UPDATE_MULTIPLE, + CATALOG_S3_SIGN, + + // From Iceberg's snapshot summary + SNAP_ADD_DATA_FILES, + SNAP_DELETE_DATA_FILES, + SNAP_ADD_DELETE_FILES, + SNAP_ADD_EQUALITY_DELETE_FILES, + SNAP_ADD_POSITION_DELETE_FILES, + SNAP_REMOVE_DELETE_FILES, + SNAP_REMOVE_EQUALITY_DELETE_FILES, + SNAP_REMOVE_POSITION_DELETE_FILES, + SNAP_ADDED_RECORDS, + SNAP_DELETED_RECORDS, + SNAP_ADDED_POSITION_DELETES, + SNAP_DELETED_POSITION_DELETES, + SNAP_ADDED_EQUALITY_DELETES, + SNAP_DELETED_EQUALITY_DELETES, + SNAP_REPLACE_PARTITIONS, + SNAP_OP_APPEND, + SNAP_OP_REPLACE, + SNAP_OP_OVERWRITE, + SNAP_OP_DELETE, +} diff --git a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergTableMetadataUpdateState.java b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergTableMetadataUpdateState.java index cb821cfa015..ffa96880fc1 100644 --- a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergTableMetadataUpdateState.java +++ b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergTableMetadataUpdateState.java @@ -20,6 +20,7 @@ import java.time.Instant; import java.util.ArrayList; +import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -59,6 +60,7 @@ public class IcebergTableMetadataUpdateState { private final Set addedSchemaIds = new HashSet<>(); private final Set addedSpecIds = new HashSet<>(); private final Set addedOrderIds = new HashSet<>(); + private final Set catalogOps = EnumSet.noneOf(CatalogOps.class); public IcebergTableMetadataUpdateState( NessieTableSnapshot snapshot, ContentKey key, boolean tableExists) { @@ -72,6 +74,14 @@ public NessieTableSnapshot.Builder builder() { return builder; } + public void addCatalogOp(CatalogOps op) { + catalogOps.add(op); + } + + public Set catalogOps() { + return catalogOps; + } + public NessieTableSnapshot snapshot() { return snapshot; } diff --git a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergViewMetadataUpdateState.java b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergViewMetadataUpdateState.java index dce42a239e6..5d13e777637 100644 --- a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergViewMetadataUpdateState.java +++ b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergViewMetadataUpdateState.java @@ -19,6 +19,7 @@ import java.time.Instant; import java.util.ArrayList; +import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -50,6 +51,7 @@ public class IcebergViewMetadataUpdateState { private final List addedSnapshots = new ArrayList<>(); private final Set addedSchemaIds = new HashSet<>(); private final Set addedVersionIds = new HashSet<>(); + private final Set catalogOps = EnumSet.noneOf(CatalogOps.class); public IcebergViewMetadataUpdateState( NessieViewSnapshot snapshot, ContentKey key, boolean viewExists) { @@ -63,6 +65,14 @@ public NessieViewSnapshot.Builder builder() { return builder; } + public void addCatalogOp(CatalogOps op) { + catalogOps.add(op); + } + + public Set catalogOps() { + return catalogOps; + } + public NessieViewSnapshot snapshot() { return snapshot; } diff --git a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/rest/IcebergMetadataUpdate.java b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/rest/IcebergMetadataUpdate.java index 0ae81163760..5d64bc992bf 100644 --- a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/rest/IcebergMetadataUpdate.java +++ b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/rest/IcebergMetadataUpdate.java @@ -33,6 +33,7 @@ import com.fasterxml.jackson.databind.annotation.JsonSerialize; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -46,6 +47,7 @@ import org.projectnessie.catalog.formats.iceberg.meta.IcebergStatisticsFile; import org.projectnessie.catalog.formats.iceberg.meta.IcebergViewRepresentation; import org.projectnessie.catalog.formats.iceberg.meta.IcebergViewVersion; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.formats.iceberg.nessie.IcebergTableMetadataUpdateState; import org.projectnessie.catalog.formats.iceberg.nessie.IcebergViewMetadataUpdateState; import org.projectnessie.catalog.formats.iceberg.nessie.NessieModelIceberg; @@ -130,11 +132,13 @@ static UpgradeFormatVersion upgradeFormatVersion(int formatVersion) { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_UPGRADE_FORMAT_VERSION); NessieModelIceberg.upgradeFormatVersion(formatVersion(), state.snapshot(), state.builder()); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_UPGRADE_FORMAT_VERSION); NessieModelIceberg.upgradeFormatVersion(formatVersion(), state.snapshot(), state.builder()); } } @@ -165,11 +169,19 @@ interface RemoveProperties extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_REMOVE_PROPERTIES); + if (removals().contains("location")) { + state.addCatalogOp(CatalogOps.META_REMOVE_LOCATION_PROPERTY); + } NessieModelIceberg.removeProperties(this, state.snapshot(), state.builder()); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_REMOVE_PROPERTIES); + if (removals().contains("location")) { + state.addCatalogOp(CatalogOps.META_REMOVE_LOCATION_PROPERTY); + } NessieModelIceberg.removeProperties(this, state.snapshot(), state.builder()); } } @@ -184,6 +196,7 @@ interface AddViewVersion extends IcebergMetadataUpdate { @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_VIEW_VERSION); NessieModelIceberg.addViewVersion(this, state); } @@ -214,6 +227,7 @@ interface SetCurrentViewVersion extends IcebergMetadataUpdate { @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_CURRENT_VIEW_VERSION); NessieModelIceberg.setCurrentViewVersion(this, state); } @@ -234,6 +248,7 @@ interface SetStatistics extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_STATISTICS); long snapshotId = Objects.requireNonNull(state.snapshot().icebergSnapshotId()); if (snapshotId == snapshotId()) { state.builder().statisticsFiles(singleton(icebergStatisticsFileToNessie(statistics()))); @@ -251,6 +266,7 @@ interface RemoveStatistics extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_REMOVE_STATISTICS); long snapshotId = Objects.requireNonNull(state.snapshot().icebergSnapshotId()); if (snapshotId == snapshotId()) { state.builder().statisticsFiles(emptyList()); @@ -268,6 +284,7 @@ interface SetPartitionStatistics extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_PARTITION_STATISTICS); long snapshotId = Objects.requireNonNull(state.snapshot().icebergSnapshotId()); if (snapshotId == partitionStatistics().snapshotId()) { state @@ -289,6 +306,7 @@ interface RemovePartitionStatistics extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_REMOVE_PARTITION_STATISTICS); long snapshotId = Objects.requireNonNull(state.snapshot().icebergSnapshotId()); if (snapshotId == snapshotId()) { state.builder().partitionStatisticsFiles(emptyList()); @@ -305,11 +323,13 @@ interface AssignUUID extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ASSIGN_UUID); NessieModelIceberg.assignUUID(this, state.snapshot()); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ASSIGN_UUID); NessieModelIceberg.assignUUID(this, state.snapshot()); } @@ -329,11 +349,13 @@ interface AddSchema extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_SCHEMA); NessieModelIceberg.addSchema(this, state); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_SCHEMA); NessieModelIceberg.addSchema(this, state); } @@ -352,12 +374,14 @@ interface SetCurrentSchema extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_CURRENT_SCHEMA); NessieModelIceberg.setCurrentSchema( this, state.lastAddedSchemaId(), state.snapshot(), state.builder()); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_CURRENT_SCHEMA); NessieModelIceberg.setCurrentSchema( this, state.lastAddedSchemaId(), state.snapshot(), state.builder()); } @@ -376,6 +400,7 @@ interface AddPartitionSpec extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_PARTITION_SPEC); NessieModelIceberg.addPartitionSpec(this, state); } @@ -403,6 +428,7 @@ interface SetDefaultPartitionSpec extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_DEFAULT_PARTITION_SPEC); NessieModelIceberg.setDefaultPartitionSpec(this, state); } @@ -420,6 +446,89 @@ interface AddSnapshot extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_SNAPSHOT); + Map summary = snapshot().summary(); + + String v = summary.get("added-data-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADD_DATA_FILES); + } + v = summary.get("deleted-data-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_DELETE_DATA_FILES); + } + v = summary.get("added-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADD_DELETE_FILES); + } + v = summary.get("added-equality-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADD_EQUALITY_DELETE_FILES); + } + v = summary.get("added-position-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADD_POSITION_DELETE_FILES); + } + v = summary.get("removed-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_REMOVE_DELETE_FILES); + } + v = summary.get("removed-equality-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_REMOVE_EQUALITY_DELETE_FILES); + } + v = summary.get("removed-position-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_REMOVE_POSITION_DELETE_FILES); + } + v = summary.get("added-records"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADDED_RECORDS); + } + v = summary.get("deleted-records"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_DELETED_RECORDS); + } + v = summary.get("added-position-deletes"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADDED_POSITION_DELETES); + } + v = summary.get("deleted-position-deletes"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_DELETED_POSITION_DELETES); + } + v = summary.get("added-equality-deletes"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADDED_EQUALITY_DELETES); + } + v = summary.get("deleted-equality-deletes"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_DELETED_EQUALITY_DELETES); + } + v = summary.get("replace-partitions"); + if (Boolean.parseBoolean(v)) { + state.addCatalogOp(CatalogOps.SNAP_REPLACE_PARTITIONS); + } + v = summary.get("operation"); + if (v != null) { + switch (v.toLowerCase(Locale.ROOT)) { + case "append": + state.addCatalogOp(CatalogOps.SNAP_OP_APPEND); + break; + case "replace": + state.addCatalogOp(CatalogOps.SNAP_OP_REPLACE); + break; + case "overwrite": + state.addCatalogOp(CatalogOps.SNAP_OP_OVERWRITE); + break; + case "delete": + state.addCatalogOp(CatalogOps.SNAP_OP_DELETE); + break; + default: + break; + } + } + NessieModelIceberg.addSnapshot(this, state); } } @@ -433,6 +542,7 @@ interface AddSortOrder extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_SORT_ORDER); NessieModelIceberg.addSortOrder(this, state); } @@ -466,6 +576,7 @@ interface SetDefaultSortOrder extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_DEFAULT_SORT_ORDER); NessieModelIceberg.setDefaultSortOrder(this, state); } @@ -489,6 +600,7 @@ default boolean trusted() { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_LOCATION); if (trusted()) { NessieModelIceberg.setLocation(this, state.builder()); } @@ -496,6 +608,7 @@ default void applyToTable(IcebergTableMetadataUpdateState state) { @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_LOCATION); if (trusted()) { NessieModelIceberg.setLocation(this, state.builder()); } @@ -516,11 +629,19 @@ interface SetProperties extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_PROPERTIES); + if (updates().containsKey("location")) { + state.addCatalogOp(CatalogOps.META_SET_LOCATION); + } NessieModelIceberg.setProperties(this, state.snapshot(), state.builder()); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_PROPERTIES); + if (updates().containsKey("location")) { + state.addCatalogOp(CatalogOps.META_SET_LOCATION); + } NessieModelIceberg.setProperties(this, state.snapshot(), state.builder()); } @@ -554,6 +675,7 @@ interface SetSnapshotRef extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_SNAPSHOT_REF); // NOP - This class is used for JSON deserialization only. // Nessie has catalog-level branches and tags. } @@ -569,6 +691,7 @@ interface RemoveSnapshotRef extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_REMOVE_SNAPSHOT_REF); // NOP - This class is used for JSON deserialization only. // Nessie has catalog-level branches and tags. } diff --git a/catalog/service/common/build.gradle.kts b/catalog/service/common/build.gradle.kts index 159084dfa98..fccd247bfe3 100644 --- a/catalog/service/common/build.gradle.kts +++ b/catalog/service/common/build.gradle.kts @@ -22,6 +22,8 @@ dependencies { implementation(project(":nessie-model")) implementation(project(":nessie-catalog-files-api")) implementation(project(":nessie-catalog-model")) + implementation(project(":nessie-services")) + implementation(project(":nessie-versioned-spi")) implementation(project(":nessie-versioned-storage-common")) implementation(project(":nessie-tasks-api")) implementation(project(":nessie-catalog-service-transfer")) diff --git a/catalog/service/common/src/main/java/org/projectnessie/catalog/service/api/CatalogService.java b/catalog/service/common/src/main/java/org/projectnessie/catalog/service/api/CatalogService.java index 7a65383ccca..208e455d8f2 100644 --- a/catalog/service/common/src/main/java/org/projectnessie/catalog/service/api/CatalogService.java +++ b/catalog/service/common/src/main/java/org/projectnessie/catalog/service/api/CatalogService.java @@ -31,6 +31,8 @@ import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; import org.projectnessie.model.Reference; +import org.projectnessie.services.authz.ApiContext; +import org.projectnessie.versioned.RequestMeta; public interface CatalogService { @@ -41,7 +43,8 @@ public interface CatalogService { * more. * @param key content key of the table or view * @param expectedType The expected content-type. - * @param forWrite indicates whether access checks shall be performed for a write/update request + * @param requestMeta additional information for access checks + * @param apiContext * @return The response is either a response object or callback to produce the result. The latter * is useful to return results that are quite big, for example Iceberg manifest lists or * manifest files. @@ -50,20 +53,25 @@ CompletionStage retrieveSnapshot( SnapshotReqParams reqParams, ContentKey key, @Nullable Content.Type expectedType, - boolean forWrite) + RequestMeta requestMeta, + ApiContext apiContext) throws NessieNotFoundException; Stream>> retrieveSnapshots( SnapshotReqParams reqParams, List keys, - Consumer effectiveReferenceConsumer) + Consumer effectiveReferenceConsumer, + RequestMeta requestMeta, + ApiContext apiContext) throws NessieNotFoundException; CompletionStage> commit( ParsedReference reference, CatalogCommit commit, SnapshotReqParams reqParams, - Function commitMetaBuilder) + Function commitMetaBuilder, + String apiRequest, + ApiContext apiContext) throws BaseNessieClientServerException; interface CatalogUriResolver { diff --git a/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/CatalogServiceImpl.java b/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/CatalogServiceImpl.java index d9d6e2d5890..cc44a774c00 100644 --- a/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/CatalogServiceImpl.java +++ b/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/CatalogServiceImpl.java @@ -103,6 +103,7 @@ import org.projectnessie.model.Reference; import org.projectnessie.nessie.tasks.api.TasksService; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.config.ServerConfig; import org.projectnessie.services.impl.ContentApiImpl; @@ -110,6 +111,8 @@ import org.projectnessie.services.spi.ContentService; import org.projectnessie.services.spi.TreeService; import org.projectnessie.storage.uri.StorageUri; +import org.projectnessie.versioned.RequestMeta; +import org.projectnessie.versioned.RequestMeta.RequestMetaBuilder; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.storage.common.persist.ObjId; import org.projectnessie.versioned.storage.common.persist.Persist; @@ -122,9 +125,11 @@ public class CatalogServiceImpl implements CatalogService { private static final Logger LOGGER = LoggerFactory.getLogger(CatalogServiceImpl.class); @Inject ObjectIO objectIO; - TreeService treeService; - ContentService contentService; - CatalogConfig catalogConfig; + @Inject ServerConfig serverConfig; + @Inject CatalogConfig catalogConfig; + @Inject VersionStore versionStore; + @Inject Authorizer authorizer; + @Inject AccessContext accessContext; @Inject Persist persist; @Inject TasksService tasksService; @Inject BackendExceptionMapper backendExceptionMapper; @@ -134,20 +139,12 @@ public class CatalogServiceImpl implements CatalogService { @Named("import-jobs") Executor executor; - public CatalogServiceImpl() { - this(null, null, null, null, null); + TreeService treeService(ApiContext apiContext) { + return new TreeApiImpl(serverConfig, versionStore, authorizer, accessContext, apiContext); } - @Inject - public CatalogServiceImpl( - ServerConfig serverConfig, - CatalogConfig catalogConfig, - VersionStore store, - Authorizer authorizer, - AccessContext accessContext) { - this.catalogConfig = catalogConfig; - this.treeService = new TreeApiImpl(serverConfig, store, authorizer, accessContext); - this.contentService = new ContentApiImpl(serverConfig, store, authorizer, accessContext); + ContentService contentService(ApiContext apiContext) { + return new ContentApiImpl(serverConfig, versionStore, authorizer, accessContext, apiContext); } private IcebergStuff icebergStuff() { @@ -164,7 +161,9 @@ private IcebergStuff icebergStuff() { public Stream>> retrieveSnapshots( SnapshotReqParams reqParams, List keys, - Consumer effectiveReferenceConsumer) + Consumer effectiveReferenceConsumer, + RequestMeta requestMeta, + ApiContext apiContext) throws NessieNotFoundException { ParsedReference reference = reqParams.ref(); @@ -175,8 +174,9 @@ public Stream>> retrieveSnapshots( keys); GetMultipleContentsResponse contentResponse = - contentService.getMultipleContents( - reference.name(), reference.hashWithRelativeSpec(), keys, false, false); + contentService(apiContext) + .getMultipleContents( + reference.name(), reference.hashWithRelativeSpec(), keys, false, requestMeta); IcebergStuff icebergStuff = icebergStuff(); @@ -219,7 +219,8 @@ public CompletionStage retrieveSnapshot( SnapshotReqParams reqParams, ContentKey key, @Nullable Content.Type expectedType, - boolean forWrite) + RequestMeta requestMeta, + ApiContext apiContext) throws NessieNotFoundException { ParsedReference reference = reqParams.ref(); @@ -231,8 +232,9 @@ public CompletionStage retrieveSnapshot( key); ContentResponse contentResponse = - contentService.getContent( - key, reference.name(), reference.hashWithRelativeSpec(), false, forWrite); + contentService(apiContext) + .getContent( + key, reference.name(), reference.hashWithRelativeSpec(), false, requestMeta); Content content = contentResponse.getContent(); if (expectedType != null && !content.getType().equals(expectedType)) { throw new NessieContentNotFoundException(key, reference.name()); @@ -360,16 +362,26 @@ private SnapshotResponse snapshotViewResponse( CompletionStage commit( ParsedReference reference, CatalogCommit commit, - Function commitMetaBuilder) + Function commitMetaBuilder, + String apiRequest, + ApiContext apiContext) throws BaseNessieClientServerException { + RequestMetaBuilder requestMeta = RequestMeta.apiWrite(); + List allKeys = + commit.getOperations().stream().map(CatalogOperation::getKey).collect(toList()); + for (ContentKey key : allKeys) { + requestMeta.addKeyAction(key, apiRequest); + } + GetMultipleContentsResponse contentsResponse = - contentService.getMultipleContents( - reference.name(), - reference.hashWithRelativeSpec(), - commit.getOperations().stream().map(CatalogOperation::getKey).collect(toList()), - false, - true); + contentService(apiContext) + .getMultipleContents( + reference.name(), + reference.hashWithRelativeSpec(), + allKeys, + false, + requestMeta.build()); checkArgument( requireNonNull(contentsResponse.getEffectiveReference()) instanceof Branch, @@ -388,7 +400,8 @@ CompletionStage commit( IcebergStuff icebergStuff = icebergStuff(); - MultiTableUpdate multiTableUpdate = new MultiTableUpdate(treeService, target); + MultiTableUpdate multiTableUpdate = + new MultiTableUpdate(treeService(apiContext), target, requestMeta); LOGGER.trace( "Executing commit containing {} operations against '{}@{}'", @@ -496,9 +509,11 @@ public CompletionStage> commit( ParsedReference reference, CatalogCommit commit, SnapshotReqParams reqParams, - Function commitMetaBuilder) + Function commitMetaBuilder, + String apiRequest, + ApiContext apiContext) throws BaseNessieClientServerException { - return commit(reference, commit, commitMetaBuilder) + return commit(reference, commit, commitMetaBuilder, apiRequest, apiContext) // Finally, transform each MultiTableUpdate.SingleTableUpdate to a SnapshotResponse .thenApply( updates -> @@ -579,13 +594,13 @@ private CompletionStage applyIcebergTableCommitOperation( return new IcebergTableMetadataUpdateState( nessieSnapshot, op.getKey(), content != null) .checkRequirements(icebergOp.requirements()) - .applyUpdates(pruneUpdates(icebergOp, content != null)) - .snapshot(); + .applyUpdates(pruneUpdates(icebergOp, content != null)); // TODO handle the case when nothing changed -> do not update // e.g. when adding a schema/spec/order that already exists }) .thenApply( - nessieSnapshot -> { + updateState -> { + NessieTableSnapshot nessieSnapshot = updateState.snapshot(); String metadataJsonLocation = icebergMetadataJsonLocation(nessieSnapshot.icebergLocation()); IcebergTableMetadata icebergMetadata = @@ -597,7 +612,8 @@ private CompletionStage applyIcebergTableCommitOperation( nessieSnapshot = nessieSnapshot.withId(objIdToNessieId(snapshotId)); SingleTableUpdate singleTableUpdate = - new SingleTableUpdate(nessieSnapshot, updated, icebergOp.getKey()); + new SingleTableUpdate( + nessieSnapshot, updated, icebergOp.getKey(), updateState.catalogOps()); multiTableUpdate.addUpdate(op.getKey(), singleTableUpdate); return singleTableUpdate; }); @@ -647,13 +663,13 @@ private CompletionStage applyIcebergViewCommitOperation( return new IcebergViewMetadataUpdateState( nessieSnapshot, op.getKey(), content != null) .checkRequirements(icebergOp.requirements()) - .applyUpdates(pruneUpdates(icebergOp, content != null)) - .snapshot(); + .applyUpdates(pruneUpdates(icebergOp, content != null)); // TODO handle the case when nothing changed -> do not update // e.g. when adding a schema/spec/order that already exists }) .thenApply( - nessieSnapshot -> { + updateState -> { + NessieViewSnapshot nessieSnapshot = updateState.snapshot(); String metadataJsonLocation = icebergMetadataJsonLocation(nessieSnapshot.icebergLocation()); IcebergViewMetadata icebergMetadata = @@ -664,7 +680,8 @@ private CompletionStage applyIcebergViewCommitOperation( nessieSnapshot = nessieSnapshot.withId(objIdToNessieId(snapshotId)); SingleTableUpdate singleTableUpdate = - new SingleTableUpdate(nessieSnapshot, updated, icebergOp.getKey()); + new SingleTableUpdate( + nessieSnapshot, updated, icebergOp.getKey(), updateState.catalogOps()); multiTableUpdate.addUpdate(op.getKey(), singleTableUpdate); return singleTableUpdate; }); diff --git a/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/MultiTableUpdate.java b/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/MultiTableUpdate.java index 33e7d7de3e8..a07492dc209 100644 --- a/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/MultiTableUpdate.java +++ b/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/MultiTableUpdate.java @@ -20,6 +20,9 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.model.snapshot.NessieEntitySnapshot; import org.projectnessie.error.NessieConflictException; import org.projectnessie.error.NessieNotFoundException; @@ -30,6 +33,7 @@ import org.projectnessie.model.ImmutableOperations; import org.projectnessie.model.Operation; import org.projectnessie.services.spi.TreeService; +import org.projectnessie.versioned.RequestMeta.RequestMetaBuilder; /** Maintains state across all individual updates of a commit. */ final class MultiTableUpdate { @@ -40,11 +44,13 @@ final class MultiTableUpdate { private Map addedContentsMap; private Branch targetBranch; private boolean committed; + private final RequestMetaBuilder requestMeta; - MultiTableUpdate(TreeService treeService, Branch target) { + MultiTableUpdate(TreeService treeService, Branch target, RequestMetaBuilder requestMeta) { this.treeService = treeService; this.operations = ImmutableOperations.builder(); this.targetBranch = target; + this.requestMeta = requestMeta; } ImmutableOperations.Builder operations() { @@ -55,9 +61,21 @@ MultiTableUpdate commit() throws NessieConflictException, NessieNotFoundExceptio synchronized (this) { committed = true; if (!tableUpdates.isEmpty()) { + RequestMetaBuilder checkMeta = requestMeta; + for (SingleTableUpdate update : tableUpdates) { + checkMeta.addKeyActions( + update.key, + update.catalogOps.stream() + .map(CatalogOps::name) + .collect(Collectors.toUnmodifiableSet())); + } + CommitResponse commitResponse = treeService.commitMultipleOperations( - targetBranch().getName(), targetBranch.getHash(), operations.build()); + targetBranch().getName(), + targetBranch.getHash(), + operations.build(), + checkMeta.build()); addedContentsMap = commitResponse.getAddedContents() != null @@ -112,11 +130,17 @@ static final class SingleTableUpdate { final NessieEntitySnapshot snapshot; final Content content; final ContentKey key; + final Set catalogOps; - SingleTableUpdate(NessieEntitySnapshot snapshot, Content content, ContentKey key) { + SingleTableUpdate( + NessieEntitySnapshot snapshot, + Content content, + ContentKey key, + Set catalogOps) { this.snapshot = snapshot; this.content = content; this.key = key; + this.catalogOps = catalogOps; } } } diff --git a/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/AbstractCatalogService.java b/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/AbstractCatalogService.java index f72fe94000f..5c2e7a57346 100644 --- a/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/AbstractCatalogService.java +++ b/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/AbstractCatalogService.java @@ -18,6 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.projectnessie.api.v2.params.ParsedReference.parsedReference; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_MULTIPLE; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AddPartitionSpec.addPartitionSpec; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AddSchema.addSchema; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AddSortOrder.addSortOrder; @@ -32,6 +33,7 @@ import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; import static org.projectnessie.nessie.combined.EmptyHttpHeaders.emptyHttpHeaders; import static org.projectnessie.services.authz.AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; +import static org.projectnessie.services.authz.ApiContext.apiContext; import java.net.URI; import java.time.Clock; @@ -86,10 +88,9 @@ import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.config.ServerConfig; -import org.projectnessie.services.impl.ContentApiImpl; -import org.projectnessie.services.impl.TreeApiImpl; import org.projectnessie.services.rest.RestV2ConfigResource; import org.projectnessie.services.rest.RestV2TreeResource; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.storage.common.persist.Persist; import org.projectnessie.versioned.storage.testextension.NessiePersist; @@ -117,16 +118,21 @@ public abstract class AbstractCatalogService { protected ObjectIO objectIO; protected CatalogServiceImpl catalogService; protected NessieApiV2 api; - protected TreeApiImpl treeService; - protected ContentApiImpl contentService; + + protected ServerConfig serverConfig; + protected VersionStore versionStore; + protected Authorizer authorizer; + protected AccessContext accessContext; + protected volatile Function batchAccessCheckerFactory; - protected ParsedReference commitSingle(Reference branch, ContentKey key) + protected ParsedReference commitSingle(Reference branch, ContentKey key, RequestMeta requestMeta) throws InterruptedException, ExecutionException, BaseNessieClientServerException { - return commitMultiple(branch, key); + return commitMultiple(branch, requestMeta, key); } - protected ParsedReference commitMultiple(Reference branch, ContentKey... keys) + protected ParsedReference commitMultiple( + Reference branch, RequestMeta requestMeta, ContentKey... keys) throws InterruptedException, ExecutionException, BaseNessieClientServerException { ParsedReference ref = parsedReference(branch.getName(), branch.getHash(), Reference.ReferenceType.BRANCH); @@ -155,7 +161,12 @@ protected ParsedReference commitMultiple(Reference branch, ContentKey... keys) MultiTableUpdate update = catalogService - .commit(ref, commit.build(), CommitMeta::fromMessage) + .commit( + ref, + commit.build(), + CommitMeta::fromMessage, + CATALOG_UPDATE_MULTIPLE.name(), + apiContext("Iceberg", 1)) .toCompletableFuture() .get(); branch = update.targetBranch(); @@ -203,8 +214,10 @@ private void setupCatalogService() { catalogService.objectIO = objectIO; catalogService.persist = persist; catalogService.executor = executor; - catalogService.contentService = contentService; - catalogService.treeService = treeService; + catalogService.serverConfig = serverConfig; + catalogService.versionStore = versionStore; + catalogService.authorizer = authorizer; + catalogService.accessContext = accessContext; catalogService.backendExceptionMapper = BackendExceptionMapper.builder().build(); } @@ -251,7 +264,7 @@ private void setupObjectStorage() { private void setupNessieApi() { batchAccessCheckerFactory = accessContext -> NOOP_ACCESS_CHECKER; - ServerConfig config = + serverConfig = new ServerConfig() { @Override public String getDefaultBranch() { @@ -263,17 +276,15 @@ public boolean sendStacktraceToClient() { return true; } }; - VersionStore versionStore = new VersionStoreImpl(persist); - Authorizer authorizer = context -> batchAccessCheckerFactory.apply(context); - AccessContext accessContext = () -> () -> null; - - treeService = new TreeApiImpl(config, versionStore, authorizer, accessContext); - contentService = new ContentApiImpl(config, versionStore, authorizer, accessContext); + versionStore = new VersionStoreImpl(persist); + authorizer = (context, apiContext) -> batchAccessCheckerFactory.apply(context); + accessContext = () -> () -> null; RestV2TreeResource treeResource = - new RestV2TreeResource(config, versionStore, authorizer, accessContext, emptyHttpHeaders()); + new RestV2TreeResource( + serverConfig, versionStore, authorizer, accessContext, emptyHttpHeaders()); RestV2ConfigResource configResource = - new RestV2ConfigResource(config, versionStore, authorizer, accessContext); + new RestV2ConfigResource(serverConfig, versionStore, authorizer, accessContext); api = new CombinedClientBuilder() .withTreeResource(treeResource) diff --git a/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/TestCatalogServiceImpl.java b/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/TestCatalogServiceImpl.java index 2bc32b60e94..f823b91a520 100644 --- a/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/TestCatalogServiceImpl.java +++ b/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/TestCatalogServiceImpl.java @@ -19,13 +19,17 @@ import static java.util.concurrent.TimeUnit.MINUTES; import static org.assertj.core.api.InstanceOfAssertFactories.STRING; import static org.projectnessie.api.v2.params.ParsedReference.parsedReference; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_MULTIPLE; import static org.projectnessie.catalog.service.api.SnapshotReqParams.forSnapshotHttpReq; import static org.projectnessie.model.CommitMeta.fromMessage; import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; +import static org.projectnessie.services.authz.ApiContext.apiContext; import static org.projectnessie.services.authz.Check.CheckType.COMMIT_CHANGE_AGAINST_REFERENCE; import static org.projectnessie.services.authz.Check.CheckType.READ_ENTITY_VALUE; import static org.projectnessie.services.authz.Check.CheckType.UPDATE_ENTITY; import static org.projectnessie.services.authz.Check.CheckType.VIEW_REFERENCE; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import java.io.InputStream; import java.util.ArrayList; @@ -62,9 +66,11 @@ import org.projectnessie.objectstoragemock.MockObject; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessCheckException; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Check; import org.projectnessie.services.authz.Check.CheckType; import org.projectnessie.storage.uri.StorageUri; +import org.projectnessie.versioned.RequestMeta; import software.amazon.awssdk.services.s3.model.S3Exception; public class TestCatalogServiceImpl extends AbstractCatalogService { @@ -92,7 +98,7 @@ public void cleanupAfterNessieCommitFailure() throws Exception { .operation(Operation.Put.of(key1, IcebergView.of("meta", 1, 2))) .commitWithResponse(); - soft.assertThatThrownBy(() -> commitMultiple(main, key1, key2)) + soft.assertThatThrownBy(() -> commitMultiple(main, API_WRITE, key1, key2)) .isInstanceOf(ExecutionException.class) .cause() .isInstanceOf(RuntimeException.class) @@ -148,7 +154,7 @@ public MockObject commit() { return Optional.empty(); }); - soft.assertThatThrownBy(() -> commitMultiple(main, key1, key2, key3, key4)) + soft.assertThatThrownBy(() -> commitMultiple(main, API_WRITE, key1, key2, key3, key4)) .isInstanceOf(ExecutionException.class) .cause() .cause() @@ -166,7 +172,15 @@ public void noCommitOps() throws Exception { parsedReference(main.getName(), main.getHash(), Reference.ReferenceType.BRANCH); CatalogCommit commit = CatalogCommit.builder().build(); - catalogService.commit(ref, commit, CommitMeta::fromMessage).toCompletableFuture().get(); + catalogService + .commit( + ref, + commit, + CommitMeta::fromMessage, + CATALOG_UPDATE_MULTIPLE.name(), + apiContext("Catalog", 0)) + .toCompletableFuture() + .get(); Reference afterCommit = api.getReference().refName("main").get(); soft.assertThat(afterCommit).isEqualTo(main); @@ -179,7 +193,7 @@ public void twoTableCreates() throws Exception { ContentKey key1 = ContentKey.of("mytable1"); ContentKey key2 = ContentKey.of("mytable2"); - ParsedReference committed = commitMultiple(main, key1, key2); + ParsedReference committed = commitMultiple(main, API_WRITE, key1, key2); Reference afterCommit = api.getReference().refName("main").get(); soft.assertThat(afterCommit) @@ -193,7 +207,7 @@ public void singleTableCreate() throws Exception { Reference main = api.getReference().refName("main").get(); ContentKey key = ContentKey.of("mytable"); - ParsedReference committed = commitSingle(main, key); + ParsedReference committed = commitSingle(main, key, API_WRITE); Reference afterCommit = api.getReference().refName("main").get(); soft.assertThat(afterCommit) @@ -204,7 +218,11 @@ public void singleTableCreate() throws Exception { SnapshotResponse snap = catalogService .retrieveSnapshot( - forSnapshotHttpReq(committed, "ICEBERG", "2"), key, ICEBERG_TABLE, false) + forSnapshotHttpReq(committed, "ICEBERG", "2"), + key, + ICEBERG_TABLE, + API_READ, + apiContext("Catalog", 0)) .toCompletableFuture() .get(5, MINUTES); @@ -247,20 +265,20 @@ public void singleTableCreate() throws Exception { /** * Verify behavior of {@link CatalogService#retrieveSnapshot(SnapshotReqParams, ContentKey, - * Content.Type, boolean)} against related Nessie {@link CheckType check types} for read and write - * intents. + * Content.Type, RequestMeta, ApiContext)} against related Nessie {@link CheckType check types} + * for read and write intents. */ @Test public void retrieveSnapshotAccessChecks() throws Exception { Reference main = api.getReference().refName("main").get(); ContentKey key = ContentKey.of("mytable"); - ParsedReference committed = commitSingle(main, key); + ParsedReference committed = commitSingle(main, key, API_WRITE); AtomicReference failingCheckType = new AtomicReference<>(); batchAccessCheckerFactory = x -> - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { return getChecks().stream() @@ -290,7 +308,8 @@ public Map check() { forSnapshotHttpReq(committed, "ICEBERG", "2"), key, ICEBERG_TABLE, - false) + API_READ, + apiContext("Catalog", 0)) .toCompletableFuture() .get(5, MINUTES)) .describedAs("forRead with %s", checkType); @@ -308,7 +327,8 @@ public Map check() { forSnapshotHttpReq(committed, "ICEBERG", "2"), key, ICEBERG_TABLE, - true) + API_WRITE, + apiContext("Catalog", 0)) .toCompletableFuture() .get(5, MINUTES)) .describedAs("forWrite with %s", checkType); diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/AbstractCatalogResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/AbstractCatalogResource.java index 22e84f1604a..00e8597fc7e 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/AbstractCatalogResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/AbstractCatalogResource.java @@ -16,6 +16,7 @@ package org.projectnessie.catalog.service.rest; import static java.nio.charset.StandardCharsets.UTF_8; +import static org.projectnessie.versioned.RequestMeta.API_READ; import io.smallrye.mutiny.Uni; import jakarta.inject.Inject; @@ -35,6 +36,7 @@ import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; import org.projectnessie.model.Reference; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.rest.common.RestCommon; abstract class AbstractCatalogResource { @@ -49,18 +51,25 @@ abstract class AbstractCatalogResource { @Context ExternalBaseUri uriInfo; Uni snapshotBased( - ContentKey key, SnapshotReqParams snapshotReqParams, Content.Type expectedType) + ContentKey key, + SnapshotReqParams snapshotReqParams, + Content.Type expectedType, + ApiContext apiContext) throws NessieNotFoundException { - return snapshotResponse(key, snapshotReqParams, expectedType) + return snapshotResponse(key, snapshotReqParams, expectedType, apiContext) .map(AbstractCatalogResource::snapshotToResponse); } Uni snapshotResponse( - ContentKey key, SnapshotReqParams snapshotReqParams, Content.Type expectedType) + ContentKey key, + SnapshotReqParams snapshotReqParams, + Content.Type expectedType, + ApiContext apiContext) throws NessieNotFoundException { return Uni.createFrom() .completionStage( - catalogService.retrieveSnapshot(snapshotReqParams, key, expectedType, false)); + catalogService.retrieveSnapshot( + snapshotReqParams, key, expectedType, API_READ, apiContext)); } private static Response snapshotToResponse(SnapshotResponse snapshot) { diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1GenericResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1GenericResource.java index 60a9c32785e..2ab07b27125 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1GenericResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1GenericResource.java @@ -16,6 +16,7 @@ package org.projectnessie.catalog.service.rest; import static java.util.Objects.requireNonNull; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_MULTIPLE; import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; import io.smallrye.common.annotation.Blocking; @@ -160,7 +161,13 @@ public Uni commitTransaction( // results are consumed. return Uni.createFrom() .completionStage( - catalogService.commit(ref, commit.build(), reqParams, this::updateCommitMeta)) + catalogService.commit( + ref, + commit.build(), + reqParams, + this::updateCommitMeta, + CATALOG_UPDATE_MULTIPLE.name(), + ICEBERG_V1)) .map(stream -> stream.reduce(null, (ident, snap) -> ident, (i1, i2) -> i1)); } } diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1NamespaceResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1NamespaceResource.java index 7d47314f126..436c0027d0a 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1NamespaceResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1NamespaceResource.java @@ -15,11 +15,17 @@ */ package org.projectnessie.catalog.service.rest; -import static java.lang.String.format; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_CREATE_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_DROP_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.META_SET_LOCATION; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.META_SET_PROPERTIES; import static org.projectnessie.error.ContentKeyErrorDetails.contentKeyErrorDetails; import static org.projectnessie.model.Content.Type.NAMESPACE; import static org.projectnessie.services.impl.RefUtil.toReference; -import static org.projectnessie.services.rest.common.RestCommon.updateCommitMeta; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; +import static org.projectnessie.versioned.RequestMeta.apiWrite; import io.smallrye.common.annotation.Blocking; import jakarta.enterprise.context.RequestScoped; @@ -41,11 +47,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Stream; import org.eclipse.microprofile.openapi.annotations.Operation; import org.jboss.resteasy.reactive.server.ServerExceptionMapper; import org.projectnessie.api.v2.params.ParsedReference; import org.projectnessie.catalog.formats.iceberg.meta.IcebergNamespace; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.formats.iceberg.rest.IcebergCreateNamespaceRequest; import org.projectnessie.catalog.formats.iceberg.rest.IcebergCreateNamespaceResponse; import org.projectnessie.catalog.formats.iceberg.rest.IcebergGetNamespaceResponse; @@ -63,12 +71,12 @@ import org.projectnessie.error.NessieReferenceNotFoundException; import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; -import org.projectnessie.model.ContentResponse; import org.projectnessie.model.EntriesResponse; import org.projectnessie.model.GetMultipleContentsResponse; import org.projectnessie.model.ImmutableNamespace; import org.projectnessie.model.ImmutableOperations; import org.projectnessie.model.Namespace; +import org.projectnessie.model.Operation.Delete; import org.projectnessie.model.Operations; import org.projectnessie.model.Reference; import org.projectnessie.services.authz.AccessContext; @@ -76,6 +84,7 @@ import org.projectnessie.services.config.ServerConfig; import org.projectnessie.services.spi.PagedResponseHandler; import org.projectnessie.storage.uri.StorageUri; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; /** Handles Iceberg REST API v1 endpoints that are associated with namespaces. */ @@ -131,7 +140,7 @@ public IcebergCreateNamespaceResponse createNamespace( try { contentsResponse = contentService.getMultipleContents( - ref.name(), ref.hashWithRelativeSpec(), List.of(key), false, false); + ref.name(), ref.hashWithRelativeSpec(), List.of(key), false, API_READ); } catch (NessieNotFoundException e) { throw new NessieReferenceNotFoundException(e.getMessage(), e); } @@ -157,10 +166,20 @@ public IcebergCreateNamespaceResponse createNamespace( .commitMeta(updateCommitMeta("update namespace " + key)) .build(); + RequestMeta.RequestMetaBuilder requestMeta = + apiWrite().addKeyAction(key, CATALOG_CREATE_ENTITY.name()); + if (!namespace.getProperties().isEmpty()) { + requestMeta.addKeyAction(key, META_SET_PROPERTIES.name()); + if (namespace.getProperties().containsKey("location")) { + requestMeta.addKeyAction(key, META_SET_LOCATION.name()); + } + } + treeService.commitMultipleOperations( contentsResponse.getEffectiveReference().getName(), contentsResponse.getEffectiveReference().getHash(), - ops); + ops, + requestMeta.build()); return IcebergCreateNamespaceResponse.builder() .namespace(createNamespaceRequest.namespace()) @@ -176,54 +195,54 @@ public void dropNamespace( @PathParam("prefix") String prefix, @PathParam("namespace") String namespace) throws IOException { NamespaceRef namespaceRef = decodeNamespaceRef(prefix, namespace); - ContentKey key = namespaceRef.namespace().toContentKey(); - ContentResponse contentResponse = - contentService.getContent( - key, namespaceRef.referenceName(), namespaceRef.hashWithRelativeSpec(), false, false); - if (!(contentResponse.getContent() instanceof Namespace)) { - throw new NessieNamespaceNotFoundException( - contentKeyErrorDetails(key), - String.format("Namespace '%s' does not exist", key.toCanonicalString())); - } - - Reference ref = contentResponse.getEffectiveReference(); - boolean notEmpty = + var ref = new AtomicReference(); + var entries = treeService.getEntries( - ref.getName(), - ref.getHash(), + namespaceRef.referenceName(), + namespaceRef.hashWithRelativeSpec(), + null, null, - format("entry.encodedKey.startsWith('%s.')", key.toPathString()), null, false, - new PagedResponseHandler<>() { - boolean found; + new PagedResponseHandler, EntriesResponse.Entry>() { + final List entries = new ArrayList<>(); @Override public boolean addEntry(EntriesResponse.Entry entry) { - if (found) { + if (entries.size() == 2) { return false; } - found = true; + entries.add(entry); return true; } @Override - public Boolean build() { - return found; + public List build() { + return entries; } @Override public void hasMore(String pagingToken) {} }, - h -> toReference(h), + h -> ref.set(toReference(h)), null, null, key, - null); + List.of()); - if (notEmpty) { + if (entries.isEmpty()) { + throw new NessieNamespaceNotFoundException( + contentKeyErrorDetails(key), + String.format("Namespace '%s' does not exist", key.toCanonicalString())); + } + if (!NAMESPACE.equals(entries.get(0).getType())) { + throw new NessieNamespaceNotFoundException( + contentKeyErrorDetails(key), + String.format("Namespace '%s' does not exist", key.toCanonicalString())); + } + if (entries.size() > 1) { throw new NessieNamespaceNotEmptyException( contentKeyErrorDetails(key), String.format("Namespace '%s' is not empty", key.toCanonicalString())); @@ -231,11 +250,14 @@ public void hasMore(String pagingToken) {} Operations ops = ImmutableOperations.builder() - .addOperations(org.projectnessie.model.Operation.Delete.of(key)) + .addOperations(Delete.of(key)) .commitMeta(updateCommitMeta("delete namespace " + key)) .build(); - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + RequestMeta.RequestMetaBuilder requestMeta = + apiWrite().addKeyAction(key, CATALOG_DROP_ENTITY.name()); + treeService.commitMultipleOperations( + ref.get().getName(), ref.get().getHash(), ops, requestMeta.build()); } @Operation(operationId = "iceberg.v1.listNamespaces") @@ -279,7 +301,7 @@ public void namespaceExists( namespaceRef.referenceName(), namespaceRef.hashWithRelativeSpec(), false, - false) + API_READ) .getContent(); if (!(c instanceof Namespace)) { throw new NessieNamespaceNotFoundException( @@ -311,7 +333,7 @@ public IcebergGetNamespaceResponse loadNamespaceMetadata( namespaceRef.hashWithRelativeSpec(), keysInOrder, false, - false); + API_READ); Map namespacesMap = namespaces.toContentsMap(); Content content = namespacesMap.get(nessieNamespace.toContentKey()); @@ -371,8 +393,6 @@ public IcebergUpdateNamespacePropertiesResponse updateProperties( throws IOException { NamespaceRef namespaceRef = decodeNamespaceRef(prefix, namespace); - // TODO might want to prevent setting 'location' - ContentKey key = namespaceRef.namespace().toContentKey(); GetMultipleContentsResponse namespaces = contentService.getMultipleContents( @@ -380,7 +400,7 @@ public IcebergUpdateNamespacePropertiesResponse updateProperties( namespaceRef.hashWithRelativeSpec(), List.of(key), false, - true); + API_WRITE); Reference ref = namespaces.getEffectiveReference(); Map namespacesMap = namespaces.toContentsMap(); @@ -403,7 +423,20 @@ public IcebergUpdateNamespacePropertiesResponse updateProperties( .commitMeta(updateCommitMeta("update namespace " + key)) .build(); - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + RequestMeta.RequestMetaBuilder requestMeta = + apiWrite().addKeyAction(key, CATALOG_UPDATE_ENTITY.name()); + if (!updateNamespacePropertiesRequest.removals().isEmpty()) { + if (updateNamespacePropertiesRequest.removals().contains("location")) { + requestMeta.addKeyAction(key, CatalogOps.META_REMOVE_LOCATION_PROPERTY.name()); + } + } + if (!updateNamespacePropertiesRequest.updates().isEmpty()) { + requestMeta.addKeyAction(key, CatalogOps.META_SET_PROPERTIES.name()); + if (updateNamespacePropertiesRequest.updates().containsKey("location")) { + requestMeta.addKeyAction(key, CatalogOps.META_SET_LOCATION.name()); + } + } + treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops, requestMeta.build()); IcebergUpdateNamespacePropertiesResponse.Builder response = IcebergUpdateNamespacePropertiesResponse.builder(); diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ResourceBase.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ResourceBase.java index ceb1297ec34..7d3b6032a1e 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ResourceBase.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ResourceBase.java @@ -28,7 +28,10 @@ import static org.projectnessie.catalog.service.rest.TimestampParser.timestampToNessie; import static org.projectnessie.model.Namespace.Empty.EMPTY_NAMESPACE; import static org.projectnessie.model.Reference.ReferenceType.BRANCH; +import static org.projectnessie.services.authz.ApiContext.apiContext; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.base.Splitter; import io.smallrye.mutiny.Uni; @@ -44,6 +47,7 @@ import java.util.stream.Stream; import org.projectnessie.api.v2.params.ParsedReference; import org.projectnessie.catalog.formats.iceberg.meta.IcebergTableIdentifier; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.formats.iceberg.rest.IcebergCatalogOperation; import org.projectnessie.catalog.formats.iceberg.rest.IcebergRenameTableRequest; import org.projectnessie.catalog.formats.iceberg.rest.IcebergUpdateEntityRequest; @@ -70,6 +74,7 @@ import org.projectnessie.model.Reference; import org.projectnessie.model.TableReference; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.config.ServerConfig; import org.projectnessie.services.impl.ContentApiImpl; @@ -77,6 +82,8 @@ import org.projectnessie.services.spi.ContentService; import org.projectnessie.services.spi.PagedCountingResponseHandler; import org.projectnessie.services.spi.TreeService; +import org.projectnessie.versioned.RequestMeta; +import org.projectnessie.versioned.RequestMeta.RequestMetaBuilder; import org.projectnessie.versioned.VersionStore; abstract class IcebergApiV1ResourceBase extends AbstractCatalogResource { @@ -86,6 +93,8 @@ abstract class IcebergApiV1ResourceBase extends AbstractCatalogResource { final ServerConfig serverConfig; final CatalogConfig catalogConfig; + static final ApiContext ICEBERG_V1 = apiContext("Iceberg", 1); + protected IcebergApiV1ResourceBase( ServerConfig serverConfig, CatalogConfig catalogConfig, @@ -94,8 +103,9 @@ protected IcebergApiV1ResourceBase( AccessContext accessContext) { this.serverConfig = serverConfig; this.catalogConfig = catalogConfig; - this.treeService = new TreeApiImpl(serverConfig, store, authorizer, accessContext); - this.contentService = new ContentApiImpl(serverConfig, store, authorizer, accessContext); + this.treeService = new TreeApiImpl(serverConfig, store, authorizer, accessContext, ICEBERG_V1); + this.contentService = + new ContentApiImpl(serverConfig, store, authorizer, accessContext, ICEBERG_V1); } protected Stream listContent( @@ -182,7 +192,7 @@ protected void renameContent( ref.hashWithRelativeSpec(), List.of(toTableRef.contentKey(), fromTableRef.contentKey()), false, - false); + API_READ); Map contentsMap = contents.toContentsMap(); Content existingFrom = contentsMap.get(fromTableRef.contentKey()); if (existingFrom == null || !expectedContentType.equals(existingFrom.getType())) { @@ -220,7 +230,13 @@ protected void renameContent( entityType, fromTableRef.contentKey(), toTableRef.contentKey()))) .build(); - treeService.commitMultipleOperations(effectiveRef.getName(), effectiveRef.getHash(), ops); + RequestMetaBuilder requestMeta = + RequestMeta.apiWrite() + .addKeyAction(fromTableRef.contentKey(), CatalogOps.CATALOG_RENAME_ENTITY_FROM.name()) + .addKeyAction(toTableRef.contentKey(), CatalogOps.CATALOG_RENAME_ENTITY_TO.name()); + + treeService.commitMultipleOperations( + effectiveRef.getName(), effectiveRef.getHash(), ops, requestMeta.build()); } protected NamespaceRef decodeNamespaceRef(String prefix, String encodedNs) { @@ -339,7 +355,11 @@ void createEntityVerifyNotExists(TableRef tableRef, Content.Type type) GetMultipleContentsResponse contentResponse = contentService.getMultipleContents( - ref.name(), ref.hashWithRelativeSpec(), List.of(tableRef.contentKey()), false, true); + ref.name(), + ref.hashWithRelativeSpec(), + List.of(tableRef.contentKey()), + false, + API_WRITE); if (!contentResponse.getContents().isEmpty()) { Content existing = contentResponse.getContents().get(0).getContent(); throw new CatalogEntityAlreadyExistsException( @@ -354,7 +374,11 @@ ContentResponse fetchIcebergEntity( ParsedReference ref = requireNonNull(tableRef.reference()); ContentResponse content = contentService.getContent( - tableRef.contentKey(), ref.name(), ref.hashWithRelativeSpec(), false, forWrite); + tableRef.contentKey(), + ref.name(), + ref.hashWithRelativeSpec(), + false, + forWrite ? API_WRITE : API_READ); checkArgument( content.getContent().getType().equals(expectedType), "Expecting an Iceberg %s, but got type %s", @@ -364,7 +388,10 @@ ContentResponse fetchIcebergEntity( } Uni createOrUpdateEntity( - TableRef tableRef, IcebergUpdateEntityRequest updateEntityRequest, Content.Type contentType) + TableRef tableRef, + IcebergUpdateEntityRequest updateEntityRequest, + Content.Type contentType, + CatalogOps apiOperation) throws IOException { IcebergCatalogOperation op = @@ -383,7 +410,13 @@ Uni createOrUpdateEntity( return Uni.createFrom() .completionStage( - catalogService.commit(tableRef.reference(), commit, reqParams, this::updateCommitMeta)) + catalogService.commit( + tableRef.reference(), + commit, + reqParams, + this::updateCommitMeta, + apiOperation.name(), + ICEBERG_V1)) .map(Stream::findFirst) .map( o -> diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1TableResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1TableResource.java index 31e21519d36..e1b21cfb584 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1TableResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1TableResource.java @@ -23,6 +23,9 @@ import static org.projectnessie.catalog.formats.iceberg.meta.IcebergSortOrder.unsorted; import static org.projectnessie.catalog.formats.iceberg.meta.IcebergTableIdentifier.fromNessieContentKey; import static org.projectnessie.catalog.formats.iceberg.meta.IcebergTableMetadata.GC_ENABLED; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_CREATE_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_DROP_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_ENTITY; import static org.projectnessie.catalog.formats.iceberg.nessie.NessieModelIceberg.icebergBaseLocation; import static org.projectnessie.catalog.formats.iceberg.nessie.NessieModelIceberg.nessieTableSnapshotToIceberg; import static org.projectnessie.catalog.formats.iceberg.nessie.NessieModelIceberg.newIcebergTableSnapshot; @@ -39,6 +42,8 @@ import static org.projectnessie.catalog.service.rest.TableRef.tableRef; import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; import static org.projectnessie.model.Reference.ReferenceType.BRANCH; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; +import static org.projectnessie.versioned.RequestMeta.apiWrite; import com.google.common.collect.Lists; import io.smallrye.common.annotation.Blocking; @@ -74,6 +79,7 @@ import org.projectnessie.catalog.formats.iceberg.meta.IcebergSortOrder; import org.projectnessie.catalog.formats.iceberg.meta.IcebergTableMetadata; import org.projectnessie.catalog.formats.iceberg.metrics.IcebergMetricsReport; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.formats.iceberg.nessie.IcebergTableMetadataUpdateState; import org.projectnessie.catalog.formats.iceberg.rest.IcebergCommitTableResponse; import org.projectnessie.catalog.formats.iceberg.rest.IcebergCreateTableRequest; @@ -111,6 +117,7 @@ import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.config.ServerConfig; import org.projectnessie.storage.uri.StorageUri; +import org.projectnessie.versioned.RequestMeta.RequestMetaBuilder; import org.projectnessie.versioned.VersionStore; /** Handles Iceberg REST API v1 endpoints that are associated with tables. */ @@ -169,7 +176,8 @@ private Uni loadTable( return snapshotResponse( key, SnapshotReqParams.forSnapshotHttpReq(tableRef.reference(), "iceberg", null), - ICEBERG_TABLE) + ICEBERG_TABLE, + ICEBERG_V1) .map( snap -> loadTableResultFromSnapshotResponse( @@ -213,7 +221,7 @@ R loadTableResultFromSnapshotResponse( snap.effectiveReference().getName(), snap.effectiveReference().getHash(), false, - true); + API_WRITE); writeAccessValidated = true; } catch (Exception ignore) { } @@ -354,7 +362,7 @@ public Uni createTable( .addRequirement(IcebergUpdateRequirement.AssertCreate.assertTableDoesNotExist()) .build(); - return createOrUpdateEntity(tableRef, updateTableReq, ICEBERG_TABLE) + return createOrUpdateEntity(tableRef, updateTableReq, ICEBERG_TABLE, CATALOG_CREATE_ENTITY) .map( snap -> this.loadTableResultFromSnapshotResponse( @@ -391,6 +399,9 @@ public Uni registerTable( ParsedReference reference = requireNonNull(tableRef.reference()); Branch ref = checkBranch(treeService.getReferenceByName(reference.name(), FetchOption.MINIMAL)); + RequestMetaBuilder requestMeta = + apiWrite().addKeyAction(tableRef.contentKey(), CatalogOps.CATALOG_REGISTER_ENTITY.name()); + Optional catalogTableRef = uriInfo.resolveTableFromUri(registerTableRequest.metadataLocation()); boolean nessieCatalogUri = uriInfo.isNessieCatalogUri(registerTableRequest.metadataLocation()); @@ -416,7 +427,8 @@ public Uni registerTable( ctr.contentKey(), registerTableRequest.metadataLocation()))) .build(); CommitResponse committed = - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + treeService.commitMultipleOperations( + ref.getName(), ref.getHash(), ops, requestMeta.build()); return this.loadTable( TableRef.tableRef( @@ -463,7 +475,8 @@ public Uni registerTable( tableRef.contentKey(), registerTableRequest.metadataLocation()))) .build(); CommitResponse committed = - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + treeService.commitMultipleOperations( + ref.getName(), ref.getHash(), ops, requestMeta.build()); return this.loadTable( tableRef( @@ -499,7 +512,9 @@ public void dropTable( .commitMeta(updateCommitMeta(format("Drop ICEBERG_TABLE %s", tableRef.contentKey()))) .build(); - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + RequestMetaBuilder requestMeta = + apiWrite().addKeyAction(tableRef.contentKey(), CATALOG_DROP_ENTITY.name()); + treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops, requestMeta.build()); } @Operation(operationId = "iceberg.v1.listTables") @@ -583,7 +598,7 @@ public Uni updateTable( throws IOException { TableRef tableRef = decodeTableRef(prefix, namespace, table); - return createOrUpdateEntity(tableRef, commitTableRequest, ICEBERG_TABLE) + return createOrUpdateEntity(tableRef, commitTableRequest, ICEBERG_TABLE, CATALOG_UPDATE_ENTITY) .map( snap -> { IcebergTableMetadata tableMetadata = diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ViewResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ViewResource.java index 0bcf4db65e9..d5c66aa2359 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ViewResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ViewResource.java @@ -18,6 +18,9 @@ import static java.lang.String.format; import static java.util.UUID.randomUUID; import static org.projectnessie.catalog.formats.iceberg.meta.IcebergTableIdentifier.fromNessieContentKey; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_CREATE_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_DROP_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_ENTITY; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AddSchema.addSchema; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AddViewVersion.addViewVersion; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AssignUUID.assignUUID; @@ -26,6 +29,7 @@ import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.SetProperties.setProperties; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.UpgradeFormatVersion.upgradeFormatVersion; import static org.projectnessie.model.Content.Type.ICEBERG_VIEW; +import static org.projectnessie.versioned.RequestMeta.apiWrite; import io.smallrye.common.annotation.Blocking; import io.smallrye.mutiny.Uni; @@ -74,6 +78,7 @@ import org.projectnessie.services.authz.AccessContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.config.ServerConfig; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; /** Handles Iceberg REST API v1 endpoints that are associated with views. */ @@ -138,7 +143,7 @@ public Uni createView( .addRequirement(IcebergUpdateRequirement.AssertCreate.assertTableDoesNotExist()) .build(); - return createOrUpdateEntity(tableRef, updateTableReq, ICEBERG_VIEW) + return createOrUpdateEntity(tableRef, updateTableReq, ICEBERG_VIEW, CATALOG_CREATE_ENTITY) .map(snap -> loadViewResultFromSnapshotResponse(snap, IcebergLoadViewResponse.builder())); } @@ -180,7 +185,9 @@ public void dropView( .commitMeta(updateCommitMeta(format("Drop ICEBERG_VIEW %s", tableRef.contentKey()))) .build(); - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + RequestMeta.RequestMetaBuilder requestMeta = + apiWrite().addKeyAction(tableRef.contentKey(), CATALOG_DROP_ENTITY.name()); + treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops, requestMeta.build()); } private ContentResponse fetchIcebergView(TableRef tableRef, boolean forWrite) @@ -230,7 +237,8 @@ private Uni loadView(TableRef tableRef) throws NessieNo return snapshotResponse( key, SnapshotReqParams.forSnapshotHttpReq(tableRef.reference(), "iceberg", null), - ICEBERG_VIEW) + ICEBERG_VIEW, + ICEBERG_V1) .map(snap -> loadViewResultFromSnapshotResponse(snap, IcebergLoadViewResponse.builder())); } @@ -272,7 +280,7 @@ public Uni updateView( throws IOException { TableRef tableRef = decodeTableRef(prefix, namespace, view); - return createOrUpdateEntity(tableRef, commitViewRequest, ICEBERG_VIEW) + return createOrUpdateEntity(tableRef, commitViewRequest, ICEBERG_VIEW, CATALOG_UPDATE_ENTITY) .map( snap -> { IcebergViewMetadata viewMetadata = diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergS3SignParams.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergS3SignParams.java index b9091e8f66c..65f4d11ef31 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergS3SignParams.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergS3SignParams.java @@ -19,9 +19,13 @@ import static java.util.Objects.requireNonNull; import static org.projectnessie.catalog.files.s3.S3Utils.extractBucketName; import static org.projectnessie.catalog.files.s3.S3Utils.normalizeS3Scheme; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_S3_SIGN; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergError.icebergError; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergS3SignResponse.icebergS3SignResponse; +import static org.projectnessie.catalog.service.rest.IcebergApiV1ResourceBase.ICEBERG_V1; import static org.projectnessie.catalog.service.rest.IcebergConfigurer.icebergWriteLocation; +import static org.projectnessie.versioned.RequestMeta.apiRead; +import static org.projectnessie.versioned.RequestMeta.apiWrite; import io.smallrye.mutiny.Multi; import io.smallrye.mutiny.Uni; @@ -52,6 +56,7 @@ import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; import org.projectnessie.model.IcebergContent; +import org.projectnessie.versioned.RequestMeta.RequestMetaBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -144,13 +149,16 @@ private boolean checkLocation(String location) { private Uni fetchSnapshot() { try { + RequestMetaBuilder requestMeta = write() ? apiWrite() : apiRead(); + requestMeta.addKeyAction(key(), CATALOG_S3_SIGN.name()); CompletionStage stage = catalogService() .retrieveSnapshot( SnapshotReqParams.forSnapshotHttpReq(ref(), "iceberg", null), key(), null, - write()); + requestMeta.build(), + ICEBERG_V1); // consider an import failure as a non-existing content: // signing will be authorized for the future location only. return Uni.createFrom().completionStage(stage).onFailure().recoverWithNull(); diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/NessieCatalogResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/NessieCatalogResource.java index b6e95f9ea17..d56beb3bc0b 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/NessieCatalogResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/NessieCatalogResource.java @@ -15,10 +15,13 @@ */ package org.projectnessie.catalog.service.rest; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_ENTITY; import static org.projectnessie.catalog.service.api.SnapshotReqParams.forSnapshotHttpReq; import static org.projectnessie.catalog.service.rest.ExternalBaseUri.parseRefPathString; import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; import static org.projectnessie.model.Validation.REF_NAME_PATH_ELEMENT_REGEX; +import static org.projectnessie.services.authz.ApiContext.apiContext; +import static org.projectnessie.versioned.RequestMeta.API_READ; import io.smallrye.common.annotation.Blocking; import io.smallrye.mutiny.Multi; @@ -48,12 +51,15 @@ import org.projectnessie.error.NessieNotFoundException; import org.projectnessie.model.ContentKey; import org.projectnessie.model.Reference; +import org.projectnessie.services.authz.ApiContext; @RequestScoped @Consumes(MediaType.APPLICATION_JSON) @Path("catalog/v1") public class NessieCatalogResource extends AbstractCatalogResource { + static final ApiContext CATALOG_V0 = apiContext("Catalog", 0); + @GET @Path("trees/{ref:" + REF_NAME_PATH_ELEMENT_REGEX + "}/snapshots") @Produces(MediaType.APPLICATION_JSON) @@ -73,7 +79,8 @@ public Multi tableSnapshots( // This operation can block --> @Blocking Stream>> snapshots = - catalogService.retrieveSnapshots(reqParams, keys, effectiveReference::set); + catalogService.retrieveSnapshots( + reqParams, keys, effectiveReference::set, API_READ, CATALOG_V0); Multi multi = Multi.createFrom() @@ -105,7 +112,10 @@ public Uni tableSnapshot( @QueryParam("specVersion") String specVersion) throws NessieNotFoundException { return snapshotBased( - key, forSnapshotHttpReq(parseRefPathString(ref), format, specVersion), ICEBERG_TABLE); + key, + forSnapshotHttpReq(parseRefPathString(ref), format, specVersion), + ICEBERG_TABLE, + CATALOG_V0); } @POST @@ -125,7 +135,13 @@ public Uni commit( return Uni.createFrom() .completionStage( - catalogService.commit(reference, commit, reqParams, this::updateCommitMeta)) + catalogService.commit( + reference, + commit, + reqParams, + this::updateCommitMeta, + CATALOG_UPDATE_ENTITY.name(), + CATALOG_V0)) .map(v -> Response.ok().build()); } } diff --git a/catalog/service/rest/src/test/java/org/projectnessie/catalog/service/rest/TestIcebergS3SignParams.java b/catalog/service/rest/src/test/java/org/projectnessie/catalog/service/rest/TestIcebergS3SignParams.java index babc04408e4..6d834f32c4e 100644 --- a/catalog/service/rest/src/test/java/org/projectnessie/catalog/service/rest/TestIcebergS3SignParams.java +++ b/catalog/service/rest/src/test/java/org/projectnessie/catalog/service/rest/TestIcebergS3SignParams.java @@ -19,6 +19,9 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.Mockito.when; +import static org.projectnessie.catalog.service.rest.IcebergApiV1ResourceBase.ICEBERG_V1; +import static org.projectnessie.versioned.RequestMeta.apiRead; +import static org.projectnessie.versioned.RequestMeta.apiWrite; import io.smallrye.mutiny.Uni; import io.smallrye.mutiny.helpers.test.UniAssertSubscriber; @@ -38,6 +41,7 @@ import org.projectnessie.catalog.files.api.ImmutableSigningResponse; import org.projectnessie.catalog.files.api.RequestSigner; import org.projectnessie.catalog.files.api.SigningResponse; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.formats.iceberg.rest.IcebergException; import org.projectnessie.catalog.formats.iceberg.rest.IcebergS3SignRequest; import org.projectnessie.catalog.formats.iceberg.rest.IcebergS3SignResponse; @@ -56,6 +60,7 @@ import org.projectnessie.model.IcebergTable; import org.projectnessie.model.IcebergView; import org.projectnessie.model.Reference.ReferenceType; +import org.projectnessie.versioned.RequestMeta; @ExtendWith(MockitoExtension.class) class TestIcebergS3SignParams { @@ -121,7 +126,8 @@ class TestIcebergS3SignParams { @ParameterizedTest @ValueSource(strings = {"GET", "HEAD", "OPTIONS", "TRACE"}) void verifyAndSignSuccessRead(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(false))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiRead(key)), eq(ICEBERG_V1))) .thenReturn(successStage); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = @@ -135,7 +141,8 @@ void verifyAndSignSuccessRead(String method) throws Exception { @ParameterizedTest @ValueSource(strings = {"PUT", "POST", "DELETE", "PATCH"}) void verifyAndSignSuccessWrite(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiWrite(key)), eq(ICEBERG_V1))) .thenReturn(successStage); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = @@ -168,7 +175,8 @@ void verifyAndSignSuccessView() throws Exception { key, view, nessieViewSnapshot); - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiWrite(key)), eq(ICEBERG_V1))) .thenReturn(CompletableFuture.completedStage(snapshotResponse)); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = newBuilder().build(); @@ -178,7 +186,8 @@ void verifyAndSignSuccessView() throws Exception { @Test void verifyAndSignSuccessContentNotFound() throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiWrite(key)), eq(ICEBERG_V1))) .thenThrow(new NessieContentNotFoundException(key, "main")); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = newBuilder().build(); @@ -188,7 +197,8 @@ void verifyAndSignSuccessContentNotFound() throws Exception { @Test void verifyAndSignFailureReferenceNotFound() throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiWrite(key)), eq(ICEBERG_V1))) .thenThrow(new NessieReferenceNotFoundException("ref not found")); IcebergS3SignParams icebergSigner = newBuilder().build(); Uni response = icebergSigner.verifyAndSign(); @@ -199,7 +209,8 @@ void verifyAndSignFailureReferenceNotFound() throws Exception { void verifyAndSignSuccessImportFailed() throws Exception { CompletionStage importFailedStage = CompletableFuture.failedStage(new RuntimeException("import failed")); - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiWrite(key)), eq(ICEBERG_V1))) .thenReturn(importFailedStage); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = newBuilder().build(); @@ -210,7 +221,8 @@ void verifyAndSignSuccessImportFailed() throws Exception { @ParameterizedTest @ValueSource(strings = {"GET", "HEAD", "OPTIONS", "TRACE"}) void verifyAndSignSuccessReadMetadataLocation(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(false))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiRead(key)), eq(ICEBERG_V1))) .thenReturn(successStage); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = @@ -224,7 +236,8 @@ void verifyAndSignSuccessReadMetadataLocation(String method) throws Exception { @ParameterizedTest @ValueSource(strings = {"PUT", "POST", "DELETE", "PATCH"}) void verifyAndSignFailureWriteMetadataLocation(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiWrite(key)), eq(ICEBERG_V1))) .thenReturn(successStage); IcebergS3SignParams icebergSigner = newBuilder() @@ -242,7 +255,8 @@ void verifyAndSignFailureWriteMetadataLocation(String method) throws Exception { @ParameterizedTest @ValueSource(strings = {"GET", "HEAD", "OPTIONS", "TRACE"}) void verifyAndSignSuccessReadAncientLocation(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(false))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiRead(key)), eq(ICEBERG_V1))) .thenReturn(successStage); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = @@ -261,7 +275,8 @@ void verifyAndSignSuccessReadAncientLocation(String method) throws Exception { @ParameterizedTest @ValueSource(strings = {"PUT", "POST", "DELETE", "PATCH"}) void verifyAndSignFailureWriteAncientLocation(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiWrite(key)), eq(ICEBERG_V1))) .thenReturn(successStage); IcebergS3SignParams icebergSigner = newBuilder() @@ -279,7 +294,8 @@ void verifyAndSignFailureWriteAncientLocation(String method) throws Exception { @Test void verifyAndSignFailureWrongBaseLocation() throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot( + any(), eq(key), isNull(), eq(expectedApiWrite(key)), eq(ICEBERG_V1))) .thenReturn(successStage); IcebergS3SignParams icebergSigner = newBuilder() @@ -320,4 +336,12 @@ private void expectFailure( response.subscribe().withSubscriber(UniAssertSubscriber.create()); subscriber.assertFailedWith(exceptionClass, message); } + + private static RequestMeta expectedApiRead(ContentKey key) { + return apiRead().addKeyAction(key, CatalogOps.CATALOG_S3_SIGN.name()).build(); + } + + private static RequestMeta expectedApiWrite(ContentKey key) { + return apiWrite().addKeyAction(key, CatalogOps.CATALOG_S3_SIGN.name()).build(); + } } diff --git a/compatibility/jersey/src/main/java/org/projectnessie/tools/compatibility/jersey/AuthorizerExtension.java b/compatibility/jersey/src/main/java/org/projectnessie/tools/compatibility/jersey/AuthorizerExtension.java index 3109f696e83..9f5a05b890f 100644 --- a/compatibility/jersey/src/main/java/org/projectnessie/tools/compatibility/jersey/AuthorizerExtension.java +++ b/compatibility/jersey/src/main/java/org/projectnessie/tools/compatibility/jersey/AuthorizerExtension.java @@ -24,6 +24,7 @@ import java.util.function.Function; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; @@ -33,7 +34,7 @@ public class AuthorizerExtension implements Extension { private final Authorizer authorizer = new Authorizer() { @Override - public BatchAccessChecker startAccessCheck(AccessContext context) { + public BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext) { if (accessCheckerSupplier == null) { return AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; } diff --git a/servers/jax-rs-testextension/src/main/java/org/projectnessie/jaxrs/ext/AuthorizerExtension.java b/servers/jax-rs-testextension/src/main/java/org/projectnessie/jaxrs/ext/AuthorizerExtension.java index ced6fdac377..5bc5a03a442 100644 --- a/servers/jax-rs-testextension/src/main/java/org/projectnessie/jaxrs/ext/AuthorizerExtension.java +++ b/servers/jax-rs-testextension/src/main/java/org/projectnessie/jaxrs/ext/AuthorizerExtension.java @@ -24,6 +24,7 @@ import java.util.function.Function; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; @@ -33,7 +34,7 @@ public class AuthorizerExtension implements Extension { private final Authorizer authorizer = new Authorizer() { @Override - public BatchAccessChecker startAccessCheck(AccessContext context) { + public BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext) { if (accessCheckerSupplier == null) { return AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; } diff --git a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelAuthorizer.java b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelAuthorizer.java index 59c85995358..9eb211f76d8 100644 --- a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelAuthorizer.java +++ b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelAuthorizer.java @@ -18,6 +18,7 @@ import jakarta.enterprise.context.Dependent; import jakarta.inject.Inject; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.AuthorizerType; import org.projectnessie.services.authz.BatchAccessChecker; @@ -33,7 +34,7 @@ public CelAuthorizer(CompiledAuthorizationRules compiledRules) { } @Override - public BatchAccessChecker startAccessCheck(AccessContext context) { - return new CelBatchAccessChecker(compiledRules, context); + public BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext) { + return new CelBatchAccessChecker(compiledRules, context, apiContext); } } diff --git a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelBatchAccessChecker.java b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelBatchAccessChecker.java index 9d5c06c0fd7..1290f54a7d5 100644 --- a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelBatchAccessChecker.java +++ b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelBatchAccessChecker.java @@ -15,6 +15,15 @@ */ package org.projectnessie.server.authz; +import static org.projectnessie.services.cel.CELUtil.VAR_ACTIONS; +import static org.projectnessie.services.cel.CELUtil.VAR_API; +import static org.projectnessie.services.cel.CELUtil.VAR_CONTENT_TYPE; +import static org.projectnessie.services.cel.CELUtil.VAR_OP; +import static org.projectnessie.services.cel.CELUtil.VAR_PATH; +import static org.projectnessie.services.cel.CELUtil.VAR_REF; +import static org.projectnessie.services.cel.CELUtil.VAR_ROLE; +import static org.projectnessie.services.cel.CELUtil.VAR_ROLES; + import java.security.Principal; import java.util.LinkedHashMap; import java.util.List; @@ -27,6 +36,7 @@ import org.projectnessie.model.RepositoryConfig; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.authz.Check; import org.projectnessie.versioned.NamedRef; @@ -39,7 +49,9 @@ final class CelBatchAccessChecker extends AbstractBatchAccessChecker { private final CompiledAuthorizationRules compiledRules; private final AccessContext context; - CelBatchAccessChecker(CompiledAuthorizationRules compiledRules, AccessContext context) { + CelBatchAccessChecker( + CompiledAuthorizationRules compiledRules, AccessContext context, ApiContext apiContext) { + super(apiContext); this.compiledRules = compiledRules; this.context = context; } @@ -81,17 +93,21 @@ private void canPerformOp(Check check, Map failed) { String roleName = roleName(); Map arguments = Map.of( - "role", + VAR_ROLE, roleName, - "roles", + VAR_ROLES, roles(), - "op", + VAR_OP, check.type().name(), - "path", + VAR_ACTIONS, + check.actions(), + VAR_API, + getApiContext(), + VAR_PATH, "", - "ref", + VAR_REF, "", - "contentType", + VAR_CONTENT_TYPE, ""); Supplier errorMsgSupplier = diff --git a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/QuarkusAuthorizer.java b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/QuarkusAuthorizer.java index 380c48599cb..b9321b87c47 100644 --- a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/QuarkusAuthorizer.java +++ b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/QuarkusAuthorizer.java @@ -23,6 +23,7 @@ import org.projectnessie.server.config.QuarkusNessieAuthorizationConfig; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.AuthorizerType; import org.projectnessie.services.authz.BatchAccessChecker; @@ -53,12 +54,12 @@ public QuarkusAuthorizer( this.authorizer = authorizerInstance.get(); } else { - this.authorizer = context -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; + this.authorizer = (context, apiContext) -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; } } @Override - public BatchAccessChecker startAccessCheck(AccessContext context) { - return this.authorizer.startAccessCheck(context); + public BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext) { + return this.authorizer.startAccessCheck(context, apiContext); } } diff --git a/servers/quarkus-auth/src/test/java/org/projectnessie/server/authz/TestCELAuthZ.java b/servers/quarkus-auth/src/test/java/org/projectnessie/server/authz/TestCELAuthZ.java index b427ecb1db5..3c192e17565 100644 --- a/servers/quarkus-auth/src/test/java/org/projectnessie/server/authz/TestCELAuthZ.java +++ b/servers/quarkus-auth/src/test/java/org/projectnessie/server/authz/TestCELAuthZ.java @@ -17,6 +17,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import static org.projectnessie.services.authz.ApiContext.apiContext; import static org.projectnessie.services.authz.Check.CheckType.CREATE_REFERENCE; import static org.projectnessie.services.authz.Check.CheckType.VIEW_REFERENCE; @@ -93,7 +94,8 @@ public Principal user() { public Set roleIds() { return roles.get(); } - }); + }, + apiContext("Nessie", 2)); BranchName main = BranchName.of("main"); soft.assertThatCode(() -> batchAccessChecker.canViewReference(main).checkAndThrow()) @@ -127,7 +129,8 @@ public Set roleIds() { void celBatchAccessCheckerEmptyChecks(CheckType type) { QuarkusNessieAuthorizationConfig config = buildConfig(true); CompiledAuthorizationRules rules = new CompiledAuthorizationRules(config); - CelBatchAccessChecker batchAccessChecker = new CelBatchAccessChecker(rules, () -> () -> null); + CelBatchAccessChecker batchAccessChecker = + new CelBatchAccessChecker(rules, () -> () -> null, apiContext("Nessie", 2)); Check check = Check.builder(type).build(); if (type == CheckType.VIEW_REFERENCE) { soft.assertThatCode(() -> batchAccessChecker.can(check).checkAndThrow()) @@ -155,14 +158,14 @@ void celAuthorizer() { when(authorizers.select(new AuthorizerType.Literal("CEL"))).thenReturn(celAuthorizerInstance); soft.assertThat( new QuarkusAuthorizer(configEnabled, authorizers) - .startAccessCheck(() -> () -> "some-user")) + .startAccessCheck(() -> () -> "some-user", apiContext("Nessie", 2))) .isInstanceOf(CelBatchAccessChecker.class); when(celAuthorizerInstance.get()).thenReturn(celAuthorizer); when(authorizers.select(new AuthorizerType.Literal("CEL"))).thenReturn(celAuthorizerInstance); soft.assertThat( new QuarkusAuthorizer(configDisabled, authorizers) - .startAccessCheck(() -> () -> "some-user")) + .startAccessCheck(() -> () -> "some-user", apiContext("Nessie", 2))) .isSameAs(AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER); } diff --git a/servers/quarkus-server/build.gradle.kts b/servers/quarkus-server/build.gradle.kts index 045d431b90a..3b95618eefa 100644 --- a/servers/quarkus-server/build.gradle.kts +++ b/servers/quarkus-server/build.gradle.kts @@ -178,6 +178,8 @@ dependencies { testFixturesCompileOnly(libs.microprofile.openapi) testFixturesCompileOnly(project(":nessie-immutables")) + testCompileOnly(project(":nessie-immutables")) + testAnnotationProcessor(project(":nessie-immutables", configuration = "processor")) intTestCompileOnly(project(":nessie-immutables")) intTestAnnotationProcessor(project(":nessie-immutables", configuration = "processor")) diff --git a/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/MockedAuthorizer.java b/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/MockedAuthorizer.java new file mode 100644 index 00000000000..ebe7c89688f --- /dev/null +++ b/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/MockedAuthorizer.java @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.server.authz; + +import static org.projectnessie.server.authz.MockedAuthorizer.AuthzCheck.authzCheck; +import static org.projectnessie.services.authz.Check.check; + +import jakarta.inject.Singleton; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; +import org.projectnessie.nessie.immutables.NessieImmutable; +import org.projectnessie.services.authz.AbstractBatchAccessChecker; +import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; +import org.projectnessie.services.authz.Authorizer; +import org.projectnessie.services.authz.AuthorizerType; +import org.projectnessie.services.authz.BatchAccessChecker; +import org.projectnessie.services.authz.Check; + +@AuthorizerType("MOCKED") +@Singleton +public class MockedAuthorizer implements Authorizer { + private BiFunction, Map> responder = + (b, c) -> Map.of(); + private final List checks = new ArrayList<>(); + + @Override + public BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext) { + return new MockedBatchAccessChecker(context, apiContext); + } + + public synchronized void setResponder( + BiFunction, Map> responder) { + this.responder = responder; + } + + public synchronized void reset() { + checks.clear(); + responder = (b, c) -> Map.of(); + } + + public synchronized List checks() { + return List.copyOf(checks); + } + + public List checksWithoutIdentifiedKey() { + return checks().stream() + .map( + ac -> + authzCheck( + ac.apiContext(), + ac.checks().stream() + .map(c -> check(c.type(), c.ref(), c.key(), c.actions())) + .toList(), + ac.response())) + .toList(); + } + + synchronized void addCheck(AuthzCheck authzCheck) { + checks.add(authzCheck); + } + + public class MockedBatchAccessChecker extends AbstractBatchAccessChecker { + public final AccessContext context; + + public MockedBatchAccessChecker(AccessContext context, ApiContext apiContext) { + super(apiContext); + this.context = context; + } + + @Override + public Map check() { + var response = responder.apply(this, this.getChecks()); + addCheck(authzCheck(getApiContext(), getChecks(), response)); + return response; + } + } + + @NessieImmutable + public interface AuthzCheck { + ApiContext apiContext(); + + Set checks(); + + Map response(); + + static AuthzCheck authzCheck( + ApiContext apiContext, Collection checks, Map response) { + return ImmutableAuthzCheck.of(apiContext, new LinkedHashSet<>(checks), response); + } + } +} diff --git a/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/TestAuthzMeta.java b/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/TestAuthzMeta.java new file mode 100644 index 00000000000..55de528f637 --- /dev/null +++ b/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/TestAuthzMeta.java @@ -0,0 +1,460 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.server.authz; + +import static org.apache.iceberg.types.Types.NestedField.required; +import static org.projectnessie.server.authn.AuthenticationEnabledProfile.AUTH_CONFIG_OVERRIDES; +import static org.projectnessie.server.authn.AuthenticationEnabledProfile.SECURITY_CONFIG; +import static org.projectnessie.server.authz.MockedAuthorizer.AuthzCheck.authzCheck; +import static org.projectnessie.server.catalog.IcebergCatalogTestCommon.WAREHOUSE_NAME; +import static org.projectnessie.services.authz.ApiContext.apiContext; +import static org.projectnessie.services.authz.Check.CheckType.CREATE_ENTITY; +import static org.projectnessie.services.authz.Check.CheckType.DELETE_ENTITY; +import static org.projectnessie.services.authz.Check.CheckType.READ_ENTITY_VALUE; +import static org.projectnessie.services.authz.Check.CheckType.UPDATE_ENTITY; +import static org.projectnessie.services.authz.Check.canCommitChangeAgainstReference; +import static org.projectnessie.services.authz.Check.canReadContentKey; +import static org.projectnessie.services.authz.Check.canReadEntries; +import static org.projectnessie.services.authz.Check.canViewReference; +import static org.projectnessie.services.authz.Check.check; + +import com.google.common.collect.ImmutableMap; +import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.junit.TestProfile; +import jakarta.inject.Inject; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.apache.iceberg.CatalogProperties; +import org.apache.iceberg.PartitionSpec; +import org.apache.iceberg.Schema; +import org.apache.iceberg.catalog.Namespace; +import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.iceberg.exceptions.AlreadyExistsException; +import org.apache.iceberg.rest.RESTCatalog; +import org.apache.iceberg.types.Types; +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.projectnessie.client.auth.BasicAuthenticationProvider; +import org.projectnessie.error.NessieForbiddenException; +import org.projectnessie.model.CommitMeta; +import org.projectnessie.model.ContentKey; +import org.projectnessie.model.IcebergTable; +import org.projectnessie.model.Operation; +import org.projectnessie.objectstoragemock.HeapStorageBucket; +import org.projectnessie.quarkus.tests.profiles.BaseConfigProfile; +import org.projectnessie.server.BaseClientAuthTest; +import org.projectnessie.server.catalog.Catalogs; +import org.projectnessie.server.catalog.S3UnitTestProfiles.S3UnitTestProfile; +import org.projectnessie.services.authz.ApiContext; +import org.projectnessie.services.authz.AuthorizerType; +import org.projectnessie.services.authz.BatchAccessChecker; +import org.projectnessie.services.authz.Check; +import org.projectnessie.versioned.BranchName; + +@SuppressWarnings("resource") // api() returns an AutoCloseable +@QuarkusTest +@TestProfile(TestAuthzMeta.Profile.class) +public class TestAuthzMeta extends BaseClientAuthTest { + @Inject + @AuthorizerType("MOCKED") + MockedAuthorizer mockedAuthorizer; + + HeapStorageBucket heapStorageBucket; + + private static final Catalogs CATALOGS = new Catalogs(); + + // Cannot use @ExtendWith(SoftAssertionsExtension.class) + @InjectSoftAssertions here, because + // of Quarkus class loading issues. See https://github.com/quarkusio/quarkus/issues/19814 + protected final SoftAssertions soft = new SoftAssertions(); + + protected RESTCatalog catalog(Map catalogOptions) { + return CATALOGS.getCatalog(catalogOptions); + } + + @AfterAll + static void closeRestCatalog() throws Exception { + CATALOGS.close(); + } + + @AfterEach + void cleanup() { + // Cannot use @ExtendWith(SoftAssertionsExtension.class) + @InjectSoftAssertions here, because + // of Quarkus class loading issues. See https://github.com/quarkusio/quarkus/issues/19814 + soft.assertAll(); + } + + @BeforeEach + void beforeEach() { + mockedAuthorizer.reset(); + heapStorageBucket.clear(); + } + + /** + * Verifies that the expected authz checks are issued with the right {@link ApiContext} and + * "actions". + */ + @Test + public void icebergApiTable() { + var catalog = + catalog( + Map.of("http.header.Authorization", basicAuthorizationHeader("admin_user", "test123"))); + + var apiContext = apiContext("Iceberg", 1); + var branch = BranchName.of("main"); + + var myNamespaceIceberg = Namespace.of("iceberg_table"); + var tableKey = ContentKey.of("iceberg_table", "table_foo"); + var tableIdentifier = TableIdentifier.of(myNamespaceIceberg, "table_foo"); + + mockedAuthorizer.reset(); + catalog.createNamespace(myNamespaceIceberg); + // no assertion, done in 'icebergApiNamespaces()' + + var schema = + new Schema( + required(3, "id", Types.IntegerType.get()), + required(4, "data", Types.StringType.get())); + var spec = PartitionSpec.builderFor(schema).bucket("id", 16).build(); + + mockedAuthorizer.reset(); + var props = new HashMap(); + catalog.createTable(tableIdentifier, schema, spec, "my://location", props); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + // 'IcebergApiV1ResourceBase.createEntityVerifyNotExists' + authzCheck( + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check(READ_ENTITY_VALUE, branch, tableKey), + check(CREATE_ENTITY, branch, tableKey)), + Map.of()), + // 'CatalogServiceImpl.commit' + authzCheck( + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check(READ_ENTITY_VALUE, branch, tableKey, Set.of("CATALOG_CREATE_ENTITY")), + check(CREATE_ENTITY, branch, tableKey, Set.of("CATALOG_CREATE_ENTITY"))), + Map.of()), + // actual 'commit' + authzCheck( + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check( + CREATE_ENTITY, + branch, + tableKey, + Set.of( + "CATALOG_CREATE_ENTITY", + "META_ADD_SORT_ORDER", + "META_SET_DEFAULT_PARTITION_SPEC", + "META_SET_CURRENT_SCHEMA", + "META_UPGRADE_FORMAT_VERSION", + "META_SET_PROPERTIES", + "META_ASSIGN_UUID", + "META_SET_LOCATION", + "META_ADD_SCHEMA", + "META_SET_DEFAULT_SORT_ORDER", + "META_ADD_PARTITION_SPEC"))), + Map.of())); + } + + /** + * Verifies that the expected authz checks are issued with the right {@link ApiContext} and + * "actions". + */ + @Test + public void icebergApiNamespaces() { + var catalog = + catalog( + Map.of("http.header.Authorization", basicAuthorizationHeader("admin_user", "test123"))); + + var myNamespace = ContentKey.of("iceberg_namespaces"); + var myNamespaceIceberg = Namespace.of("iceberg_namespaces"); + var myNamespaceInner = ContentKey.of("iceberg_namespaces", "inner"); + var myNamespaceIcebergInner = Namespace.of("iceberg_namespaces", "inner"); + + var apiContext = apiContext("Iceberg", 1); + var branch = BranchName.of("main"); + + mockedAuthorizer.reset(); + soft.assertThat(catalog.dropNamespace(myNamespaceIceberg)).isFalse(); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck( // 'getEntries' in 'IcebergApiV1NamespaceResource.dropNamespace' + apiContext, List.of(canReadEntries(branch)), Map.of())); + + mockedAuthorizer.reset(); + catalog.createNamespace(myNamespaceIceberg); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck(apiContext, List.of(canViewReference(branch)), Map.of()), + authzCheck( // 'commit' + apiContext, + List.of( + canViewReference(branch), + check(CREATE_ENTITY, branch, myNamespace, Set.of("CATALOG_CREATE_ENTITY")), + canCommitChangeAgainstReference(branch)), + Map.of())); + + var props = new HashMap(); + props.put("location", "my_location"); + mockedAuthorizer.reset(); + catalog.createNamespace(myNamespaceIcebergInner, props); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck(apiContext, List.of(canViewReference(branch)), Map.of()), + authzCheck( // 'commit' + apiContext, + List.of( + canViewReference(branch), + check( + CREATE_ENTITY, + branch, + myNamespaceInner, + Set.of( + "META_SET_LOCATION", "CATALOG_CREATE_ENTITY", "META_SET_PROPERTIES")), + canCommitChangeAgainstReference(branch)), + Map.of())); + + var props2 = new HashMap(); + props2.put("a", "b"); + mockedAuthorizer.reset(); + catalog.setProperties(myNamespaceIceberg, props2); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck( // 'getMultipleContents' in 'IcebergApiV1NamespaceResource.updateProperties' + apiContext, + List.of( + canViewReference(branch), + check(UPDATE_ENTITY, branch, myNamespace), + check(READ_ENTITY_VALUE, branch, myNamespace), + canCommitChangeAgainstReference(branch)), + Map.of()), + authzCheck( // 'commit' + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check( + UPDATE_ENTITY, + branch, + myNamespace, + Set.of("META_SET_PROPERTIES", "CATALOG_UPDATE_ENTITY"))), + Map.of())); + + // not empty + mockedAuthorizer.reset(); + soft.assertThatThrownBy(() -> catalog.dropNamespace(myNamespaceIceberg)) + .isInstanceOf(AlreadyExistsException.class); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck( // 'getEntries' in 'IcebergApiV1NamespaceResource.dropNamespace' + apiContext, + List.of( + canReadEntries(branch), + canReadContentKey(branch, myNamespace), + canReadContentKey(branch, myNamespaceInner)), + Map.of())); + + mockedAuthorizer.reset(); + catalog.dropNamespace(myNamespaceIcebergInner); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck( // 'getEntries' in 'IcebergApiV1NamespaceResource.dropNamespace' + apiContext, + List.of(canReadEntries(branch), canReadContentKey(branch, myNamespaceInner)), + Map.of()), + authzCheck( // 'commit' + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check(DELETE_ENTITY, branch, myNamespaceInner, Set.of("CATALOG_DROP_ENTITY"))), + Map.of())); + + mockedAuthorizer.reset(); + catalog.dropNamespace(myNamespaceIceberg); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck( // 'getEntries' in 'IcebergApiV1NamespaceResource.dropNamespace' + apiContext, + List.of(canReadEntries(branch), canReadContentKey(branch, myNamespace)), + Map.of()), + authzCheck( // 'commit' + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check(DELETE_ENTITY, branch, myNamespace, Set.of("CATALOG_DROP_ENTITY"))), + Map.of())); + } + + /** Verifies that the expected authz checks are issued with the right {@link ApiContext}. */ + @Test + public void nessieApi() throws Exception { + withClientCustomizer( + c -> c.withAuthentication(BasicAuthenticationProvider.create("admin_user", "test123"))); + soft.assertThat(api().getAllReferences().stream()).isNotEmpty(); + + // Verify that the ApiContext is correctly set + soft.assertThat(mockedAuthorizer.checks()) + .containsExactly( + authzCheck( + apiContext("Nessie", 2), + List.of(canViewReference(BranchName.of("main"))), + Map.of())); + } + + /** + * Simulates how an authorizer implementation can disallow creating/updating Iceberg entities via + * the Nessie API, but allow those via Iceberg, leveraging the {@code ApiContext}. + */ + @Test + public void commitFailWithNessieSucceedWithIceberg() { + withClientCustomizer( + c -> c.withAuthentication(BasicAuthenticationProvider.create("admin_user", "test123"))); + + var catalog = + catalog( + Map.of("http.header.Authorization", basicAuthorizationHeader("admin_user", "test123"))); + + var branch = BranchName.of("main"); + + var myNamespaceKey = ContentKey.of("commit_nessie_iceberg"); + var myNamespaceIceberg = Namespace.of("commit_nessie_iceberg"); + var tableKey = ContentKey.of("commit_nessie_iceberg", "table_foo"); + var tableIdentifier = TableIdentifier.of(myNamespaceIceberg, "table_foo"); + + var schema = + new Schema( + required(3, "id", Types.IntegerType.get()), + required(4, "data", Types.StringType.get())); + var spec = PartitionSpec.builderFor(schema).bucket("id", 16).build(); + + BiFunction, Map> responder = + (checker, checks) -> + checks.stream() + .filter( + c -> + (c.type() == CREATE_ENTITY || c.type() == UPDATE_ENTITY) + && checker.getApiContext().getApiName().equals("Nessie")) + .collect(Collectors.toMap(Function.identity(), c -> "No no no")); + + // Creating a namespace is forbidden for Nessie API + mockedAuthorizer.reset(); + mockedAuthorizer.setResponder(responder); + soft.assertThatThrownBy( + () -> + api() + .createNamespace() + .refName(branch.getName()) + .namespace(org.projectnessie.model.Namespace.of(myNamespaceKey)) + .createWithResponse()) + .isInstanceOf(NessieForbiddenException.class) + .hasMessageContaining("No no no"); + + // Creating a namespace is allowed for Iceberg API + mockedAuthorizer.reset(); + mockedAuthorizer.setResponder(responder); + catalog.createNamespace(myNamespaceIceberg); + + // Updating a namespace is forbidden for Nessie API + var props = new HashMap(); + props.put("foo", "bar"); + mockedAuthorizer.reset(); + mockedAuthorizer.setResponder(responder); + soft.assertThatThrownBy( + () -> + api() + .updateProperties() + .refName(branch.getName()) + .namespace(org.projectnessie.model.Namespace.of(myNamespaceKey)) + .updateProperties(props) + .updateWithResponse()) + .isInstanceOf(NessieForbiddenException.class) + .hasMessageContaining("No no no"); + + // Updating a namespace is allowed for Iceberg API + mockedAuthorizer.reset(); + mockedAuthorizer.setResponder(responder); + catalog.setProperties(myNamespaceIceberg, props); + + // Creating a table is forbidden for Nessie API + mockedAuthorizer.reset(); + mockedAuthorizer.setResponder(responder); + soft.assertThatThrownBy( + () -> + api() + .commitMultipleOperations() + .branch(api().getDefaultBranch()) + .commitMeta(CommitMeta.fromMessage("attempt")) + .operation( + Operation.Put.of(tableKey, IcebergTable.of("ms://location", 1, 2, 3, 4))) + .commitWithResponse()) + .isInstanceOf(NessieForbiddenException.class) + .hasMessageContaining("No no no"); + + // Creating a table is allowed for Iceberg API + mockedAuthorizer.reset(); + mockedAuthorizer.setResponder(responder); + catalog.createTable(tableIdentifier, schema, spec, "my://location", props); + } + + public static class Profile extends S3UnitTestProfile { + @Override + public Map getConfigOverrides() { + return ImmutableMap.builder() + .putAll(super.getConfigOverrides()) + .putAll(BaseConfigProfile.CONFIG_OVERRIDES) + .putAll(AUTH_CONFIG_OVERRIDES) + .putAll(SECURITY_CONFIG) + .put("quarkus.http.auth.basic", "true") + // Need a dummy URL to satisfy the Quarkus OIDC extension. + .put("quarkus.oidc.auth-server-url", "http://127.255.0.0:0/auth/realms/unset/") + // + .put("nessie.catalog.default-warehouse", WAREHOUSE_NAME) + .put(CatalogProperties.WAREHOUSE_LOCATION, WAREHOUSE_NAME) + // + .put("nessie.server.authorization.enabled", "true") + .put("nessie.server.authorization.type", "MOCKED") + .build(); + } + } + + public static String basicAuthorizationHeader(String username, String password) { + String userPass = username + ':' + password; + byte[] encoded = Base64.getEncoder().encode(userPass.getBytes(StandardCharsets.UTF_8)); + String encodedString = new String(encoded, StandardCharsets.UTF_8); + return "Basic " + encodedString; + } +} diff --git a/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/Catalogs.java b/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/Catalogs.java index f9da6c29fa6..8926dcd6285 100644 --- a/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/Catalogs.java +++ b/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/Catalogs.java @@ -20,6 +20,7 @@ import java.util.TreeMap; import org.apache.hadoop.conf.Configuration; import org.apache.iceberg.CatalogProperties; +import org.apache.iceberg.rest.HTTPClient; import org.apache.iceberg.rest.RESTCatalog; public class Catalogs implements AutoCloseable { @@ -33,7 +34,18 @@ public RESTCatalog getCatalog(Map options) { options, opts -> { int catalogServerPort = Integer.getInteger("quarkus.http.port"); - RESTCatalog c = new RESTCatalog(); + RESTCatalog c = + new RESTCatalog( + config -> { + var builder = HTTPClient.builder(config).uri(config.get(CatalogProperties.URI)); + config.entrySet().stream() + .filter(e -> e.getKey().startsWith("http.header.")) + .forEach( + e -> + builder.withHeader( + e.getKey().substring("http.header.".length()), e.getValue())); + return builder.build(); + }); c.setConf(new Configuration()); Map catalogOptions = new HashMap<>(); catalogOptions.put( diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestApiContext.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestApiContext.java new file mode 100644 index 00000000000..208211af39f --- /dev/null +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestApiContext.java @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.services.rest; + +import org.projectnessie.services.authz.ApiContext; + +public final class RestApiContext { + private RestApiContext() {} + + public static ApiContext NESSIE_V1 = ApiContext.apiContext("Nessie", 1); + public static ApiContext NESSIE_V2 = ApiContext.apiContext("Nessie", 2); +} diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestConfigResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestConfigResource.java index e479d2a9620..4a916bfa321 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestConfigResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestConfigResource.java @@ -15,6 +15,8 @@ */ package org.projectnessie.services.rest; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V1; + import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Inject; @@ -44,7 +46,7 @@ public RestConfigResource() { @Inject public RestConfigResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.configService = new ConfigApiImpl(config, store, authorizer, accessContext, 1); + this.configService = new ConfigApiImpl(config, store, authorizer, accessContext, NESSIE_V1); } @Override diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestContentResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestContentResource.java index e06ff2d2f45..c81b6b7bc59 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestContentResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestContentResource.java @@ -15,6 +15,9 @@ */ package org.projectnessie.services.rest; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V1; +import static org.projectnessie.versioned.RequestMeta.API_READ; + import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Inject; @@ -52,7 +55,7 @@ public RestContentResource() { @Inject public RestContentResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.contentService = new ContentApiImpl(config, store, authorizer, accessContext); + this.contentService = new ContentApiImpl(config, store, authorizer, accessContext, NESSIE_V1); } private ContentService resource() { @@ -63,7 +66,7 @@ private ContentService resource() { @JsonView(Views.V1.class) public Content getContent(ContentKey key, String ref, String hashOnRef) throws NessieNotFoundException { - return resource().getContent(key, ref, hashOnRef, false, false).getContent(); + return resource().getContent(key, ref, hashOnRef, false, API_READ).getContent(); } @Override @@ -71,6 +74,7 @@ public Content getContent(ContentKey key, String ref, String hashOnRef) public GetMultipleContentsResponse getMultipleContents( String ref, String hashOnRef, GetMultipleContentsRequest request) throws NessieNotFoundException { - return resource().getMultipleContents(ref, hashOnRef, request.getRequestedKeys(), false, false); + return resource() + .getMultipleContents(ref, hashOnRef, request.getRequestedKeys(), false, API_READ); } } diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestDiffResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestDiffResource.java index 9e5b1e0a223..a10bc80b8fc 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestDiffResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestDiffResource.java @@ -16,6 +16,7 @@ package org.projectnessie.services.rest; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V1; import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; @@ -55,7 +56,7 @@ public RestDiffResource() { @Inject public RestDiffResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.diffService = new DiffApiImpl(config, store, authorizer, accessContext); + this.diffService = new DiffApiImpl(config, store, authorizer, accessContext, NESSIE_V1); } private DiffService resource() { diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestNamespaceResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestNamespaceResource.java index cdf32042caa..9c3e329d281 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestNamespaceResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestNamespaceResource.java @@ -15,6 +15,9 @@ */ package org.projectnessie.services.rest; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V1; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; + import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Inject; @@ -57,7 +60,8 @@ public RestNamespaceResource() { @Inject public RestNamespaceResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.namespaceService = new NamespaceApiImpl(config, store, authorizer, accessContext); + this.namespaceService = + new NamespaceApiImpl(config, store, authorizer, accessContext, NESSIE_V1); } private NamespaceService resource() { @@ -68,7 +72,7 @@ private NamespaceService resource() { @JsonView(Views.V1.class) public Namespace createNamespace(NamespaceParams params, Namespace namespace) throws NessieNamespaceAlreadyExistsException, NessieReferenceNotFoundException { - return resource().createNamespace(params.getRefName(), namespace); + return resource().createNamespace(params.getRefName(), namespace, API_WRITE); } @Override @@ -105,6 +109,7 @@ public void updateProperties(NamespaceParams params, NamespaceUpdate namespaceUp params.getRefName(), params.getNamespace(), namespaceUpdate.getPropertyUpdates(), - namespaceUpdate.getPropertyRemovals()); + namespaceUpdate.getPropertyRemovals(), + API_WRITE); } } diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestTreeResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestTreeResource.java index 7c52ed84ed0..2f1e71941cb 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestTreeResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestTreeResource.java @@ -17,7 +17,9 @@ import static com.google.common.base.Preconditions.checkArgument; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V1; import static org.projectnessie.services.spi.TreeService.MAX_COMMIT_LOG_ENTRIES; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; @@ -73,7 +75,7 @@ public RestTreeResource() { @Inject public RestTreeResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.treeService = new TreeApiImpl(config, store, authorizer, accessContext); + this.treeService = new TreeApiImpl(config, store, authorizer, accessContext, NESSIE_V1); } private TreeService resource() { @@ -278,7 +280,7 @@ public Branch commitMultipleOperations( String branchName, String expectedHash, Operations operations) throws NessieNotFoundException, NessieConflictException { return resource() - .commitMultipleOperations(branchName, expectedHash, operations) + .commitMultipleOperations(branchName, expectedHash, operations, API_WRITE) .getTargetBranch(); } } diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2ConfigResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2ConfigResource.java index 043cf8075b0..b16aea3ea4f 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2ConfigResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2ConfigResource.java @@ -15,6 +15,8 @@ */ package org.projectnessie.services.rest; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V2; + import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Inject; @@ -54,7 +56,7 @@ public RestV2ConfigResource() { @Inject public RestV2ConfigResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.config = new ConfigApiImpl(config, store, authorizer, accessContext, 2); + this.config = new ConfigApiImpl(config, store, authorizer, accessContext, NESSIE_V2); } @Override diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2TreeResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2TreeResource.java index a3ae5b03078..f9421e1b53d 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2TreeResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2TreeResource.java @@ -18,8 +18,11 @@ import static com.google.common.base.Preconditions.checkArgument; import static org.projectnessie.api.v2.params.ReferenceResolver.resolveReferencePathElement; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V2; import static org.projectnessie.services.rest.common.RestCommon.updateCommitMeta; import static org.projectnessie.services.spi.TreeService.MAX_COMMIT_LOG_ENTRIES; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; @@ -102,10 +105,10 @@ public RestV2TreeResource( Authorizer authorizer, AccessContext accessContext, HttpHeaders httpHeaders) { - this.configService = new ConfigApiImpl(config, store, authorizer, accessContext, 2); - this.treeService = new TreeApiImpl(config, store, authorizer, accessContext); - this.contentService = new ContentApiImpl(config, store, authorizer, accessContext); - this.diffService = new DiffApiImpl(config, store, authorizer, accessContext); + this.configService = new ConfigApiImpl(config, store, authorizer, accessContext, NESSIE_V2); + this.treeService = new TreeApiImpl(config, store, authorizer, accessContext, NESSIE_V2); + this.contentService = new ContentApiImpl(config, store, authorizer, accessContext, NESSIE_V2); + this.diffService = new DiffApiImpl(config, store, authorizer, accessContext, NESSIE_V2); this.httpHeaders = httpHeaders; } @@ -356,7 +359,7 @@ public ContentResponse getContent( ParsedReference reference = parseRefPathString(ref); return content() .getContent( - key, reference.name(), reference.hashWithRelativeSpec(), withDocumentation, forWrite); + key, reference.name(), reference.hashWithRelativeSpec(), withDocumentation, API_READ); } @JsonView(Views.V2.class) @@ -381,7 +384,7 @@ public GetMultipleContentsResponse getMultipleContents( reference.hashWithRelativeSpec(), request.getRequestedKeys(), withDocumentation, - forWrite); + API_READ); } @JsonView(Views.V2.class) @@ -452,7 +455,8 @@ public CommitResponse commitMultipleOperations(String branch, Operations operati .commitMeta(commitMeta(CommitMeta.builder().from(operations.getCommitMeta())).build()); ParsedReference ref = parseRefPathString(branch); - return tree().commitMultipleOperations(ref.name(), ref.hashWithRelativeSpec(), ops.build()); + return tree() + .commitMultipleOperations(ref.name(), ref.hashWithRelativeSpec(), ops.build(), API_WRITE); } CommitMeta.Builder commitMeta(CommitMeta.Builder commitMeta) { diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/AbstractBatchAccessChecker.java b/servers/services/src/main/java/org/projectnessie/services/authz/AbstractBatchAccessChecker.java index 3d9ee9c6889..b4f5b3a7511 100644 --- a/servers/services/src/main/java/org/projectnessie/services/authz/AbstractBatchAccessChecker.java +++ b/servers/services/src/main/java/org/projectnessie/services/authz/AbstractBatchAccessChecker.java @@ -16,17 +16,19 @@ package org.projectnessie.services.authz; import static java.util.Collections.emptyMap; +import static org.projectnessie.services.authz.ApiContext.apiContext; import java.util.Collection; import java.util.LinkedHashSet; import java.util.Map; +import java.util.Set; import org.projectnessie.model.IdentifiedContentKey; import org.projectnessie.model.RepositoryConfig; import org.projectnessie.versioned.NamedRef; public abstract class AbstractBatchAccessChecker implements BatchAccessChecker { public static final BatchAccessChecker NOOP_ACCESS_CHECKER = - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("", 0)) { @Override public Map check() { return emptyMap(); @@ -38,8 +40,18 @@ public BatchAccessChecker can(Check check) { } }; + private final ApiContext apiContext; private final Collection checks = new LinkedHashSet<>(); + protected AbstractBatchAccessChecker(ApiContext apiContext) { + this.apiContext = apiContext; + } + + @Override + public ApiContext getApiContext() { + return apiContext; + } + protected Collection getChecks() { return checks; } @@ -84,6 +96,13 @@ public BatchAccessChecker canReadContentKey(NamedRef ref, IdentifiedContentKey i return can(Check.canReadContentKey(ref, identifiedKey)); } + @Override + public BatchAccessChecker canReadContentKey( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + canViewReference(ref); + return can(Check.canReadContentKey(ref, identifiedKey, actions)); + } + @Override public BatchAccessChecker canListCommitLog(NamedRef ref) { canViewReference(ref); @@ -102,6 +121,13 @@ public BatchAccessChecker canReadEntityValue(NamedRef ref, IdentifiedContentKey return can(Check.canReadEntityValue(ref, identifiedKey)); } + @Override + public BatchAccessChecker canReadEntityValue( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + canViewReference(ref); + return can(Check.canReadEntityValue(ref, identifiedKey, actions)); + } + @Override @Deprecated public BatchAccessChecker canCreateEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { @@ -109,6 +135,14 @@ public BatchAccessChecker canCreateEntity(NamedRef ref, IdentifiedContentKey ide return can(Check.canCreateEntity(ref, identifiedKey)); } + @Override + @Deprecated + public BatchAccessChecker canCreateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + canViewReference(ref); + return can(Check.canCreateEntity(ref, identifiedKey, actions)); + } + @Override @Deprecated public BatchAccessChecker canUpdateEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { @@ -116,12 +150,27 @@ public BatchAccessChecker canUpdateEntity(NamedRef ref, IdentifiedContentKey ide return can(Check.canUpdateEntity(ref, identifiedKey)); } + @Override + @Deprecated + public BatchAccessChecker canUpdateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + canViewReference(ref); + return can(Check.canUpdateEntity(ref, identifiedKey, actions)); + } + @Override public BatchAccessChecker canDeleteEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { canViewReference(ref); return can(Check.canDeleteEntity(ref, identifiedKey)); } + @Override + public BatchAccessChecker canDeleteEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + canViewReference(ref); + return can(Check.canDeleteEntity(ref, identifiedKey, actions)); + } + @Override public BatchAccessChecker canReadRepositoryConfig(RepositoryConfig.Type repositoryConfigType) { return can(Check.canReadRepositoryConfig(repositoryConfigType)); diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/ApiContext.java b/servers/services/src/main/java/org/projectnessie/services/authz/ApiContext.java new file mode 100644 index 00000000000..19d2c747cc7 --- /dev/null +++ b/servers/services/src/main/java/org/projectnessie/services/authz/ApiContext.java @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.services.authz; + +import org.immutables.value.Value; + +@Value.Immutable +@Value.Style(allParameters = true) +public interface ApiContext { + String getApiName(); + + int getApiVersion(); + + static ApiContext apiContext(String apiName, int apiVersion) { + return ImmutableApiContext.of(apiName, apiVersion); + } +} diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/Authorizer.java b/servers/services/src/main/java/org/projectnessie/services/authz/Authorizer.java index 8a7020486cf..07d2113543b 100644 --- a/servers/services/src/main/java/org/projectnessie/services/authz/Authorizer.java +++ b/servers/services/src/main/java/org/projectnessie/services/authz/Authorizer.java @@ -27,7 +27,8 @@ public interface Authorizer { * Start an access-check batch/bulk operation. * * @param context The context carrying the principal information. + * @param apiContext API contextual information * @return access checker */ - BatchAccessChecker startAccessCheck(AccessContext context); + BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext); } diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/BatchAccessChecker.java b/servers/services/src/main/java/org/projectnessie/services/authz/BatchAccessChecker.java index 4bddfac540c..5aab3c86dc5 100644 --- a/servers/services/src/main/java/org/projectnessie/services/authz/BatchAccessChecker.java +++ b/servers/services/src/main/java/org/projectnessie/services/authz/BatchAccessChecker.java @@ -17,6 +17,7 @@ import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.util.Map; +import java.util.Set; import org.projectnessie.model.Branch; import org.projectnessie.model.ContentKey; import org.projectnessie.model.Detached; @@ -54,6 +55,8 @@ public interface BatchAccessChecker { */ Map check(); + ApiContext getApiContext(); + /** * Convenience methods that throws an {@link AccessCheckException}, if {@link #check()} returns a * non-empty map. @@ -130,8 +133,13 @@ static void throwForFailedChecks(Map failedChecks) throws AccessC * * @param ref current reference * @param identifiedKey content key / ID / type to check + * @param actions contextual information, API actions/operations performed */ @CanIgnoreReturnValue + BatchAccessChecker canReadContentKey( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions); + + @CanIgnoreReturnValue BatchAccessChecker canReadContentKey(NamedRef ref, IdentifiedContentKey identifiedKey); /** @@ -164,8 +172,13 @@ static void throwForFailedChecks(Map failedChecks) throws AccessC * * @param ref The {@link NamedRef} to check * @param identifiedKey content key / ID / type to check + * @param actions contextual information, API actions/operations performed */ @CanIgnoreReturnValue + BatchAccessChecker canReadEntityValue( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions); + + @CanIgnoreReturnValue BatchAccessChecker canReadEntityValue(NamedRef ref, IdentifiedContentKey identifiedKey); /** @@ -177,8 +190,13 @@ static void throwForFailedChecks(Map failedChecks) throws AccessC * * @param ref The {@link NamedRef} to check * @param identifiedKey content key / ID / type to check + * @param actions contextual information, API actions/operations performed */ @CanIgnoreReturnValue + BatchAccessChecker canCreateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions); + + @CanIgnoreReturnValue BatchAccessChecker canCreateEntity(NamedRef ref, IdentifiedContentKey identifiedKey); /** @@ -192,6 +210,10 @@ static void throwForFailedChecks(Map failedChecks) throws AccessC * @param identifiedKey content key / ID / type to check */ @CanIgnoreReturnValue + BatchAccessChecker canUpdateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions); + + @CanIgnoreReturnValue BatchAccessChecker canUpdateEntity(NamedRef ref, IdentifiedContentKey identifiedKey); /** @@ -203,8 +225,13 @@ static void throwForFailedChecks(Map failedChecks) throws AccessC * * @param ref The {@link NamedRef} to check * @param identifiedKey content key / ID / type to check + * @param actions contextual information, API actions/operations performed */ @CanIgnoreReturnValue + BatchAccessChecker canDeleteEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions); + + @CanIgnoreReturnValue BatchAccessChecker canDeleteEntity(NamedRef ref, IdentifiedContentKey identifiedKey); @CanIgnoreReturnValue diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/Check.java b/servers/services/src/main/java/org/projectnessie/services/authz/Check.java index d2e0e4dc8b3..108ecb5db32 100644 --- a/servers/services/src/main/java/org/projectnessie/services/authz/Check.java +++ b/servers/services/src/main/java/org/projectnessie/services/authz/Check.java @@ -16,6 +16,8 @@ package org.projectnessie.services.authz; import jakarta.annotation.Nullable; +import jakarta.validation.constraints.NotNull; +import java.util.Set; import org.immutables.value.Value; import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; @@ -53,24 +55,43 @@ public interface Check { @Value.Parameter(order = 7) RepositoryConfig.Type repositoryConfigType(); + @Value.Parameter(order = 8) + Set actions(); + static Check check(CheckType type) { - return ImmutableCheck.of(type, null, null, null, null, null, null); + return ImmutableCheck.of(type, null, null, null, null, null, null, Set.of()); } static Check check(CheckType type, RepositoryConfig.Type repositoryConfigType) { - return ImmutableCheck.of(type, null, null, null, null, null, repositoryConfigType); + return ImmutableCheck.of(type, null, null, null, null, null, repositoryConfigType, Set.of()); } static Check check(CheckType type, @Nullable NamedRef ref) { - return ImmutableCheck.of(type, ref, null, null, null, null, null); + return ImmutableCheck.of(type, ref, null, null, null, null, null, Set.of()); } static Check check(CheckType type, @Nullable NamedRef ref, @Nullable ContentKey key) { - return ImmutableCheck.of(type, ref, key, null, null, null, null); + return check(type, ref, key, Set.of()); + } + + static Check check( + CheckType type, + @Nullable NamedRef ref, + @Nullable ContentKey key, + @NotNull Set actions) { + return ImmutableCheck.of(type, ref, key, null, null, null, null, actions); } static Check check( CheckType type, @Nullable NamedRef ref, @Nullable IdentifiedContentKey identifiedKey) { + return check(type, ref, identifiedKey, Set.of()); + } + + static Check check( + CheckType type, + @Nullable NamedRef ref, + @Nullable IdentifiedContentKey identifiedKey, + @NotNull Set actions) { if (identifiedKey != null) { IdentifiedContentKey.IdentifiedElement element = identifiedKey.lastElement(); return ImmutableCheck.of( @@ -80,10 +101,11 @@ static Check check( element.contentId(), identifiedKey.type(), identifiedKey, - null); + null, + actions); } - return ImmutableCheck.of(type, ref, null, null, null, null, null); + return ImmutableCheck.of(type, ref, null, null, null, null, null, actions); } static ImmutableCheck.Builder builder(CheckType type) { @@ -167,10 +189,19 @@ static Check canReadContentKey(NamedRef ref, ContentKey key) { return check(CheckType.READ_CONTENT_KEY, ref, key); } + static Check canReadContentKey(NamedRef ref, ContentKey key, Set actions) { + return check(CheckType.READ_CONTENT_KEY, ref, key, actions); + } + static Check canReadContentKey(NamedRef ref, IdentifiedContentKey identifiedKey) { return check(CheckType.READ_CONTENT_KEY, ref, identifiedKey); } + static Check canReadContentKey( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + return check(CheckType.READ_CONTENT_KEY, ref, identifiedKey, actions); + } + static Check canListCommitLog(NamedRef ref) { return check(CheckType.LIST_COMMIT_LOG, ref); } @@ -183,18 +214,38 @@ static Check canReadEntityValue(NamedRef ref, IdentifiedContentKey identifiedKey return check(CheckType.READ_ENTITY_VALUE, ref, identifiedKey); } + static Check canReadEntityValue( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + return check(CheckType.READ_ENTITY_VALUE, ref, identifiedKey, actions); + } + static Check canCreateEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { return check(CheckType.CREATE_ENTITY, ref, identifiedKey); } + static Check canCreateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + return check(CheckType.CREATE_ENTITY, ref, identifiedKey, actions); + } + static Check canUpdateEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { return check(CheckType.UPDATE_ENTITY, ref, identifiedKey); } + static Check canUpdateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + return check(CheckType.UPDATE_ENTITY, ref, identifiedKey, actions); + } + static Check canDeleteEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { return check(CheckType.DELETE_ENTITY, ref, identifiedKey); } + static Check canDeleteEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + return check(CheckType.DELETE_ENTITY, ref, identifiedKey, actions); + } + static Check canReadRepositoryConfig(RepositoryConfig.Type repositoryConfigType) { return check(CheckType.READ_REPOSITORY_CONFIG, repositoryConfigType); } diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/RetriableAccessChecker.java b/servers/services/src/main/java/org/projectnessie/services/authz/RetriableAccessChecker.java index ed3dc8f9cc8..721d1b94b99 100644 --- a/servers/services/src/main/java/org/projectnessie/services/authz/RetriableAccessChecker.java +++ b/servers/services/src/main/java/org/projectnessie/services/authz/RetriableAccessChecker.java @@ -29,19 +29,33 @@ */ public final class RetriableAccessChecker { private final Supplier validator; + private final ApiContext apiContext; private Collection validatedChecks; private Map result; - public RetriableAccessChecker(Supplier validator) { + public RetriableAccessChecker(Supplier validator, ApiContext apiContext) { Preconditions.checkNotNull(validator); this.validator = validator; + this.apiContext = apiContext; } public BatchAccessChecker newAttempt() { - return new Attempt(); + return new Attempt(apiContext); } private class Attempt extends AbstractBatchAccessChecker { + private final ApiContext apiContext; + + Attempt(ApiContext apiContext) { + super(apiContext); + this.apiContext = apiContext; + } + + @Override + public ApiContext getApiContext() { + return apiContext; + } + @Override public Map check() { // Shallow collection copy to ensure that we use what was current at the time of check diff --git a/servers/services/src/main/java/org/projectnessie/services/cel/CELUtil.java b/servers/services/src/main/java/org/projectnessie/services/cel/CELUtil.java index 6e157463c06..1b43f9944a4 100644 --- a/servers/services/src/main/java/org/projectnessie/services/cel/CELUtil.java +++ b/servers/services/src/main/java/org/projectnessie/services/cel/CELUtil.java @@ -50,6 +50,8 @@ public final class CELUtil { public static final String VAR_ROLE = "role"; public static final String VAR_ROLES = "roles"; public static final String VAR_OP = "op"; + public static final String VAR_ACTIONS = "actions"; + public static final String VAR_API = "api"; public static final String VAR_OPERATIONS = "operations"; public static final String VAR_CONTENT_TYPE = "contentType"; diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/BaseApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/BaseApiImpl.java index 9eb0be4c507..d018de721df 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/BaseApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/BaseApiImpl.java @@ -34,6 +34,7 @@ import org.projectnessie.model.CommitMeta; import org.projectnessie.model.ContentKey; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.config.ServerConfig; @@ -47,14 +48,20 @@ public abstract class BaseApiImpl { private final VersionStore store; private final Authorizer authorizer; private final AccessContext accessContext; + private final ApiContext apiContext; private HashResolver hashResolver; protected BaseApiImpl( - ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { + ServerConfig config, + VersionStore store, + Authorizer authorizer, + AccessContext accessContext, + ApiContext apiContext) { this.config = config; this.store = store; this.authorizer = authorizer; this.accessContext = accessContext; + this.apiContext = apiContext; } /** @@ -104,6 +111,10 @@ protected Authorizer getAuthorizer() { return authorizer; } + protected ApiContext getApiContext() { + return apiContext; + } + protected HashResolver getHashResolver() { if (hashResolver == null) { this.hashResolver = new HashResolver(config, store); @@ -112,7 +123,7 @@ protected HashResolver getHashResolver() { } protected BatchAccessChecker startAccessCheck() { - return getAuthorizer().startAccessCheck(accessContext); + return getAuthorizer().startAccessCheck(accessContext, apiContext); } protected MetadataRewriter commitMetaUpdate( diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/ConfigApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/ConfigApiImpl.java index 9b16458e96b..ea88fbdff24 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/ConfigApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/ConfigApiImpl.java @@ -26,6 +26,7 @@ import org.projectnessie.model.RepositoryConfig; import org.projectnessie.model.types.GenericRepositoryConfig; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.config.ServerConfig; @@ -36,16 +37,13 @@ public class ConfigApiImpl extends BaseApiImpl implements ConfigService { - private final int actualApiVersion; - public ConfigApiImpl( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext, - int actualApiVersion) { - super(config, store, authorizer, accessContext); - this.actualApiVersion = actualApiVersion; + ApiContext apiContext) { + super(config, store, authorizer, accessContext, apiContext); } @Override @@ -58,7 +56,7 @@ public NessieConfiguration getConfig() { return ImmutableNessieConfiguration.builder() .from(NessieConfiguration.getBuiltInConfig()) .defaultBranch(defaultBranch) - .actualApiVersion(actualApiVersion) + .actualApiVersion(getApiContext().getApiVersion()) .noAncestorHash(info.getNoAncestorHash()) .repositoryCreationTimestamp(info.getRepositoryCreationTimestamp()) .oldestPossibleCommitTimestamp(info.getOldestPossibleCommitTimestamp()) diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/ContentApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/ContentApiImpl.java index 61ecd1dbf3c..d4f0764b5cf 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/ContentApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/ContentApiImpl.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import org.projectnessie.error.NessieContentNotFoundException; import org.projectnessie.error.NessieNotFoundException; @@ -33,6 +34,7 @@ import org.projectnessie.model.Reference; import org.projectnessie.model.Tag; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.config.ServerConfig; @@ -44,6 +46,7 @@ import org.projectnessie.versioned.DetachedRef; import org.projectnessie.versioned.NamedRef; import org.projectnessie.versioned.ReferenceNotFoundException; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.TagName; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.WithHash; @@ -51,8 +54,12 @@ public class ContentApiImpl extends BaseApiImpl implements ContentService { public ContentApiImpl( - ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - super(config, store, authorizer, accessContext); + ServerConfig config, + VersionStore store, + Authorizer authorizer, + AccessContext accessContext, + ApiContext apiContext) { + super(config, store, authorizer, accessContext, apiContext); } @Override @@ -61,12 +68,13 @@ public ContentResponse getContent( String namedRef, String hashOnRef, boolean withDocumentation, - boolean forWrite) + RequestMeta requestMeta) throws NessieNotFoundException { try { ResolvedHash ref = getHashResolver() .resolveHashOnRef(namedRef, hashOnRef, new HashValidator("Expected hash")); + boolean forWrite = requestMeta.forWrite(); ContentResult obj = getStore().getValue(ref.getHash(), key, forWrite); BatchAccessChecker accessCheck = startAccessCheck(); @@ -76,10 +84,11 @@ public ContentResponse getContent( accessCheck.canCommitChangeAgainstReference(r); } + Set actions = requestMeta.keyActions(key); if (obj != null && obj.content() != null) { - accessCheck.canReadEntityValue(r, obj.identifiedKey()); + accessCheck.canReadEntityValue(r, obj.identifiedKey(), actions); if (forWrite) { - accessCheck.canUpdateEntity(r, obj.identifiedKey()); + accessCheck.canUpdateEntity(r, obj.identifiedKey(), actions); } accessCheck.checkAndThrow(); @@ -89,8 +98,8 @@ public ContentResponse getContent( if (forWrite) { accessCheck - .canReadEntityValue(r, requireNonNull(obj, "obj is null").identifiedKey()) - .canCreateEntity(r, obj.identifiedKey()); + .canReadEntityValue(r, requireNonNull(obj, "obj is null").identifiedKey(), actions) + .canCreateEntity(r, obj.identifiedKey(), actions); } accessCheck.checkAndThrow(); @@ -106,7 +115,7 @@ public GetMultipleContentsResponse getMultipleContents( String hashOnRef, List keys, boolean withDocumentation, - boolean forWrite) + RequestMeta requestMeta) throws NessieNotFoundException { try { ResolvedHash ref = @@ -115,6 +124,7 @@ public GetMultipleContentsResponse getMultipleContents( NamedRef r = ref.getValue(); BatchAccessChecker check = startAccessCheck().canViewReference(r); + boolean forWrite = requestMeta.forWrite(); if (forWrite) { check.canCommitChangeAgainstReference(r); } @@ -126,15 +136,16 @@ public GetMultipleContentsResponse getMultipleContents( e -> { ContentResult contentResult = e.getValue(); IdentifiedContentKey identifiedKey = contentResult.identifiedKey(); - check.canReadEntityValue(r, identifiedKey); + Set actions = requestMeta.keyActions(identifiedKey.contentKey()); + check.canReadEntityValue(r, identifiedKey, actions); if (contentResult.content() != null) { if (forWrite) { - check.canUpdateEntity(r, identifiedKey); + check.canUpdateEntity(r, identifiedKey, actions); } return true; } else { if (forWrite) { - check.canCreateEntity(r, identifiedKey); + check.canCreateEntity(r, identifiedKey, actions); } return false; } diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/DiffApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/DiffApiImpl.java index 3f21aa6a5f8..a4e5104d643 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/DiffApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/DiffApiImpl.java @@ -34,6 +34,7 @@ import org.projectnessie.model.ContentKey; import org.projectnessie.model.DiffResponse.DiffEntry; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.AuthzPaginationIterator; import org.projectnessie.services.authz.Check; @@ -53,8 +54,12 @@ public class DiffApiImpl extends BaseApiImpl implements DiffService { public DiffApiImpl( - ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - super(config, store, authorizer, accessContext); + ServerConfig config, + VersionStore store, + Authorizer authorizer, + AccessContext accessContext, + ApiContext apiContext) { + super(config, store, authorizer, accessContext, apiContext); } @Override diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/NamespaceApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/NamespaceApiImpl.java index 5ca740ce448..1fc562e5a66 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/NamespaceApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/NamespaceApiImpl.java @@ -20,6 +20,7 @@ import static org.projectnessie.error.ContentKeyErrorDetails.contentKeyErrorDetails; import static org.projectnessie.model.Validation.validateHash; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import static org.projectnessie.versioned.VersionStore.KeyRestrictions.NO_KEY_RESTRICTIONS; import com.google.common.base.Preconditions; @@ -51,6 +52,7 @@ import org.projectnessie.model.Operation.Delete; import org.projectnessie.model.Operation.Put; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.config.ServerConfig; @@ -62,18 +64,23 @@ import org.projectnessie.versioned.KeyEntry; import org.projectnessie.versioned.ReferenceConflictException; import org.projectnessie.versioned.ReferenceNotFoundException; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.paging.PaginationIterator; public class NamespaceApiImpl extends BaseApiImpl implements NamespaceService { public NamespaceApiImpl( - ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - super(config, store, authorizer, accessContext); + ServerConfig config, + VersionStore store, + Authorizer authorizer, + AccessContext accessContext, + ApiContext apiContext) { + super(config, store, authorizer, accessContext, apiContext); } @Override - public Namespace createNamespace(String refName, Namespace namespace) + public Namespace createNamespace(String refName, Namespace namespace, RequestMeta requestMeta) throws NessieReferenceNotFoundException { Preconditions.checkArgument(!namespace.isEmpty(), "Namespace name must not be empty"); @@ -104,7 +111,8 @@ public Namespace createNamespace(String refName, Namespace namespace) commit( BranchName.of(refWithHash.getValue().getName()), "create namespace '" + namespace.toCanonicalString() + "'", - put); + put, + requestMeta); Content content = getExplicitlyCreatedNamespace(namespace, hash).orElse(null); @@ -145,7 +153,8 @@ public void deleteNamespace(String refName, Namespace namespaceToDelete) commit( BranchName.of(refWithHash.getValue().getName()), "delete namespace '" + namespace.toCanonicalString() + "'", - delete); + delete, + API_WRITE); } catch (ReferenceNotFoundException | ReferenceConflictException e) { throw new NessieReferenceNotFoundException(e.getMessage(), e); } @@ -255,7 +264,8 @@ public void updateProperties( String refName, Namespace namespaceToUpdate, Map propertyUpdates, - Set propertyRemovals) + Set propertyRemovals, + RequestMeta requestMeta) throws NessieNamespaceNotFoundException, NessieReferenceNotFoundException { try { ResolvedHash refWithHash = getHashResolver().resolveToHead(refName); @@ -275,7 +285,8 @@ public void updateProperties( commit( BranchName.of(refWithHash.getValue().getName()), "update properties for namespace '" + updatedNamespace.toCanonicalString() + "'", - put); + put, + requestMeta); } catch (ReferenceNotFoundException | ReferenceConflictException e) { throw new NessieReferenceNotFoundException(e.getMessage(), e); @@ -356,7 +367,8 @@ private static NessieReferenceNotFoundException refNotFoundException( return new NessieReferenceNotFoundException(e.getMessage(), e); } - private Hash commit(BranchName branch, String commitMsg, Operation contentOperation) + private Hash commit( + BranchName branch, String commitMsg, Operation contentOperation, RequestMeta requestMeta) throws ReferenceNotFoundException, ReferenceConflictException { return getStore() .commit( @@ -371,15 +383,17 @@ private Hash commit(BranchName branch, String commitMsg, Operation contentOperat .operations() .forEach( op -> { + Set actions = + requestMeta.keyActions(op.identifiedKey().contentKey()); switch (op.operationType()) { case CREATE: - check.canCreateEntity(branch, op.identifiedKey()); + check.canCreateEntity(branch, op.identifiedKey(), actions); break; case UPDATE: - check.canUpdateEntity(branch, op.identifiedKey()); + check.canUpdateEntity(branch, op.identifiedKey(), actions); break; case DELETE: - check.canDeleteEntity(branch, op.identifiedKey()); + check.canDeleteEntity(branch, op.identifiedKey(), actions); break; default: throw new UnsupportedOperationException( diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/TreeApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/TreeApiImpl.java index 589710f2d46..51c24f66179 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/TreeApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/TreeApiImpl.java @@ -41,6 +41,7 @@ import static org.projectnessie.services.cel.CELUtil.VAR_REF_META; import static org.projectnessie.services.cel.CELUtil.VAR_REF_TYPE; import static org.projectnessie.services.impl.RefUtil.toNamedRef; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; @@ -99,6 +100,7 @@ import org.projectnessie.model.Tag; import org.projectnessie.model.Validation; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.AuthzPaginationIterator; import org.projectnessie.services.authz.BatchAccessChecker; @@ -124,6 +126,7 @@ import org.projectnessie.versioned.ReferenceHistory; import org.projectnessie.versioned.ReferenceInfo; import org.projectnessie.versioned.ReferenceNotFoundException; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.TagName; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.VersionStore.CommitValidator; @@ -135,8 +138,12 @@ public class TreeApiImpl extends BaseApiImpl implements TreeService { public TreeApiImpl( - ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - super(config, store, authorizer, accessContext); + ServerConfig config, + VersionStore store, + Authorizer authorizer, + AccessContext accessContext, + ApiContext apiContext) { + super(config, store, authorizer, accessContext, apiContext); } @Override @@ -695,7 +702,7 @@ fromRef, hash, new HashValidator("Hash to transplant").hashMustBeUnambiguous()) .defaultMergeBehavior(defaultMergeBehavior(defaultMergeBehavior)) .dryRun(Boolean.TRUE.equals(dryRun)) .fetchAdditionalInfo(Boolean.TRUE.equals(fetchAdditionalInfo)) - .validator(createCommitValidator((BranchName) toRef.getNamedRef())) + .validator(createCommitValidator((BranchName) toRef.getNamedRef(), API_WRITE)) .build()); return createResponse(fetchAdditionalInfo, result); } catch (ReferenceNotFoundException e) { @@ -771,7 +778,7 @@ public MergeResponse mergeRefIntoBranch( .defaultMergeBehavior(defaultMergeBehavior(defaultMergeBehavior)) .dryRun(Boolean.TRUE.equals(dryRun)) .fetchAdditionalInfo(Boolean.TRUE.equals(fetchAdditionalInfo)) - .validator(createCommitValidator((BranchName) toRef.getNamedRef())) + .validator(createCommitValidator((BranchName) toRef.getNamedRef(), API_WRITE)) .build()); return createResponse(fetchAdditionalInfo, result); } catch (ReferenceNotFoundException e) { @@ -1042,7 +1049,7 @@ protected BiPredicate filterEntries(String filter) { @Override public CommitResponse commitMultipleOperations( - String branch, String expectedHash, Operations operations) + String branch, String expectedHash, Operations operations, RequestMeta requestMeta) throws NessieNotFoundException, NessieConflictException { CommitMeta commitMeta = operations.getCommitMeta(); @@ -1067,7 +1074,7 @@ public CommitResponse commitMultipleOperations( Optional.of(toRef.getHash()), commitMetaUpdate(null, numCommits -> null).rewriteSingle(commitMeta), operations.getOperations(), - createCommitValidator((BranchName) toRef.getNamedRef()), + createCommitValidator((BranchName) toRef.getNamedRef(), requestMeta), (key, cid) -> commitResponse.addAddedContents(addedContent(key, cid))) .getCommitHash(); @@ -1079,14 +1086,15 @@ public CommitResponse commitMultipleOperations( } } - private CommitValidator createCommitValidator(BranchName branchName) { + private CommitValidator createCommitValidator(BranchName branchName, RequestMeta requestMeta) { // Commits routinely run retries due to collisions on updating the HEAD of the branch. // Authorization is not dependent on the commit history, only on the collection of access // checks, which reflect the current commit. On retries, the commit data relevant to access // checks almost never changes. Therefore, we use RetriableAccessChecker to avoid re-validating // access checks (which could be a time-consuming operation) on subsequent retries, unless // authorization input data changes. - RetriableAccessChecker accessChecker = new RetriableAccessChecker(this::startAccessCheck); + RetriableAccessChecker accessChecker = + new RetriableAccessChecker(this::startAccessCheck, getApiContext()); return validation -> { BatchAccessChecker check = accessChecker.newAttempt(); check.canCommitChangeAgainstReference(branchName); @@ -1094,15 +1102,16 @@ private CommitValidator createCommitValidator(BranchName branchName) { .operations() .forEach( op -> { + Set keyActions = requestMeta.keyActions(op.identifiedKey().contentKey()); switch (op.operationType()) { case CREATE: - check.canCreateEntity(branchName, op.identifiedKey()); + check.canCreateEntity(branchName, op.identifiedKey(), keyActions); break; case UPDATE: - check.canUpdateEntity(branchName, op.identifiedKey()); + check.canUpdateEntity(branchName, op.identifiedKey(), keyActions); break; case DELETE: - check.canDeleteEntity(branchName, op.identifiedKey()); + check.canDeleteEntity(branchName, op.identifiedKey(), keyActions); break; default: throw new UnsupportedOperationException( diff --git a/servers/services/src/main/java/org/projectnessie/services/spi/ContentService.java b/servers/services/src/main/java/org/projectnessie/services/spi/ContentService.java index 35d25972bc6..24af7133faa 100644 --- a/servers/services/src/main/java/org/projectnessie/services/spi/ContentService.java +++ b/servers/services/src/main/java/org/projectnessie/services/spi/ContentService.java @@ -31,6 +31,7 @@ import org.projectnessie.model.ContentResponse; import org.projectnessie.model.GetMultipleContentsResponse; import org.projectnessie.services.authz.AccessCheckException; +import org.projectnessie.versioned.RequestMeta; /** * Server-side interface to services managing the loading of content objects. @@ -47,7 +48,7 @@ public interface ContentService { * @param namedRef name of the reference * @param hashOnRef optional, ID of the commit or a commit specification * @param withDocumentation unused, pass {@code false} - * @param forWrite if {@code false}, "natural" read access checks will be performed. If {@code + * @param requestMeta if {@code false}, "natural" read access checks will be performed. If {@code * true}, update/create access checks will be performed in addition to the read access checks. * @return the content response, if the content object exists * @throws NessieNotFoundException if the content object or the reference does not exist @@ -66,7 +67,7 @@ ContentResponse getContent( message = HASH_OR_RELATIVE_COMMIT_SPEC_MESSAGE) String hashOnRef, boolean withDocumentation, - boolean forWrite) + RequestMeta requestMeta) throws NessieNotFoundException; /** @@ -76,7 +77,7 @@ ContentResponse getContent( * @param hashOnRef optional, ID of the commit or a commit specification * @param keys the keys of the content objects to retrieve * @param withDocumentation unused, pass {@code false} - * @param forWrite if {@code false}, "natural" read access checks will be performed. If {@code + * @param requestMeta if {@code false}, "natural" read access checks will be performed. If {@code * true}, update/create access checks will be performed in addition to the read access checks. * @return the existing content objects * @throws NessieNotFoundException if the reference does not exist @@ -92,8 +93,8 @@ GetMultipleContentsResponse getMultipleContents( regexp = HASH_OR_RELATIVE_COMMIT_SPEC_REGEX, message = HASH_OR_RELATIVE_COMMIT_SPEC_MESSAGE) String hashOnRef, - @Valid @Size @jakarta.validation.constraints.Size(min = 1) List keys, + @Valid @Size @Size(min = 1) List keys, boolean withDocumentation, - boolean forWrite) + RequestMeta requestMeta) throws NessieNotFoundException; } diff --git a/servers/services/src/main/java/org/projectnessie/services/spi/NamespaceService.java b/servers/services/src/main/java/org/projectnessie/services/spi/NamespaceService.java index 81aba640292..d129d401b4a 100644 --- a/servers/services/src/main/java/org/projectnessie/services/spi/NamespaceService.java +++ b/servers/services/src/main/java/org/projectnessie/services/spi/NamespaceService.java @@ -23,6 +23,7 @@ import org.projectnessie.error.NessieReferenceNotFoundException; import org.projectnessie.model.GetNamespacesResponse; import org.projectnessie.model.Namespace; +import org.projectnessie.versioned.RequestMeta; /** * Server-side interface to services managing namespaces. @@ -32,14 +33,15 @@ */ public interface NamespaceService { - Namespace createNamespace(String refName, Namespace namespace) + Namespace createNamespace(String refName, Namespace namespace, RequestMeta requestMeta) throws NessieNamespaceAlreadyExistsException, NessieReferenceNotFoundException; void updateProperties( String refName, Namespace namespaceToUpdate, Map propertyUpdates, - Set propertyRemovals) + Set propertyRemovals, + RequestMeta requestMeta) throws NessieNamespaceNotFoundException, NessieReferenceNotFoundException; void deleteNamespace(String refName, Namespace namespaceToDelete) diff --git a/servers/services/src/main/java/org/projectnessie/services/spi/TreeService.java b/servers/services/src/main/java/org/projectnessie/services/spi/TreeService.java index 045f1610533..d8c9e962939 100644 --- a/servers/services/src/main/java/org/projectnessie/services/spi/TreeService.java +++ b/servers/services/src/main/java/org/projectnessie/services/spi/TreeService.java @@ -45,6 +45,7 @@ import org.projectnessie.model.Reference.ReferenceType; import org.projectnessie.model.ReferenceHistoryResponse; import org.projectnessie.versioned.NamedRef; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.WithHash; /** @@ -201,6 +202,7 @@ CommitResponse commitMultipleOperations( regexp = HASH_OR_RELATIVE_COMMIT_SPEC_REGEX, message = HASH_OR_RELATIVE_COMMIT_SPEC_MESSAGE) String expectedHash, - @Valid Operations operations) + @Valid Operations operations, + @NotNull RequestMeta requestMeta) throws NessieNotFoundException, NessieConflictException; } diff --git a/servers/services/src/test/java/org/projectnessie/services/authz/TestBatchAccessChecker.java b/servers/services/src/test/java/org/projectnessie/services/authz/TestBatchAccessChecker.java index b3c1d9d3838..e879ac1ce26 100644 --- a/servers/services/src/test/java/org/projectnessie/services/authz/TestBatchAccessChecker.java +++ b/servers/services/src/test/java/org/projectnessie/services/authz/TestBatchAccessChecker.java @@ -19,6 +19,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.projectnessie.services.authz.ApiContext.apiContext; import com.google.common.collect.ImmutableMap; import java.util.Arrays; @@ -139,7 +140,7 @@ private static void performCheck(BatchAccessChecker checker, Check c) { checker.canReadEntries(c.ref()); break; case READ_CONTENT_KEY: - checker.canReadContentKey(c.ref(), c.identifiedKey()); + checker.canReadContentKey(c.ref(), c.identifiedKey(), c.actions()); break; case ASSIGN_REFERENCE_TO_HASH: checker.canAssignRefToHash(c.ref()); @@ -151,16 +152,16 @@ private static void performCheck(BatchAccessChecker checker, Check c) { checker.canCommitChangeAgainstReference(c.ref()); break; case READ_ENTITY_VALUE: - checker.canReadEntityValue(c.ref(), c.identifiedKey()); + checker.canReadEntityValue(c.ref(), c.identifiedKey(), c.actions()); break; case CREATE_ENTITY: - checker.canCreateEntity(c.ref(), c.identifiedKey()); + checker.canCreateEntity(c.ref(), c.identifiedKey(), c.actions()); break; case UPDATE_ENTITY: - checker.canUpdateEntity(c.ref(), c.identifiedKey()); + checker.canUpdateEntity(c.ref(), c.identifiedKey(), c.actions()); break; case DELETE_ENTITY: - checker.canDeleteEntity(c.ref(), c.identifiedKey()); + checker.canDeleteEntity(c.ref(), c.identifiedKey(), c.actions()); break; case READ_REPOSITORY_CONFIG: checker.canReadRepositoryConfig(c.repositoryConfigType()); @@ -191,7 +192,7 @@ private static List listWithAllCheckTypes() { static BatchAccessChecker newAccessChecker( Function, Map> check) { - return new AbstractBatchAccessChecker() { + return new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { return check.apply(getChecks()); diff --git a/servers/services/src/test/java/org/projectnessie/services/authz/TestRetriableAccessChecker.java b/servers/services/src/test/java/org/projectnessie/services/authz/TestRetriableAccessChecker.java index b8bb7e660bc..d89c75b0694 100644 --- a/servers/services/src/test/java/org/projectnessie/services/authz/TestRetriableAccessChecker.java +++ b/servers/services/src/test/java/org/projectnessie/services/authz/TestRetriableAccessChecker.java @@ -18,6 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.projectnessie.model.IdentifiedContentKey.IdentifiedElement.identifiedElement; +import static org.projectnessie.services.authz.ApiContext.apiContext; import java.util.ArrayList; import java.util.HashMap; @@ -38,7 +39,7 @@ class TestRetriableAccessChecker { private final Supplier validator = () -> - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { checkCount++; @@ -50,7 +51,7 @@ public Map check() { @Test void checkAndThrow() { - RetriableAccessChecker checker = new RetriableAccessChecker(validator); + RetriableAccessChecker checker = new RetriableAccessChecker(validator, apiContext("Nessie", 1)); Check check = Check.check(Check.CheckType.CREATE_ENTITY); result.put(check, "test123"); assertThatThrownBy(() -> checker.newAttempt().can(check).checkAndThrow()) @@ -62,7 +63,7 @@ void checkAndThrow() { @Test void repeatedCheck() { - RetriableAccessChecker checker = new RetriableAccessChecker(validator); + RetriableAccessChecker checker = new RetriableAccessChecker(validator, apiContext("Nessie", 1)); Check c1 = Check.check(Check.CheckType.CREATE_ENTITY); Check c2 = Check.check(Check.CheckType.CREATE_REFERENCE); assertThat(checker.newAttempt().can(c1).can(c2).check()).isEmpty(); @@ -101,7 +102,7 @@ void dataChangeBetweenAttempts() { .addElements(ns2, tableElement) .build(); - RetriableAccessChecker checker = new RetriableAccessChecker(validator); + RetriableAccessChecker checker = new RetriableAccessChecker(validator, apiContext("Nessie", 1)); BranchName ref = BranchName.of("test"); assertThat(checker.newAttempt().canCreateEntity(ref, t1).check()).isEmpty(); assertThat(checked) diff --git a/servers/services/src/test/java/org/projectnessie/services/impl/TestNamespaceApi.java b/servers/services/src/test/java/org/projectnessie/services/impl/TestNamespaceApi.java index 6cd4ae93db3..01baa870832 100644 --- a/servers/services/src/test/java/org/projectnessie/services/impl/TestNamespaceApi.java +++ b/servers/services/src/test/java/org/projectnessie/services/impl/TestNamespaceApi.java @@ -17,6 +17,8 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.projectnessie.model.Namespace.Empty.EMPTY_NAMESPACE; +import static org.projectnessie.services.authz.ApiContext.apiContext; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import org.junit.jupiter.api.Test; @@ -24,8 +26,8 @@ public class TestNamespaceApi { @Test public void emptyNamespaceCreation() { - NamespaceApiImpl api = new NamespaceApiImpl(null, null, null, null); - assertThatThrownBy(() -> api.createNamespace("main", EMPTY_NAMESPACE)) + NamespaceApiImpl api = new NamespaceApiImpl(null, null, null, null, apiContext("Nessie", 2)); + assertThatThrownBy(() -> api.createNamespace("main", EMPTY_NAMESPACE, API_WRITE)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Namespace name must not be empty"); } diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestAccessChecks.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestAccessChecks.java index b02d85ca440..dd145917eec 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestAccessChecks.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestAccessChecks.java @@ -27,12 +27,14 @@ import static org.projectnessie.model.FetchOption.MINIMAL; import static org.projectnessie.model.IdentifiedContentKey.IdentifiedElement.identifiedElement; import static org.projectnessie.model.MergeBehavior.NORMAL; +import static org.projectnessie.services.authz.ApiContext.apiContext; import static org.projectnessie.services.authz.Check.canCommitChangeAgainstReference; import static org.projectnessie.services.authz.Check.canCreateEntity; import static org.projectnessie.services.authz.Check.canDeleteEntity; import static org.projectnessie.services.authz.Check.canReadEntityValue; import static org.projectnessie.services.authz.Check.canUpdateEntity; import static org.projectnessie.services.authz.Check.canViewReference; +import static org.projectnessie.versioned.RequestMeta.API_READ; import com.google.common.collect.ImmutableMap; import java.util.Collection; @@ -95,7 +97,7 @@ protected Set recordAccessChecks() { Set checks = new HashSet<>(); setBatchAccessChecker( c -> - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { checks.addAll(getChecks()); @@ -416,7 +418,7 @@ public void forbiddenContentKeys() throws Exception { setBatchAccessChecker( x -> - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { return getChecks().stream() @@ -450,7 +452,7 @@ public void entriesAreFilteredBeforeAccessCheck() throws Exception { setBatchAccessChecker( x -> - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { getChecks() @@ -474,7 +476,7 @@ public Map check() { public void detachedRefAccessChecks() throws Exception { BatchAccessChecker accessChecker = - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { Map failed = new LinkedHashMap<>(); @@ -557,7 +559,7 @@ public Map check() { .isInstanceOf(AccessCheckException.class) .hasMessageContaining(READ_MSG); soft.assertThatThrownBy( - () -> contentApi().getContent(key, ref.getName(), ref.getHash(), false, false)) + () -> contentApi().getContent(key, ref.getName(), ref.getHash(), false, API_READ)) .describedAs("ref='%s', getContent", ref) .isInstanceOf(AccessCheckException.class) .hasMessageContaining(ENTITIES_MSG); diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestCommitLog.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestCommitLog.java index 7dacae232b2..e9b6c7e4a0c 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestCommitLog.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestCommitLog.java @@ -21,6 +21,7 @@ import static org.projectnessie.model.CommitMeta.fromMessage; import static org.projectnessie.model.FetchOption.ALL; import static org.projectnessie.model.FetchOption.MINIMAL; +import static org.projectnessie.versioned.RequestMeta.API_READ; import com.google.common.collect.ImmutableList; import java.time.Instant; @@ -392,7 +393,9 @@ public void commitLogPaging() throws BaseNessieClientServerException { Put op; try { Content existing = - contentApi().getContent(key, branch.getName(), currentHash, false, false).getContent(); + contentApi() + .getContent(key, branch.getName(), currentHash, false, API_READ) + .getContent(); op = Put.of(key, IcebergTable.of("some-file-" + i, 42, 42, 42, 42, existing.getId())); } catch (NessieNotFoundException notFound) { op = Put.of(key, IcebergTable.of("some-file-" + i, 42, 42, 42, 42)); diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestContents.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestContents.java index 4c973fb7228..1fa999050dc 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestContents.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestContents.java @@ -20,6 +20,7 @@ import static org.assertj.core.groups.Tuple.tuple; import static org.projectnessie.model.CommitMeta.fromMessage; import static org.projectnessie.model.FetchOption.ALL; +import static org.projectnessie.versioned.RequestMeta.API_READ; import com.google.common.collect.Maps; import java.util.List; @@ -293,7 +294,7 @@ public void verifyContentAndOperationTypesIndividually( soft.assertThat( contentApi() .getContent( - fixedContentKey, committed.getName(), committed.getHash(), false, false)) + fixedContentKey, committed.getName(), committed.getHash(), false, API_READ)) .extracting(ContentResponse::getContent) .extracting(this::clearIdOnContent) .isEqualTo(put.getContent()); @@ -320,7 +321,11 @@ public void verifyContentAndOperationTypesIndividually( () -> contentApi() .getContent( - fixedContentKey, committed.getName(), committed.getHash(), false, false)) + fixedContentKey, + committed.getName(), + committed.getHash(), + false, + API_READ)) .isInstanceOf(NessieNotFoundException.class); // Compare operation on HEAD commit with the committed operation @@ -343,7 +348,7 @@ public void verifyContentAndOperationTypesIndividually( soft.assertThat( contentApi() .getContent( - fixedContentKey, committed.getName(), committed.getHash(), false, false)) + fixedContentKey, committed.getName(), committed.getHash(), false, API_READ)) .extracting(ContentResponse::getContent) .extracting(this::clearIdOnContent) .isEqualTo(contentAndOperationType.prepare.getContent()); diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestEntries.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestEntries.java index ce70ef91a79..bf687d9ca1c 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestEntries.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestEntries.java @@ -20,6 +20,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.tuple; import static org.projectnessie.model.CommitMeta.fromMessage; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -407,7 +408,8 @@ private void checkNamespaces( soft.assertThat(namespaceApi().getNamespace(reference.getName(), reference.getHash(), ns)) .isNotNull(); - soft.assertThatThrownBy(() -> namespaceApi().createNamespace(reference.getName(), ns)) + soft.assertThatThrownBy( + () -> namespaceApi().createNamespace(reference.getName(), ns, API_WRITE)) .cause() .isInstanceOf(NessieNamespaceAlreadyExistsException.class) .hasMessage(String.format("Namespace '%s' already exists", namespace)); diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestInvalidRefs.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestInvalidRefs.java index aa912bcaab6..bbff43933bb 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestInvalidRefs.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestInvalidRefs.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.projectnessie.model.FetchOption.MINIMAL; +import static org.projectnessie.versioned.RequestMeta.API_READ; import org.junit.jupiter.api.Test; import org.projectnessie.error.BaseNessieClientServerException; @@ -51,7 +52,7 @@ public void testUnknownHashesOnValidNamedRefs() throws BaseNessieClientServerExc () -> contentApi() .getContent( - ContentKey.of("table0"), branch.getName(), invalidHash, false, false)) + ContentKey.of("table0"), branch.getName(), invalidHash, false, API_READ)) .isInstanceOf(NessieNotFoundException.class) .hasMessageContaining(String.format("Commit '%s' not found", invalidHash)); } diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestMergeTransplant.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestMergeTransplant.java index f960fb17773..79a4189da29 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestMergeTransplant.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestMergeTransplant.java @@ -28,6 +28,8 @@ import static org.projectnessie.model.MergeBehavior.DROP; import static org.projectnessie.model.MergeBehavior.FORCE; import static org.projectnessie.model.MergeBehavior.NORMAL; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.collect.ImmutableList; import java.util.Collection; @@ -159,7 +161,7 @@ private void mergeTransplant( table1 = (IcebergTable) contentApi() - .getContent(key1, committed1.getName(), committed1.getHash(), false, false) + .getContent(key1, committed1.getName(), committed1.getHash(), false, API_READ) .getContent(); Branch committed2 = @@ -478,8 +480,8 @@ public void mergeWithNamespaces(ReferenceMode refMode) throws BaseNessieClientSe Namespace ns = Namespace.parse("a.b.c"); base = ensureNamespacesForKeysExist(base, ns.toContentKey()); branch = ensureNamespacesForKeysExist(branch, ns.toContentKey()); - namespaceApi().createNamespace(base.getName(), ns); - namespaceApi().createNamespace(branch.getName(), ns); + namespaceApi().createNamespace(base.getName(), ns, API_WRITE); + namespaceApi().createNamespace(branch.getName(), ns, API_WRITE); base = (Branch) getReference(base.getName()); branch = (Branch) getReference(branch.getName()); @@ -496,7 +498,7 @@ public void mergeWithNamespaces(ReferenceMode refMode) throws BaseNessieClientSe table1 = (IcebergTable) contentApi() - .getContent(key1, committed1.getName(), committed1.getHash(), false, false) + .getContent(key1, committed1.getName(), committed1.getHash(), false, API_READ) .getContent(); Branch committed2 = @@ -664,7 +666,7 @@ public void mergeRecreateTableNoConflict() throws BaseNessieClientServerExceptio ContentResponse tableOnRootAfterMerge = contentApi() .getContent( - setup.key, rootAfterMerge.getName(), rootAfterMerge.getHash(), false, false); + setup.key, rootAfterMerge.getName(), rootAfterMerge.getHash(), false, API_READ); soft.assertThat(setup.tableOnWork.getContent().getId()) .isEqualTo(tableOnRootAfterMerge.getContent().getId()); @@ -729,7 +731,7 @@ private MergeRecreateTableSetup setupMergeRecreateTable() soft.assertThat(root).isNotEqualTo(lastRoot); ContentResponse tableOnRoot = - contentApi().getContent(key, root.getName(), root.getHash(), false, false); + contentApi().getContent(key, root.getName(), root.getHash(), false, API_READ); soft.assertThat(tableOnRoot.getEffectiveReference()).isEqualTo(root); Branch work = createBranch("recreateBranch", root); @@ -749,7 +751,7 @@ private MergeRecreateTableSetup setupMergeRecreateTable() soft.assertThat(work).isNotEqualTo(lastWork); ContentResponse tableOnWork = - contentApi().getContent(key, work.getName(), work.getHash(), false, false); + contentApi().getContent(key, work.getName(), work.getHash(), false, API_READ); soft.assertThat(tableOnWork.getEffectiveReference()).isEqualTo(work); soft.assertThat(tableOnWork.getContent().getId()) diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestNamespace.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestNamespace.java index 143f0fd2507..6ed127b0100 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestNamespace.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestNamespace.java @@ -25,6 +25,7 @@ import static org.projectnessie.model.MergeBehavior.NORMAL; import static org.projectnessie.model.Namespace.Empty.EMPTY_NAMESPACE; import static org.projectnessie.services.impl.AbstractTestContents.contentAndOperationTypes; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -62,7 +63,7 @@ public abstract class AbstractTestNamespace extends BaseTestServiceImpl { public void testNamespaces(String namespaceName) throws BaseNessieClientServerException { Namespace ns = Namespace.parse(namespaceName); Branch branch = ensureNamespacesForKeysExist(createBranch("testNamespaces"), ns.toContentKey()); - Namespace namespace = namespaceApi().createNamespace(branch.getName(), ns); + Namespace namespace = namespaceApi().createNamespace(branch.getName(), ns, API_WRITE); soft.assertThat(namespace) .isNotNull() @@ -75,7 +76,7 @@ public void testNamespaces(String namespaceName) throws BaseNessieClientServerEx // the namespace in the error message will contain the representation with u001D String namespaceInErrorMsg = namespaceName.replace("\u0000", "\u001D"); - soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), ns)) + soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), ns, API_WRITE)) .cause() .isInstanceOf(NessieNamespaceAlreadyExistsException.class) .hasMessage(String.format("Namespace '%s' already exists", namespaceInErrorMsg)); @@ -100,7 +101,9 @@ public void testNamespacesRetrieval() throws BaseNessieClientServerException { Branch branch = createBranch("namespace"); ThrowingExtractor createNamespace = - identifier -> namespaceApi().createNamespace(branch.getName(), Namespace.parse(identifier)); + identifier -> + namespaceApi() + .createNamespace(branch.getName(), Namespace.parse(identifier), API_WRITE); Namespace a = createNamespace.apply("a"); Namespace ab = createNamespace.apply("a.b"); @@ -213,7 +216,7 @@ public void testNamespaceMergeWithConflict() throws BaseNessieClientServerExcept Namespace ns = Namespace.parse("a.b.c"); base = ensureNamespacesForKeysExist(base, ns.toContentKey()); // create a namespace on the base branch - namespaceApi().createNamespace(base.getName(), ns); + namespaceApi().createNamespace(base.getName(), ns, API_WRITE); base = (Branch) getReference(base.getName()); // create a table with the same name on the other branch @@ -270,7 +273,7 @@ public void testNamespaceConflictWithOtherContent() throws BaseNessieClientServe commit(branch, fromMessage("add table"), Put.of(key, icebergTable)); Namespace ns = Namespace.of(elements); - soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), ns)) + soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), ns, API_WRITE)) .cause() .isInstanceOf(NessieNamespaceAlreadyExistsException.class) .hasMessage("Another content object with name 'a.b.c' already exists"); @@ -299,7 +302,8 @@ public void testNamespacesWithAndWithoutZeroBytes() throws BaseNessieClientServe identifier -> { Namespace namespace = Namespace.parse(identifier); - Namespace created = namespaceApi().createNamespace(branch.getName(), namespace); + Namespace created = + namespaceApi().createNamespace(branch.getName(), namespace, API_WRITE); soft.assertThat(created) .isNotNull() .extracting(Namespace::getElements, Namespace::toPathString) @@ -308,7 +312,8 @@ public void testNamespacesWithAndWithoutZeroBytes() throws BaseNessieClientServe soft.assertThat(namespaceApi().getNamespace(branch.getName(), null, namespace)) .isEqualTo(created); - soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), namespace)) + soft.assertThatThrownBy( + () -> namespaceApi().createNamespace(branch.getName(), namespace, API_WRITE)) .cause() .isInstanceOf(NessieNamespaceAlreadyExistsException.class) .hasMessage(String.format("Namespace '%s' already exists", namespace.name())); @@ -371,7 +376,8 @@ public void testNamespacesWithAndWithoutZeroBytes() throws BaseNessieClientServe public void testEmptyNamespace() throws BaseNessieClientServerException { Branch branch = createBranch("emptyNamespace"); // can't create/fetch/delete an empty namespace due to empty REST path - soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), EMPTY_NAMESPACE)) + soft.assertThatThrownBy( + () -> namespaceApi().createNamespace(branch.getName(), EMPTY_NAMESPACE, API_WRITE)) .isInstanceOf(Exception.class); soft.assertThatThrownBy( @@ -407,7 +413,8 @@ public void testNamespaceWithProperties() throws BaseNessieClientServerException Namespace ns = namespaceApi() - .createNamespace(branch.getName(), Namespace.of(namespace.getElements(), properties)); + .createNamespace( + branch.getName(), Namespace.of(namespace.getElements(), properties), API_WRITE); soft.assertThat(ns.getProperties()).isEqualTo(properties); soft.assertThat(ns.getId()).isNotNull(); String nsId = ns.getId(); @@ -416,7 +423,11 @@ public void testNamespaceWithProperties() throws BaseNessieClientServerException () -> namespaceApi() .updateProperties( - branch.getName(), Namespace.of("non-existing"), properties, emptySet())) + branch.getName(), + Namespace.of("non-existing"), + properties, + emptySet(), + API_WRITE)) .isInstanceOf(NessieNamespaceNotFoundException.class) .hasMessage("Namespace 'non-existing' does not exist"); @@ -429,11 +440,12 @@ public void testNamespaceWithProperties() throws BaseNessieClientServerException branch.getName(), Namespace.of("non-existing"), emptyMap(), - properties.keySet())) + properties.keySet(), + API_WRITE)) .isInstanceOf(NessieNamespaceNotFoundException.class) .hasMessage("Namespace 'non-existing' does not exist"); - namespaceApi().updateProperties(branch.getName(), namespace, properties, emptySet()); + namespaceApi().updateProperties(branch.getName(), namespace, properties, emptySet(), API_WRITE); // namespace does not exist at the previous hash soft.assertThatThrownBy( @@ -450,7 +462,8 @@ public void testNamespaceWithProperties() throws BaseNessieClientServerException updated.getName(), namespace, ImmutableMap.of("key3", "val3", "key1", "xyz"), - ImmutableSet.of("key2", "key5")); + ImmutableSet.of("key2", "key5"), + API_WRITE); // "updated" still points to the hash prior to the update soft.assertThat( diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/BaseTestServiceImpl.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/BaseTestServiceImpl.java index 1e42192e50c..2a488a52c35 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/BaseTestServiceImpl.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/BaseTestServiceImpl.java @@ -20,7 +20,10 @@ import static org.projectnessie.model.FetchOption.MINIMAL; import static org.projectnessie.model.Reference.ReferenceType.BRANCH; import static org.projectnessie.model.Reference.ReferenceType.TAG; +import static org.projectnessie.services.authz.ApiContext.apiContext; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import static org.projectnessie.versioned.storage.common.logic.Logics.repositoryLogic; import com.google.common.collect.ImmutableMap; @@ -95,7 +98,7 @@ public boolean sendStacktraceToClient() { }; protected static final Authorizer NOOP_AUTHORIZER = - context -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; + (context, apiContext) -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; @InjectSoftAssertions protected SoftAssertions soft; @@ -103,23 +106,28 @@ public boolean sendStacktraceToClient() { private Principal principal; protected final ConfigApiImpl configApi() { - return new ConfigApiImpl(config(), versionStore(), authorizer(), this::principal, 2); + return new ConfigApiImpl( + config(), versionStore(), authorizer(), this::principal, apiContext("Nessie", 2)); } protected final TreeApiImpl treeApi() { - return new TreeApiImpl(config(), versionStore(), authorizer(), this::principal); + return new TreeApiImpl( + config(), versionStore(), authorizer(), this::principal, apiContext("Nessie", 2)); } protected final ContentApiImpl contentApi() { - return new ContentApiImpl(config(), versionStore(), authorizer(), this::principal); + return new ContentApiImpl( + config(), versionStore(), authorizer(), this::principal, apiContext("Nessie", 2)); } protected final DiffApiImpl diffApi() { - return new DiffApiImpl(config(), versionStore(), authorizer(), this::principal); + return new DiffApiImpl( + config(), versionStore(), authorizer(), this::principal, apiContext("Nessie", 2)); } protected final NamespaceApiImpl namespaceApi() { - return new NamespaceApiImpl(config(), versionStore(), authorizer(), this::principal); + return new NamespaceApiImpl( + config(), versionStore(), authorizer(), this::principal, apiContext("Nessie", 2)); } protected Principal principal() { @@ -140,7 +148,7 @@ protected void setAuthorizer(Authorizer authorizer) { protected void setBatchAccessChecker( Function batchAccessChecker) { - this.authorizer = batchAccessChecker::apply; + this.authorizer = (t, apiContext) -> batchAccessChecker.apply(t); } protected VersionStore versionStore() { @@ -451,7 +459,7 @@ protected CommitResponse commit( throws NessieConflictException, NessieNotFoundException { Operations ops = ImmutableOperations.builder().addOperations(operations).commitMeta(meta).build(); - return treeApi().commitMultipleOperations(branch, expectedHash, ops); + return treeApi().commitMultipleOperations(branch, expectedHash, ops, API_WRITE); } protected Map contents(Reference reference, ContentKey... keys) @@ -473,7 +481,8 @@ protected Map contents( String refName, String hashOnRef, boolean forWrite, ContentKey... keys) throws NessieNotFoundException { return contentApi() - .getMultipleContents(refName, hashOnRef, Arrays.asList(keys), false, forWrite) + .getMultipleContents( + refName, hashOnRef, Arrays.asList(keys), false, forWrite ? API_WRITE : API_READ) .getContents() .stream() .collect(Collectors.toMap(ContentWithKey::getKey, ContentWithKey::getContent)); @@ -487,7 +496,7 @@ protected ContentResponse content(Reference reference, boolean forWrite, Content protected ContentResponse content( String refName, String hashOnRef, boolean forWrite, ContentKey key) throws NessieNotFoundException { - return contentApi().getContent(key, refName, hashOnRef, false, forWrite); + return contentApi().getContent(key, refName, hashOnRef, false, forWrite ? API_WRITE : API_READ); } protected String createCommits( @@ -502,7 +511,7 @@ protected String createCommits( try { Content existing = contentApi() - .getContent(key, branch.getName(), currentHash, false, false) + .getContent(key, branch.getName(), currentHash, false, API_READ) .getContent(); op = Put.of(key, IcebergTable.of("some-file-" + i, 42, 42, 42, 42, existing.getId())); } catch (NessieContentNotFoundException notFound) { diff --git a/site/in-dev/authorization.md b/site/in-dev/authorization.md index 4a96cce8353..05a60ebd2ba 100644 --- a/site/in-dev/authorization.md +++ b/site/in-dev/authorization.md @@ -88,6 +88,80 @@ Certain variables are available within the `` depending on cont * **path** - refers to the URI path representation (`ContentKey.toPathString()`) of the [content key](https://github.com/projectnessie/nessie/blob/main/api/model/src/main/java/org/projectnessie/model/ContentKey.java) for the object related to the authorization check. * **contentType** - refers to a (possibly empty) string representing the name of the object's [`Content.Type`](https://github.com/projectnessie/nessie/blob/main/api/model/src/main/java/org/projectnessie/model/Content.java). * **type** - refers to the repository config type to be retrieved or updated. +* **api** - contains information about the receiving API. This is a composite object with two properties: + * **apiName** the name of the API, can be `Nessie` or `Iceberg` + * **apiVersion** the version of the API - for `Nessie` it can be 1 or 2, for `Iceberg` currently 1 +* **actions** a list of actions (strings), available for some Iceberg endpoints. + +#### Actions + +The list of `actions` (strings) is available for some Iceberg endpoints that perform changes against +an entity (table, view, namespace). The list of actions is empty for the Nessie REST API. + +**Catalog operations** + +Available for all updating Iceberg endpoints +for the `Check` types `CREATE_ENTITY`, `UPDATE_ENTITY` and `DELETE_ENTITY`. + +* `CATALOG_CREATE_ENTITY` - create a table/view/namespace +* `CATALOG_UPDATE_ENTITY` - update a table/view/namespace +* `CATALOG_DROP_ENTITY` - dropping a table/view/namespace +* `CATALOG_RENAME_ENTITY_FROM` - renaming a table (from) +* `CATALOG_RENAME_ENTITY_TO` - renaming a table (to) +* `CATALOG_REGISTER_ENTITY` - registering a table (from) +* `CATALOG_UPDATE_MULTIPLE` - update multiple tables +* `CATALOG_S3_SIGN` - S3 request signing + +**Iceberg metadata updates** + +Available for Iceberg endpoints that update entities, represents the kinds of metadata updates, +for the `Check` types `CREATE_ENTITY`, `UPDATE_ENTITY`. + +* `META_ADD_VIEW_VERSION` +* `META_SET_CURRENT_VIEW_VERSION` +* `META_SET_STATISTICS` +* `META_REMOVE_STATISTICS` +* `META_SET_PARTITION_STATISTICS` +* `META_REMOVE_PARTITION_STATISTICS` +* `META_ASSIGN_UUID` +* `META_ADD_SCHEMA` +* `META_SET_CURRENT_SCHEMA` +* `META_ADD_PARTITION_SPEC` +* `META_SET_DEFAULT_PARTITION_SPEC` +* `META_ADD_SNAPSHOT` +* `META_ADD_SORT_ORDER` +* `META_SET_DEFAULT_SORT_ORDER` +* `META_SET_LOCATION` +* `META_SET_PROPERTIES` +* `META_REMOVE_PROPERTIES` +* `META_REMOVE_LOCATION_PROPERTY` +* `META_SET_SNAPSHOT_REF` +* `META_REMOVE_SNAPSHOT_REF` +* `META_UPGRADE_FORMAT_VERSION` + +**from Iceberg's snapshot summary** + +Available for Iceberg updates that add a snapshot, for the `Check` types `CREATE_ENTITY`, `UPDATE_ENTITY`. + +* `SNAP_ADD_DATA_FILES` +* `SNAP_DELETE_DATA_FILES` +* `SNAP_ADD_DELETE_FILES` +* `SNAP_ADD_EQUALITY_DELETE_FILES` +* `SNAP_ADD_POSITION_DELETE_FILES` +* `SNAP_REMOVE_DELETE_FILES` +* `SNAP_REMOVE_EQUALITY_DELETE_FILES` +* `SNAP_REMOVE_POSITION_DELETE_FILES` +* `SNAP_ADDED_RECORDS` +* `SNAP_DELETED_RECORDS` +* `SNAP_ADDED_POSITION_DELETES` +* `SNAP_DELETED_POSITION_DELETES` +* `SNAP_ADDED_EQUALITY_DELETES` +* `SNAP_DELETED_EQUALITY_DELETES` +* `SNAP_REPLACE_PARTITIONS` +* `SNAP_OP_APPEND` +* `SNAP_OP_REPLACE` +* `SNAP_OP_OVERWRITE` +* `SNAP_OP_DELETE` #### Checks for Reference operations @@ -100,12 +174,15 @@ Applicable `op` types: * `DELETE_REFERENCE` * `READ_ENTRIES` * `LIST_COMMIT_LOG` +* `COMMIT_CHANGE_AGAINST_REFERENCE` Available variables: * `role` * `roles` * `ref` +* `ref` +* `api` #### Checks for Content operations @@ -124,6 +201,8 @@ Available variables: * `ref` * `path` * `contentType` +* `api` +* `actions` (for `CREATE_ENTITY`, `UPDATE_ENTITY`, `DELETE_ENTITY` against Iceberg REST) #### Checks for Repository Config operations @@ -137,6 +216,7 @@ Available variables: * `role` * `roles` * `type` +* `api` #### Relevant CEL features diff --git a/versioned/combined-cs/src/main/java/org/projectnessie/nessie/combined/CombinedClientBuilder.java b/versioned/combined-cs/src/main/java/org/projectnessie/nessie/combined/CombinedClientBuilder.java index 53b31f56eb1..ab697558c25 100644 --- a/versioned/combined-cs/src/main/java/org/projectnessie/nessie/combined/CombinedClientBuilder.java +++ b/versioned/combined-cs/src/main/java/org/projectnessie/nessie/combined/CombinedClientBuilder.java @@ -16,12 +16,14 @@ package org.projectnessie.nessie.combined; import static org.projectnessie.nessie.combined.EmptyHttpHeaders.emptyHttpHeaders; +import static org.projectnessie.services.authz.ApiContext.apiContext; import org.projectnessie.client.NessieClientBuilder; import org.projectnessie.client.api.NessieApi; import org.projectnessie.client.api.NessieApiV2; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.config.ServerConfig; import org.projectnessie.services.rest.RestV2ConfigResource; @@ -38,6 +40,7 @@ public class CombinedClientBuilder extends NessieClientBuilder.AbstractNessieCli private Persist persist; private RestV2ConfigResource configResource; private RestV2TreeResource treeResource; + private ApiContext apiContext = apiContext("Nessie", 2); public CombinedClientBuilder() {} @@ -66,6 +69,11 @@ public CombinedClientBuilder withPersist(Persist persist) { return this; } + public CombinedClientBuilder withApiContext(ApiContext apiContext) { + this.apiContext = apiContext; + return this; + } + @Override public API build(Class apiContract) { RestV2ConfigResource configResource = this.configResource; @@ -97,7 +105,7 @@ public boolean sendStacktraceToClient() { }; VersionStore versionStore = new VersionStoreImpl(persist); - Authorizer authorizer = c -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; + Authorizer authorizer = (c, apiContext) -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; AccessContext accessContext = () -> null; diff --git a/versioned/spi/src/main/java/org/projectnessie/versioned/RequestMeta.java b/versioned/spi/src/main/java/org/projectnessie/versioned/RequestMeta.java new file mode 100644 index 00000000000..78c37234916 --- /dev/null +++ b/versioned/spi/src/main/java/org/projectnessie/versioned/RequestMeta.java @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned; + +import com.google.common.collect.ImmutableMap; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import org.immutables.value.Value; +import org.projectnessie.model.ContentKey; + +/** Additional information related to the incoming API request. */ +@Value.Immutable +@Value.Style(allParameters = true) +public interface RequestMeta { + /** Indicates whether access checks shall be performed for a write/update request. */ + boolean forWrite(); + + @Value.Default + default Map> keyActions() { + return Map.of(); + } + + default Set keyActions(ContentKey key) { + return keyActions().getOrDefault(key, Set.of()); + } + + static RequestMetaBuilder apiWrite() { + return new RequestMetaBuilder().forWrite(true); + } + + static RequestMetaBuilder apiRead() { + return new RequestMetaBuilder().forWrite(false); + } + + RequestMeta API_WRITE = apiWrite().build(); + RequestMeta API_READ = apiRead().build(); + + final class RequestMetaBuilder { + private final Map> keyActions = new HashMap<>(); + private boolean forWrite; + + public RequestMetaBuilder forWrite(boolean forWrite) { + this.forWrite = forWrite; + return this; + } + + public RequestMetaBuilder addKeyAction(ContentKey key, String name) { + keyActions.computeIfAbsent(key, x -> new HashSet<>()).add(name); + return this; + } + + public RequestMetaBuilder addKeyActions(ContentKey key, Set names) { + keyActions.computeIfAbsent(key, x -> new HashSet<>()).addAll(names); + return this; + } + + public RequestMetaBuilder newBuilder() { + RequestMetaBuilder newBuilder = new RequestMetaBuilder().forWrite(forWrite); + keyActions.forEach(newBuilder::addKeyActions); + return newBuilder; + } + + public RequestMeta build() { + var immutableKeyActions = ImmutableMap.>builder(); + keyActions.forEach((k, v) -> immutableKeyActions.put(k, Set.copyOf(v))); + return ImmutableRequestMeta.of(forWrite, immutableKeyActions.build()); + } + } +}