From f1cea061e3a945d30c531d52847676e0b8f3a87a Mon Sep 17 00:00:00 2001 From: Robert Stupp Date: Tue, 17 Sep 2024 17:01:38 +0200 Subject: [PATCH] Richer access checks This change introduces the ability to distinguish individual checks by the (external) API being used (Nessie, Iceberg) and for Nessie Catalog (Iceberg REST) to information about the kind(s) of changes being applied. The individual changes that can be distinguished are: * Catalog API operation * Metadata update actions, with special actions wrt to the `location` property * Snapshot operation * Snapshot summary extracts (for example whether a snapshot added or removed data/delete files) All new attributes can be retrieved from the existing `Check` type via new attributes exposed via `AccessCheckMeta` holding the source API, "for write flag" and per-content-key flags. The flags represent the mentioned "individual changes". Fixes #9559 (and more) --- CHANGELOG.md | 3 + .../formats/iceberg/nessie/CatalogOps.java | 74 ++++ .../IcebergTableMetadataUpdateState.java | 10 + .../IcebergViewMetadataUpdateState.java | 10 + .../iceberg/rest/IcebergMetadataUpdate.java | 123 +++++++ catalog/service/common/build.gradle.kts | 1 + .../catalog/service/api/CatalogService.java | 11 +- .../service/impl/CatalogServiceImpl.java | 61 ++-- .../service/impl/MultiTableUpdate.java | 28 +- .../service/impl/AbstractCatalogService.java | 21 +- .../service/impl/TestCatalogServiceImpl.java | 32 +- .../service/rest/AbstractCatalogResource.java | 3 +- .../rest/IcebergApiV1GenericResource.java | 8 +- .../rest/IcebergApiV1NamespaceResource.java | 103 ++++-- .../rest/IcebergApiV1ResourceBase.java | 52 ++- .../rest/IcebergApiV1TableResource.java | 25 +- .../rest/IcebergApiV1ViewResource.java | 15 +- .../service/rest/IcebergS3SignParams.java | 4 +- .../service/rest/NessieCatalogResource.java | 7 +- .../service/rest/TestIcebergS3SignParams.java | 24 +- .../jersey/AuthorizerExtension.java | 3 +- .../jaxrs/ext/AuthorizerExtension.java | 3 +- .../server/authz/CelAuthorizer.java | 5 +- .../server/authz/CelBatchAccessChecker.java | 30 +- .../server/authz/QuarkusAuthorizer.java | 7 +- .../server/authz/TestCELAuthZ.java | 11 +- servers/quarkus-server/build.gradle.kts | 2 + .../server/authz/MockedAuthorizer.java | 101 ++++++ .../server/authz/TestAuthzMeta.java | 336 ++++++++++++++++++ .../catalog/AbstractIcebergCatalogTests.java | 6 +- .../server/catalog/Catalogs.java | 14 +- .../services/rest/RestApiContext.java | 25 ++ .../services/rest/RestConfigResource.java | 4 +- .../services/rest/RestContentResource.java | 10 +- .../services/rest/RestDiffResource.java | 3 +- .../services/rest/RestNamespaceResource.java | 11 +- .../services/rest/RestTreeResource.java | 6 +- .../services/rest/RestV2ConfigResource.java | 4 +- .../services/rest/RestV2TreeResource.java | 18 +- .../authz/AbstractBatchAccessChecker.java | 51 ++- .../services/authz/ApiContext.java | 30 ++ .../services/authz/Authorizer.java | 3 +- .../services/authz/BatchAccessChecker.java | 23 ++ .../projectnessie/services/authz/Check.java | 63 +++- .../authz/RetriableAccessChecker.java | 18 +- .../projectnessie/services/cel/CELUtil.java | 2 + .../services/impl/BaseApiImpl.java | 15 +- .../services/impl/ConfigApiImpl.java | 10 +- .../services/impl/ContentApiImpl.java | 48 ++- .../services/impl/DiffApiImpl.java | 9 +- .../services/impl/NamespaceApiImpl.java | 36 +- .../services/impl/TreeApiImpl.java | 31 +- .../services/spi/ContentService.java | 11 +- .../services/spi/NamespaceService.java | 6 +- .../services/spi/TreeService.java | 4 +- .../authz/TestBatchAccessChecker.java | 13 +- .../authz/TestRetriableAccessChecker.java | 9 +- .../services/impl/TestNamespaceApi.java | 6 +- .../impl/AbstractTestAccessChecks.java | 12 +- .../services/impl/AbstractTestCommitLog.java | 5 +- .../services/impl/AbstractTestContents.java | 11 +- .../services/impl/AbstractTestEntries.java | 4 +- .../impl/AbstractTestInvalidRefs.java | 3 +- .../impl/AbstractTestMergeTransplant.java | 16 +- .../services/impl/AbstractTestNamespace.java | 39 +- .../services/impl/BaseTestServiceImpl.java | 31 +- site/in-dev/authorization.md | 79 ++++ .../combined/CombinedClientBuilder.java | 10 +- .../projectnessie/versioned/RequestMeta.java | 86 +++++ 69 files changed, 1615 insertions(+), 283 deletions(-) create mode 100644 catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/CatalogOps.java create mode 100644 servers/quarkus-server/src/test/java/org/projectnessie/server/authz/MockedAuthorizer.java create mode 100644 servers/quarkus-server/src/test/java/org/projectnessie/server/authz/TestAuthzMeta.java create mode 100644 servers/rest-services/src/main/java/org/projectnessie/services/rest/RestApiContext.java create mode 100644 servers/services/src/main/java/org/projectnessie/services/authz/ApiContext.java create mode 100644 versioned/spi/src/main/java/org/projectnessie/versioned/RequestMeta.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 8049d4c0d33..f82e062c8f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,9 @@ as necessary. Empty sections will not end in the release notes. ### New Features +- Access check SPI has been enhanced to provide richer information in the `Check` type about the receiving + API (Nessie REST or Iceberg REST) and about the individual changes, especially during a commit operation. + ### Changes ### Deprecations diff --git a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/CatalogOps.java b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/CatalogOps.java new file mode 100644 index 00000000000..459354a9b1c --- /dev/null +++ b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/CatalogOps.java @@ -0,0 +1,74 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.catalog.formats.iceberg.nessie; + +/** + * Enum serving as a "constants pool" for the string values passed to Nessie access control checks. + */ +public enum CatalogOps { + // Iceberg metadata updates + META_ADD_VIEW_VERSION, + META_SET_CURRENT_VIEW_VERSION, + META_SET_STATISTICS, + META_REMOVE_STATISTICS, + META_SET_PARTITION_STATISTICS, + META_REMOVE_PARTITION_STATISTICS, + META_ASSIGN_UUID, + META_ADD_SCHEMA, + META_SET_CURRENT_SCHEMA, + META_ADD_PARTITION_SPEC, + META_SET_DEFAULT_PARTITION_SPEC, + META_ADD_SNAPSHOT, + META_ADD_SORT_ORDER, + META_SET_DEFAULT_SORT_ORDER, + META_SET_LOCATION, + META_SET_PROPERTIES, + META_REMOVE_PROPERTIES, + META_REMOVE_LOCATION_PROPERTY, + META_SET_SNAPSHOT_REF, + META_REMOVE_SNAPSHOT_REF, + META_UPGRADE_FORMAT_VERSION, + + // Catalog operations + CATALOG_CREATE_ENTITY, + CATALOG_UPDATE_ENTITY, + CATALOG_DROP_ENTITY, + CATALOG_RENAME_ENTITY_FROM, + CATALOG_RENAME_ENTITY_TO, + CATALOG_REGISTER_ENTITY, + CATALOG_UPDATE_MULTIPLE, + + // From Iceberg's snapshot summary + SNAP_ADD_DATA_FILES, + SNAP_DELETE_DATA_FILES, + SNAP_ADD_DELETE_FILES, + SNAP_ADD_EQUALITY_DELETE_FILES, + SNAP_ADD_POSITION_DELETE_FILES, + SNAP_REMOVE_DELETE_FILES, + SNAP_REMOVE_EQUALITY_DELETE_FILES, + SNAP_REMOVE_POSITION_DELETE_FILES, + SNAP_ADDED_RECORDS, + SNAP_DELETED_RECORDS, + SNAP_ADDED_POSITION_DELETES, + SNAP_DELETED_POSITION_DELETES, + SNAP_ADDED_EQUALITY_DELETES, + SNAP_DELETED_EQUALITY_DELETES, + SNAP_REPLACE_PARTITIONS, + SNAP_OP_APPEND, + SNAP_OP_REPLACE, + SNAP_OP_OVERWRITE, + SNAP_OP_DELETE, +} diff --git a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergTableMetadataUpdateState.java b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergTableMetadataUpdateState.java index cb821cfa015..ffa96880fc1 100644 --- a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergTableMetadataUpdateState.java +++ b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergTableMetadataUpdateState.java @@ -20,6 +20,7 @@ import java.time.Instant; import java.util.ArrayList; +import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -59,6 +60,7 @@ public class IcebergTableMetadataUpdateState { private final Set addedSchemaIds = new HashSet<>(); private final Set addedSpecIds = new HashSet<>(); private final Set addedOrderIds = new HashSet<>(); + private final Set catalogOps = EnumSet.noneOf(CatalogOps.class); public IcebergTableMetadataUpdateState( NessieTableSnapshot snapshot, ContentKey key, boolean tableExists) { @@ -72,6 +74,14 @@ public NessieTableSnapshot.Builder builder() { return builder; } + public void addCatalogOp(CatalogOps op) { + catalogOps.add(op); + } + + public Set catalogOps() { + return catalogOps; + } + public NessieTableSnapshot snapshot() { return snapshot; } diff --git a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergViewMetadataUpdateState.java b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergViewMetadataUpdateState.java index dce42a239e6..5d13e777637 100644 --- a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergViewMetadataUpdateState.java +++ b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/nessie/IcebergViewMetadataUpdateState.java @@ -19,6 +19,7 @@ import java.time.Instant; import java.util.ArrayList; +import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -50,6 +51,7 @@ public class IcebergViewMetadataUpdateState { private final List addedSnapshots = new ArrayList<>(); private final Set addedSchemaIds = new HashSet<>(); private final Set addedVersionIds = new HashSet<>(); + private final Set catalogOps = EnumSet.noneOf(CatalogOps.class); public IcebergViewMetadataUpdateState( NessieViewSnapshot snapshot, ContentKey key, boolean viewExists) { @@ -63,6 +65,14 @@ public NessieViewSnapshot.Builder builder() { return builder; } + public void addCatalogOp(CatalogOps op) { + catalogOps.add(op); + } + + public Set catalogOps() { + return catalogOps; + } + public NessieViewSnapshot snapshot() { return snapshot; } diff --git a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/rest/IcebergMetadataUpdate.java b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/rest/IcebergMetadataUpdate.java index 0ae81163760..5d64bc992bf 100644 --- a/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/rest/IcebergMetadataUpdate.java +++ b/catalog/format/iceberg/src/main/java/org/projectnessie/catalog/formats/iceberg/rest/IcebergMetadataUpdate.java @@ -33,6 +33,7 @@ import com.fasterxml.jackson.databind.annotation.JsonSerialize; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -46,6 +47,7 @@ import org.projectnessie.catalog.formats.iceberg.meta.IcebergStatisticsFile; import org.projectnessie.catalog.formats.iceberg.meta.IcebergViewRepresentation; import org.projectnessie.catalog.formats.iceberg.meta.IcebergViewVersion; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.formats.iceberg.nessie.IcebergTableMetadataUpdateState; import org.projectnessie.catalog.formats.iceberg.nessie.IcebergViewMetadataUpdateState; import org.projectnessie.catalog.formats.iceberg.nessie.NessieModelIceberg; @@ -130,11 +132,13 @@ static UpgradeFormatVersion upgradeFormatVersion(int formatVersion) { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_UPGRADE_FORMAT_VERSION); NessieModelIceberg.upgradeFormatVersion(formatVersion(), state.snapshot(), state.builder()); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_UPGRADE_FORMAT_VERSION); NessieModelIceberg.upgradeFormatVersion(formatVersion(), state.snapshot(), state.builder()); } } @@ -165,11 +169,19 @@ interface RemoveProperties extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_REMOVE_PROPERTIES); + if (removals().contains("location")) { + state.addCatalogOp(CatalogOps.META_REMOVE_LOCATION_PROPERTY); + } NessieModelIceberg.removeProperties(this, state.snapshot(), state.builder()); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_REMOVE_PROPERTIES); + if (removals().contains("location")) { + state.addCatalogOp(CatalogOps.META_REMOVE_LOCATION_PROPERTY); + } NessieModelIceberg.removeProperties(this, state.snapshot(), state.builder()); } } @@ -184,6 +196,7 @@ interface AddViewVersion extends IcebergMetadataUpdate { @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_VIEW_VERSION); NessieModelIceberg.addViewVersion(this, state); } @@ -214,6 +227,7 @@ interface SetCurrentViewVersion extends IcebergMetadataUpdate { @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_CURRENT_VIEW_VERSION); NessieModelIceberg.setCurrentViewVersion(this, state); } @@ -234,6 +248,7 @@ interface SetStatistics extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_STATISTICS); long snapshotId = Objects.requireNonNull(state.snapshot().icebergSnapshotId()); if (snapshotId == snapshotId()) { state.builder().statisticsFiles(singleton(icebergStatisticsFileToNessie(statistics()))); @@ -251,6 +266,7 @@ interface RemoveStatistics extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_REMOVE_STATISTICS); long snapshotId = Objects.requireNonNull(state.snapshot().icebergSnapshotId()); if (snapshotId == snapshotId()) { state.builder().statisticsFiles(emptyList()); @@ -268,6 +284,7 @@ interface SetPartitionStatistics extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_PARTITION_STATISTICS); long snapshotId = Objects.requireNonNull(state.snapshot().icebergSnapshotId()); if (snapshotId == partitionStatistics().snapshotId()) { state @@ -289,6 +306,7 @@ interface RemovePartitionStatistics extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_REMOVE_PARTITION_STATISTICS); long snapshotId = Objects.requireNonNull(state.snapshot().icebergSnapshotId()); if (snapshotId == snapshotId()) { state.builder().partitionStatisticsFiles(emptyList()); @@ -305,11 +323,13 @@ interface AssignUUID extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ASSIGN_UUID); NessieModelIceberg.assignUUID(this, state.snapshot()); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ASSIGN_UUID); NessieModelIceberg.assignUUID(this, state.snapshot()); } @@ -329,11 +349,13 @@ interface AddSchema extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_SCHEMA); NessieModelIceberg.addSchema(this, state); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_SCHEMA); NessieModelIceberg.addSchema(this, state); } @@ -352,12 +374,14 @@ interface SetCurrentSchema extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_CURRENT_SCHEMA); NessieModelIceberg.setCurrentSchema( this, state.lastAddedSchemaId(), state.snapshot(), state.builder()); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_CURRENT_SCHEMA); NessieModelIceberg.setCurrentSchema( this, state.lastAddedSchemaId(), state.snapshot(), state.builder()); } @@ -376,6 +400,7 @@ interface AddPartitionSpec extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_PARTITION_SPEC); NessieModelIceberg.addPartitionSpec(this, state); } @@ -403,6 +428,7 @@ interface SetDefaultPartitionSpec extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_DEFAULT_PARTITION_SPEC); NessieModelIceberg.setDefaultPartitionSpec(this, state); } @@ -420,6 +446,89 @@ interface AddSnapshot extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_SNAPSHOT); + Map summary = snapshot().summary(); + + String v = summary.get("added-data-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADD_DATA_FILES); + } + v = summary.get("deleted-data-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_DELETE_DATA_FILES); + } + v = summary.get("added-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADD_DELETE_FILES); + } + v = summary.get("added-equality-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADD_EQUALITY_DELETE_FILES); + } + v = summary.get("added-position-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADD_POSITION_DELETE_FILES); + } + v = summary.get("removed-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_REMOVE_DELETE_FILES); + } + v = summary.get("removed-equality-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_REMOVE_EQUALITY_DELETE_FILES); + } + v = summary.get("removed-position-delete-files"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_REMOVE_POSITION_DELETE_FILES); + } + v = summary.get("added-records"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADDED_RECORDS); + } + v = summary.get("deleted-records"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_DELETED_RECORDS); + } + v = summary.get("added-position-deletes"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADDED_POSITION_DELETES); + } + v = summary.get("deleted-position-deletes"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_DELETED_POSITION_DELETES); + } + v = summary.get("added-equality-deletes"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_ADDED_EQUALITY_DELETES); + } + v = summary.get("deleted-equality-deletes"); + if (v != null && Long.parseLong(v) > 0) { + state.addCatalogOp(CatalogOps.SNAP_DELETED_EQUALITY_DELETES); + } + v = summary.get("replace-partitions"); + if (Boolean.parseBoolean(v)) { + state.addCatalogOp(CatalogOps.SNAP_REPLACE_PARTITIONS); + } + v = summary.get("operation"); + if (v != null) { + switch (v.toLowerCase(Locale.ROOT)) { + case "append": + state.addCatalogOp(CatalogOps.SNAP_OP_APPEND); + break; + case "replace": + state.addCatalogOp(CatalogOps.SNAP_OP_REPLACE); + break; + case "overwrite": + state.addCatalogOp(CatalogOps.SNAP_OP_OVERWRITE); + break; + case "delete": + state.addCatalogOp(CatalogOps.SNAP_OP_DELETE); + break; + default: + break; + } + } + NessieModelIceberg.addSnapshot(this, state); } } @@ -433,6 +542,7 @@ interface AddSortOrder extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_ADD_SORT_ORDER); NessieModelIceberg.addSortOrder(this, state); } @@ -466,6 +576,7 @@ interface SetDefaultSortOrder extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_DEFAULT_SORT_ORDER); NessieModelIceberg.setDefaultSortOrder(this, state); } @@ -489,6 +600,7 @@ default boolean trusted() { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_LOCATION); if (trusted()) { NessieModelIceberg.setLocation(this, state.builder()); } @@ -496,6 +608,7 @@ default void applyToTable(IcebergTableMetadataUpdateState state) { @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_LOCATION); if (trusted()) { NessieModelIceberg.setLocation(this, state.builder()); } @@ -516,11 +629,19 @@ interface SetProperties extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_PROPERTIES); + if (updates().containsKey("location")) { + state.addCatalogOp(CatalogOps.META_SET_LOCATION); + } NessieModelIceberg.setProperties(this, state.snapshot(), state.builder()); } @Override default void applyToView(IcebergViewMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_PROPERTIES); + if (updates().containsKey("location")) { + state.addCatalogOp(CatalogOps.META_SET_LOCATION); + } NessieModelIceberg.setProperties(this, state.snapshot(), state.builder()); } @@ -554,6 +675,7 @@ interface SetSnapshotRef extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_SET_SNAPSHOT_REF); // NOP - This class is used for JSON deserialization only. // Nessie has catalog-level branches and tags. } @@ -569,6 +691,7 @@ interface RemoveSnapshotRef extends IcebergMetadataUpdate { @Override default void applyToTable(IcebergTableMetadataUpdateState state) { + state.addCatalogOp(CatalogOps.META_REMOVE_SNAPSHOT_REF); // NOP - This class is used for JSON deserialization only. // Nessie has catalog-level branches and tags. } diff --git a/catalog/service/common/build.gradle.kts b/catalog/service/common/build.gradle.kts index 159084dfa98..4379b49233d 100644 --- a/catalog/service/common/build.gradle.kts +++ b/catalog/service/common/build.gradle.kts @@ -22,6 +22,7 @@ dependencies { implementation(project(":nessie-model")) implementation(project(":nessie-catalog-files-api")) implementation(project(":nessie-catalog-model")) + implementation(project(":nessie-versioned-spi")) implementation(project(":nessie-versioned-storage-common")) implementation(project(":nessie-tasks-api")) implementation(project(":nessie-catalog-service-transfer")) diff --git a/catalog/service/common/src/main/java/org/projectnessie/catalog/service/api/CatalogService.java b/catalog/service/common/src/main/java/org/projectnessie/catalog/service/api/CatalogService.java index 7a65383ccca..f3bc8e1ac29 100644 --- a/catalog/service/common/src/main/java/org/projectnessie/catalog/service/api/CatalogService.java +++ b/catalog/service/common/src/main/java/org/projectnessie/catalog/service/api/CatalogService.java @@ -31,6 +31,7 @@ import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; import org.projectnessie.model.Reference; +import org.projectnessie.versioned.RequestMeta; public interface CatalogService { @@ -41,7 +42,7 @@ public interface CatalogService { * more. * @param key content key of the table or view * @param expectedType The expected content-type. - * @param forWrite indicates whether access checks shall be performed for a write/update request + * @param requestMeta additional information for access checks * @return The response is either a response object or callback to produce the result. The latter * is useful to return results that are quite big, for example Iceberg manifest lists or * manifest files. @@ -50,20 +51,22 @@ CompletionStage retrieveSnapshot( SnapshotReqParams reqParams, ContentKey key, @Nullable Content.Type expectedType, - boolean forWrite) + RequestMeta requestMeta) throws NessieNotFoundException; Stream>> retrieveSnapshots( SnapshotReqParams reqParams, List keys, - Consumer effectiveReferenceConsumer) + Consumer effectiveReferenceConsumer, + RequestMeta requestMeta) throws NessieNotFoundException; CompletionStage> commit( ParsedReference reference, CatalogCommit commit, SnapshotReqParams reqParams, - Function commitMetaBuilder) + Function commitMetaBuilder, + String apiRequest) throws BaseNessieClientServerException; interface CatalogUriResolver { diff --git a/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/CatalogServiceImpl.java b/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/CatalogServiceImpl.java index d9d6e2d5890..397319d6ae0 100644 --- a/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/CatalogServiceImpl.java +++ b/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/CatalogServiceImpl.java @@ -39,6 +39,7 @@ import static org.projectnessie.error.ReferenceConflicts.referenceConflicts; import static org.projectnessie.model.Conflict.conflict; import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; +import static org.projectnessie.services.authz.ApiContext.apiContext; import jakarta.annotation.Nullable; import jakarta.enterprise.context.RequestScoped; @@ -103,6 +104,7 @@ import org.projectnessie.model.Reference; import org.projectnessie.nessie.tasks.api.TasksService; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.config.ServerConfig; import org.projectnessie.services.impl.ContentApiImpl; @@ -110,6 +112,7 @@ import org.projectnessie.services.spi.ContentService; import org.projectnessie.services.spi.TreeService; import org.projectnessie.storage.uri.StorageUri; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.storage.common.persist.ObjId; import org.projectnessie.versioned.storage.common.persist.Persist; @@ -134,6 +137,8 @@ public class CatalogServiceImpl implements CatalogService { @Named("import-jobs") Executor executor; + static final ApiContext ICEBERG_V1 = apiContext("Iceberg", 1); + public CatalogServiceImpl() { this(null, null, null, null, null); } @@ -146,8 +151,9 @@ public CatalogServiceImpl( Authorizer authorizer, AccessContext accessContext) { this.catalogConfig = catalogConfig; - this.treeService = new TreeApiImpl(serverConfig, store, authorizer, accessContext); - this.contentService = new ContentApiImpl(serverConfig, store, authorizer, accessContext); + this.treeService = new TreeApiImpl(serverConfig, store, authorizer, accessContext, ICEBERG_V1); + this.contentService = + new ContentApiImpl(serverConfig, store, authorizer, accessContext, ICEBERG_V1); } private IcebergStuff icebergStuff() { @@ -164,7 +170,8 @@ private IcebergStuff icebergStuff() { public Stream>> retrieveSnapshots( SnapshotReqParams reqParams, List keys, - Consumer effectiveReferenceConsumer) + Consumer effectiveReferenceConsumer, + RequestMeta requestMeta) throws NessieNotFoundException { ParsedReference reference = reqParams.ref(); @@ -176,7 +183,7 @@ public Stream>> retrieveSnapshots( GetMultipleContentsResponse contentResponse = contentService.getMultipleContents( - reference.name(), reference.hashWithRelativeSpec(), keys, false, false); + reference.name(), reference.hashWithRelativeSpec(), keys, false, requestMeta); IcebergStuff icebergStuff = icebergStuff(); @@ -219,7 +226,7 @@ public CompletionStage retrieveSnapshot( SnapshotReqParams reqParams, ContentKey key, @Nullable Content.Type expectedType, - boolean forWrite) + RequestMeta requestMeta) throws NessieNotFoundException { ParsedReference reference = reqParams.ref(); @@ -232,7 +239,7 @@ public CompletionStage retrieveSnapshot( ContentResponse contentResponse = contentService.getContent( - key, reference.name(), reference.hashWithRelativeSpec(), false, forWrite); + key, reference.name(), reference.hashWithRelativeSpec(), false, requestMeta); Content content = contentResponse.getContent(); if (expectedType != null && !content.getType().equals(expectedType)) { throw new NessieContentNotFoundException(key, reference.name()); @@ -360,16 +367,21 @@ private SnapshotResponse snapshotViewResponse( CompletionStage commit( ParsedReference reference, CatalogCommit commit, - Function commitMetaBuilder) + Function commitMetaBuilder, + String apiRequest) throws BaseNessieClientServerException { + RequestMeta requestMeta = RequestMeta.API_WRITE; + + List allKeys = + commit.getOperations().stream().map(CatalogOperation::getKey).collect(toList()); + for (ContentKey key : allKeys) { + requestMeta.addKeyAction(key, apiRequest); + } + GetMultipleContentsResponse contentsResponse = contentService.getMultipleContents( - reference.name(), - reference.hashWithRelativeSpec(), - commit.getOperations().stream().map(CatalogOperation::getKey).collect(toList()), - false, - true); + reference.name(), reference.hashWithRelativeSpec(), allKeys, false, requestMeta); checkArgument( requireNonNull(contentsResponse.getEffectiveReference()) instanceof Branch, @@ -388,7 +400,7 @@ CompletionStage commit( IcebergStuff icebergStuff = icebergStuff(); - MultiTableUpdate multiTableUpdate = new MultiTableUpdate(treeService, target); + MultiTableUpdate multiTableUpdate = new MultiTableUpdate(treeService, target, requestMeta); LOGGER.trace( "Executing commit containing {} operations against '{}@{}'", @@ -496,9 +508,10 @@ public CompletionStage> commit( ParsedReference reference, CatalogCommit commit, SnapshotReqParams reqParams, - Function commitMetaBuilder) + Function commitMetaBuilder, + String apiRequest) throws BaseNessieClientServerException { - return commit(reference, commit, commitMetaBuilder) + return commit(reference, commit, commitMetaBuilder, apiRequest) // Finally, transform each MultiTableUpdate.SingleTableUpdate to a SnapshotResponse .thenApply( updates -> @@ -579,13 +592,13 @@ private CompletionStage applyIcebergTableCommitOperation( return new IcebergTableMetadataUpdateState( nessieSnapshot, op.getKey(), content != null) .checkRequirements(icebergOp.requirements()) - .applyUpdates(pruneUpdates(icebergOp, content != null)) - .snapshot(); + .applyUpdates(pruneUpdates(icebergOp, content != null)); // TODO handle the case when nothing changed -> do not update // e.g. when adding a schema/spec/order that already exists }) .thenApply( - nessieSnapshot -> { + updateState -> { + NessieTableSnapshot nessieSnapshot = updateState.snapshot(); String metadataJsonLocation = icebergMetadataJsonLocation(nessieSnapshot.icebergLocation()); IcebergTableMetadata icebergMetadata = @@ -597,7 +610,8 @@ private CompletionStage applyIcebergTableCommitOperation( nessieSnapshot = nessieSnapshot.withId(objIdToNessieId(snapshotId)); SingleTableUpdate singleTableUpdate = - new SingleTableUpdate(nessieSnapshot, updated, icebergOp.getKey()); + new SingleTableUpdate( + nessieSnapshot, updated, icebergOp.getKey(), updateState.catalogOps()); multiTableUpdate.addUpdate(op.getKey(), singleTableUpdate); return singleTableUpdate; }); @@ -647,13 +661,13 @@ private CompletionStage applyIcebergViewCommitOperation( return new IcebergViewMetadataUpdateState( nessieSnapshot, op.getKey(), content != null) .checkRequirements(icebergOp.requirements()) - .applyUpdates(pruneUpdates(icebergOp, content != null)) - .snapshot(); + .applyUpdates(pruneUpdates(icebergOp, content != null)); // TODO handle the case when nothing changed -> do not update // e.g. when adding a schema/spec/order that already exists }) .thenApply( - nessieSnapshot -> { + updateState -> { + NessieViewSnapshot nessieSnapshot = updateState.snapshot(); String metadataJsonLocation = icebergMetadataJsonLocation(nessieSnapshot.icebergLocation()); IcebergViewMetadata icebergMetadata = @@ -664,7 +678,8 @@ private CompletionStage applyIcebergViewCommitOperation( nessieSnapshot = nessieSnapshot.withId(objIdToNessieId(snapshotId)); SingleTableUpdate singleTableUpdate = - new SingleTableUpdate(nessieSnapshot, updated, icebergOp.getKey()); + new SingleTableUpdate( + nessieSnapshot, updated, icebergOp.getKey(), updateState.catalogOps()); multiTableUpdate.addUpdate(op.getKey(), singleTableUpdate); return singleTableUpdate; }); diff --git a/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/MultiTableUpdate.java b/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/MultiTableUpdate.java index 33e7d7de3e8..9544c52d510 100644 --- a/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/MultiTableUpdate.java +++ b/catalog/service/impl/src/main/java/org/projectnessie/catalog/service/impl/MultiTableUpdate.java @@ -20,6 +20,9 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.model.snapshot.NessieEntitySnapshot; import org.projectnessie.error.NessieConflictException; import org.projectnessie.error.NessieNotFoundException; @@ -30,6 +33,7 @@ import org.projectnessie.model.ImmutableOperations; import org.projectnessie.model.Operation; import org.projectnessie.services.spi.TreeService; +import org.projectnessie.versioned.RequestMeta; /** Maintains state across all individual updates of a commit. */ final class MultiTableUpdate { @@ -40,11 +44,13 @@ final class MultiTableUpdate { private Map addedContentsMap; private Branch targetBranch; private boolean committed; + private final RequestMeta requestMeta; - MultiTableUpdate(TreeService treeService, Branch target) { + MultiTableUpdate(TreeService treeService, Branch target, RequestMeta requestMeta) { this.treeService = treeService; this.operations = ImmutableOperations.builder(); this.targetBranch = target; + this.requestMeta = requestMeta; } ImmutableOperations.Builder operations() { @@ -55,9 +61,19 @@ MultiTableUpdate commit() throws NessieConflictException, NessieNotFoundExceptio synchronized (this) { committed = true; if (!tableUpdates.isEmpty()) { + RequestMeta checkMeta = requestMeta; + for (SingleTableUpdate update : tableUpdates) { + checkMeta = + checkMeta.addKeyActions( + update.key, + update.catalogOps.stream() + .map(CatalogOps::name) + .collect(Collectors.toUnmodifiableSet())); + } + CommitResponse commitResponse = treeService.commitMultipleOperations( - targetBranch().getName(), targetBranch.getHash(), operations.build()); + targetBranch().getName(), targetBranch.getHash(), operations.build(), checkMeta); addedContentsMap = commitResponse.getAddedContents() != null @@ -112,11 +128,17 @@ static final class SingleTableUpdate { final NessieEntitySnapshot snapshot; final Content content; final ContentKey key; + final Set catalogOps; - SingleTableUpdate(NessieEntitySnapshot snapshot, Content content, ContentKey key) { + SingleTableUpdate( + NessieEntitySnapshot snapshot, + Content content, + ContentKey key, + Set catalogOps) { this.snapshot = snapshot; this.content = content; this.key = key; + this.catalogOps = catalogOps; } } } diff --git a/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/AbstractCatalogService.java b/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/AbstractCatalogService.java index f72fe94000f..6b7a631fe82 100644 --- a/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/AbstractCatalogService.java +++ b/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/AbstractCatalogService.java @@ -18,6 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.projectnessie.api.v2.params.ParsedReference.parsedReference; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_MULTIPLE; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AddPartitionSpec.addPartitionSpec; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AddSchema.addSchema; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AddSortOrder.addSortOrder; @@ -32,6 +33,7 @@ import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; import static org.projectnessie.nessie.combined.EmptyHttpHeaders.emptyHttpHeaders; import static org.projectnessie.services.authz.AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; +import static org.projectnessie.services.authz.ApiContext.apiContext; import java.net.URI; import java.time.Clock; @@ -83,6 +85,7 @@ import org.projectnessie.objectstoragemock.InterceptingBucket; import org.projectnessie.objectstoragemock.ObjectStorageMock; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.config.ServerConfig; @@ -90,6 +93,7 @@ import org.projectnessie.services.impl.TreeApiImpl; import org.projectnessie.services.rest.RestV2ConfigResource; import org.projectnessie.services.rest.RestV2TreeResource; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.storage.common.persist.Persist; import org.projectnessie.versioned.storage.testextension.NessiePersist; @@ -121,12 +125,13 @@ public abstract class AbstractCatalogService { protected ContentApiImpl contentService; protected volatile Function batchAccessCheckerFactory; - protected ParsedReference commitSingle(Reference branch, ContentKey key) + protected ParsedReference commitSingle(Reference branch, ContentKey key, RequestMeta requestMeta) throws InterruptedException, ExecutionException, BaseNessieClientServerException { - return commitMultiple(branch, key); + return commitMultiple(branch, requestMeta, key); } - protected ParsedReference commitMultiple(Reference branch, ContentKey... keys) + protected ParsedReference commitMultiple( + Reference branch, RequestMeta requestMeta, ContentKey... keys) throws InterruptedException, ExecutionException, BaseNessieClientServerException { ParsedReference ref = parsedReference(branch.getName(), branch.getHash(), Reference.ReferenceType.BRANCH); @@ -155,7 +160,7 @@ protected ParsedReference commitMultiple(Reference branch, ContentKey... keys) MultiTableUpdate update = catalogService - .commit(ref, commit.build(), CommitMeta::fromMessage) + .commit(ref, commit.build(), CommitMeta::fromMessage, CATALOG_UPDATE_MULTIPLE.name()) .toCompletableFuture() .get(); branch = update.targetBranch(); @@ -264,11 +269,13 @@ public boolean sendStacktraceToClient() { } }; VersionStore versionStore = new VersionStoreImpl(persist); - Authorizer authorizer = context -> batchAccessCheckerFactory.apply(context); + Authorizer authorizer = (context, apiContext) -> batchAccessCheckerFactory.apply(context); AccessContext accessContext = () -> () -> null; + ApiContext apiContext = apiContext("Nessie", 2); - treeService = new TreeApiImpl(config, versionStore, authorizer, accessContext); - contentService = new ContentApiImpl(config, versionStore, authorizer, accessContext); + treeService = new TreeApiImpl(config, versionStore, authorizer, accessContext, apiContext); + contentService = + new ContentApiImpl(config, versionStore, authorizer, accessContext, apiContext); RestV2TreeResource treeResource = new RestV2TreeResource(config, versionStore, authorizer, accessContext, emptyHttpHeaders()); diff --git a/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/TestCatalogServiceImpl.java b/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/TestCatalogServiceImpl.java index 2bc32b60e94..2f9323c39cf 100644 --- a/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/TestCatalogServiceImpl.java +++ b/catalog/service/impl/src/test/java/org/projectnessie/catalog/service/impl/TestCatalogServiceImpl.java @@ -19,13 +19,17 @@ import static java.util.concurrent.TimeUnit.MINUTES; import static org.assertj.core.api.InstanceOfAssertFactories.STRING; import static org.projectnessie.api.v2.params.ParsedReference.parsedReference; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_MULTIPLE; import static org.projectnessie.catalog.service.api.SnapshotReqParams.forSnapshotHttpReq; import static org.projectnessie.model.CommitMeta.fromMessage; import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; +import static org.projectnessie.services.authz.ApiContext.apiContext; import static org.projectnessie.services.authz.Check.CheckType.COMMIT_CHANGE_AGAINST_REFERENCE; import static org.projectnessie.services.authz.Check.CheckType.READ_ENTITY_VALUE; import static org.projectnessie.services.authz.Check.CheckType.UPDATE_ENTITY; import static org.projectnessie.services.authz.Check.CheckType.VIEW_REFERENCE; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import java.io.InputStream; import java.util.ArrayList; @@ -65,6 +69,7 @@ import org.projectnessie.services.authz.Check; import org.projectnessie.services.authz.Check.CheckType; import org.projectnessie.storage.uri.StorageUri; +import org.projectnessie.versioned.RequestMeta; import software.amazon.awssdk.services.s3.model.S3Exception; public class TestCatalogServiceImpl extends AbstractCatalogService { @@ -92,7 +97,7 @@ public void cleanupAfterNessieCommitFailure() throws Exception { .operation(Operation.Put.of(key1, IcebergView.of("meta", 1, 2))) .commitWithResponse(); - soft.assertThatThrownBy(() -> commitMultiple(main, key1, key2)) + soft.assertThatThrownBy(() -> commitMultiple(main, API_WRITE, key1, key2)) .isInstanceOf(ExecutionException.class) .cause() .isInstanceOf(RuntimeException.class) @@ -148,7 +153,7 @@ public MockObject commit() { return Optional.empty(); }); - soft.assertThatThrownBy(() -> commitMultiple(main, key1, key2, key3, key4)) + soft.assertThatThrownBy(() -> commitMultiple(main, API_WRITE, key1, key2, key3, key4)) .isInstanceOf(ExecutionException.class) .cause() .cause() @@ -166,7 +171,10 @@ public void noCommitOps() throws Exception { parsedReference(main.getName(), main.getHash(), Reference.ReferenceType.BRANCH); CatalogCommit commit = CatalogCommit.builder().build(); - catalogService.commit(ref, commit, CommitMeta::fromMessage).toCompletableFuture().get(); + catalogService + .commit(ref, commit, CommitMeta::fromMessage, CATALOG_UPDATE_MULTIPLE.name()) + .toCompletableFuture() + .get(); Reference afterCommit = api.getReference().refName("main").get(); soft.assertThat(afterCommit).isEqualTo(main); @@ -179,7 +187,7 @@ public void twoTableCreates() throws Exception { ContentKey key1 = ContentKey.of("mytable1"); ContentKey key2 = ContentKey.of("mytable2"); - ParsedReference committed = commitMultiple(main, key1, key2); + ParsedReference committed = commitMultiple(main, API_WRITE, key1, key2); Reference afterCommit = api.getReference().refName("main").get(); soft.assertThat(afterCommit) @@ -193,7 +201,7 @@ public void singleTableCreate() throws Exception { Reference main = api.getReference().refName("main").get(); ContentKey key = ContentKey.of("mytable"); - ParsedReference committed = commitSingle(main, key); + ParsedReference committed = commitSingle(main, key, API_WRITE); Reference afterCommit = api.getReference().refName("main").get(); soft.assertThat(afterCommit) @@ -204,7 +212,7 @@ public void singleTableCreate() throws Exception { SnapshotResponse snap = catalogService .retrieveSnapshot( - forSnapshotHttpReq(committed, "ICEBERG", "2"), key, ICEBERG_TABLE, false) + forSnapshotHttpReq(committed, "ICEBERG", "2"), key, ICEBERG_TABLE, API_READ) .toCompletableFuture() .get(5, MINUTES); @@ -247,20 +255,20 @@ public void singleTableCreate() throws Exception { /** * Verify behavior of {@link CatalogService#retrieveSnapshot(SnapshotReqParams, ContentKey, - * Content.Type, boolean)} against related Nessie {@link CheckType check types} for read and write - * intents. + * Content.Type, RequestMeta)} against related Nessie {@link CheckType check types} for read and + * write intents. */ @Test public void retrieveSnapshotAccessChecks() throws Exception { Reference main = api.getReference().refName("main").get(); ContentKey key = ContentKey.of("mytable"); - ParsedReference committed = commitSingle(main, key); + ParsedReference committed = commitSingle(main, key, API_WRITE); AtomicReference failingCheckType = new AtomicReference<>(); batchAccessCheckerFactory = x -> - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { return getChecks().stream() @@ -290,7 +298,7 @@ public Map check() { forSnapshotHttpReq(committed, "ICEBERG", "2"), key, ICEBERG_TABLE, - false) + API_READ) .toCompletableFuture() .get(5, MINUTES)) .describedAs("forRead with %s", checkType); @@ -308,7 +316,7 @@ public Map check() { forSnapshotHttpReq(committed, "ICEBERG", "2"), key, ICEBERG_TABLE, - true) + API_WRITE) .toCompletableFuture() .get(5, MINUTES)) .describedAs("forWrite with %s", checkType); diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/AbstractCatalogResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/AbstractCatalogResource.java index 22e84f1604a..9d7cee6adcd 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/AbstractCatalogResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/AbstractCatalogResource.java @@ -16,6 +16,7 @@ package org.projectnessie.catalog.service.rest; import static java.nio.charset.StandardCharsets.UTF_8; +import static org.projectnessie.versioned.RequestMeta.API_READ; import io.smallrye.mutiny.Uni; import jakarta.inject.Inject; @@ -60,7 +61,7 @@ Uni snapshotResponse( throws NessieNotFoundException { return Uni.createFrom() .completionStage( - catalogService.retrieveSnapshot(snapshotReqParams, key, expectedType, false)); + catalogService.retrieveSnapshot(snapshotReqParams, key, expectedType, API_READ)); } private static Response snapshotToResponse(SnapshotResponse snapshot) { diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1GenericResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1GenericResource.java index 60a9c32785e..1ef82102b3a 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1GenericResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1GenericResource.java @@ -16,6 +16,7 @@ package org.projectnessie.catalog.service.rest; import static java.util.Objects.requireNonNull; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_MULTIPLE; import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; import io.smallrye.common.annotation.Blocking; @@ -160,7 +161,12 @@ public Uni commitTransaction( // results are consumed. return Uni.createFrom() .completionStage( - catalogService.commit(ref, commit.build(), reqParams, this::updateCommitMeta)) + catalogService.commit( + ref, + commit.build(), + reqParams, + this::updateCommitMeta, + CATALOG_UPDATE_MULTIPLE.name())) .map(stream -> stream.reduce(null, (ident, snap) -> ident, (i1, i2) -> i1)); } } diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1NamespaceResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1NamespaceResource.java index 7d47314f126..768fdad5f80 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1NamespaceResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1NamespaceResource.java @@ -15,11 +15,16 @@ */ package org.projectnessie.catalog.service.rest; -import static java.lang.String.format; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_CREATE_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_DROP_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.META_SET_LOCATION; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.META_SET_PROPERTIES; import static org.projectnessie.error.ContentKeyErrorDetails.contentKeyErrorDetails; import static org.projectnessie.model.Content.Type.NAMESPACE; import static org.projectnessie.services.impl.RefUtil.toReference; -import static org.projectnessie.services.rest.common.RestCommon.updateCommitMeta; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import io.smallrye.common.annotation.Blocking; import jakarta.enterprise.context.RequestScoped; @@ -41,11 +46,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Stream; import org.eclipse.microprofile.openapi.annotations.Operation; import org.jboss.resteasy.reactive.server.ServerExceptionMapper; import org.projectnessie.api.v2.params.ParsedReference; import org.projectnessie.catalog.formats.iceberg.meta.IcebergNamespace; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.formats.iceberg.rest.IcebergCreateNamespaceRequest; import org.projectnessie.catalog.formats.iceberg.rest.IcebergCreateNamespaceResponse; import org.projectnessie.catalog.formats.iceberg.rest.IcebergGetNamespaceResponse; @@ -63,12 +70,12 @@ import org.projectnessie.error.NessieReferenceNotFoundException; import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; -import org.projectnessie.model.ContentResponse; import org.projectnessie.model.EntriesResponse; import org.projectnessie.model.GetMultipleContentsResponse; import org.projectnessie.model.ImmutableNamespace; import org.projectnessie.model.ImmutableOperations; import org.projectnessie.model.Namespace; +import org.projectnessie.model.Operation.Delete; import org.projectnessie.model.Operations; import org.projectnessie.model.Reference; import org.projectnessie.services.authz.AccessContext; @@ -76,6 +83,7 @@ import org.projectnessie.services.config.ServerConfig; import org.projectnessie.services.spi.PagedResponseHandler; import org.projectnessie.storage.uri.StorageUri; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; /** Handles Iceberg REST API v1 endpoints that are associated with namespaces. */ @@ -131,7 +139,7 @@ public IcebergCreateNamespaceResponse createNamespace( try { contentsResponse = contentService.getMultipleContents( - ref.name(), ref.hashWithRelativeSpec(), List.of(key), false, false); + ref.name(), ref.hashWithRelativeSpec(), List.of(key), false, API_READ); } catch (NessieNotFoundException e) { throw new NessieReferenceNotFoundException(e.getMessage(), e); } @@ -157,10 +165,19 @@ public IcebergCreateNamespaceResponse createNamespace( .commitMeta(updateCommitMeta("update namespace " + key)) .build(); + RequestMeta requestMeta = API_WRITE.addKeyAction(key, CATALOG_CREATE_ENTITY.name()); + if (!namespace.getProperties().isEmpty()) { + requestMeta = requestMeta.addKeyAction(key, META_SET_PROPERTIES.name()); + if (namespace.getProperties().containsKey("location")) { + requestMeta = requestMeta.addKeyAction(key, META_SET_LOCATION.name()); + } + } + treeService.commitMultipleOperations( contentsResponse.getEffectiveReference().getName(), contentsResponse.getEffectiveReference().getHash(), - ops); + ops, + requestMeta); return IcebergCreateNamespaceResponse.builder() .namespace(createNamespaceRequest.namespace()) @@ -176,54 +193,54 @@ public void dropNamespace( @PathParam("prefix") String prefix, @PathParam("namespace") String namespace) throws IOException { NamespaceRef namespaceRef = decodeNamespaceRef(prefix, namespace); - ContentKey key = namespaceRef.namespace().toContentKey(); - ContentResponse contentResponse = - contentService.getContent( - key, namespaceRef.referenceName(), namespaceRef.hashWithRelativeSpec(), false, false); - if (!(contentResponse.getContent() instanceof Namespace)) { - throw new NessieNamespaceNotFoundException( - contentKeyErrorDetails(key), - String.format("Namespace '%s' does not exist", key.toCanonicalString())); - } - - Reference ref = contentResponse.getEffectiveReference(); - boolean notEmpty = + var ref = new AtomicReference(); + var entries = treeService.getEntries( - ref.getName(), - ref.getHash(), + namespaceRef.referenceName(), + namespaceRef.hashWithRelativeSpec(), + null, null, - format("entry.encodedKey.startsWith('%s.')", key.toPathString()), null, false, - new PagedResponseHandler<>() { - boolean found; + new PagedResponseHandler, EntriesResponse.Entry>() { + final List entries = new ArrayList<>(); @Override public boolean addEntry(EntriesResponse.Entry entry) { - if (found) { + if (entries.size() == 2) { return false; } - found = true; + entries.add(entry); return true; } @Override - public Boolean build() { - return found; + public List build() { + return entries; } @Override public void hasMore(String pagingToken) {} }, - h -> toReference(h), + h -> ref.set(toReference(h)), null, null, key, - null); + List.of()); - if (notEmpty) { + if (entries.isEmpty()) { + throw new NessieNamespaceNotFoundException( + contentKeyErrorDetails(key), + String.format("Namespace '%s' does not exist", key.toCanonicalString())); + } + if (!NAMESPACE.equals(entries.get(0).getType())) { + throw new NessieNamespaceNotFoundException( + contentKeyErrorDetails(key), + String.format("Namespace '%s' does not exist", key.toCanonicalString())); + } + if (entries.size() > 1) { throw new NessieNamespaceNotEmptyException( contentKeyErrorDetails(key), String.format("Namespace '%s' is not empty", key.toCanonicalString())); @@ -231,11 +248,13 @@ public void hasMore(String pagingToken) {} Operations ops = ImmutableOperations.builder() - .addOperations(org.projectnessie.model.Operation.Delete.of(key)) + .addOperations(Delete.of(key)) .commitMeta(updateCommitMeta("delete namespace " + key)) .build(); - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + RequestMeta requestMeta = API_WRITE.addKeyAction(key, CATALOG_DROP_ENTITY.name()); + treeService.commitMultipleOperations( + ref.get().getName(), ref.get().getHash(), ops, requestMeta); } @Operation(operationId = "iceberg.v1.listNamespaces") @@ -279,7 +298,7 @@ public void namespaceExists( namespaceRef.referenceName(), namespaceRef.hashWithRelativeSpec(), false, - false) + API_READ) .getContent(); if (!(c instanceof Namespace)) { throw new NessieNamespaceNotFoundException( @@ -311,7 +330,7 @@ public IcebergGetNamespaceResponse loadNamespaceMetadata( namespaceRef.hashWithRelativeSpec(), keysInOrder, false, - false); + API_READ); Map namespacesMap = namespaces.toContentsMap(); Content content = namespacesMap.get(nessieNamespace.toContentKey()); @@ -380,7 +399,7 @@ public IcebergUpdateNamespacePropertiesResponse updateProperties( namespaceRef.hashWithRelativeSpec(), List.of(key), false, - true); + API_WRITE); Reference ref = namespaces.getEffectiveReference(); Map namespacesMap = namespaces.toContentsMap(); @@ -403,7 +422,21 @@ public IcebergUpdateNamespacePropertiesResponse updateProperties( .commitMeta(updateCommitMeta("update namespace " + key)) .build(); - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + RequestMeta requestMeta = API_WRITE.addKeyAction(key, CATALOG_UPDATE_ENTITY.name()); + if (!updateNamespacePropertiesRequest.removals().isEmpty()) { + requestMeta = requestMeta.addKeyAction(key, CatalogOps.META_REMOVE_PROPERTIES.name()); + if (updateNamespacePropertiesRequest.removals().contains("location")) { + requestMeta = + requestMeta.addKeyAction(key, CatalogOps.META_REMOVE_LOCATION_PROPERTY.name()); + } + } + if (!updateNamespacePropertiesRequest.updates().isEmpty()) { + requestMeta = requestMeta.addKeyAction(key, CatalogOps.META_SET_PROPERTIES.name()); + if (updateNamespacePropertiesRequest.updates().containsKey("location")) { + requestMeta = requestMeta.addKeyAction(key, CatalogOps.META_SET_LOCATION.name()); + } + } + treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops, requestMeta); IcebergUpdateNamespacePropertiesResponse.Builder response = IcebergUpdateNamespacePropertiesResponse.builder(); diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ResourceBase.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ResourceBase.java index ceb1297ec34..fa80435f860 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ResourceBase.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ResourceBase.java @@ -28,7 +28,10 @@ import static org.projectnessie.catalog.service.rest.TimestampParser.timestampToNessie; import static org.projectnessie.model.Namespace.Empty.EMPTY_NAMESPACE; import static org.projectnessie.model.Reference.ReferenceType.BRANCH; +import static org.projectnessie.services.authz.ApiContext.apiContext; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.base.Splitter; import io.smallrye.mutiny.Uni; @@ -44,6 +47,7 @@ import java.util.stream.Stream; import org.projectnessie.api.v2.params.ParsedReference; import org.projectnessie.catalog.formats.iceberg.meta.IcebergTableIdentifier; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.formats.iceberg.rest.IcebergCatalogOperation; import org.projectnessie.catalog.formats.iceberg.rest.IcebergRenameTableRequest; import org.projectnessie.catalog.formats.iceberg.rest.IcebergUpdateEntityRequest; @@ -70,6 +74,7 @@ import org.projectnessie.model.Reference; import org.projectnessie.model.TableReference; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.config.ServerConfig; import org.projectnessie.services.impl.ContentApiImpl; @@ -77,6 +82,7 @@ import org.projectnessie.services.spi.ContentService; import org.projectnessie.services.spi.PagedCountingResponseHandler; import org.projectnessie.services.spi.TreeService; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; abstract class IcebergApiV1ResourceBase extends AbstractCatalogResource { @@ -86,6 +92,8 @@ abstract class IcebergApiV1ResourceBase extends AbstractCatalogResource { final ServerConfig serverConfig; final CatalogConfig catalogConfig; + static final ApiContext ICEBERG_V1 = apiContext("Iceberg", 1); + protected IcebergApiV1ResourceBase( ServerConfig serverConfig, CatalogConfig catalogConfig, @@ -94,8 +102,9 @@ protected IcebergApiV1ResourceBase( AccessContext accessContext) { this.serverConfig = serverConfig; this.catalogConfig = catalogConfig; - this.treeService = new TreeApiImpl(serverConfig, store, authorizer, accessContext); - this.contentService = new ContentApiImpl(serverConfig, store, authorizer, accessContext); + this.treeService = new TreeApiImpl(serverConfig, store, authorizer, accessContext, ICEBERG_V1); + this.contentService = + new ContentApiImpl(serverConfig, store, authorizer, accessContext, ICEBERG_V1); } protected Stream listContent( @@ -170,7 +179,10 @@ public void hasMore(String pagingToken) { } protected void renameContent( - String prefix, IcebergRenameTableRequest renameTableRequest, Content.Type expectedContentType) + String prefix, + IcebergRenameTableRequest renameTableRequest, + Content.Type expectedContentType, + RequestMeta requestMeta) throws NessieNotFoundException, NessieConflictException { TableRef fromTableRef = decodeTableRef(prefix, renameTableRequest.source()); TableRef toTableRef = decodeTableRef(prefix, renameTableRequest.destination()); @@ -182,7 +194,7 @@ protected void renameContent( ref.hashWithRelativeSpec(), List.of(toTableRef.contentKey(), fromTableRef.contentKey()), false, - false); + API_READ); Map contentsMap = contents.toContentsMap(); Content existingFrom = contentsMap.get(fromTableRef.contentKey()); if (existingFrom == null || !expectedContentType.equals(existingFrom.getType())) { @@ -220,7 +232,13 @@ protected void renameContent( entityType, fromTableRef.contentKey(), toTableRef.contentKey()))) .build(); - treeService.commitMultipleOperations(effectiveRef.getName(), effectiveRef.getHash(), ops); + requestMeta = + requestMeta + .addKeyAction(fromTableRef.contentKey(), CatalogOps.CATALOG_RENAME_ENTITY_FROM.name()) + .addKeyAction(toTableRef.contentKey(), CatalogOps.CATALOG_RENAME_ENTITY_TO.name()); + + treeService.commitMultipleOperations( + effectiveRef.getName(), effectiveRef.getHash(), ops, requestMeta); } protected NamespaceRef decodeNamespaceRef(String prefix, String encodedNs) { @@ -339,7 +357,11 @@ void createEntityVerifyNotExists(TableRef tableRef, Content.Type type) GetMultipleContentsResponse contentResponse = contentService.getMultipleContents( - ref.name(), ref.hashWithRelativeSpec(), List.of(tableRef.contentKey()), false, true); + ref.name(), + ref.hashWithRelativeSpec(), + List.of(tableRef.contentKey()), + false, + API_WRITE); if (!contentResponse.getContents().isEmpty()) { Content existing = contentResponse.getContents().get(0).getContent(); throw new CatalogEntityAlreadyExistsException( @@ -354,7 +376,11 @@ ContentResponse fetchIcebergEntity( ParsedReference ref = requireNonNull(tableRef.reference()); ContentResponse content = contentService.getContent( - tableRef.contentKey(), ref.name(), ref.hashWithRelativeSpec(), false, forWrite); + tableRef.contentKey(), + ref.name(), + ref.hashWithRelativeSpec(), + false, + forWrite ? API_WRITE : API_READ); checkArgument( content.getContent().getType().equals(expectedType), "Expecting an Iceberg %s, but got type %s", @@ -364,7 +390,10 @@ ContentResponse fetchIcebergEntity( } Uni createOrUpdateEntity( - TableRef tableRef, IcebergUpdateEntityRequest updateEntityRequest, Content.Type contentType) + TableRef tableRef, + IcebergUpdateEntityRequest updateEntityRequest, + Content.Type contentType, + CatalogOps apiOperation) throws IOException { IcebergCatalogOperation op = @@ -383,7 +412,12 @@ Uni createOrUpdateEntity( return Uni.createFrom() .completionStage( - catalogService.commit(tableRef.reference(), commit, reqParams, this::updateCommitMeta)) + catalogService.commit( + tableRef.reference(), + commit, + reqParams, + this::updateCommitMeta, + apiOperation.name())) .map(Stream::findFirst) .map( o -> diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1TableResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1TableResource.java index 31e21519d36..27c94920487 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1TableResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1TableResource.java @@ -23,6 +23,9 @@ import static org.projectnessie.catalog.formats.iceberg.meta.IcebergSortOrder.unsorted; import static org.projectnessie.catalog.formats.iceberg.meta.IcebergTableIdentifier.fromNessieContentKey; import static org.projectnessie.catalog.formats.iceberg.meta.IcebergTableMetadata.GC_ENABLED; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_CREATE_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_DROP_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_ENTITY; import static org.projectnessie.catalog.formats.iceberg.nessie.NessieModelIceberg.icebergBaseLocation; import static org.projectnessie.catalog.formats.iceberg.nessie.NessieModelIceberg.nessieTableSnapshotToIceberg; import static org.projectnessie.catalog.formats.iceberg.nessie.NessieModelIceberg.newIcebergTableSnapshot; @@ -39,6 +42,7 @@ import static org.projectnessie.catalog.service.rest.TableRef.tableRef; import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; import static org.projectnessie.model.Reference.ReferenceType.BRANCH; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.collect.Lists; import io.smallrye.common.annotation.Blocking; @@ -74,6 +78,7 @@ import org.projectnessie.catalog.formats.iceberg.meta.IcebergSortOrder; import org.projectnessie.catalog.formats.iceberg.meta.IcebergTableMetadata; import org.projectnessie.catalog.formats.iceberg.metrics.IcebergMetricsReport; +import org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps; import org.projectnessie.catalog.formats.iceberg.nessie.IcebergTableMetadataUpdateState; import org.projectnessie.catalog.formats.iceberg.rest.IcebergCommitTableResponse; import org.projectnessie.catalog.formats.iceberg.rest.IcebergCreateTableRequest; @@ -111,6 +116,7 @@ import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.config.ServerConfig; import org.projectnessie.storage.uri.StorageUri; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; /** Handles Iceberg REST API v1 endpoints that are associated with tables. */ @@ -213,7 +219,7 @@ R loadTableResultFromSnapshotResponse( snap.effectiveReference().getName(), snap.effectiveReference().getHash(), false, - true); + API_WRITE); writeAccessValidated = true; } catch (Exception ignore) { } @@ -354,7 +360,7 @@ public Uni createTable( .addRequirement(IcebergUpdateRequirement.AssertCreate.assertTableDoesNotExist()) .build(); - return createOrUpdateEntity(tableRef, updateTableReq, ICEBERG_TABLE) + return createOrUpdateEntity(tableRef, updateTableReq, ICEBERG_TABLE, CATALOG_CREATE_ENTITY) .map( snap -> this.loadTableResultFromSnapshotResponse( @@ -391,6 +397,9 @@ public Uni registerTable( ParsedReference reference = requireNonNull(tableRef.reference()); Branch ref = checkBranch(treeService.getReferenceByName(reference.name(), FetchOption.MINIMAL)); + RequestMeta requestMeta = + API_WRITE.addKeyAction(tableRef.contentKey(), CatalogOps.CATALOG_REGISTER_ENTITY.name()); + Optional catalogTableRef = uriInfo.resolveTableFromUri(registerTableRequest.metadataLocation()); boolean nessieCatalogUri = uriInfo.isNessieCatalogUri(registerTableRequest.metadataLocation()); @@ -416,7 +425,7 @@ public Uni registerTable( ctr.contentKey(), registerTableRequest.metadataLocation()))) .build(); CommitResponse committed = - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops, requestMeta); return this.loadTable( TableRef.tableRef( @@ -463,7 +472,7 @@ public Uni registerTable( tableRef.contentKey(), registerTableRequest.metadataLocation()))) .build(); CommitResponse committed = - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops, requestMeta); return this.loadTable( tableRef( @@ -499,7 +508,9 @@ public void dropTable( .commitMeta(updateCommitMeta(format("Drop ICEBERG_TABLE %s", tableRef.contentKey()))) .build(); - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + RequestMeta requestMeta = + API_WRITE.addKeyAction(tableRef.contentKey(), CATALOG_DROP_ENTITY.name()); + treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops, requestMeta); } @Operation(operationId = "iceberg.v1.listTables") @@ -531,7 +542,7 @@ public void renameTable( @Valid @NotNull IcebergRenameTableRequest renameTableRequest) throws IOException { - renameContent(prefix, renameTableRequest, ICEBERG_TABLE); + renameContent(prefix, renameTableRequest, ICEBERG_TABLE, API_WRITE); } @Operation(operationId = "iceberg.v1.tableExists") @@ -583,7 +594,7 @@ public Uni updateTable( throws IOException { TableRef tableRef = decodeTableRef(prefix, namespace, table); - return createOrUpdateEntity(tableRef, commitTableRequest, ICEBERG_TABLE) + return createOrUpdateEntity(tableRef, commitTableRequest, ICEBERG_TABLE, CATALOG_UPDATE_ENTITY) .map( snap -> { IcebergTableMetadata tableMetadata = diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ViewResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ViewResource.java index 0bcf4db65e9..4a441314994 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ViewResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergApiV1ViewResource.java @@ -18,6 +18,9 @@ import static java.lang.String.format; import static java.util.UUID.randomUUID; import static org.projectnessie.catalog.formats.iceberg.meta.IcebergTableIdentifier.fromNessieContentKey; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_CREATE_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_DROP_ENTITY; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_ENTITY; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AddSchema.addSchema; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AddViewVersion.addViewVersion; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.AssignUUID.assignUUID; @@ -26,6 +29,7 @@ import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.SetProperties.setProperties; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergMetadataUpdate.UpgradeFormatVersion.upgradeFormatVersion; import static org.projectnessie.model.Content.Type.ICEBERG_VIEW; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import io.smallrye.common.annotation.Blocking; import io.smallrye.mutiny.Uni; @@ -74,6 +78,7 @@ import org.projectnessie.services.authz.AccessContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.config.ServerConfig; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; /** Handles Iceberg REST API v1 endpoints that are associated with views. */ @@ -138,7 +143,7 @@ public Uni createView( .addRequirement(IcebergUpdateRequirement.AssertCreate.assertTableDoesNotExist()) .build(); - return createOrUpdateEntity(tableRef, updateTableReq, ICEBERG_VIEW) + return createOrUpdateEntity(tableRef, updateTableReq, ICEBERG_VIEW, CATALOG_CREATE_ENTITY) .map(snap -> loadViewResultFromSnapshotResponse(snap, IcebergLoadViewResponse.builder())); } @@ -180,7 +185,9 @@ public void dropView( .commitMeta(updateCommitMeta(format("Drop ICEBERG_VIEW %s", tableRef.contentKey()))) .build(); - treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops); + RequestMeta requestMeta = + API_WRITE.addKeyAction(tableRef.contentKey(), CATALOG_DROP_ENTITY.name()); + treeService.commitMultipleOperations(ref.getName(), ref.getHash(), ops, requestMeta); } private ContentResponse fetchIcebergView(TableRef tableRef, boolean forWrite) @@ -243,7 +250,7 @@ public void renameView( @Valid @NotNull IcebergRenameTableRequest renameTableRequest) throws IOException { - renameContent(prefix, renameTableRequest, ICEBERG_VIEW); + renameContent(prefix, renameTableRequest, ICEBERG_VIEW, API_WRITE); } @Operation(operationId = "iceberg.v1.viewExists") @@ -272,7 +279,7 @@ public Uni updateView( throws IOException { TableRef tableRef = decodeTableRef(prefix, namespace, view); - return createOrUpdateEntity(tableRef, commitViewRequest, ICEBERG_VIEW) + return createOrUpdateEntity(tableRef, commitViewRequest, ICEBERG_VIEW, CATALOG_UPDATE_ENTITY) .map( snap -> { IcebergViewMetadata viewMetadata = diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergS3SignParams.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergS3SignParams.java index b9091e8f66c..e9f33f5de7c 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergS3SignParams.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/IcebergS3SignParams.java @@ -22,6 +22,8 @@ import static org.projectnessie.catalog.formats.iceberg.rest.IcebergError.icebergError; import static org.projectnessie.catalog.formats.iceberg.rest.IcebergS3SignResponse.icebergS3SignResponse; import static org.projectnessie.catalog.service.rest.IcebergConfigurer.icebergWriteLocation; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import io.smallrye.mutiny.Multi; import io.smallrye.mutiny.Uni; @@ -150,7 +152,7 @@ private Uni fetchSnapshot() { SnapshotReqParams.forSnapshotHttpReq(ref(), "iceberg", null), key(), null, - write()); + write() ? API_WRITE : API_READ); // consider an import failure as a non-existing content: // signing will be authorized for the future location only. return Uni.createFrom().completionStage(stage).onFailure().recoverWithNull(); diff --git a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/NessieCatalogResource.java b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/NessieCatalogResource.java index b6e95f9ea17..4730043656f 100644 --- a/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/NessieCatalogResource.java +++ b/catalog/service/rest/src/main/java/org/projectnessie/catalog/service/rest/NessieCatalogResource.java @@ -15,10 +15,12 @@ */ package org.projectnessie.catalog.service.rest; +import static org.projectnessie.catalog.formats.iceberg.nessie.CatalogOps.CATALOG_UPDATE_ENTITY; import static org.projectnessie.catalog.service.api.SnapshotReqParams.forSnapshotHttpReq; import static org.projectnessie.catalog.service.rest.ExternalBaseUri.parseRefPathString; import static org.projectnessie.model.Content.Type.ICEBERG_TABLE; import static org.projectnessie.model.Validation.REF_NAME_PATH_ELEMENT_REGEX; +import static org.projectnessie.versioned.RequestMeta.API_READ; import io.smallrye.common.annotation.Blocking; import io.smallrye.mutiny.Multi; @@ -73,7 +75,7 @@ public Multi tableSnapshots( // This operation can block --> @Blocking Stream>> snapshots = - catalogService.retrieveSnapshots(reqParams, keys, effectiveReference::set); + catalogService.retrieveSnapshots(reqParams, keys, effectiveReference::set, API_READ); Multi multi = Multi.createFrom() @@ -125,7 +127,8 @@ public Uni commit( return Uni.createFrom() .completionStage( - catalogService.commit(reference, commit, reqParams, this::updateCommitMeta)) + catalogService.commit( + reference, commit, reqParams, this::updateCommitMeta, CATALOG_UPDATE_ENTITY.name())) .map(v -> Response.ok().build()); } } diff --git a/catalog/service/rest/src/test/java/org/projectnessie/catalog/service/rest/TestIcebergS3SignParams.java b/catalog/service/rest/src/test/java/org/projectnessie/catalog/service/rest/TestIcebergS3SignParams.java index babc04408e4..5b6bab6ebd0 100644 --- a/catalog/service/rest/src/test/java/org/projectnessie/catalog/service/rest/TestIcebergS3SignParams.java +++ b/catalog/service/rest/src/test/java/org/projectnessie/catalog/service/rest/TestIcebergS3SignParams.java @@ -19,6 +19,8 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.Mockito.when; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import io.smallrye.mutiny.Uni; import io.smallrye.mutiny.helpers.test.UniAssertSubscriber; @@ -121,7 +123,7 @@ class TestIcebergS3SignParams { @ParameterizedTest @ValueSource(strings = {"GET", "HEAD", "OPTIONS", "TRACE"}) void verifyAndSignSuccessRead(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(false))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_READ))) .thenReturn(successStage); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = @@ -135,7 +137,7 @@ void verifyAndSignSuccessRead(String method) throws Exception { @ParameterizedTest @ValueSource(strings = {"PUT", "POST", "DELETE", "PATCH"}) void verifyAndSignSuccessWrite(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_WRITE))) .thenReturn(successStage); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = @@ -168,7 +170,7 @@ void verifyAndSignSuccessView() throws Exception { key, view, nessieViewSnapshot); - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_WRITE))) .thenReturn(CompletableFuture.completedStage(snapshotResponse)); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = newBuilder().build(); @@ -178,7 +180,7 @@ void verifyAndSignSuccessView() throws Exception { @Test void verifyAndSignSuccessContentNotFound() throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_WRITE))) .thenThrow(new NessieContentNotFoundException(key, "main")); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = newBuilder().build(); @@ -188,7 +190,7 @@ void verifyAndSignSuccessContentNotFound() throws Exception { @Test void verifyAndSignFailureReferenceNotFound() throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_WRITE))) .thenThrow(new NessieReferenceNotFoundException("ref not found")); IcebergS3SignParams icebergSigner = newBuilder().build(); Uni response = icebergSigner.verifyAndSign(); @@ -199,7 +201,7 @@ void verifyAndSignFailureReferenceNotFound() throws Exception { void verifyAndSignSuccessImportFailed() throws Exception { CompletionStage importFailedStage = CompletableFuture.failedStage(new RuntimeException("import failed")); - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_WRITE))) .thenReturn(importFailedStage); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = newBuilder().build(); @@ -210,7 +212,7 @@ void verifyAndSignSuccessImportFailed() throws Exception { @ParameterizedTest @ValueSource(strings = {"GET", "HEAD", "OPTIONS", "TRACE"}) void verifyAndSignSuccessReadMetadataLocation(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(false))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_READ))) .thenReturn(successStage); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = @@ -224,7 +226,7 @@ void verifyAndSignSuccessReadMetadataLocation(String method) throws Exception { @ParameterizedTest @ValueSource(strings = {"PUT", "POST", "DELETE", "PATCH"}) void verifyAndSignFailureWriteMetadataLocation(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_WRITE))) .thenReturn(successStage); IcebergS3SignParams icebergSigner = newBuilder() @@ -242,7 +244,7 @@ void verifyAndSignFailureWriteMetadataLocation(String method) throws Exception { @ParameterizedTest @ValueSource(strings = {"GET", "HEAD", "OPTIONS", "TRACE"}) void verifyAndSignSuccessReadAncientLocation(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(false))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_READ))) .thenReturn(successStage); when(signer.sign(any())).thenReturn(signingResponse); IcebergS3SignParams icebergSigner = @@ -261,7 +263,7 @@ void verifyAndSignSuccessReadAncientLocation(String method) throws Exception { @ParameterizedTest @ValueSource(strings = {"PUT", "POST", "DELETE", "PATCH"}) void verifyAndSignFailureWriteAncientLocation(String method) throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_WRITE))) .thenReturn(successStage); IcebergS3SignParams icebergSigner = newBuilder() @@ -279,7 +281,7 @@ void verifyAndSignFailureWriteAncientLocation(String method) throws Exception { @Test void verifyAndSignFailureWrongBaseLocation() throws Exception { - when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(true))) + when(catalogService.retrieveSnapshot(any(), eq(key), isNull(), eq(API_WRITE))) .thenReturn(successStage); IcebergS3SignParams icebergSigner = newBuilder() diff --git a/compatibility/jersey/src/main/java/org/projectnessie/tools/compatibility/jersey/AuthorizerExtension.java b/compatibility/jersey/src/main/java/org/projectnessie/tools/compatibility/jersey/AuthorizerExtension.java index 3109f696e83..9f5a05b890f 100644 --- a/compatibility/jersey/src/main/java/org/projectnessie/tools/compatibility/jersey/AuthorizerExtension.java +++ b/compatibility/jersey/src/main/java/org/projectnessie/tools/compatibility/jersey/AuthorizerExtension.java @@ -24,6 +24,7 @@ import java.util.function.Function; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; @@ -33,7 +34,7 @@ public class AuthorizerExtension implements Extension { private final Authorizer authorizer = new Authorizer() { @Override - public BatchAccessChecker startAccessCheck(AccessContext context) { + public BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext) { if (accessCheckerSupplier == null) { return AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; } diff --git a/servers/jax-rs-testextension/src/main/java/org/projectnessie/jaxrs/ext/AuthorizerExtension.java b/servers/jax-rs-testextension/src/main/java/org/projectnessie/jaxrs/ext/AuthorizerExtension.java index ced6fdac377..5bc5a03a442 100644 --- a/servers/jax-rs-testextension/src/main/java/org/projectnessie/jaxrs/ext/AuthorizerExtension.java +++ b/servers/jax-rs-testextension/src/main/java/org/projectnessie/jaxrs/ext/AuthorizerExtension.java @@ -24,6 +24,7 @@ import java.util.function.Function; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; @@ -33,7 +34,7 @@ public class AuthorizerExtension implements Extension { private final Authorizer authorizer = new Authorizer() { @Override - public BatchAccessChecker startAccessCheck(AccessContext context) { + public BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext) { if (accessCheckerSupplier == null) { return AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; } diff --git a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelAuthorizer.java b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelAuthorizer.java index 59c85995358..9eb211f76d8 100644 --- a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelAuthorizer.java +++ b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelAuthorizer.java @@ -18,6 +18,7 @@ import jakarta.enterprise.context.Dependent; import jakarta.inject.Inject; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.AuthorizerType; import org.projectnessie.services.authz.BatchAccessChecker; @@ -33,7 +34,7 @@ public CelAuthorizer(CompiledAuthorizationRules compiledRules) { } @Override - public BatchAccessChecker startAccessCheck(AccessContext context) { - return new CelBatchAccessChecker(compiledRules, context); + public BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext) { + return new CelBatchAccessChecker(compiledRules, context, apiContext); } } diff --git a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelBatchAccessChecker.java b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelBatchAccessChecker.java index 9d5c06c0fd7..1290f54a7d5 100644 --- a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelBatchAccessChecker.java +++ b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/CelBatchAccessChecker.java @@ -15,6 +15,15 @@ */ package org.projectnessie.server.authz; +import static org.projectnessie.services.cel.CELUtil.VAR_ACTIONS; +import static org.projectnessie.services.cel.CELUtil.VAR_API; +import static org.projectnessie.services.cel.CELUtil.VAR_CONTENT_TYPE; +import static org.projectnessie.services.cel.CELUtil.VAR_OP; +import static org.projectnessie.services.cel.CELUtil.VAR_PATH; +import static org.projectnessie.services.cel.CELUtil.VAR_REF; +import static org.projectnessie.services.cel.CELUtil.VAR_ROLE; +import static org.projectnessie.services.cel.CELUtil.VAR_ROLES; + import java.security.Principal; import java.util.LinkedHashMap; import java.util.List; @@ -27,6 +36,7 @@ import org.projectnessie.model.RepositoryConfig; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.authz.Check; import org.projectnessie.versioned.NamedRef; @@ -39,7 +49,9 @@ final class CelBatchAccessChecker extends AbstractBatchAccessChecker { private final CompiledAuthorizationRules compiledRules; private final AccessContext context; - CelBatchAccessChecker(CompiledAuthorizationRules compiledRules, AccessContext context) { + CelBatchAccessChecker( + CompiledAuthorizationRules compiledRules, AccessContext context, ApiContext apiContext) { + super(apiContext); this.compiledRules = compiledRules; this.context = context; } @@ -81,17 +93,21 @@ private void canPerformOp(Check check, Map failed) { String roleName = roleName(); Map arguments = Map.of( - "role", + VAR_ROLE, roleName, - "roles", + VAR_ROLES, roles(), - "op", + VAR_OP, check.type().name(), - "path", + VAR_ACTIONS, + check.actions(), + VAR_API, + getApiContext(), + VAR_PATH, "", - "ref", + VAR_REF, "", - "contentType", + VAR_CONTENT_TYPE, ""); Supplier errorMsgSupplier = diff --git a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/QuarkusAuthorizer.java b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/QuarkusAuthorizer.java index 380c48599cb..b9321b87c47 100644 --- a/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/QuarkusAuthorizer.java +++ b/servers/quarkus-auth/src/main/java/org/projectnessie/server/authz/QuarkusAuthorizer.java @@ -23,6 +23,7 @@ import org.projectnessie.server.config.QuarkusNessieAuthorizationConfig; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.AuthorizerType; import org.projectnessie.services.authz.BatchAccessChecker; @@ -53,12 +54,12 @@ public QuarkusAuthorizer( this.authorizer = authorizerInstance.get(); } else { - this.authorizer = context -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; + this.authorizer = (context, apiContext) -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; } } @Override - public BatchAccessChecker startAccessCheck(AccessContext context) { - return this.authorizer.startAccessCheck(context); + public BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext) { + return this.authorizer.startAccessCheck(context, apiContext); } } diff --git a/servers/quarkus-auth/src/test/java/org/projectnessie/server/authz/TestCELAuthZ.java b/servers/quarkus-auth/src/test/java/org/projectnessie/server/authz/TestCELAuthZ.java index b427ecb1db5..3c192e17565 100644 --- a/servers/quarkus-auth/src/test/java/org/projectnessie/server/authz/TestCELAuthZ.java +++ b/servers/quarkus-auth/src/test/java/org/projectnessie/server/authz/TestCELAuthZ.java @@ -17,6 +17,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import static org.projectnessie.services.authz.ApiContext.apiContext; import static org.projectnessie.services.authz.Check.CheckType.CREATE_REFERENCE; import static org.projectnessie.services.authz.Check.CheckType.VIEW_REFERENCE; @@ -93,7 +94,8 @@ public Principal user() { public Set roleIds() { return roles.get(); } - }); + }, + apiContext("Nessie", 2)); BranchName main = BranchName.of("main"); soft.assertThatCode(() -> batchAccessChecker.canViewReference(main).checkAndThrow()) @@ -127,7 +129,8 @@ public Set roleIds() { void celBatchAccessCheckerEmptyChecks(CheckType type) { QuarkusNessieAuthorizationConfig config = buildConfig(true); CompiledAuthorizationRules rules = new CompiledAuthorizationRules(config); - CelBatchAccessChecker batchAccessChecker = new CelBatchAccessChecker(rules, () -> () -> null); + CelBatchAccessChecker batchAccessChecker = + new CelBatchAccessChecker(rules, () -> () -> null, apiContext("Nessie", 2)); Check check = Check.builder(type).build(); if (type == CheckType.VIEW_REFERENCE) { soft.assertThatCode(() -> batchAccessChecker.can(check).checkAndThrow()) @@ -155,14 +158,14 @@ void celAuthorizer() { when(authorizers.select(new AuthorizerType.Literal("CEL"))).thenReturn(celAuthorizerInstance); soft.assertThat( new QuarkusAuthorizer(configEnabled, authorizers) - .startAccessCheck(() -> () -> "some-user")) + .startAccessCheck(() -> () -> "some-user", apiContext("Nessie", 2))) .isInstanceOf(CelBatchAccessChecker.class); when(celAuthorizerInstance.get()).thenReturn(celAuthorizer); when(authorizers.select(new AuthorizerType.Literal("CEL"))).thenReturn(celAuthorizerInstance); soft.assertThat( new QuarkusAuthorizer(configDisabled, authorizers) - .startAccessCheck(() -> () -> "some-user")) + .startAccessCheck(() -> () -> "some-user", apiContext("Nessie", 2))) .isSameAs(AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER); } diff --git a/servers/quarkus-server/build.gradle.kts b/servers/quarkus-server/build.gradle.kts index 045d431b90a..3b95618eefa 100644 --- a/servers/quarkus-server/build.gradle.kts +++ b/servers/quarkus-server/build.gradle.kts @@ -178,6 +178,8 @@ dependencies { testFixturesCompileOnly(libs.microprofile.openapi) testFixturesCompileOnly(project(":nessie-immutables")) + testCompileOnly(project(":nessie-immutables")) + testAnnotationProcessor(project(":nessie-immutables", configuration = "processor")) intTestCompileOnly(project(":nessie-immutables")) intTestAnnotationProcessor(project(":nessie-immutables", configuration = "processor")) diff --git a/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/MockedAuthorizer.java b/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/MockedAuthorizer.java new file mode 100644 index 00000000000..42b01dd18ae --- /dev/null +++ b/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/MockedAuthorizer.java @@ -0,0 +1,101 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.server.authz; + +import static org.projectnessie.server.authz.MockedAuthorizer.AuthzCheck.authzCheck; +import static org.projectnessie.services.authz.Check.check; + +import jakarta.inject.Singleton; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.projectnessie.nessie.immutables.NessieImmutable; +import org.projectnessie.services.authz.AbstractBatchAccessChecker; +import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; +import org.projectnessie.services.authz.Authorizer; +import org.projectnessie.services.authz.AuthorizerType; +import org.projectnessie.services.authz.BatchAccessChecker; +import org.projectnessie.services.authz.Check; + +@AuthorizerType("MOCKED") +@Singleton +public class MockedAuthorizer implements Authorizer { + private final List checks = new ArrayList<>(); + + @Override + public BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext) { + return new MockedBatchAccessChecker(context, apiContext); + } + + public synchronized void reset() { + checks.clear(); + } + + public synchronized List checks() { + return List.copyOf(checks); + } + + public List checksWithoutIdentifiedKey() { + return checks().stream() + .map( + ac -> + authzCheck( + ac.apiContext(), + ac.checks().stream() + .map(c -> check(c.type(), c.ref(), c.key(), c.actions())) + .toList(), + ac.response())) + .toList(); + } + + synchronized void addCheck(AuthzCheck authzCheck) { + checks.add(authzCheck); + } + + public class MockedBatchAccessChecker extends AbstractBatchAccessChecker { + public final AccessContext context; + + public MockedBatchAccessChecker(AccessContext context, ApiContext apiContext) { + super(apiContext); + this.context = context; + } + + @Override + public Map check() { + var response = Map.of(); + addCheck(authzCheck(getApiContext(), getChecks(), response)); + return response; + } + } + + @NessieImmutable + public interface AuthzCheck { + ApiContext apiContext(); + + Set checks(); + + Map response(); + + static AuthzCheck authzCheck( + ApiContext apiContext, Collection checks, Map response) { + return ImmutableAuthzCheck.of(apiContext, new LinkedHashSet<>(checks), response); + } + } +} diff --git a/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/TestAuthzMeta.java b/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/TestAuthzMeta.java new file mode 100644 index 00000000000..48308bf34e2 --- /dev/null +++ b/servers/quarkus-server/src/test/java/org/projectnessie/server/authz/TestAuthzMeta.java @@ -0,0 +1,336 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.server.authz; + +import static org.apache.iceberg.types.Types.NestedField.required; +import static org.projectnessie.client.auth.BasicAuthenticationProvider.basicAuthorizationHeader; +import static org.projectnessie.server.authn.AuthenticationEnabledProfile.AUTH_CONFIG_OVERRIDES; +import static org.projectnessie.server.authn.AuthenticationEnabledProfile.SECURITY_CONFIG; +import static org.projectnessie.server.authz.MockedAuthorizer.AuthzCheck.authzCheck; +import static org.projectnessie.server.catalog.IcebergCatalogTestCommon.WAREHOUSE_NAME; +import static org.projectnessie.services.authz.ApiContext.apiContext; +import static org.projectnessie.services.authz.Check.CheckType.CREATE_ENTITY; +import static org.projectnessie.services.authz.Check.CheckType.DELETE_ENTITY; +import static org.projectnessie.services.authz.Check.CheckType.READ_ENTITY_VALUE; +import static org.projectnessie.services.authz.Check.CheckType.UPDATE_ENTITY; +import static org.projectnessie.services.authz.Check.canCommitChangeAgainstReference; +import static org.projectnessie.services.authz.Check.canReadContentKey; +import static org.projectnessie.services.authz.Check.canReadEntries; +import static org.projectnessie.services.authz.Check.canViewReference; +import static org.projectnessie.services.authz.Check.check; + +import com.google.common.collect.ImmutableMap; +import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.junit.TestProfile; +import jakarta.inject.Inject; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.iceberg.CatalogProperties; +import org.apache.iceberg.PartitionSpec; +import org.apache.iceberg.Schema; +import org.apache.iceberg.catalog.Namespace; +import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.iceberg.exceptions.AlreadyExistsException; +import org.apache.iceberg.rest.RESTCatalog; +import org.apache.iceberg.types.Types; +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.projectnessie.client.auth.BasicAuthenticationProvider; +import org.projectnessie.model.ContentKey; +import org.projectnessie.objectstoragemock.HeapStorageBucket; +import org.projectnessie.quarkus.tests.profiles.BaseConfigProfile; +import org.projectnessie.server.BaseClientAuthTest; +import org.projectnessie.server.catalog.Catalogs; +import org.projectnessie.server.catalog.S3UnitTestProfiles.S3UnitTestProfile; +import org.projectnessie.services.authz.AuthorizerType; +import org.projectnessie.versioned.BranchName; + +@SuppressWarnings("resource") // api() returns an AutoCloseable +@QuarkusTest +@TestProfile(TestAuthzMeta.Profile.class) +public class TestAuthzMeta extends BaseClientAuthTest { + @Inject + @AuthorizerType("MOCKED") + MockedAuthorizer mockedAuthorizer; + + HeapStorageBucket heapStorageBucket; + + private static final Catalogs CATALOGS = new Catalogs(); + + // Cannot use @ExtendWith(SoftAssertionsExtension.class) + @InjectSoftAssertions here, because + // of Quarkus class loading issues. See https://github.com/quarkusio/quarkus/issues/19814 + protected final SoftAssertions soft = new SoftAssertions(); + + protected RESTCatalog catalog(Map catalogOptions) { + return CATALOGS.getCatalog(catalogOptions); + } + + @AfterAll + static void closeRestCatalog() throws Exception { + CATALOGS.close(); + } + + @AfterEach + void cleanup() { + // Cannot use @ExtendWith(SoftAssertionsExtension.class) + @InjectSoftAssertions here, because + // of Quarkus class loading issues. See https://github.com/quarkusio/quarkus/issues/19814 + soft.assertAll(); + } + + @BeforeEach + void beforeEach() { + mockedAuthorizer.reset(); + heapStorageBucket.clear(); + } + + @Test + public void icebergApiTable() { + var catalog = + catalog( + Map.of("http.header.Authorization", basicAuthorizationHeader("admin_user", "test123"))); + + var apiContext = apiContext("Iceberg", 1); + var branch = BranchName.of("main"); + + var myNamespace = ContentKey.of("ns_tables"); + var myNamespaceIceberg = Namespace.of("ns_tables"); + var tableKey = ContentKey.of("ns_tables", "table_foo"); + var tableIdentifier = TableIdentifier.of(myNamespaceIceberg, "table_foo"); + + mockedAuthorizer.reset(); + catalog.createNamespace(myNamespaceIceberg); + // no assertion, done in 'icebergApiNamespaces()' + + var schema = + new Schema( + required(3, "id", Types.IntegerType.get()), + required(4, "data", Types.StringType.get())); + var spec = PartitionSpec.builderFor(schema).bucket("id", 16).build(); + + mockedAuthorizer.reset(); + var props = new HashMap(); + catalog.createTable(tableIdentifier, schema, spec, "my://location", props); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + // 'IcebergApiV1ResourceBase.createEntityVerifyNotExists' + authzCheck( + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check(READ_ENTITY_VALUE, branch, tableKey), + check(CREATE_ENTITY, branch, tableKey)), + Map.of()), + // 'CatalogServiceImpl.commit' + authzCheck( + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check(READ_ENTITY_VALUE, branch, tableKey), + check(CREATE_ENTITY, branch, tableKey)), + Map.of()), + // actual 'commit' + authzCheck( + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check( + CREATE_ENTITY, + branch, + tableKey, + Set.of( + "META_ADD_SORT_ORDER", + "META_SET_DEFAULT_PARTITION_SPEC", + "META_SET_CURRENT_SCHEMA", + "META_UPGRADE_FORMAT_VERSION", + "META_SET_PROPERTIES", + "META_ASSIGN_UUID", + "META_SET_LOCATION", + "META_ADD_SCHEMA", + "META_SET_DEFAULT_SORT_ORDER", + "META_ADD_PARTITION_SPEC"))), + Map.of())); + } + + @Test + public void icebergApiNamespaces() { + var catalog = + catalog( + Map.of("http.header.Authorization", basicAuthorizationHeader("admin_user", "test123"))); + + var myNamespace = ContentKey.of("my_namespace"); + var myNamespaceIceberg = Namespace.of("my_namespace"); + var myNamespaceInner = ContentKey.of("my_namespace", "inner"); + var myNamespaceIcebergInner = Namespace.of("my_namespace", "inner"); + + var apiContext = apiContext("Iceberg", 1); + var branch = BranchName.of("main"); + + mockedAuthorizer.reset(); + soft.assertThat(catalog.dropNamespace(myNamespaceIceberg)).isFalse(); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck( // 'getEntries' in 'IcebergApiV1NamespaceResource.dropNamespace' + apiContext, List.of(canReadEntries(branch)), Map.of())); + + mockedAuthorizer.reset(); + catalog.createNamespace(myNamespaceIceberg); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck(apiContext, List.of(canViewReference(branch)), Map.of()), + authzCheck( // 'commit' + apiContext, + List.of( + canViewReference(branch), + check(CREATE_ENTITY, branch, myNamespace, Set.of("CATALOG_CREATE_ENTITY")), + canCommitChangeAgainstReference(branch)), + Map.of())); + + var props = new HashMap(); + props.put("location", "my_location"); + mockedAuthorizer.reset(); + catalog.createNamespace(myNamespaceIcebergInner, props); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck(apiContext, List.of(canViewReference(branch)), Map.of()), + authzCheck( // 'commit' + apiContext, + List.of( + canViewReference(branch), + check( + CREATE_ENTITY, + branch, + myNamespaceInner, + Set.of( + "META_SET_LOCATION", "CATALOG_CREATE_ENTITY", "META_SET_PROPERTIES")), + canCommitChangeAgainstReference(branch)), + Map.of())); + + var props2 = new HashMap(); + props2.put("a", "b"); + mockedAuthorizer.reset(); + catalog.setProperties(myNamespaceIceberg, props2); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck( // 'getMultipleContents' in 'IcebergApiV1NamespaceResource.updateProperties' + apiContext, + List.of( + canViewReference(branch), + check(UPDATE_ENTITY, branch, myNamespace), + check(READ_ENTITY_VALUE, branch, myNamespace), + canCommitChangeAgainstReference(branch)), + Map.of()), + authzCheck( // 'commit' + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check( + UPDATE_ENTITY, + branch, + myNamespace, + Set.of("META_SET_PROPERTIES", "CATALOG_UPDATE_ENTITY"))), + Map.of())); + + // not empty + mockedAuthorizer.reset(); + soft.assertThatThrownBy(() -> catalog.dropNamespace(myNamespaceIceberg)) + .isInstanceOf(AlreadyExistsException.class); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck( // 'getEntries' in 'IcebergApiV1NamespaceResource.dropNamespace' + apiContext, + List.of( + canReadEntries(branch), + canReadContentKey(branch, myNamespace), + canReadContentKey(branch, myNamespaceInner)), + Map.of())); + + mockedAuthorizer.reset(); + catalog.dropNamespace(myNamespaceIcebergInner); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck( // 'getEntries' in 'IcebergApiV1NamespaceResource.dropNamespace' + apiContext, + List.of(canReadEntries(branch), canReadContentKey(branch, myNamespaceInner)), + Map.of()), + authzCheck( // 'commit' + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check(DELETE_ENTITY, branch, myNamespaceInner, Set.of("CATALOG_DROP_ENTITY"))), + Map.of())); + + mockedAuthorizer.reset(); + catalog.dropNamespace(myNamespaceIceberg); + soft.assertThat(mockedAuthorizer.checksWithoutIdentifiedKey()) + .containsExactly( + authzCheck( // 'getEntries' in 'IcebergApiV1NamespaceResource.dropNamespace' + apiContext, + List.of(canReadEntries(branch), canReadContentKey(branch, myNamespace)), + Map.of()), + authzCheck( // 'commit' + apiContext, + List.of( + canViewReference(branch), + canCommitChangeAgainstReference(branch), + check(DELETE_ENTITY, branch, myNamespace, Set.of("CATALOG_DROP_ENTITY"))), + Map.of())); + } + + @Test + public void nessieApiV2() throws Exception { + withClientCustomizer( + c -> c.withAuthentication(BasicAuthenticationProvider.create("admin_user", "test123"))); + soft.assertThat(api().getAllReferences().stream()).isNotEmpty(); + + soft.assertThat(mockedAuthorizer.checks()) + .containsExactly( + authzCheck( + apiContext("Nessie", 2), + List.of(canViewReference(BranchName.of("main"))), + Map.of())); + } + + public static class Profile extends S3UnitTestProfile { + @Override + public Map getConfigOverrides() { + return ImmutableMap.builder() + .putAll(super.getConfigOverrides()) + .putAll(BaseConfigProfile.CONFIG_OVERRIDES) + .putAll(AUTH_CONFIG_OVERRIDES) + .putAll(SECURITY_CONFIG) + .put("quarkus.http.auth.basic", "true") + // Need a dummy URL to satisfy the Quarkus OIDC extension. + .put("quarkus.oidc.auth-server-url", "http://127.255.0.0:0/auth/realms/unset/") + // + .put("nessie.catalog.default-warehouse", WAREHOUSE_NAME) + .put(CatalogProperties.WAREHOUSE_LOCATION, WAREHOUSE_NAME) + // + .put("nessie.server.authorization.enabled", "true") + .put("nessie.server.authorization.type", "MOCKED") + .build(); + } + } +} diff --git a/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/AbstractIcebergCatalogTests.java b/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/AbstractIcebergCatalogTests.java index b6c421d7934..fe02a57500a 100644 --- a/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/AbstractIcebergCatalogTests.java +++ b/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/AbstractIcebergCatalogTests.java @@ -20,7 +20,6 @@ import static java.util.Collections.singletonMap; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; -import static org.projectnessie.server.catalog.IcebergCatalogTestCommon.EMPTY_OBJ_ID; import static org.projectnessie.server.catalog.IcebergCatalogTestCommon.WAREHOUSE_NAME; import com.google.common.collect.ImmutableMap; @@ -124,7 +123,10 @@ void cleanup() throws Exception { api.deleteReference().reference(reference).delete(); } } - api.assignReference().reference(main).assignTo(Branch.of("main", EMPTY_OBJ_ID)).assign(); + api.assignReference() + .reference(main) + .assignTo(Branch.of("main", IcebergCatalogTestCommon.EMPTY_OBJ_ID)) + .assign(); } } } diff --git a/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/Catalogs.java b/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/Catalogs.java index f9da6c29fa6..8926dcd6285 100644 --- a/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/Catalogs.java +++ b/servers/quarkus-server/src/testFixtures/java/org/projectnessie/server/catalog/Catalogs.java @@ -20,6 +20,7 @@ import java.util.TreeMap; import org.apache.hadoop.conf.Configuration; import org.apache.iceberg.CatalogProperties; +import org.apache.iceberg.rest.HTTPClient; import org.apache.iceberg.rest.RESTCatalog; public class Catalogs implements AutoCloseable { @@ -33,7 +34,18 @@ public RESTCatalog getCatalog(Map options) { options, opts -> { int catalogServerPort = Integer.getInteger("quarkus.http.port"); - RESTCatalog c = new RESTCatalog(); + RESTCatalog c = + new RESTCatalog( + config -> { + var builder = HTTPClient.builder(config).uri(config.get(CatalogProperties.URI)); + config.entrySet().stream() + .filter(e -> e.getKey().startsWith("http.header.")) + .forEach( + e -> + builder.withHeader( + e.getKey().substring("http.header.".length()), e.getValue())); + return builder.build(); + }); c.setConf(new Configuration()); Map catalogOptions = new HashMap<>(); catalogOptions.put( diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestApiContext.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestApiContext.java new file mode 100644 index 00000000000..208211af39f --- /dev/null +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestApiContext.java @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.services.rest; + +import org.projectnessie.services.authz.ApiContext; + +public final class RestApiContext { + private RestApiContext() {} + + public static ApiContext NESSIE_V1 = ApiContext.apiContext("Nessie", 1); + public static ApiContext NESSIE_V2 = ApiContext.apiContext("Nessie", 2); +} diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestConfigResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestConfigResource.java index e479d2a9620..4a916bfa321 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestConfigResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestConfigResource.java @@ -15,6 +15,8 @@ */ package org.projectnessie.services.rest; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V1; + import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Inject; @@ -44,7 +46,7 @@ public RestConfigResource() { @Inject public RestConfigResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.configService = new ConfigApiImpl(config, store, authorizer, accessContext, 1); + this.configService = new ConfigApiImpl(config, store, authorizer, accessContext, NESSIE_V1); } @Override diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestContentResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestContentResource.java index e06ff2d2f45..c81b6b7bc59 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestContentResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestContentResource.java @@ -15,6 +15,9 @@ */ package org.projectnessie.services.rest; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V1; +import static org.projectnessie.versioned.RequestMeta.API_READ; + import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Inject; @@ -52,7 +55,7 @@ public RestContentResource() { @Inject public RestContentResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.contentService = new ContentApiImpl(config, store, authorizer, accessContext); + this.contentService = new ContentApiImpl(config, store, authorizer, accessContext, NESSIE_V1); } private ContentService resource() { @@ -63,7 +66,7 @@ private ContentService resource() { @JsonView(Views.V1.class) public Content getContent(ContentKey key, String ref, String hashOnRef) throws NessieNotFoundException { - return resource().getContent(key, ref, hashOnRef, false, false).getContent(); + return resource().getContent(key, ref, hashOnRef, false, API_READ).getContent(); } @Override @@ -71,6 +74,7 @@ public Content getContent(ContentKey key, String ref, String hashOnRef) public GetMultipleContentsResponse getMultipleContents( String ref, String hashOnRef, GetMultipleContentsRequest request) throws NessieNotFoundException { - return resource().getMultipleContents(ref, hashOnRef, request.getRequestedKeys(), false, false); + return resource() + .getMultipleContents(ref, hashOnRef, request.getRequestedKeys(), false, API_READ); } } diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestDiffResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestDiffResource.java index 9e5b1e0a223..a10bc80b8fc 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestDiffResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestDiffResource.java @@ -16,6 +16,7 @@ package org.projectnessie.services.rest; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V1; import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; @@ -55,7 +56,7 @@ public RestDiffResource() { @Inject public RestDiffResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.diffService = new DiffApiImpl(config, store, authorizer, accessContext); + this.diffService = new DiffApiImpl(config, store, authorizer, accessContext, NESSIE_V1); } private DiffService resource() { diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestNamespaceResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestNamespaceResource.java index cdf32042caa..9c3e329d281 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestNamespaceResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestNamespaceResource.java @@ -15,6 +15,9 @@ */ package org.projectnessie.services.rest; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V1; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; + import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Inject; @@ -57,7 +60,8 @@ public RestNamespaceResource() { @Inject public RestNamespaceResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.namespaceService = new NamespaceApiImpl(config, store, authorizer, accessContext); + this.namespaceService = + new NamespaceApiImpl(config, store, authorizer, accessContext, NESSIE_V1); } private NamespaceService resource() { @@ -68,7 +72,7 @@ private NamespaceService resource() { @JsonView(Views.V1.class) public Namespace createNamespace(NamespaceParams params, Namespace namespace) throws NessieNamespaceAlreadyExistsException, NessieReferenceNotFoundException { - return resource().createNamespace(params.getRefName(), namespace); + return resource().createNamespace(params.getRefName(), namespace, API_WRITE); } @Override @@ -105,6 +109,7 @@ public void updateProperties(NamespaceParams params, NamespaceUpdate namespaceUp params.getRefName(), params.getNamespace(), namespaceUpdate.getPropertyUpdates(), - namespaceUpdate.getPropertyRemovals()); + namespaceUpdate.getPropertyRemovals(), + API_WRITE); } } diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestTreeResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestTreeResource.java index 7c52ed84ed0..2f1e71941cb 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestTreeResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestTreeResource.java @@ -17,7 +17,9 @@ import static com.google.common.base.Preconditions.checkArgument; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V1; import static org.projectnessie.services.spi.TreeService.MAX_COMMIT_LOG_ENTRIES; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; @@ -73,7 +75,7 @@ public RestTreeResource() { @Inject public RestTreeResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.treeService = new TreeApiImpl(config, store, authorizer, accessContext); + this.treeService = new TreeApiImpl(config, store, authorizer, accessContext, NESSIE_V1); } private TreeService resource() { @@ -278,7 +280,7 @@ public Branch commitMultipleOperations( String branchName, String expectedHash, Operations operations) throws NessieNotFoundException, NessieConflictException { return resource() - .commitMultipleOperations(branchName, expectedHash, operations) + .commitMultipleOperations(branchName, expectedHash, operations, API_WRITE) .getTargetBranch(); } } diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2ConfigResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2ConfigResource.java index 043cf8075b0..b16aea3ea4f 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2ConfigResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2ConfigResource.java @@ -15,6 +15,8 @@ */ package org.projectnessie.services.rest; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V2; + import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Inject; @@ -54,7 +56,7 @@ public RestV2ConfigResource() { @Inject public RestV2ConfigResource( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - this.config = new ConfigApiImpl(config, store, authorizer, accessContext, 2); + this.config = new ConfigApiImpl(config, store, authorizer, accessContext, NESSIE_V2); } @Override diff --git a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2TreeResource.java b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2TreeResource.java index a3ae5b03078..f9421e1b53d 100644 --- a/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2TreeResource.java +++ b/servers/rest-services/src/main/java/org/projectnessie/services/rest/RestV2TreeResource.java @@ -18,8 +18,11 @@ import static com.google.common.base.Preconditions.checkArgument; import static org.projectnessie.api.v2.params.ReferenceResolver.resolveReferencePathElement; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.services.rest.RestApiContext.NESSIE_V2; import static org.projectnessie.services.rest.common.RestCommon.updateCommitMeta; import static org.projectnessie.services.spi.TreeService.MAX_COMMIT_LOG_ENTRIES; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.fasterxml.jackson.annotation.JsonView; import jakarta.enterprise.context.RequestScoped; @@ -102,10 +105,10 @@ public RestV2TreeResource( Authorizer authorizer, AccessContext accessContext, HttpHeaders httpHeaders) { - this.configService = new ConfigApiImpl(config, store, authorizer, accessContext, 2); - this.treeService = new TreeApiImpl(config, store, authorizer, accessContext); - this.contentService = new ContentApiImpl(config, store, authorizer, accessContext); - this.diffService = new DiffApiImpl(config, store, authorizer, accessContext); + this.configService = new ConfigApiImpl(config, store, authorizer, accessContext, NESSIE_V2); + this.treeService = new TreeApiImpl(config, store, authorizer, accessContext, NESSIE_V2); + this.contentService = new ContentApiImpl(config, store, authorizer, accessContext, NESSIE_V2); + this.diffService = new DiffApiImpl(config, store, authorizer, accessContext, NESSIE_V2); this.httpHeaders = httpHeaders; } @@ -356,7 +359,7 @@ public ContentResponse getContent( ParsedReference reference = parseRefPathString(ref); return content() .getContent( - key, reference.name(), reference.hashWithRelativeSpec(), withDocumentation, forWrite); + key, reference.name(), reference.hashWithRelativeSpec(), withDocumentation, API_READ); } @JsonView(Views.V2.class) @@ -381,7 +384,7 @@ public GetMultipleContentsResponse getMultipleContents( reference.hashWithRelativeSpec(), request.getRequestedKeys(), withDocumentation, - forWrite); + API_READ); } @JsonView(Views.V2.class) @@ -452,7 +455,8 @@ public CommitResponse commitMultipleOperations(String branch, Operations operati .commitMeta(commitMeta(CommitMeta.builder().from(operations.getCommitMeta())).build()); ParsedReference ref = parseRefPathString(branch); - return tree().commitMultipleOperations(ref.name(), ref.hashWithRelativeSpec(), ops.build()); + return tree() + .commitMultipleOperations(ref.name(), ref.hashWithRelativeSpec(), ops.build(), API_WRITE); } CommitMeta.Builder commitMeta(CommitMeta.Builder commitMeta) { diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/AbstractBatchAccessChecker.java b/servers/services/src/main/java/org/projectnessie/services/authz/AbstractBatchAccessChecker.java index 3d9ee9c6889..b4f5b3a7511 100644 --- a/servers/services/src/main/java/org/projectnessie/services/authz/AbstractBatchAccessChecker.java +++ b/servers/services/src/main/java/org/projectnessie/services/authz/AbstractBatchAccessChecker.java @@ -16,17 +16,19 @@ package org.projectnessie.services.authz; import static java.util.Collections.emptyMap; +import static org.projectnessie.services.authz.ApiContext.apiContext; import java.util.Collection; import java.util.LinkedHashSet; import java.util.Map; +import java.util.Set; import org.projectnessie.model.IdentifiedContentKey; import org.projectnessie.model.RepositoryConfig; import org.projectnessie.versioned.NamedRef; public abstract class AbstractBatchAccessChecker implements BatchAccessChecker { public static final BatchAccessChecker NOOP_ACCESS_CHECKER = - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("", 0)) { @Override public Map check() { return emptyMap(); @@ -38,8 +40,18 @@ public BatchAccessChecker can(Check check) { } }; + private final ApiContext apiContext; private final Collection checks = new LinkedHashSet<>(); + protected AbstractBatchAccessChecker(ApiContext apiContext) { + this.apiContext = apiContext; + } + + @Override + public ApiContext getApiContext() { + return apiContext; + } + protected Collection getChecks() { return checks; } @@ -84,6 +96,13 @@ public BatchAccessChecker canReadContentKey(NamedRef ref, IdentifiedContentKey i return can(Check.canReadContentKey(ref, identifiedKey)); } + @Override + public BatchAccessChecker canReadContentKey( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + canViewReference(ref); + return can(Check.canReadContentKey(ref, identifiedKey, actions)); + } + @Override public BatchAccessChecker canListCommitLog(NamedRef ref) { canViewReference(ref); @@ -102,6 +121,13 @@ public BatchAccessChecker canReadEntityValue(NamedRef ref, IdentifiedContentKey return can(Check.canReadEntityValue(ref, identifiedKey)); } + @Override + public BatchAccessChecker canReadEntityValue( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + canViewReference(ref); + return can(Check.canReadEntityValue(ref, identifiedKey, actions)); + } + @Override @Deprecated public BatchAccessChecker canCreateEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { @@ -109,6 +135,14 @@ public BatchAccessChecker canCreateEntity(NamedRef ref, IdentifiedContentKey ide return can(Check.canCreateEntity(ref, identifiedKey)); } + @Override + @Deprecated + public BatchAccessChecker canCreateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + canViewReference(ref); + return can(Check.canCreateEntity(ref, identifiedKey, actions)); + } + @Override @Deprecated public BatchAccessChecker canUpdateEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { @@ -116,12 +150,27 @@ public BatchAccessChecker canUpdateEntity(NamedRef ref, IdentifiedContentKey ide return can(Check.canUpdateEntity(ref, identifiedKey)); } + @Override + @Deprecated + public BatchAccessChecker canUpdateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + canViewReference(ref); + return can(Check.canUpdateEntity(ref, identifiedKey, actions)); + } + @Override public BatchAccessChecker canDeleteEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { canViewReference(ref); return can(Check.canDeleteEntity(ref, identifiedKey)); } + @Override + public BatchAccessChecker canDeleteEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + canViewReference(ref); + return can(Check.canDeleteEntity(ref, identifiedKey, actions)); + } + @Override public BatchAccessChecker canReadRepositoryConfig(RepositoryConfig.Type repositoryConfigType) { return can(Check.canReadRepositoryConfig(repositoryConfigType)); diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/ApiContext.java b/servers/services/src/main/java/org/projectnessie/services/authz/ApiContext.java new file mode 100644 index 00000000000..19d2c747cc7 --- /dev/null +++ b/servers/services/src/main/java/org/projectnessie/services/authz/ApiContext.java @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.services.authz; + +import org.immutables.value.Value; + +@Value.Immutable +@Value.Style(allParameters = true) +public interface ApiContext { + String getApiName(); + + int getApiVersion(); + + static ApiContext apiContext(String apiName, int apiVersion) { + return ImmutableApiContext.of(apiName, apiVersion); + } +} diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/Authorizer.java b/servers/services/src/main/java/org/projectnessie/services/authz/Authorizer.java index 8a7020486cf..e307fd87229 100644 --- a/servers/services/src/main/java/org/projectnessie/services/authz/Authorizer.java +++ b/servers/services/src/main/java/org/projectnessie/services/authz/Authorizer.java @@ -27,7 +27,8 @@ public interface Authorizer { * Start an access-check batch/bulk operation. * * @param context The context carrying the principal information. + * @param apiContext * @return access checker */ - BatchAccessChecker startAccessCheck(AccessContext context); + BatchAccessChecker startAccessCheck(AccessContext context, ApiContext apiContext); } diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/BatchAccessChecker.java b/servers/services/src/main/java/org/projectnessie/services/authz/BatchAccessChecker.java index 4bddfac540c..18eb27df51f 100644 --- a/servers/services/src/main/java/org/projectnessie/services/authz/BatchAccessChecker.java +++ b/servers/services/src/main/java/org/projectnessie/services/authz/BatchAccessChecker.java @@ -17,6 +17,7 @@ import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.util.Map; +import java.util.Set; import org.projectnessie.model.Branch; import org.projectnessie.model.ContentKey; import org.projectnessie.model.Detached; @@ -54,6 +55,8 @@ public interface BatchAccessChecker { */ Map check(); + ApiContext getApiContext(); + /** * Convenience methods that throws an {@link AccessCheckException}, if {@link #check()} returns a * non-empty map. @@ -132,6 +135,10 @@ static void throwForFailedChecks(Map failedChecks) throws AccessC * @param identifiedKey content key / ID / type to check */ @CanIgnoreReturnValue + BatchAccessChecker canReadContentKey( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions); + + @CanIgnoreReturnValue BatchAccessChecker canReadContentKey(NamedRef ref, IdentifiedContentKey identifiedKey); /** @@ -166,6 +173,10 @@ static void throwForFailedChecks(Map failedChecks) throws AccessC * @param identifiedKey content key / ID / type to check */ @CanIgnoreReturnValue + BatchAccessChecker canReadEntityValue( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions); + + @CanIgnoreReturnValue BatchAccessChecker canReadEntityValue(NamedRef ref, IdentifiedContentKey identifiedKey); /** @@ -179,6 +190,10 @@ static void throwForFailedChecks(Map failedChecks) throws AccessC * @param identifiedKey content key / ID / type to check */ @CanIgnoreReturnValue + BatchAccessChecker canCreateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions); + + @CanIgnoreReturnValue BatchAccessChecker canCreateEntity(NamedRef ref, IdentifiedContentKey identifiedKey); /** @@ -192,6 +207,10 @@ static void throwForFailedChecks(Map failedChecks) throws AccessC * @param identifiedKey content key / ID / type to check */ @CanIgnoreReturnValue + BatchAccessChecker canUpdateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions); + + @CanIgnoreReturnValue BatchAccessChecker canUpdateEntity(NamedRef ref, IdentifiedContentKey identifiedKey); /** @@ -205,6 +224,10 @@ static void throwForFailedChecks(Map failedChecks) throws AccessC * @param identifiedKey content key / ID / type to check */ @CanIgnoreReturnValue + BatchAccessChecker canDeleteEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions); + + @CanIgnoreReturnValue BatchAccessChecker canDeleteEntity(NamedRef ref, IdentifiedContentKey identifiedKey); @CanIgnoreReturnValue diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/Check.java b/servers/services/src/main/java/org/projectnessie/services/authz/Check.java index d2e0e4dc8b3..108ecb5db32 100644 --- a/servers/services/src/main/java/org/projectnessie/services/authz/Check.java +++ b/servers/services/src/main/java/org/projectnessie/services/authz/Check.java @@ -16,6 +16,8 @@ package org.projectnessie.services.authz; import jakarta.annotation.Nullable; +import jakarta.validation.constraints.NotNull; +import java.util.Set; import org.immutables.value.Value; import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; @@ -53,24 +55,43 @@ public interface Check { @Value.Parameter(order = 7) RepositoryConfig.Type repositoryConfigType(); + @Value.Parameter(order = 8) + Set actions(); + static Check check(CheckType type) { - return ImmutableCheck.of(type, null, null, null, null, null, null); + return ImmutableCheck.of(type, null, null, null, null, null, null, Set.of()); } static Check check(CheckType type, RepositoryConfig.Type repositoryConfigType) { - return ImmutableCheck.of(type, null, null, null, null, null, repositoryConfigType); + return ImmutableCheck.of(type, null, null, null, null, null, repositoryConfigType, Set.of()); } static Check check(CheckType type, @Nullable NamedRef ref) { - return ImmutableCheck.of(type, ref, null, null, null, null, null); + return ImmutableCheck.of(type, ref, null, null, null, null, null, Set.of()); } static Check check(CheckType type, @Nullable NamedRef ref, @Nullable ContentKey key) { - return ImmutableCheck.of(type, ref, key, null, null, null, null); + return check(type, ref, key, Set.of()); + } + + static Check check( + CheckType type, + @Nullable NamedRef ref, + @Nullable ContentKey key, + @NotNull Set actions) { + return ImmutableCheck.of(type, ref, key, null, null, null, null, actions); } static Check check( CheckType type, @Nullable NamedRef ref, @Nullable IdentifiedContentKey identifiedKey) { + return check(type, ref, identifiedKey, Set.of()); + } + + static Check check( + CheckType type, + @Nullable NamedRef ref, + @Nullable IdentifiedContentKey identifiedKey, + @NotNull Set actions) { if (identifiedKey != null) { IdentifiedContentKey.IdentifiedElement element = identifiedKey.lastElement(); return ImmutableCheck.of( @@ -80,10 +101,11 @@ static Check check( element.contentId(), identifiedKey.type(), identifiedKey, - null); + null, + actions); } - return ImmutableCheck.of(type, ref, null, null, null, null, null); + return ImmutableCheck.of(type, ref, null, null, null, null, null, actions); } static ImmutableCheck.Builder builder(CheckType type) { @@ -167,10 +189,19 @@ static Check canReadContentKey(NamedRef ref, ContentKey key) { return check(CheckType.READ_CONTENT_KEY, ref, key); } + static Check canReadContentKey(NamedRef ref, ContentKey key, Set actions) { + return check(CheckType.READ_CONTENT_KEY, ref, key, actions); + } + static Check canReadContentKey(NamedRef ref, IdentifiedContentKey identifiedKey) { return check(CheckType.READ_CONTENT_KEY, ref, identifiedKey); } + static Check canReadContentKey( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + return check(CheckType.READ_CONTENT_KEY, ref, identifiedKey, actions); + } + static Check canListCommitLog(NamedRef ref) { return check(CheckType.LIST_COMMIT_LOG, ref); } @@ -183,18 +214,38 @@ static Check canReadEntityValue(NamedRef ref, IdentifiedContentKey identifiedKey return check(CheckType.READ_ENTITY_VALUE, ref, identifiedKey); } + static Check canReadEntityValue( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + return check(CheckType.READ_ENTITY_VALUE, ref, identifiedKey, actions); + } + static Check canCreateEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { return check(CheckType.CREATE_ENTITY, ref, identifiedKey); } + static Check canCreateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + return check(CheckType.CREATE_ENTITY, ref, identifiedKey, actions); + } + static Check canUpdateEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { return check(CheckType.UPDATE_ENTITY, ref, identifiedKey); } + static Check canUpdateEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + return check(CheckType.UPDATE_ENTITY, ref, identifiedKey, actions); + } + static Check canDeleteEntity(NamedRef ref, IdentifiedContentKey identifiedKey) { return check(CheckType.DELETE_ENTITY, ref, identifiedKey); } + static Check canDeleteEntity( + NamedRef ref, IdentifiedContentKey identifiedKey, Set actions) { + return check(CheckType.DELETE_ENTITY, ref, identifiedKey, actions); + } + static Check canReadRepositoryConfig(RepositoryConfig.Type repositoryConfigType) { return check(CheckType.READ_REPOSITORY_CONFIG, repositoryConfigType); } diff --git a/servers/services/src/main/java/org/projectnessie/services/authz/RetriableAccessChecker.java b/servers/services/src/main/java/org/projectnessie/services/authz/RetriableAccessChecker.java index ed3dc8f9cc8..721d1b94b99 100644 --- a/servers/services/src/main/java/org/projectnessie/services/authz/RetriableAccessChecker.java +++ b/servers/services/src/main/java/org/projectnessie/services/authz/RetriableAccessChecker.java @@ -29,19 +29,33 @@ */ public final class RetriableAccessChecker { private final Supplier validator; + private final ApiContext apiContext; private Collection validatedChecks; private Map result; - public RetriableAccessChecker(Supplier validator) { + public RetriableAccessChecker(Supplier validator, ApiContext apiContext) { Preconditions.checkNotNull(validator); this.validator = validator; + this.apiContext = apiContext; } public BatchAccessChecker newAttempt() { - return new Attempt(); + return new Attempt(apiContext); } private class Attempt extends AbstractBatchAccessChecker { + private final ApiContext apiContext; + + Attempt(ApiContext apiContext) { + super(apiContext); + this.apiContext = apiContext; + } + + @Override + public ApiContext getApiContext() { + return apiContext; + } + @Override public Map check() { // Shallow collection copy to ensure that we use what was current at the time of check diff --git a/servers/services/src/main/java/org/projectnessie/services/cel/CELUtil.java b/servers/services/src/main/java/org/projectnessie/services/cel/CELUtil.java index 6e157463c06..1b43f9944a4 100644 --- a/servers/services/src/main/java/org/projectnessie/services/cel/CELUtil.java +++ b/servers/services/src/main/java/org/projectnessie/services/cel/CELUtil.java @@ -50,6 +50,8 @@ public final class CELUtil { public static final String VAR_ROLE = "role"; public static final String VAR_ROLES = "roles"; public static final String VAR_OP = "op"; + public static final String VAR_ACTIONS = "actions"; + public static final String VAR_API = "api"; public static final String VAR_OPERATIONS = "operations"; public static final String VAR_CONTENT_TYPE = "contentType"; diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/BaseApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/BaseApiImpl.java index 9eb0be4c507..d018de721df 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/BaseApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/BaseApiImpl.java @@ -34,6 +34,7 @@ import org.projectnessie.model.CommitMeta; import org.projectnessie.model.ContentKey; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.config.ServerConfig; @@ -47,14 +48,20 @@ public abstract class BaseApiImpl { private final VersionStore store; private final Authorizer authorizer; private final AccessContext accessContext; + private final ApiContext apiContext; private HashResolver hashResolver; protected BaseApiImpl( - ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { + ServerConfig config, + VersionStore store, + Authorizer authorizer, + AccessContext accessContext, + ApiContext apiContext) { this.config = config; this.store = store; this.authorizer = authorizer; this.accessContext = accessContext; + this.apiContext = apiContext; } /** @@ -104,6 +111,10 @@ protected Authorizer getAuthorizer() { return authorizer; } + protected ApiContext getApiContext() { + return apiContext; + } + protected HashResolver getHashResolver() { if (hashResolver == null) { this.hashResolver = new HashResolver(config, store); @@ -112,7 +123,7 @@ protected HashResolver getHashResolver() { } protected BatchAccessChecker startAccessCheck() { - return getAuthorizer().startAccessCheck(accessContext); + return getAuthorizer().startAccessCheck(accessContext, apiContext); } protected MetadataRewriter commitMetaUpdate( diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/ConfigApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/ConfigApiImpl.java index 9b16458e96b..ea88fbdff24 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/ConfigApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/ConfigApiImpl.java @@ -26,6 +26,7 @@ import org.projectnessie.model.RepositoryConfig; import org.projectnessie.model.types.GenericRepositoryConfig; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.config.ServerConfig; @@ -36,16 +37,13 @@ public class ConfigApiImpl extends BaseApiImpl implements ConfigService { - private final int actualApiVersion; - public ConfigApiImpl( ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext, - int actualApiVersion) { - super(config, store, authorizer, accessContext); - this.actualApiVersion = actualApiVersion; + ApiContext apiContext) { + super(config, store, authorizer, accessContext, apiContext); } @Override @@ -58,7 +56,7 @@ public NessieConfiguration getConfig() { return ImmutableNessieConfiguration.builder() .from(NessieConfiguration.getBuiltInConfig()) .defaultBranch(defaultBranch) - .actualApiVersion(actualApiVersion) + .actualApiVersion(getApiContext().getApiVersion()) .noAncestorHash(info.getNoAncestorHash()) .repositoryCreationTimestamp(info.getRepositoryCreationTimestamp()) .oldestPossibleCommitTimestamp(info.getOldestPossibleCommitTimestamp()) diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/ContentApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/ContentApiImpl.java index 61ecd1dbf3c..5619c9a477e 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/ContentApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/ContentApiImpl.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import org.projectnessie.error.NessieContentNotFoundException; import org.projectnessie.error.NessieNotFoundException; @@ -33,6 +34,7 @@ import org.projectnessie.model.Reference; import org.projectnessie.model.Tag; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.config.ServerConfig; @@ -44,6 +46,7 @@ import org.projectnessie.versioned.DetachedRef; import org.projectnessie.versioned.NamedRef; import org.projectnessie.versioned.ReferenceNotFoundException; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.TagName; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.WithHash; @@ -51,8 +54,12 @@ public class ContentApiImpl extends BaseApiImpl implements ContentService { public ContentApiImpl( - ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - super(config, store, authorizer, accessContext); + ServerConfig config, + VersionStore store, + Authorizer authorizer, + AccessContext accessContext, + ApiContext apiContext) { + super(config, store, authorizer, accessContext, apiContext); } @Override @@ -61,25 +68,26 @@ public ContentResponse getContent( String namedRef, String hashOnRef, boolean withDocumentation, - boolean forWrite) + RequestMeta requestMeta) throws NessieNotFoundException { try { ResolvedHash ref = getHashResolver() .resolveHashOnRef(namedRef, hashOnRef, new HashValidator("Expected hash")); - ContentResult obj = getStore().getValue(ref.getHash(), key, forWrite); + ContentResult obj = getStore().getValue(ref.getHash(), key, requestMeta.forWrite()); BatchAccessChecker accessCheck = startAccessCheck(); NamedRef r = ref.getValue(); accessCheck.canViewReference(r); - if (forWrite) { + if (requestMeta.forWrite()) { accessCheck.canCommitChangeAgainstReference(r); } + Set actions = requestMeta.keyActions(key); if (obj != null && obj.content() != null) { - accessCheck.canReadEntityValue(r, obj.identifiedKey()); - if (forWrite) { - accessCheck.canUpdateEntity(r, obj.identifiedKey()); + accessCheck.canReadEntityValue(r, obj.identifiedKey(), actions); + if (requestMeta.forWrite()) { + accessCheck.canUpdateEntity(r, obj.identifiedKey(), actions); } accessCheck.checkAndThrow(); @@ -87,10 +95,10 @@ public ContentResponse getContent( return ContentResponse.of(obj.content(), makeReference(ref), null); } - if (forWrite) { + if (requestMeta.forWrite()) { accessCheck - .canReadEntityValue(r, requireNonNull(obj, "obj is null").identifiedKey()) - .canCreateEntity(r, obj.identifiedKey()); + .canReadEntityValue(r, requireNonNull(obj, "obj is null").identifiedKey(), actions) + .canCreateEntity(r, obj.identifiedKey(), actions); } accessCheck.checkAndThrow(); @@ -106,7 +114,7 @@ public GetMultipleContentsResponse getMultipleContents( String hashOnRef, List keys, boolean withDocumentation, - boolean forWrite) + RequestMeta requestMeta) throws NessieNotFoundException { try { ResolvedHash ref = @@ -115,26 +123,28 @@ public GetMultipleContentsResponse getMultipleContents( NamedRef r = ref.getValue(); BatchAccessChecker check = startAccessCheck().canViewReference(r); - if (forWrite) { + if (requestMeta.forWrite()) { check.canCommitChangeAgainstReference(r); } - Map values = getStore().getValues(ref.getHash(), keys, forWrite); + Map values = + getStore().getValues(ref.getHash(), keys, requestMeta.forWrite()); List output = values.entrySet().stream() .filter( e -> { ContentResult contentResult = e.getValue(); IdentifiedContentKey identifiedKey = contentResult.identifiedKey(); - check.canReadEntityValue(r, identifiedKey); + Set actions = requestMeta.keyActions(identifiedKey.contentKey()); + check.canReadEntityValue(r, identifiedKey, actions); if (contentResult.content() != null) { - if (forWrite) { - check.canUpdateEntity(r, identifiedKey); + if (requestMeta.forWrite()) { + check.canUpdateEntity(r, identifiedKey, actions); } return true; } else { - if (forWrite) { - check.canCreateEntity(r, identifiedKey); + if (requestMeta.forWrite()) { + check.canCreateEntity(r, identifiedKey, actions); } return false; } diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/DiffApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/DiffApiImpl.java index 3f21aa6a5f8..a4e5104d643 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/DiffApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/DiffApiImpl.java @@ -34,6 +34,7 @@ import org.projectnessie.model.ContentKey; import org.projectnessie.model.DiffResponse.DiffEntry; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.AuthzPaginationIterator; import org.projectnessie.services.authz.Check; @@ -53,8 +54,12 @@ public class DiffApiImpl extends BaseApiImpl implements DiffService { public DiffApiImpl( - ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - super(config, store, authorizer, accessContext); + ServerConfig config, + VersionStore store, + Authorizer authorizer, + AccessContext accessContext, + ApiContext apiContext) { + super(config, store, authorizer, accessContext, apiContext); } @Override diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/NamespaceApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/NamespaceApiImpl.java index 5ca740ce448..1fc562e5a66 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/NamespaceApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/NamespaceApiImpl.java @@ -20,6 +20,7 @@ import static org.projectnessie.error.ContentKeyErrorDetails.contentKeyErrorDetails; import static org.projectnessie.model.Validation.validateHash; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import static org.projectnessie.versioned.VersionStore.KeyRestrictions.NO_KEY_RESTRICTIONS; import com.google.common.base.Preconditions; @@ -51,6 +52,7 @@ import org.projectnessie.model.Operation.Delete; import org.projectnessie.model.Operation.Put; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.BatchAccessChecker; import org.projectnessie.services.config.ServerConfig; @@ -62,18 +64,23 @@ import org.projectnessie.versioned.KeyEntry; import org.projectnessie.versioned.ReferenceConflictException; import org.projectnessie.versioned.ReferenceNotFoundException; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.paging.PaginationIterator; public class NamespaceApiImpl extends BaseApiImpl implements NamespaceService { public NamespaceApiImpl( - ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - super(config, store, authorizer, accessContext); + ServerConfig config, + VersionStore store, + Authorizer authorizer, + AccessContext accessContext, + ApiContext apiContext) { + super(config, store, authorizer, accessContext, apiContext); } @Override - public Namespace createNamespace(String refName, Namespace namespace) + public Namespace createNamespace(String refName, Namespace namespace, RequestMeta requestMeta) throws NessieReferenceNotFoundException { Preconditions.checkArgument(!namespace.isEmpty(), "Namespace name must not be empty"); @@ -104,7 +111,8 @@ public Namespace createNamespace(String refName, Namespace namespace) commit( BranchName.of(refWithHash.getValue().getName()), "create namespace '" + namespace.toCanonicalString() + "'", - put); + put, + requestMeta); Content content = getExplicitlyCreatedNamespace(namespace, hash).orElse(null); @@ -145,7 +153,8 @@ public void deleteNamespace(String refName, Namespace namespaceToDelete) commit( BranchName.of(refWithHash.getValue().getName()), "delete namespace '" + namespace.toCanonicalString() + "'", - delete); + delete, + API_WRITE); } catch (ReferenceNotFoundException | ReferenceConflictException e) { throw new NessieReferenceNotFoundException(e.getMessage(), e); } @@ -255,7 +264,8 @@ public void updateProperties( String refName, Namespace namespaceToUpdate, Map propertyUpdates, - Set propertyRemovals) + Set propertyRemovals, + RequestMeta requestMeta) throws NessieNamespaceNotFoundException, NessieReferenceNotFoundException { try { ResolvedHash refWithHash = getHashResolver().resolveToHead(refName); @@ -275,7 +285,8 @@ public void updateProperties( commit( BranchName.of(refWithHash.getValue().getName()), "update properties for namespace '" + updatedNamespace.toCanonicalString() + "'", - put); + put, + requestMeta); } catch (ReferenceNotFoundException | ReferenceConflictException e) { throw new NessieReferenceNotFoundException(e.getMessage(), e); @@ -356,7 +367,8 @@ private static NessieReferenceNotFoundException refNotFoundException( return new NessieReferenceNotFoundException(e.getMessage(), e); } - private Hash commit(BranchName branch, String commitMsg, Operation contentOperation) + private Hash commit( + BranchName branch, String commitMsg, Operation contentOperation, RequestMeta requestMeta) throws ReferenceNotFoundException, ReferenceConflictException { return getStore() .commit( @@ -371,15 +383,17 @@ private Hash commit(BranchName branch, String commitMsg, Operation contentOperat .operations() .forEach( op -> { + Set actions = + requestMeta.keyActions(op.identifiedKey().contentKey()); switch (op.operationType()) { case CREATE: - check.canCreateEntity(branch, op.identifiedKey()); + check.canCreateEntity(branch, op.identifiedKey(), actions); break; case UPDATE: - check.canUpdateEntity(branch, op.identifiedKey()); + check.canUpdateEntity(branch, op.identifiedKey(), actions); break; case DELETE: - check.canDeleteEntity(branch, op.identifiedKey()); + check.canDeleteEntity(branch, op.identifiedKey(), actions); break; default: throw new UnsupportedOperationException( diff --git a/servers/services/src/main/java/org/projectnessie/services/impl/TreeApiImpl.java b/servers/services/src/main/java/org/projectnessie/services/impl/TreeApiImpl.java index 589710f2d46..51c24f66179 100644 --- a/servers/services/src/main/java/org/projectnessie/services/impl/TreeApiImpl.java +++ b/servers/services/src/main/java/org/projectnessie/services/impl/TreeApiImpl.java @@ -41,6 +41,7 @@ import static org.projectnessie.services.cel.CELUtil.VAR_REF_META; import static org.projectnessie.services.cel.CELUtil.VAR_REF_TYPE; import static org.projectnessie.services.impl.RefUtil.toNamedRef; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; @@ -99,6 +100,7 @@ import org.projectnessie.model.Tag; import org.projectnessie.model.Validation; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.authz.AuthzPaginationIterator; import org.projectnessie.services.authz.BatchAccessChecker; @@ -124,6 +126,7 @@ import org.projectnessie.versioned.ReferenceHistory; import org.projectnessie.versioned.ReferenceInfo; import org.projectnessie.versioned.ReferenceNotFoundException; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.TagName; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.VersionStore.CommitValidator; @@ -135,8 +138,12 @@ public class TreeApiImpl extends BaseApiImpl implements TreeService { public TreeApiImpl( - ServerConfig config, VersionStore store, Authorizer authorizer, AccessContext accessContext) { - super(config, store, authorizer, accessContext); + ServerConfig config, + VersionStore store, + Authorizer authorizer, + AccessContext accessContext, + ApiContext apiContext) { + super(config, store, authorizer, accessContext, apiContext); } @Override @@ -695,7 +702,7 @@ fromRef, hash, new HashValidator("Hash to transplant").hashMustBeUnambiguous()) .defaultMergeBehavior(defaultMergeBehavior(defaultMergeBehavior)) .dryRun(Boolean.TRUE.equals(dryRun)) .fetchAdditionalInfo(Boolean.TRUE.equals(fetchAdditionalInfo)) - .validator(createCommitValidator((BranchName) toRef.getNamedRef())) + .validator(createCommitValidator((BranchName) toRef.getNamedRef(), API_WRITE)) .build()); return createResponse(fetchAdditionalInfo, result); } catch (ReferenceNotFoundException e) { @@ -771,7 +778,7 @@ public MergeResponse mergeRefIntoBranch( .defaultMergeBehavior(defaultMergeBehavior(defaultMergeBehavior)) .dryRun(Boolean.TRUE.equals(dryRun)) .fetchAdditionalInfo(Boolean.TRUE.equals(fetchAdditionalInfo)) - .validator(createCommitValidator((BranchName) toRef.getNamedRef())) + .validator(createCommitValidator((BranchName) toRef.getNamedRef(), API_WRITE)) .build()); return createResponse(fetchAdditionalInfo, result); } catch (ReferenceNotFoundException e) { @@ -1042,7 +1049,7 @@ protected BiPredicate filterEntries(String filter) { @Override public CommitResponse commitMultipleOperations( - String branch, String expectedHash, Operations operations) + String branch, String expectedHash, Operations operations, RequestMeta requestMeta) throws NessieNotFoundException, NessieConflictException { CommitMeta commitMeta = operations.getCommitMeta(); @@ -1067,7 +1074,7 @@ public CommitResponse commitMultipleOperations( Optional.of(toRef.getHash()), commitMetaUpdate(null, numCommits -> null).rewriteSingle(commitMeta), operations.getOperations(), - createCommitValidator((BranchName) toRef.getNamedRef()), + createCommitValidator((BranchName) toRef.getNamedRef(), requestMeta), (key, cid) -> commitResponse.addAddedContents(addedContent(key, cid))) .getCommitHash(); @@ -1079,14 +1086,15 @@ public CommitResponse commitMultipleOperations( } } - private CommitValidator createCommitValidator(BranchName branchName) { + private CommitValidator createCommitValidator(BranchName branchName, RequestMeta requestMeta) { // Commits routinely run retries due to collisions on updating the HEAD of the branch. // Authorization is not dependent on the commit history, only on the collection of access // checks, which reflect the current commit. On retries, the commit data relevant to access // checks almost never changes. Therefore, we use RetriableAccessChecker to avoid re-validating // access checks (which could be a time-consuming operation) on subsequent retries, unless // authorization input data changes. - RetriableAccessChecker accessChecker = new RetriableAccessChecker(this::startAccessCheck); + RetriableAccessChecker accessChecker = + new RetriableAccessChecker(this::startAccessCheck, getApiContext()); return validation -> { BatchAccessChecker check = accessChecker.newAttempt(); check.canCommitChangeAgainstReference(branchName); @@ -1094,15 +1102,16 @@ private CommitValidator createCommitValidator(BranchName branchName) { .operations() .forEach( op -> { + Set keyActions = requestMeta.keyActions(op.identifiedKey().contentKey()); switch (op.operationType()) { case CREATE: - check.canCreateEntity(branchName, op.identifiedKey()); + check.canCreateEntity(branchName, op.identifiedKey(), keyActions); break; case UPDATE: - check.canUpdateEntity(branchName, op.identifiedKey()); + check.canUpdateEntity(branchName, op.identifiedKey(), keyActions); break; case DELETE: - check.canDeleteEntity(branchName, op.identifiedKey()); + check.canDeleteEntity(branchName, op.identifiedKey(), keyActions); break; default: throw new UnsupportedOperationException( diff --git a/servers/services/src/main/java/org/projectnessie/services/spi/ContentService.java b/servers/services/src/main/java/org/projectnessie/services/spi/ContentService.java index 35d25972bc6..24af7133faa 100644 --- a/servers/services/src/main/java/org/projectnessie/services/spi/ContentService.java +++ b/servers/services/src/main/java/org/projectnessie/services/spi/ContentService.java @@ -31,6 +31,7 @@ import org.projectnessie.model.ContentResponse; import org.projectnessie.model.GetMultipleContentsResponse; import org.projectnessie.services.authz.AccessCheckException; +import org.projectnessie.versioned.RequestMeta; /** * Server-side interface to services managing the loading of content objects. @@ -47,7 +48,7 @@ public interface ContentService { * @param namedRef name of the reference * @param hashOnRef optional, ID of the commit or a commit specification * @param withDocumentation unused, pass {@code false} - * @param forWrite if {@code false}, "natural" read access checks will be performed. If {@code + * @param requestMeta if {@code false}, "natural" read access checks will be performed. If {@code * true}, update/create access checks will be performed in addition to the read access checks. * @return the content response, if the content object exists * @throws NessieNotFoundException if the content object or the reference does not exist @@ -66,7 +67,7 @@ ContentResponse getContent( message = HASH_OR_RELATIVE_COMMIT_SPEC_MESSAGE) String hashOnRef, boolean withDocumentation, - boolean forWrite) + RequestMeta requestMeta) throws NessieNotFoundException; /** @@ -76,7 +77,7 @@ ContentResponse getContent( * @param hashOnRef optional, ID of the commit or a commit specification * @param keys the keys of the content objects to retrieve * @param withDocumentation unused, pass {@code false} - * @param forWrite if {@code false}, "natural" read access checks will be performed. If {@code + * @param requestMeta if {@code false}, "natural" read access checks will be performed. If {@code * true}, update/create access checks will be performed in addition to the read access checks. * @return the existing content objects * @throws NessieNotFoundException if the reference does not exist @@ -92,8 +93,8 @@ GetMultipleContentsResponse getMultipleContents( regexp = HASH_OR_RELATIVE_COMMIT_SPEC_REGEX, message = HASH_OR_RELATIVE_COMMIT_SPEC_MESSAGE) String hashOnRef, - @Valid @Size @jakarta.validation.constraints.Size(min = 1) List keys, + @Valid @Size @Size(min = 1) List keys, boolean withDocumentation, - boolean forWrite) + RequestMeta requestMeta) throws NessieNotFoundException; } diff --git a/servers/services/src/main/java/org/projectnessie/services/spi/NamespaceService.java b/servers/services/src/main/java/org/projectnessie/services/spi/NamespaceService.java index 81aba640292..d129d401b4a 100644 --- a/servers/services/src/main/java/org/projectnessie/services/spi/NamespaceService.java +++ b/servers/services/src/main/java/org/projectnessie/services/spi/NamespaceService.java @@ -23,6 +23,7 @@ import org.projectnessie.error.NessieReferenceNotFoundException; import org.projectnessie.model.GetNamespacesResponse; import org.projectnessie.model.Namespace; +import org.projectnessie.versioned.RequestMeta; /** * Server-side interface to services managing namespaces. @@ -32,14 +33,15 @@ */ public interface NamespaceService { - Namespace createNamespace(String refName, Namespace namespace) + Namespace createNamespace(String refName, Namespace namespace, RequestMeta requestMeta) throws NessieNamespaceAlreadyExistsException, NessieReferenceNotFoundException; void updateProperties( String refName, Namespace namespaceToUpdate, Map propertyUpdates, - Set propertyRemovals) + Set propertyRemovals, + RequestMeta requestMeta) throws NessieNamespaceNotFoundException, NessieReferenceNotFoundException; void deleteNamespace(String refName, Namespace namespaceToDelete) diff --git a/servers/services/src/main/java/org/projectnessie/services/spi/TreeService.java b/servers/services/src/main/java/org/projectnessie/services/spi/TreeService.java index 045f1610533..d8c9e962939 100644 --- a/servers/services/src/main/java/org/projectnessie/services/spi/TreeService.java +++ b/servers/services/src/main/java/org/projectnessie/services/spi/TreeService.java @@ -45,6 +45,7 @@ import org.projectnessie.model.Reference.ReferenceType; import org.projectnessie.model.ReferenceHistoryResponse; import org.projectnessie.versioned.NamedRef; +import org.projectnessie.versioned.RequestMeta; import org.projectnessie.versioned.WithHash; /** @@ -201,6 +202,7 @@ CommitResponse commitMultipleOperations( regexp = HASH_OR_RELATIVE_COMMIT_SPEC_REGEX, message = HASH_OR_RELATIVE_COMMIT_SPEC_MESSAGE) String expectedHash, - @Valid Operations operations) + @Valid Operations operations, + @NotNull RequestMeta requestMeta) throws NessieNotFoundException, NessieConflictException; } diff --git a/servers/services/src/test/java/org/projectnessie/services/authz/TestBatchAccessChecker.java b/servers/services/src/test/java/org/projectnessie/services/authz/TestBatchAccessChecker.java index b3c1d9d3838..e879ac1ce26 100644 --- a/servers/services/src/test/java/org/projectnessie/services/authz/TestBatchAccessChecker.java +++ b/servers/services/src/test/java/org/projectnessie/services/authz/TestBatchAccessChecker.java @@ -19,6 +19,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.projectnessie.services.authz.ApiContext.apiContext; import com.google.common.collect.ImmutableMap; import java.util.Arrays; @@ -139,7 +140,7 @@ private static void performCheck(BatchAccessChecker checker, Check c) { checker.canReadEntries(c.ref()); break; case READ_CONTENT_KEY: - checker.canReadContentKey(c.ref(), c.identifiedKey()); + checker.canReadContentKey(c.ref(), c.identifiedKey(), c.actions()); break; case ASSIGN_REFERENCE_TO_HASH: checker.canAssignRefToHash(c.ref()); @@ -151,16 +152,16 @@ private static void performCheck(BatchAccessChecker checker, Check c) { checker.canCommitChangeAgainstReference(c.ref()); break; case READ_ENTITY_VALUE: - checker.canReadEntityValue(c.ref(), c.identifiedKey()); + checker.canReadEntityValue(c.ref(), c.identifiedKey(), c.actions()); break; case CREATE_ENTITY: - checker.canCreateEntity(c.ref(), c.identifiedKey()); + checker.canCreateEntity(c.ref(), c.identifiedKey(), c.actions()); break; case UPDATE_ENTITY: - checker.canUpdateEntity(c.ref(), c.identifiedKey()); + checker.canUpdateEntity(c.ref(), c.identifiedKey(), c.actions()); break; case DELETE_ENTITY: - checker.canDeleteEntity(c.ref(), c.identifiedKey()); + checker.canDeleteEntity(c.ref(), c.identifiedKey(), c.actions()); break; case READ_REPOSITORY_CONFIG: checker.canReadRepositoryConfig(c.repositoryConfigType()); @@ -191,7 +192,7 @@ private static List listWithAllCheckTypes() { static BatchAccessChecker newAccessChecker( Function, Map> check) { - return new AbstractBatchAccessChecker() { + return new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { return check.apply(getChecks()); diff --git a/servers/services/src/test/java/org/projectnessie/services/authz/TestRetriableAccessChecker.java b/servers/services/src/test/java/org/projectnessie/services/authz/TestRetriableAccessChecker.java index b8bb7e660bc..d89c75b0694 100644 --- a/servers/services/src/test/java/org/projectnessie/services/authz/TestRetriableAccessChecker.java +++ b/servers/services/src/test/java/org/projectnessie/services/authz/TestRetriableAccessChecker.java @@ -18,6 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.projectnessie.model.IdentifiedContentKey.IdentifiedElement.identifiedElement; +import static org.projectnessie.services.authz.ApiContext.apiContext; import java.util.ArrayList; import java.util.HashMap; @@ -38,7 +39,7 @@ class TestRetriableAccessChecker { private final Supplier validator = () -> - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { checkCount++; @@ -50,7 +51,7 @@ public Map check() { @Test void checkAndThrow() { - RetriableAccessChecker checker = new RetriableAccessChecker(validator); + RetriableAccessChecker checker = new RetriableAccessChecker(validator, apiContext("Nessie", 1)); Check check = Check.check(Check.CheckType.CREATE_ENTITY); result.put(check, "test123"); assertThatThrownBy(() -> checker.newAttempt().can(check).checkAndThrow()) @@ -62,7 +63,7 @@ void checkAndThrow() { @Test void repeatedCheck() { - RetriableAccessChecker checker = new RetriableAccessChecker(validator); + RetriableAccessChecker checker = new RetriableAccessChecker(validator, apiContext("Nessie", 1)); Check c1 = Check.check(Check.CheckType.CREATE_ENTITY); Check c2 = Check.check(Check.CheckType.CREATE_REFERENCE); assertThat(checker.newAttempt().can(c1).can(c2).check()).isEmpty(); @@ -101,7 +102,7 @@ void dataChangeBetweenAttempts() { .addElements(ns2, tableElement) .build(); - RetriableAccessChecker checker = new RetriableAccessChecker(validator); + RetriableAccessChecker checker = new RetriableAccessChecker(validator, apiContext("Nessie", 1)); BranchName ref = BranchName.of("test"); assertThat(checker.newAttempt().canCreateEntity(ref, t1).check()).isEmpty(); assertThat(checked) diff --git a/servers/services/src/test/java/org/projectnessie/services/impl/TestNamespaceApi.java b/servers/services/src/test/java/org/projectnessie/services/impl/TestNamespaceApi.java index 6cd4ae93db3..01baa870832 100644 --- a/servers/services/src/test/java/org/projectnessie/services/impl/TestNamespaceApi.java +++ b/servers/services/src/test/java/org/projectnessie/services/impl/TestNamespaceApi.java @@ -17,6 +17,8 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.projectnessie.model.Namespace.Empty.EMPTY_NAMESPACE; +import static org.projectnessie.services.authz.ApiContext.apiContext; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import org.junit.jupiter.api.Test; @@ -24,8 +26,8 @@ public class TestNamespaceApi { @Test public void emptyNamespaceCreation() { - NamespaceApiImpl api = new NamespaceApiImpl(null, null, null, null); - assertThatThrownBy(() -> api.createNamespace("main", EMPTY_NAMESPACE)) + NamespaceApiImpl api = new NamespaceApiImpl(null, null, null, null, apiContext("Nessie", 2)); + assertThatThrownBy(() -> api.createNamespace("main", EMPTY_NAMESPACE, API_WRITE)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Namespace name must not be empty"); } diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestAccessChecks.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestAccessChecks.java index b02d85ca440..dd145917eec 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestAccessChecks.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestAccessChecks.java @@ -27,12 +27,14 @@ import static org.projectnessie.model.FetchOption.MINIMAL; import static org.projectnessie.model.IdentifiedContentKey.IdentifiedElement.identifiedElement; import static org.projectnessie.model.MergeBehavior.NORMAL; +import static org.projectnessie.services.authz.ApiContext.apiContext; import static org.projectnessie.services.authz.Check.canCommitChangeAgainstReference; import static org.projectnessie.services.authz.Check.canCreateEntity; import static org.projectnessie.services.authz.Check.canDeleteEntity; import static org.projectnessie.services.authz.Check.canReadEntityValue; import static org.projectnessie.services.authz.Check.canUpdateEntity; import static org.projectnessie.services.authz.Check.canViewReference; +import static org.projectnessie.versioned.RequestMeta.API_READ; import com.google.common.collect.ImmutableMap; import java.util.Collection; @@ -95,7 +97,7 @@ protected Set recordAccessChecks() { Set checks = new HashSet<>(); setBatchAccessChecker( c -> - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { checks.addAll(getChecks()); @@ -416,7 +418,7 @@ public void forbiddenContentKeys() throws Exception { setBatchAccessChecker( x -> - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { return getChecks().stream() @@ -450,7 +452,7 @@ public void entriesAreFilteredBeforeAccessCheck() throws Exception { setBatchAccessChecker( x -> - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { getChecks() @@ -474,7 +476,7 @@ public Map check() { public void detachedRefAccessChecks() throws Exception { BatchAccessChecker accessChecker = - new AbstractBatchAccessChecker() { + new AbstractBatchAccessChecker(apiContext("Nessie", 1)) { @Override public Map check() { Map failed = new LinkedHashMap<>(); @@ -557,7 +559,7 @@ public Map check() { .isInstanceOf(AccessCheckException.class) .hasMessageContaining(READ_MSG); soft.assertThatThrownBy( - () -> contentApi().getContent(key, ref.getName(), ref.getHash(), false, false)) + () -> contentApi().getContent(key, ref.getName(), ref.getHash(), false, API_READ)) .describedAs("ref='%s', getContent", ref) .isInstanceOf(AccessCheckException.class) .hasMessageContaining(ENTITIES_MSG); diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestCommitLog.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestCommitLog.java index 7dacae232b2..e9b6c7e4a0c 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestCommitLog.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestCommitLog.java @@ -21,6 +21,7 @@ import static org.projectnessie.model.CommitMeta.fromMessage; import static org.projectnessie.model.FetchOption.ALL; import static org.projectnessie.model.FetchOption.MINIMAL; +import static org.projectnessie.versioned.RequestMeta.API_READ; import com.google.common.collect.ImmutableList; import java.time.Instant; @@ -392,7 +393,9 @@ public void commitLogPaging() throws BaseNessieClientServerException { Put op; try { Content existing = - contentApi().getContent(key, branch.getName(), currentHash, false, false).getContent(); + contentApi() + .getContent(key, branch.getName(), currentHash, false, API_READ) + .getContent(); op = Put.of(key, IcebergTable.of("some-file-" + i, 42, 42, 42, 42, existing.getId())); } catch (NessieNotFoundException notFound) { op = Put.of(key, IcebergTable.of("some-file-" + i, 42, 42, 42, 42)); diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestContents.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestContents.java index 4c973fb7228..1fa999050dc 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestContents.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestContents.java @@ -20,6 +20,7 @@ import static org.assertj.core.groups.Tuple.tuple; import static org.projectnessie.model.CommitMeta.fromMessage; import static org.projectnessie.model.FetchOption.ALL; +import static org.projectnessie.versioned.RequestMeta.API_READ; import com.google.common.collect.Maps; import java.util.List; @@ -293,7 +294,7 @@ public void verifyContentAndOperationTypesIndividually( soft.assertThat( contentApi() .getContent( - fixedContentKey, committed.getName(), committed.getHash(), false, false)) + fixedContentKey, committed.getName(), committed.getHash(), false, API_READ)) .extracting(ContentResponse::getContent) .extracting(this::clearIdOnContent) .isEqualTo(put.getContent()); @@ -320,7 +321,11 @@ public void verifyContentAndOperationTypesIndividually( () -> contentApi() .getContent( - fixedContentKey, committed.getName(), committed.getHash(), false, false)) + fixedContentKey, + committed.getName(), + committed.getHash(), + false, + API_READ)) .isInstanceOf(NessieNotFoundException.class); // Compare operation on HEAD commit with the committed operation @@ -343,7 +348,7 @@ public void verifyContentAndOperationTypesIndividually( soft.assertThat( contentApi() .getContent( - fixedContentKey, committed.getName(), committed.getHash(), false, false)) + fixedContentKey, committed.getName(), committed.getHash(), false, API_READ)) .extracting(ContentResponse::getContent) .extracting(this::clearIdOnContent) .isEqualTo(contentAndOperationType.prepare.getContent()); diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestEntries.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestEntries.java index ce70ef91a79..bf687d9ca1c 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestEntries.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestEntries.java @@ -20,6 +20,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.tuple; import static org.projectnessie.model.CommitMeta.fromMessage; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -407,7 +408,8 @@ private void checkNamespaces( soft.assertThat(namespaceApi().getNamespace(reference.getName(), reference.getHash(), ns)) .isNotNull(); - soft.assertThatThrownBy(() -> namespaceApi().createNamespace(reference.getName(), ns)) + soft.assertThatThrownBy( + () -> namespaceApi().createNamespace(reference.getName(), ns, API_WRITE)) .cause() .isInstanceOf(NessieNamespaceAlreadyExistsException.class) .hasMessage(String.format("Namespace '%s' already exists", namespace)); diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestInvalidRefs.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestInvalidRefs.java index aa912bcaab6..bbff43933bb 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestInvalidRefs.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestInvalidRefs.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.projectnessie.model.FetchOption.MINIMAL; +import static org.projectnessie.versioned.RequestMeta.API_READ; import org.junit.jupiter.api.Test; import org.projectnessie.error.BaseNessieClientServerException; @@ -51,7 +52,7 @@ public void testUnknownHashesOnValidNamedRefs() throws BaseNessieClientServerExc () -> contentApi() .getContent( - ContentKey.of("table0"), branch.getName(), invalidHash, false, false)) + ContentKey.of("table0"), branch.getName(), invalidHash, false, API_READ)) .isInstanceOf(NessieNotFoundException.class) .hasMessageContaining(String.format("Commit '%s' not found", invalidHash)); } diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestMergeTransplant.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestMergeTransplant.java index f960fb17773..79a4189da29 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestMergeTransplant.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestMergeTransplant.java @@ -28,6 +28,8 @@ import static org.projectnessie.model.MergeBehavior.DROP; import static org.projectnessie.model.MergeBehavior.FORCE; import static org.projectnessie.model.MergeBehavior.NORMAL; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.collect.ImmutableList; import java.util.Collection; @@ -159,7 +161,7 @@ private void mergeTransplant( table1 = (IcebergTable) contentApi() - .getContent(key1, committed1.getName(), committed1.getHash(), false, false) + .getContent(key1, committed1.getName(), committed1.getHash(), false, API_READ) .getContent(); Branch committed2 = @@ -478,8 +480,8 @@ public void mergeWithNamespaces(ReferenceMode refMode) throws BaseNessieClientSe Namespace ns = Namespace.parse("a.b.c"); base = ensureNamespacesForKeysExist(base, ns.toContentKey()); branch = ensureNamespacesForKeysExist(branch, ns.toContentKey()); - namespaceApi().createNamespace(base.getName(), ns); - namespaceApi().createNamespace(branch.getName(), ns); + namespaceApi().createNamespace(base.getName(), ns, API_WRITE); + namespaceApi().createNamespace(branch.getName(), ns, API_WRITE); base = (Branch) getReference(base.getName()); branch = (Branch) getReference(branch.getName()); @@ -496,7 +498,7 @@ public void mergeWithNamespaces(ReferenceMode refMode) throws BaseNessieClientSe table1 = (IcebergTable) contentApi() - .getContent(key1, committed1.getName(), committed1.getHash(), false, false) + .getContent(key1, committed1.getName(), committed1.getHash(), false, API_READ) .getContent(); Branch committed2 = @@ -664,7 +666,7 @@ public void mergeRecreateTableNoConflict() throws BaseNessieClientServerExceptio ContentResponse tableOnRootAfterMerge = contentApi() .getContent( - setup.key, rootAfterMerge.getName(), rootAfterMerge.getHash(), false, false); + setup.key, rootAfterMerge.getName(), rootAfterMerge.getHash(), false, API_READ); soft.assertThat(setup.tableOnWork.getContent().getId()) .isEqualTo(tableOnRootAfterMerge.getContent().getId()); @@ -729,7 +731,7 @@ private MergeRecreateTableSetup setupMergeRecreateTable() soft.assertThat(root).isNotEqualTo(lastRoot); ContentResponse tableOnRoot = - contentApi().getContent(key, root.getName(), root.getHash(), false, false); + contentApi().getContent(key, root.getName(), root.getHash(), false, API_READ); soft.assertThat(tableOnRoot.getEffectiveReference()).isEqualTo(root); Branch work = createBranch("recreateBranch", root); @@ -749,7 +751,7 @@ private MergeRecreateTableSetup setupMergeRecreateTable() soft.assertThat(work).isNotEqualTo(lastWork); ContentResponse tableOnWork = - contentApi().getContent(key, work.getName(), work.getHash(), false, false); + contentApi().getContent(key, work.getName(), work.getHash(), false, API_READ); soft.assertThat(tableOnWork.getEffectiveReference()).isEqualTo(work); soft.assertThat(tableOnWork.getContent().getId()) diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestNamespace.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestNamespace.java index 143f0fd2507..6ed127b0100 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestNamespace.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/AbstractTestNamespace.java @@ -25,6 +25,7 @@ import static org.projectnessie.model.MergeBehavior.NORMAL; import static org.projectnessie.model.Namespace.Empty.EMPTY_NAMESPACE; import static org.projectnessie.services.impl.AbstractTestContents.contentAndOperationTypes; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -62,7 +63,7 @@ public abstract class AbstractTestNamespace extends BaseTestServiceImpl { public void testNamespaces(String namespaceName) throws BaseNessieClientServerException { Namespace ns = Namespace.parse(namespaceName); Branch branch = ensureNamespacesForKeysExist(createBranch("testNamespaces"), ns.toContentKey()); - Namespace namespace = namespaceApi().createNamespace(branch.getName(), ns); + Namespace namespace = namespaceApi().createNamespace(branch.getName(), ns, API_WRITE); soft.assertThat(namespace) .isNotNull() @@ -75,7 +76,7 @@ public void testNamespaces(String namespaceName) throws BaseNessieClientServerEx // the namespace in the error message will contain the representation with u001D String namespaceInErrorMsg = namespaceName.replace("\u0000", "\u001D"); - soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), ns)) + soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), ns, API_WRITE)) .cause() .isInstanceOf(NessieNamespaceAlreadyExistsException.class) .hasMessage(String.format("Namespace '%s' already exists", namespaceInErrorMsg)); @@ -100,7 +101,9 @@ public void testNamespacesRetrieval() throws BaseNessieClientServerException { Branch branch = createBranch("namespace"); ThrowingExtractor createNamespace = - identifier -> namespaceApi().createNamespace(branch.getName(), Namespace.parse(identifier)); + identifier -> + namespaceApi() + .createNamespace(branch.getName(), Namespace.parse(identifier), API_WRITE); Namespace a = createNamespace.apply("a"); Namespace ab = createNamespace.apply("a.b"); @@ -213,7 +216,7 @@ public void testNamespaceMergeWithConflict() throws BaseNessieClientServerExcept Namespace ns = Namespace.parse("a.b.c"); base = ensureNamespacesForKeysExist(base, ns.toContentKey()); // create a namespace on the base branch - namespaceApi().createNamespace(base.getName(), ns); + namespaceApi().createNamespace(base.getName(), ns, API_WRITE); base = (Branch) getReference(base.getName()); // create a table with the same name on the other branch @@ -270,7 +273,7 @@ public void testNamespaceConflictWithOtherContent() throws BaseNessieClientServe commit(branch, fromMessage("add table"), Put.of(key, icebergTable)); Namespace ns = Namespace.of(elements); - soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), ns)) + soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), ns, API_WRITE)) .cause() .isInstanceOf(NessieNamespaceAlreadyExistsException.class) .hasMessage("Another content object with name 'a.b.c' already exists"); @@ -299,7 +302,8 @@ public void testNamespacesWithAndWithoutZeroBytes() throws BaseNessieClientServe identifier -> { Namespace namespace = Namespace.parse(identifier); - Namespace created = namespaceApi().createNamespace(branch.getName(), namespace); + Namespace created = + namespaceApi().createNamespace(branch.getName(), namespace, API_WRITE); soft.assertThat(created) .isNotNull() .extracting(Namespace::getElements, Namespace::toPathString) @@ -308,7 +312,8 @@ public void testNamespacesWithAndWithoutZeroBytes() throws BaseNessieClientServe soft.assertThat(namespaceApi().getNamespace(branch.getName(), null, namespace)) .isEqualTo(created); - soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), namespace)) + soft.assertThatThrownBy( + () -> namespaceApi().createNamespace(branch.getName(), namespace, API_WRITE)) .cause() .isInstanceOf(NessieNamespaceAlreadyExistsException.class) .hasMessage(String.format("Namespace '%s' already exists", namespace.name())); @@ -371,7 +376,8 @@ public void testNamespacesWithAndWithoutZeroBytes() throws BaseNessieClientServe public void testEmptyNamespace() throws BaseNessieClientServerException { Branch branch = createBranch("emptyNamespace"); // can't create/fetch/delete an empty namespace due to empty REST path - soft.assertThatThrownBy(() -> namespaceApi().createNamespace(branch.getName(), EMPTY_NAMESPACE)) + soft.assertThatThrownBy( + () -> namespaceApi().createNamespace(branch.getName(), EMPTY_NAMESPACE, API_WRITE)) .isInstanceOf(Exception.class); soft.assertThatThrownBy( @@ -407,7 +413,8 @@ public void testNamespaceWithProperties() throws BaseNessieClientServerException Namespace ns = namespaceApi() - .createNamespace(branch.getName(), Namespace.of(namespace.getElements(), properties)); + .createNamespace( + branch.getName(), Namespace.of(namespace.getElements(), properties), API_WRITE); soft.assertThat(ns.getProperties()).isEqualTo(properties); soft.assertThat(ns.getId()).isNotNull(); String nsId = ns.getId(); @@ -416,7 +423,11 @@ public void testNamespaceWithProperties() throws BaseNessieClientServerException () -> namespaceApi() .updateProperties( - branch.getName(), Namespace.of("non-existing"), properties, emptySet())) + branch.getName(), + Namespace.of("non-existing"), + properties, + emptySet(), + API_WRITE)) .isInstanceOf(NessieNamespaceNotFoundException.class) .hasMessage("Namespace 'non-existing' does not exist"); @@ -429,11 +440,12 @@ public void testNamespaceWithProperties() throws BaseNessieClientServerException branch.getName(), Namespace.of("non-existing"), emptyMap(), - properties.keySet())) + properties.keySet(), + API_WRITE)) .isInstanceOf(NessieNamespaceNotFoundException.class) .hasMessage("Namespace 'non-existing' does not exist"); - namespaceApi().updateProperties(branch.getName(), namespace, properties, emptySet()); + namespaceApi().updateProperties(branch.getName(), namespace, properties, emptySet(), API_WRITE); // namespace does not exist at the previous hash soft.assertThatThrownBy( @@ -450,7 +462,8 @@ public void testNamespaceWithProperties() throws BaseNessieClientServerException updated.getName(), namespace, ImmutableMap.of("key3", "val3", "key1", "xyz"), - ImmutableSet.of("key2", "key5")); + ImmutableSet.of("key2", "key5"), + API_WRITE); // "updated" still points to the hash prior to the update soft.assertThat( diff --git a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/BaseTestServiceImpl.java b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/BaseTestServiceImpl.java index 1e42192e50c..2a488a52c35 100644 --- a/servers/services/src/testFixtures/java/org/projectnessie/services/impl/BaseTestServiceImpl.java +++ b/servers/services/src/testFixtures/java/org/projectnessie/services/impl/BaseTestServiceImpl.java @@ -20,7 +20,10 @@ import static org.projectnessie.model.FetchOption.MINIMAL; import static org.projectnessie.model.Reference.ReferenceType.BRANCH; import static org.projectnessie.model.Reference.ReferenceType.TAG; +import static org.projectnessie.services.authz.ApiContext.apiContext; import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.versioned.RequestMeta.API_READ; +import static org.projectnessie.versioned.RequestMeta.API_WRITE; import static org.projectnessie.versioned.storage.common.logic.Logics.repositoryLogic; import com.google.common.collect.ImmutableMap; @@ -95,7 +98,7 @@ public boolean sendStacktraceToClient() { }; protected static final Authorizer NOOP_AUTHORIZER = - context -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; + (context, apiContext) -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; @InjectSoftAssertions protected SoftAssertions soft; @@ -103,23 +106,28 @@ public boolean sendStacktraceToClient() { private Principal principal; protected final ConfigApiImpl configApi() { - return new ConfigApiImpl(config(), versionStore(), authorizer(), this::principal, 2); + return new ConfigApiImpl( + config(), versionStore(), authorizer(), this::principal, apiContext("Nessie", 2)); } protected final TreeApiImpl treeApi() { - return new TreeApiImpl(config(), versionStore(), authorizer(), this::principal); + return new TreeApiImpl( + config(), versionStore(), authorizer(), this::principal, apiContext("Nessie", 2)); } protected final ContentApiImpl contentApi() { - return new ContentApiImpl(config(), versionStore(), authorizer(), this::principal); + return new ContentApiImpl( + config(), versionStore(), authorizer(), this::principal, apiContext("Nessie", 2)); } protected final DiffApiImpl diffApi() { - return new DiffApiImpl(config(), versionStore(), authorizer(), this::principal); + return new DiffApiImpl( + config(), versionStore(), authorizer(), this::principal, apiContext("Nessie", 2)); } protected final NamespaceApiImpl namespaceApi() { - return new NamespaceApiImpl(config(), versionStore(), authorizer(), this::principal); + return new NamespaceApiImpl( + config(), versionStore(), authorizer(), this::principal, apiContext("Nessie", 2)); } protected Principal principal() { @@ -140,7 +148,7 @@ protected void setAuthorizer(Authorizer authorizer) { protected void setBatchAccessChecker( Function batchAccessChecker) { - this.authorizer = batchAccessChecker::apply; + this.authorizer = (t, apiContext) -> batchAccessChecker.apply(t); } protected VersionStore versionStore() { @@ -451,7 +459,7 @@ protected CommitResponse commit( throws NessieConflictException, NessieNotFoundException { Operations ops = ImmutableOperations.builder().addOperations(operations).commitMeta(meta).build(); - return treeApi().commitMultipleOperations(branch, expectedHash, ops); + return treeApi().commitMultipleOperations(branch, expectedHash, ops, API_WRITE); } protected Map contents(Reference reference, ContentKey... keys) @@ -473,7 +481,8 @@ protected Map contents( String refName, String hashOnRef, boolean forWrite, ContentKey... keys) throws NessieNotFoundException { return contentApi() - .getMultipleContents(refName, hashOnRef, Arrays.asList(keys), false, forWrite) + .getMultipleContents( + refName, hashOnRef, Arrays.asList(keys), false, forWrite ? API_WRITE : API_READ) .getContents() .stream() .collect(Collectors.toMap(ContentWithKey::getKey, ContentWithKey::getContent)); @@ -487,7 +496,7 @@ protected ContentResponse content(Reference reference, boolean forWrite, Content protected ContentResponse content( String refName, String hashOnRef, boolean forWrite, ContentKey key) throws NessieNotFoundException { - return contentApi().getContent(key, refName, hashOnRef, false, forWrite); + return contentApi().getContent(key, refName, hashOnRef, false, forWrite ? API_WRITE : API_READ); } protected String createCommits( @@ -502,7 +511,7 @@ protected String createCommits( try { Content existing = contentApi() - .getContent(key, branch.getName(), currentHash, false, false) + .getContent(key, branch.getName(), currentHash, false, API_READ) .getContent(); op = Put.of(key, IcebergTable.of("some-file-" + i, 42, 42, 42, 42, existing.getId())); } catch (NessieContentNotFoundException notFound) { diff --git a/site/in-dev/authorization.md b/site/in-dev/authorization.md index 4a96cce8353..68784b2684f 100644 --- a/site/in-dev/authorization.md +++ b/site/in-dev/authorization.md @@ -88,6 +88,79 @@ Certain variables are available within the `` depending on cont * **path** - refers to the URI path representation (`ContentKey.toPathString()`) of the [content key](https://github.com/projectnessie/nessie/blob/main/api/model/src/main/java/org/projectnessie/model/ContentKey.java) for the object related to the authorization check. * **contentType** - refers to a (possibly empty) string representing the name of the object's [`Content.Type`](https://github.com/projectnessie/nessie/blob/main/api/model/src/main/java/org/projectnessie/model/Content.java). * **type** - refers to the repository config type to be retrieved or updated. +* **api** - contains information about the receiving API. This is a composite object with two properties: + * **apiName** the name of the API, can be `Nessie` or `Iceberg` + * **apiVersion** the version of the API - for `Nessie` it can be 1 or 2, for `Iceberg` currently 1 +* **actions** a list of actions (strings), available for some Iceberg endpoints. + +#### Actions + +The list of `actions` (strings) is available for some Iceberg endpoints that perform changes against +an entity (table, view, namespace). The list of actions is empty for the Nessie REST API. + +**Catalog operations** + +Available for all updating Iceberg endpoints +for the `Check` types `CREATE_ENTITY`, `UPDATE_ENTITY` and `DELETE_ENTITY`. + +* `CATALOG_CREATE_ENTITY` - create a table/view/namespace +* `CATALOG_UPDATE_ENTITY` - update a table/view/namespace +* `CATALOG_DROP_ENTITY` - dropping a table/view/namespace +* `CATALOG_RENAME_ENTITY_FROM` - renaming a table (from) +* `CATALOG_RENAME_ENTITY_TO` - renaming a table (to) +* `CATALOG_REGISTER_ENTITY` - registering a table (from) +* `CATALOG_UPDATE_MULTIPLE` - update multiple tables + +**Iceberg metadata updates** + +Available for Iceberg endpoints that update entities, represents the kinds of metadata updates, +for the `Check` types `CREATE_ENTITY`, `UPDATE_ENTITY`. + +* `META_ADD_VIEW_VERSION` +* `META_SET_CURRENT_VIEW_VERSION` +* `META_SET_STATISTICS` +* `META_REMOVE_STATISTICS` +* `META_SET_PARTITION_STATISTICS` +* `META_REMOVE_PARTITION_STATISTICS` +* `META_ASSIGN_UUID` +* `META_ADD_SCHEMA` +* `META_SET_CURRENT_SCHEMA` +* `META_ADD_PARTITION_SPEC` +* `META_SET_DEFAULT_PARTITION_SPEC` +* `META_ADD_SNAPSHOT` +* `META_ADD_SORT_ORDER` +* `META_SET_DEFAULT_SORT_ORDER` +* `META_SET_LOCATION` +* `META_SET_PROPERTIES` +* `META_REMOVE_PROPERTIES` +* `META_REMOVE_LOCATION_PROPERTY` +* `META_SET_SNAPSHOT_REF` +* `META_REMOVE_SNAPSHOT_REF` +* `META_UPGRADE_FORMAT_VERSION` + +**from Iceberg's snapshot summary** + +Available for Iceberg updates that add a snapshot, for the `Check` types `CREATE_ENTITY`, `UPDATE_ENTITY`. + +* `SNAP_ADD_DATA_FILES` +* `SNAP_DELETE_DATA_FILES` +* `SNAP_ADD_DELETE_FILES` +* `SNAP_ADD_EQUALITY_DELETE_FILES` +* `SNAP_ADD_POSITION_DELETE_FILES` +* `SNAP_REMOVE_DELETE_FILES` +* `SNAP_REMOVE_EQUALITY_DELETE_FILES` +* `SNAP_REMOVE_POSITION_DELETE_FILES` +* `SNAP_ADDED_RECORDS` +* `SNAP_DELETED_RECORDS` +* `SNAP_ADDED_POSITION_DELETES` +* `SNAP_DELETED_POSITION_DELETES` +* `SNAP_ADDED_EQUALITY_DELETES` +* `SNAP_DELETED_EQUALITY_DELETES` +* `SNAP_REPLACE_PARTITIONS` +* `SNAP_OP_APPEND` +* `SNAP_OP_REPLACE` +* `SNAP_OP_OVERWRITE` +* `SNAP_OP_DELETE` #### Checks for Reference operations @@ -100,12 +173,15 @@ Applicable `op` types: * `DELETE_REFERENCE` * `READ_ENTRIES` * `LIST_COMMIT_LOG` +* `COMMIT_CHANGE_AGAINST_REFERENCE` Available variables: * `role` * `roles` * `ref` +* `ref` +* `api` #### Checks for Content operations @@ -124,6 +200,8 @@ Available variables: * `ref` * `path` * `contentType` +* `api` +* `actions` (for `CREATE_ENTITY`, `UPDATE_ENTITY`, `DELETE_ENTITY` against Iceberg REST) #### Checks for Repository Config operations @@ -137,6 +215,7 @@ Available variables: * `role` * `roles` * `type` +* `api` #### Relevant CEL features diff --git a/versioned/combined-cs/src/main/java/org/projectnessie/nessie/combined/CombinedClientBuilder.java b/versioned/combined-cs/src/main/java/org/projectnessie/nessie/combined/CombinedClientBuilder.java index 53b31f56eb1..ab697558c25 100644 --- a/versioned/combined-cs/src/main/java/org/projectnessie/nessie/combined/CombinedClientBuilder.java +++ b/versioned/combined-cs/src/main/java/org/projectnessie/nessie/combined/CombinedClientBuilder.java @@ -16,12 +16,14 @@ package org.projectnessie.nessie.combined; import static org.projectnessie.nessie.combined.EmptyHttpHeaders.emptyHttpHeaders; +import static org.projectnessie.services.authz.ApiContext.apiContext; import org.projectnessie.client.NessieClientBuilder; import org.projectnessie.client.api.NessieApi; import org.projectnessie.client.api.NessieApiV2; import org.projectnessie.services.authz.AbstractBatchAccessChecker; import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.ApiContext; import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.config.ServerConfig; import org.projectnessie.services.rest.RestV2ConfigResource; @@ -38,6 +40,7 @@ public class CombinedClientBuilder extends NessieClientBuilder.AbstractNessieCli private Persist persist; private RestV2ConfigResource configResource; private RestV2TreeResource treeResource; + private ApiContext apiContext = apiContext("Nessie", 2); public CombinedClientBuilder() {} @@ -66,6 +69,11 @@ public CombinedClientBuilder withPersist(Persist persist) { return this; } + public CombinedClientBuilder withApiContext(ApiContext apiContext) { + this.apiContext = apiContext; + return this; + } + @Override public API build(Class apiContract) { RestV2ConfigResource configResource = this.configResource; @@ -97,7 +105,7 @@ public boolean sendStacktraceToClient() { }; VersionStore versionStore = new VersionStoreImpl(persist); - Authorizer authorizer = c -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; + Authorizer authorizer = (c, apiContext) -> AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; AccessContext accessContext = () -> null; diff --git a/versioned/spi/src/main/java/org/projectnessie/versioned/RequestMeta.java b/versioned/spi/src/main/java/org/projectnessie/versioned/RequestMeta.java new file mode 100644 index 00000000000..6a617acf33c --- /dev/null +++ b/versioned/spi/src/main/java/org/projectnessie/versioned/RequestMeta.java @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import org.immutables.value.Value; +import org.projectnessie.model.ContentKey; + +/** Additional information related to the incoming API request. */ +@Value.Immutable +@Value.Style(allParameters = true) +@JsonSerialize(as = ImmutableRequestMeta.class) +@JsonDeserialize(as = ImmutableRequestMeta.class) +public interface RequestMeta { + /** Indicates whether access checks shall be performed for a write/update request. */ + boolean forWrite(); + + @Value.Default + default Map> keyActions() { + return Map.of(); + } + + default Set keyActions(ContentKey key) { + return keyActions().getOrDefault(key, Set.of()); + } + + default RequestMeta addKeyAction(ContentKey key, String name) { + Map> keyActions = new HashMap<>(keyActions()); + Set actions = new HashSet<>(keyActions(key)); + actions.add(name); + keyActions.put(key, actions); + return ImmutableRequestMeta.builder().from(this).keyActions(keyActions).build(); + } + + default RequestMeta addKeyActions(ContentKey key, Set names) { + Map> keyActions = new HashMap<>(keyActions()); + Set actions = new HashSet<>(keyActions(key)); + actions.addAll(names); + keyActions.put(key, actions); + return ImmutableRequestMeta.builder().from(this).keyActions(keyActions).build(); + } + + static Builder builder() { + return ImmutableRequestMeta.builder(); + } + + static Builder apiWrite() { + return ImmutableRequestMeta.builder().forWrite(true); + } + + static Builder apiRead() { + return ImmutableRequestMeta.builder().forWrite(false); + } + + RequestMeta API_WRITE = apiWrite().build(); + RequestMeta API_READ = apiRead().build(); + + interface Builder { + @CanIgnoreReturnValue + Builder from(RequestMeta instance); + + @CanIgnoreReturnValue + Builder forWrite(boolean forWrite); + + RequestMeta build(); + } +}