Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[b/349543276] Add Ranger to metaconnector #550

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,13 @@
import com.google.common.collect.ImmutableList;
import com.google.edwmigration.dumper.application.dumper.annotations.RespectsArgumentAssessment;
import com.google.edwmigration.dumper.application.dumper.connector.Connector;
import com.google.edwmigration.dumper.application.dumper.connector.ConnectorProperty;
import com.google.edwmigration.dumper.application.dumper.connector.hdfs.HdfsExtractionConnector;
import com.google.edwmigration.dumper.application.dumper.connector.hive.HiveMetadataConnector;
import com.google.edwmigration.dumper.application.dumper.connector.meta.AbstractMetaConnector;
import com.google.edwmigration.dumper.application.dumper.connector.ranger.RangerConnector;
import com.google.edwmigration.dumper.plugin.ext.jdk.annotation.Description;
import javax.annotation.Nonnull;

@AutoService(Connector.class)
@Description("Dumps metadata from the Cloudera (Hadoop on-prem) cluster.")
Expand All @@ -35,8 +38,38 @@ public ClouderaConnector() {
"cloudera",
"cloudera.zip",
ImmutableList.of(
ClouderaMetadataConnector.CONNECTOR_NAME,
HiveMetadataConnector.CONNECTOR_NAME,
HdfsExtractionConnector.CONNECTOR_NAME));
ClouderaMetadataConnector.NAME,
HiveMetadataConnector.NAME,
HdfsExtractionConnector.NAME,
RangerConnector.NAME));
}

@Nonnull
@Override
public Class<? extends Enum<? extends ConnectorProperty>> getConnectorProperties() {
return ClouderaConnectorProperty.class;
}

public enum ClouderaConnectorProperty implements ConnectorProperty {
USER("ranger.user", "Ranger API username"),
PASSWORD("ranger.password", "Ranger API password");

private final String name;
private final String description;

ClouderaConnectorProperty(String name, String description) {
this.name = name;
this.description = description;
}

@Nonnull
public String getName() {
return name;
}

@Nonnull
public String getDescription() {
return description;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ public class ClouderaMetadataConnector implements MetadataConnector, ChildConnec
"spark-submit-version",
"sqoop-version");

public static final String CONNECTOR_NAME = "cloudera-metadata";
public static final String NAME = "cloudera-metadata";

private static final ImmutableList<String> SERVICE_NAMES =
ImmutableList.of(
Expand All @@ -97,7 +97,7 @@ public class ClouderaMetadataConnector implements MetadataConnector, ChildConnec
@Nonnull
@Override
public String getName() {
return CONNECTOR_NAME;
return NAME;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ protected Void doRun(TaskRunContext context, @Nonnull ByteSink sink, @Nonnull Ha
throws Exception {
MetaHandle metaHandle = (MetaHandle) handle;
metaHandle.initializeConnector(
ClouderaMetadataConnector.CONNECTOR_NAME,
new ConnectorArguments("--connector", ClouderaMetadataConnector.CONNECTOR_NAME));
ClouderaMetadataConnector.NAME,
new ConnectorArguments("--connector", ClouderaMetadataConnector.NAME));
return null;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ protected Void doRun(TaskRunContext context, @Nonnull ByteSink sink, @Nonnull Ha
throws Exception {
MetaHandle metaHandle = (MetaHandle) handle;
metaHandle.initializeConnector(
HdfsExtractionConnector.CONNECTOR_NAME,
new ConnectorArguments("--connector", HdfsExtractionConnector.CONNECTOR_NAME));
HdfsExtractionConnector.NAME,
new ConnectorArguments("--connector", HdfsExtractionConnector.NAME));
return null;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,8 @@ protected Void doRun(TaskRunContext context, @Nonnull ByteSink sink, @Nonnull Ha
throws Exception {
MetaHandle metaHandle = (MetaHandle) handle;
metaHandle.initializeConnector(
HiveMetadataConnector.CONNECTOR_NAME,
new ConnectorArguments(
"--connector", HiveMetadataConnector.CONNECTOR_NAME, "--assessment"));
HiveMetadataConnector.NAME,
new ConnectorArguments("--connector", HiveMetadataConnector.NAME, "--assessment"));
return null;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
/*
* Copyright 2022-2024 Google LLC
* Copyright 2013-2021 CompilerWorks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.edwmigration.dumper.application.dumper.connector.cloudera;

import com.google.common.collect.ImmutableList;
import com.google.common.io.ByteSink;
import com.google.edwmigration.dumper.application.dumper.ConnectorArguments;
import com.google.edwmigration.dumper.application.dumper.connector.cloudera.ClouderaConnector.ClouderaConnectorProperty;
import com.google.edwmigration.dumper.application.dumper.connector.meta.MetaHandle;
import com.google.edwmigration.dumper.application.dumper.connector.ranger.RangerConnector;
import com.google.edwmigration.dumper.application.dumper.handle.Handle;
import com.google.edwmigration.dumper.application.dumper.task.AbstractTask;
import com.google.edwmigration.dumper.application.dumper.task.TaskRunContext;
import java.io.IOException;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;

public class RangerInitializerTask extends AbstractTask<Void> {

public RangerInitializerTask() {
super("ranger-initializer.txt", /* createTarget= */ false);
}

@CheckForNull
@Override
protected Void doRun(TaskRunContext context, @Nonnull ByteSink sink, @Nonnull Handle handle)
throws Exception {
MetaHandle metaHandle = (MetaHandle) handle;
ConnectorArguments childConnectorArguments =
tunnelPropertiesToChildConnector(context.getArguments());
metaHandle.initializeConnector(RangerConnector.NAME, childConnectorArguments);
return null;
}

private ConnectorArguments tunnelPropertiesToChildConnector(
ConnectorArguments metaconnectorArguments) throws IOException {
ImmutableList.Builder<String> argumentsBuilder = ImmutableList.builder();
argumentsBuilder.add("--connector").add(RangerConnector.NAME);
@Nullable String user = metaconnectorArguments.getDefinition(ClouderaConnectorProperty.USER);
if (user != null) {
argumentsBuilder.add("--user").add(user);
}
@Nullable
String password = metaconnectorArguments.getDefinition(ClouderaConnectorProperty.PASSWORD);
if (password != null) {
argumentsBuilder.add("--password").add(password);
}
return new ConnectorArguments(argumentsBuilder.build().toArray(new String[0]));
}

@Override
public String toString() {
return "Connecting to Ranger";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,10 @@
public class HdfsExtractionConnector extends AbstractConnector
implements HdfsExtractionDumpFormat, ChildConnector {

public static final String CONNECTOR_NAME = "hdfs";
public static final String NAME = "hdfs";

public HdfsExtractionConnector() {
super(CONNECTOR_NAME);
super(NAME);
}

@Nonnull
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -681,10 +681,10 @@ protected String toCallDescription() {
}
}

public static final String CONNECTOR_NAME = "hiveql";
public static final String NAME = "hiveql";

public HiveMetadataConnector() {
super(CONNECTOR_NAME);
super(NAME);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,18 @@
*/
package com.google.edwmigration.dumper.application.dumper.connector.ranger;

import static com.google.edwmigration.dumper.application.dumper.ConnectorArguments.OPT_RANGER_PORT_DEFAULT;
import static java.lang.Integer.parseInt;

import com.google.auto.service.AutoService;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteSink;
import com.google.edwmigration.dumper.application.dumper.ConnectorArguments;
import com.google.edwmigration.dumper.application.dumper.annotations.RespectsInput;
import com.google.edwmigration.dumper.application.dumper.connector.AbstractConnector;
import com.google.edwmigration.dumper.application.dumper.connector.Connector;
import com.google.edwmigration.dumper.application.dumper.connector.cloudera.RangerInitializerTask;
import com.google.edwmigration.dumper.application.dumper.connector.meta.ChildConnector;
import com.google.edwmigration.dumper.application.dumper.connector.ranger.RangerClient.RangerException;
import com.google.edwmigration.dumper.application.dumper.connector.ranger.RangerPageIterator.Page;
import com.google.edwmigration.dumper.application.dumper.handle.AbstractHandle;
Expand Down Expand Up @@ -50,6 +55,7 @@
import java.time.Clock;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nonnull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -63,16 +69,18 @@
order = 101,
arg = ConnectorArguments.OPT_PORT,
description = "The port of the Ranger server.",
defaultValue = ConnectorArguments.OPT_RANGER_PORT_DEFAULT)
defaultValue = OPT_RANGER_PORT_DEFAULT)
@AutoService({Connector.class})
@Description("Dumps services and policies from Apache Ranger.")
public class RangerConnector extends AbstractConnector {
public class RangerConnector extends AbstractConnector implements ChildConnector {

@SuppressWarnings("UnusedVariable")
private static final Logger LOG = LoggerFactory.getLogger(RangerConnector.class);

public static final String NAME = "ranger";

public RangerConnector() {
super("ranger");
super(NAME);
}

@Nonnull
Expand All @@ -95,13 +103,24 @@ public void addTasksTo(
@Override
public Handle open(@Nonnull ConnectorArguments arguments) throws Exception {
// TODO: handle SSL or Kerberos.
URI apiUrl = URI.create("http://" + arguments.getHostOrDefault() + ":" + arguments.getPort());
URI apiUrl =
URI.create(
"http://"
+ arguments.getHostOrDefault()
+ ":"
+ arguments.getPort(parseInt(OPT_RANGER_PORT_DEFAULT)));
String password = arguments.getPasswordOrPrompt();
return new RangerClientHandle(
new RangerClient(apiUrl, arguments.getUser(), password),
arguments.getRangerPageSizeDefault());
}

@Nonnull
@Override
public Optional<Task<?>> createInitializerTask() {
return Optional.of(new RangerInitializerTask());
}

static class DumpUsersTask extends AbstractRangerTask<User> {

DumpUsersTask() {
Expand Down