diff --git a/agents-audit/dest-hdfs/pom.xml b/agents-audit/dest-hdfs/pom.xml
index 289049fc92..07e72f1f6b 100644
--- a/agents-audit/dest-hdfs/pom.xml
+++ b/agents-audit/dest-hdfs/pom.xml
@@ -92,6 +92,18 @@
+
+ org.junit.jupiter
+ junit-jupiter-api
+ ${junit.jupiter.version}
+ test
+
+
+ org.mockito
+ mockito-core
+ ${mockito.version}
+ test
+
org.slf4j
log4j-over-slf4j
diff --git a/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/destination/HDFSAuditDestinationTest.java b/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/destination/HDFSAuditDestinationTest.java
new file mode 100644
index 0000000000..60e41428ae
--- /dev/null
+++ b/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/destination/HDFSAuditDestinationTest.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.audit.destination;
+
+import org.apache.ranger.audit.utils.RangerAuditWriter;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Properties;
+
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+/**
+ * @generated by copilot
+ * @description Unit Test cases for HDFSAuditDestination
+ * */
+class HDFSAuditDestinationTest {
+ private HDFSAuditDestination destination;
+ private RangerAuditWriter mockWriter;
+
+ @BeforeEach
+ void setUp() throws Exception {
+ mockWriter = mock(RangerAuditWriter.class);
+
+ // Subclass to override getWriter()
+ destination = new HDFSAuditDestination() {
+ @Override
+ public RangerAuditWriter getWriter() {
+ return mockWriter;
+ }
+ };
+ }
+
+ @Test
+ void testInitSetsInitDone() {
+ Properties props = new Properties();
+ destination.init(props, "prefix");
+ assertTrue(getPrivateInitDone(destination));
+ }
+
+ @Test
+ void testLogJSONSuccess() throws Exception {
+ destination.init(new Properties(), "prefix");
+ Collection events = Arrays.asList("{\"event\":1}", "{\"event\":2}");
+ when(mockWriter.log(events)).thenReturn(true);
+
+ boolean result = destination.logJSON(events);
+
+ assertTrue(result);
+ verify(mockWriter, atLeastOnce()).log(events);
+ }
+
+ @Test
+ void testLogJSONWhenNotInit() throws Exception {
+ Collection events = Arrays.asList("{\"event\":1}");
+ boolean result = destination.logJSON(events);
+ assertFalse(result);
+ }
+
+ @Test
+ void testLogFileSuccess() throws Exception {
+ destination.init(new Properties(), "prefix");
+ File file = mock(File.class);
+ when(mockWriter.logFile(file)).thenReturn(true);
+
+ boolean result = destination.logFile(file);
+
+ assertTrue(result);
+ verify(mockWriter).logFile(file);
+ }
+
+ @Test
+ void testLogFileWhenNotInit() throws Exception {
+ File file = mock(File.class);
+ boolean result = destination.logFile(file);
+ assertFalse(result);
+ }
+
+ @Test
+ void testStopCallsWriterStop() throws Exception {
+ destination.init(new Properties(), "prefix");
+ destination.stop();
+ verify(mockWriter).stop();
+ }
+
+ // Helper to access private field
+ private boolean getPrivateInitDone(HDFSAuditDestination dest) {
+ try {
+ java.lang.reflect.Field f = HDFSAuditDestination.class.getDeclaredField("initDone");
+ f.setAccessible(true);
+ return f.getBoolean(dest);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/provider/LocalFileLogBufferTest.java b/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/provider/LocalFileLogBufferTest.java
new file mode 100644
index 0000000000..0a73d55c27
--- /dev/null
+++ b/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/provider/LocalFileLogBufferTest.java
@@ -0,0 +1,244 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.audit.provider;
+
+import org.apache.ranger.audit.model.AuditEventBase;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.nio.file.Path;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.timeout;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+/**
+ * @generated by copilot
+ * @description Unit Test cases for LocalFileLogBuffer
+ * */
+class LocalFileLogBufferTest {
+ private LocalFileLogBuffer logBuffer;
+
+ @Mock
+ private DebugTracer mockTracer;
+
+ @Mock
+ private LogDestination mockDestination;
+
+ @Mock
+ private AuditEventBase mockAuditEvent;
+
+ @TempDir
+ Path tempDir;
+
+ @BeforeEach
+ void setUp() {
+ MockitoAnnotations.initMocks(this);
+ logBuffer = new LocalFileLogBuffer<>(mockTracer);
+
+ // Configure the log buffer
+ logBuffer.setDirectory(tempDir.toString());
+ logBuffer.setFile("audit_test_%app-type%_%time:yyyyMMdd-HHmm%.log");
+ logBuffer.setEncoding("UTF-8");
+ logBuffer.setFlushIntervalSeconds(1); // Set to 1 second for faster testing
+ logBuffer.setRolloverIntervalSeconds(5); // Set to 5 seconds for faster testing
+ logBuffer.setArchiveDirectory(tempDir.resolve("archive").toString());
+ logBuffer.setArchiveFileCount(3);
+ logBuffer.setIsAppend(true);
+ logBuffer.setFileBufferSizeBytes(1024);
+ }
+
+ @Test
+ void testInitialState() {
+ assertEquals(tempDir.toString(), logBuffer.getDirectory());
+ assertEquals("audit_test_%app-type%_%time:yyyyMMdd-HHmm%.log", logBuffer.getFile());
+ assertEquals("UTF-8", logBuffer.getEncoding());
+ assertEquals(1, logBuffer.getFlushIntervalSeconds());
+ assertEquals(5, logBuffer.getRolloverIntervalSeconds());
+ assertEquals(tempDir.resolve("archive").toString(), logBuffer.getArchiveDirectory());
+ assertEquals(3, logBuffer.getArchiveFileCount());
+ assertTrue(logBuffer.getIsAppend());
+ assertEquals(1024, logBuffer.getFileBufferSizeBytes());
+ }
+
+ @Test
+ void testFileNotAvailable() {
+ // Set an invalid directory to test file not available scenario
+ logBuffer.setDirectory("/invalid/directory/path");
+
+ // Try to add a log
+ when(mockAuditEvent.toString()).thenReturn("Test audit event");
+ boolean added = logBuffer.add(mockAuditEvent);
+
+ // Should fail because the directory doesn't exist
+ assertFalse(added);
+ assertFalse(logBuffer.isAvailable());
+ }
+
+ @Test
+ void testDestinationDispatcherThread() throws Exception {
+ // Configure the mock destination
+ when(mockDestination.sendStringified(anyString())).thenReturn(true);
+
+ // Add a log to create a file
+ logBuffer.start(mockDestination);
+ when(mockAuditEvent.toString()).thenReturn("Test dispatch event");
+ boolean added = logBuffer.add(mockAuditEvent);
+ assertTrue(added);
+
+ // Force close the file to add it to completed files
+ Field writerField = LocalFileLogBuffer.class.getDeclaredField("mWriter");
+ writerField.setAccessible(true);
+
+ // Remember the filename before closing
+ Field bufferFilenameField = LocalFileLogBuffer.class.getDeclaredField("mBufferFilename");
+ bufferFilenameField.setAccessible(true);
+ String filename = (String) bufferFilenameField.get(logBuffer);
+
+ // Close the file to add it to the completed files list
+ logBuffer.stop();
+
+ // Create a new dispatcher thread with the same file buffer and destination
+ LocalFileLogBuffer.DestinationDispatcherThread dispatcherThread =
+ new LocalFileLogBuffer.DestinationDispatcherThread<>(logBuffer, mockDestination, mockTracer);
+
+ // Add our test file to the completed files
+ dispatcherThread.addLogfile(filename);
+
+ // Start the thread and let it process our file
+ dispatcherThread.start();
+ Thread.sleep(500);
+
+ // Verify the destination was called to send the log
+ verify(mockDestination, timeout(1000).atLeastOnce()).sendStringified("Test dispatch event");
+
+ // Stop the thread
+ dispatcherThread.stopThread();
+ dispatcherThread.join(1000);
+ assertFalse(dispatcherThread.isAlive());
+ }
+
+ @Test
+ void testToString() {
+ String toString = logBuffer.toString();
+
+ assertTrue(toString.contains("LocalFileLogBuffer"));
+ assertTrue(toString.contains(tempDir.toString()));
+ assertTrue(toString.contains("audit_test_%app-type%_%time:yyyyMMdd-HHmm%.log"));
+ assertTrue(toString.contains("RolloverIntervaSeconds=5"));
+ }
+
+ @Test
+ void testAddLogfileAndIsIdle() {
+ LocalFileLogBuffer.DestinationDispatcherThread dispatcherThread =
+ new LocalFileLogBuffer.DestinationDispatcherThread<>(logBuffer, mockDestination, mockTracer);
+
+ // Initially idle
+ assertTrue(dispatcherThread.isIdle());
+
+ // Add a logfile
+ dispatcherThread.addLogfile("dummy.log");
+ assertFalse(dispatcherThread.isIdle());
+ }
+
+ @Test
+ void testToStringMethod() {
+ LocalFileLogBuffer.DestinationDispatcherThread dispatcherThread =
+ new LocalFileLogBuffer.DestinationDispatcherThread<>(logBuffer, mockDestination, mockTracer);
+
+ String str = dispatcherThread.toString();
+ assertTrue(str.contains("DestinationDispatcherThread"));
+ assertTrue(str.contains("ThreadName="));
+ }
+
+ @Test
+ void testOpenCurrentFile_FileNotFound() throws Exception {
+ LocalFileLogBuffer.DestinationDispatcherThread dispatcherThread =
+ new LocalFileLogBuffer.DestinationDispatcherThread<>(logBuffer, mockDestination, mockTracer);
+
+ Field currentLogfileField = dispatcherThread.getClass().getDeclaredField("mCurrentLogfile");
+ currentLogfileField.setAccessible(true);
+ currentLogfileField.set(dispatcherThread, "nonexistent-file.log");
+
+ Method openCurrentFileMethod = dispatcherThread.getClass().getDeclaredMethod("openCurrentFile");
+ openCurrentFileMethod.setAccessible(true);
+ Object result = openCurrentFileMethod.invoke(dispatcherThread);
+
+ assertNull(result);
+ }
+
+ @Test
+ void testCloseCurrentFile_NullReader() throws Exception {
+ LocalFileLogBuffer.DestinationDispatcherThread dispatcherThread =
+ new LocalFileLogBuffer.DestinationDispatcherThread<>(logBuffer, mockDestination, mockTracer);
+
+ Method closeCurrentFileMethod = dispatcherThread.getClass().getDeclaredMethod("closeCurrentFile", BufferedReader.class);
+ closeCurrentFileMethod.setAccessible(true);
+
+ // Should not throw
+ closeCurrentFileMethod.invoke(dispatcherThread, new Object[] {null});
+ }
+
+ @Test
+ void testArchiveCurrentFile_FileDoesNotExist() throws Exception {
+ LocalFileLogBuffer.DestinationDispatcherThread dispatcherThread =
+ new LocalFileLogBuffer.DestinationDispatcherThread<>(logBuffer, mockDestination, mockTracer);
+
+ Field currentLogfileField = dispatcherThread.getClass().getDeclaredField("mCurrentLogfile");
+ currentLogfileField.setAccessible(true);
+ currentLogfileField.set(dispatcherThread, tempDir.resolve("doesnotexist.log").toString());
+
+ Method archiveCurrentFileMethod = dispatcherThread.getClass().getDeclaredMethod("archiveCurrentFile");
+ archiveCurrentFileMethod.setAccessible(true);
+
+ // Should not throw
+ archiveCurrentFileMethod.invoke(dispatcherThread);
+ }
+
+ @Test
+ void testSendCurrentFile_EmptyFile() throws Exception {
+ File logFile = tempDir.resolve("empty.log").toFile();
+ logFile.createNewFile();
+
+ LocalFileLogBuffer.DestinationDispatcherThread dispatcherThread =
+ new LocalFileLogBuffer.DestinationDispatcherThread<>(logBuffer, mockDestination, mockTracer);
+
+ Field currentLogfileField = dispatcherThread.getClass().getDeclaredField("mCurrentLogfile");
+ currentLogfileField.setAccessible(true);
+ currentLogfileField.set(dispatcherThread, logFile.getAbsolutePath());
+
+ Method sendCurrentFileMethod = dispatcherThread.getClass().getDeclaredMethod("sendCurrentFile");
+ sendCurrentFileMethod.setAccessible(true);
+
+ assertTrue((Boolean) sendCurrentFileMethod.invoke(dispatcherThread));
+ }
+}
diff --git a/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/provider/hdfs/HdfsAuditProviderTest.java b/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/provider/hdfs/HdfsAuditProviderTest.java
new file mode 100644
index 0000000000..6176537379
--- /dev/null
+++ b/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/provider/hdfs/HdfsAuditProviderTest.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.audit.provider.hdfs;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import java.util.Properties;
+
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
+/**
+ * @generated by copilot
+ * @description Unit Test cases for HdfsAuditProvider
+ * */
+class HdfsAuditProviderTest {
+ private HdfsAuditProvider provider;
+
+ @BeforeEach
+ void setUp() {
+ provider = new HdfsAuditProvider();
+ }
+
+ @Test
+ void testInitWithMinimalProperties() {
+ Properties props = new Properties();
+ // Add minimal required properties
+ props.setProperty("xasecure.audit.hdfs.config.destination.directory", "/tmp/hdfs");
+ props.setProperty("xasecure.audit.hdfs.config.destination.file", "audit.log");
+ props.setProperty("xasecure.audit.hdfs.config.local.buffer.directory", "/tmp/buffer");
+ props.setProperty("xasecure.audit.hdfs.config.local.buffer.file", "buffer.log");
+
+ assertDoesNotThrow(() -> provider.init(props));
+ }
+
+ @Test
+ void testInitWithAllProperties() {
+ Properties props = new Properties();
+ props.setProperty("xasecure.audit.hdfs.config.destination.directory", "/tmp/hdfs");
+ props.setProperty("xasecure.audit.hdfs.config.destination.file", "audit.log");
+ props.setProperty("xasecure.audit.hdfs.config.destination.flush.interval.seconds", "10");
+ props.setProperty("xasecure.audit.hdfs.config.destination.rollover.interval.seconds", "20");
+ props.setProperty("xasecure.audit.hdfs.config.destination.open.retry.interval.seconds", "30");
+ props.setProperty("xasecure.audit.hdfs.config.local.buffer.directory", "/tmp/buffer");
+ props.setProperty("xasecure.audit.hdfs.config.local.buffer.file", "buffer.log");
+ props.setProperty("xasecure.audit.hdfs.config.local.buffer.flush.interval.seconds", "40");
+ props.setProperty("xasecure.audit.hdfs.config.local.buffer.file.buffer.size.bytes", "4096");
+ props.setProperty("xasecure.audit.hdfs.config.local.buffer.rollover.interval.seconds", "50");
+ props.setProperty("xasecure.audit.hdfs.config.local.archive.directory", "/tmp/archive");
+ props.setProperty("xasecure.audit.hdfs.config.local.archive.max.file.count", "5");
+ props.setProperty("xasecure.audit.hdfs.config.encoding", "UTF-8");
+
+ assertDoesNotThrow(() -> provider.init(props));
+ }
+
+ @Test
+ void testInitWithNullProperties() {
+ assertThrows(NullPointerException.class, () -> provider.init(null));
+ }
+}
diff --git a/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/provider/hdfs/HdfsLogDestinationTest.java b/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/provider/hdfs/HdfsLogDestinationTest.java
new file mode 100644
index 0000000000..5432e1d702
--- /dev/null
+++ b/agents-audit/dest-hdfs/src/test/java/org/apache/ranger/audit/provider/hdfs/HdfsLogDestinationTest.java
@@ -0,0 +1,230 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.audit.provider.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.ranger.audit.model.AuditEventBase;
+import org.apache.ranger.audit.provider.DebugTracer;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+/**
+ * @generated by copilot
+ * @description Unit Test cases for HdfsLogDestination
+ * */
+class HdfsLogDestinationTest {
+ private HdfsLogDestination destination;
+ private TestableHdfsLogDestination testableDestination;
+
+ @Mock
+ private FSDataOutputStream mockStream;
+
+ @Mock
+ private OutputStreamWriter mockWriter;
+
+ @Mock
+ private AuditEventBase mockEvent;
+
+ static class DummyTracer implements DebugTracer {
+ @Override public void debug(String msg) {}
+
+ @Override public void debug(String msg, Throwable excp) {}
+
+ @Override public void info(String msg) {}
+
+ @Override public void info(String msg, Throwable excp) {}
+
+ @Override public void warn(String msg) {}
+
+ @Override public void warn(String msg, Throwable excp) {}
+
+ @Override public void error(String msg) {}
+
+ @Override public void error(String msg, Throwable excp) {}
+ }
+
+ // Testable subclass that exposes internals for testing
+ static class TestableHdfsLogDestination extends HdfsLogDestination {
+ public TestableHdfsLogDestination(DebugTracer tracer) {
+ super(tracer);
+ }
+
+ // Use reflection to access private fields
+ public void setWriter(OutputStreamWriter writer) {
+ try {
+ Field field = HdfsLogDestination.class.getDeclaredField("mWriter");
+ field.setAccessible(true);
+ field.set(this, writer);
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to set writer", e);
+ }
+ }
+
+ public void setFsDataOutStream(FSDataOutputStream stream) {
+ try {
+ Field field = HdfsLogDestination.class.getDeclaredField("mFsDataOutStream");
+ field.setAccessible(true);
+ field.set(this, stream);
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to set fsDataOutStream", e);
+ }
+ }
+
+ @Override
+ Configuration createConfiguration() {
+ // Return mocked configuration to prevent actual HDFS connection
+ return mock(Configuration.class);
+ }
+ }
+
+ @BeforeEach
+ void setUp() {
+ MockitoAnnotations.initMocks(this);
+ destination = new HdfsLogDestination<>(new DummyTracer());
+ testableDestination = new TestableHdfsLogDestination(new DummyTracer());
+
+ // Setup default behavior for mock event
+ when(mockEvent.toString()).thenReturn("{\"eventId\":\"test-id\"}");
+ }
+
+ @Test
+ void testSettersAndGetters() {
+ destination.setDirectory("/tmp/hdfs");
+ destination.setFile("audit.log");
+ destination.setFlushIntervalSeconds(10);
+ destination.setEncoding("UTF-8");
+ destination.setRolloverIntervalSeconds(20);
+ destination.setOpenRetryIntervalSeconds(30);
+ destination.setName("testName");
+
+ assertEquals("/tmp/hdfs", destination.getDirectory());
+ assertEquals("audit.log", destination.getFile());
+ assertEquals(10, destination.getFlushIntervalSeconds());
+ assertEquals("UTF-8", destination.getEncoding());
+ assertEquals(20, destination.getRolloverIntervalSeconds());
+ assertEquals(30, destination.getOpenRetryIntervalSeconds());
+ assertEquals("testName", destination.getName());
+ }
+
+ @Test
+ void testToString() {
+ destination.setDirectory("/tmp/hdfs");
+ destination.setFile("audit.log");
+ destination.setRolloverIntervalSeconds(42);
+ String str = destination.toString();
+ assertTrue(str.contains("Directory=/tmp/hdfs"));
+ assertTrue(str.contains("File=audit.log"));
+ assertTrue(str.contains("RolloverIntervalSeconds=42"));
+ }
+
+ @Test
+ void testSetConfigProps() {
+ Map props = new HashMap<>();
+ props.put("fs.defaultFS", "hdfs://localhost:9000");
+ destination.setConfigProps(props);
+ // No exception expected
+ }
+
+ @Test
+ void testIsAvailableInitiallyFalse() {
+ assertFalse(destination.isAvailable());
+ }
+
+ @Test
+ void testStartAndStop() {
+ // No exceptions should be thrown
+ testableDestination.start();
+ testableDestination.stop();
+ }
+
+ @Test
+ void testIsAvailableWithWriter() {
+ testableDestination.setWriter(mockWriter);
+ assertTrue(testableDestination.isAvailable());
+ }
+
+ @Test
+ void testSendStringifiedWithClosedWriter() throws Exception {
+ // Setup
+ testableDestination.setWriter(mockWriter);
+ doThrow(new IOException("Writer closed")).when(mockWriter).write(anyString());
+
+ // Test
+ boolean result = testableDestination.sendStringified("test message");
+
+ // Verify
+ assertFalse(result);
+ }
+
+ @Test
+ void testFlushWithWriter() throws Exception {
+ // Setup
+ testableDestination.setWriter(mockWriter);
+ testableDestination.setFsDataOutStream(mockStream);
+
+ // Test
+ boolean result = testableDestination.flush();
+
+ // Verify
+ assertTrue(result);
+ verify(mockWriter).flush();
+ verify(mockStream).hflush();
+ }
+
+ @Test
+ void testFlushWithoutWriter() {
+ // Test
+ boolean result = testableDestination.flush();
+
+ // Verify
+ assertFalse(result);
+ }
+
+ @Test
+ void testFlushWithWriterException() throws Exception {
+ // Setup
+ testableDestination.setWriter(mockWriter);
+ doThrow(new IOException("Writer error")).when(mockWriter).flush();
+
+ // Test
+ boolean result = testableDestination.flush();
+
+ // Verify
+ assertFalse(result);
+ }
+}
diff --git a/agents-audit/orc-util/pom.xml b/agents-audit/orc-util/pom.xml
index b3d2cb8a71..a20b0b06a1 100644
--- a/agents-audit/orc-util/pom.xml
+++ b/agents-audit/orc-util/pom.xml
@@ -77,6 +77,18 @@
+
+ org.junit.jupiter
+ junit-jupiter-api
+ ${junit.jupiter.version}
+ test
+
+
+ org.mockito
+ mockito-core
+ ${mockito.version}
+ test
+
org.slf4j
log4j-over-slf4j
diff --git a/agents-audit/orc-util/src/test/java/org/apache/ranger/audit/utils/ORCFileUtilTest.java b/agents-audit/orc-util/src/test/java/org/apache/ranger/audit/utils/ORCFileUtilTest.java
new file mode 100644
index 0000000000..c76e80df94
--- /dev/null
+++ b/agents-audit/orc-util/src/test/java/org/apache/ranger/audit/utils/ORCFileUtilTest.java
@@ -0,0 +1,324 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.audit.utils;
+
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.orc.CompressionKind;
+import org.apache.orc.Writer;
+import org.apache.ranger.audit.model.AuthzAuditEvent;
+import org.junit.jupiter.api.Test;
+
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.Map;
+
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertSame;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+/**
+ * @generated by copilot
+ * @description Unit Test cases for ORCFileUtil
+ * */
+class ORCFileUtilTest {
+ @Test
+ void testGetInstanceReturnsSingleton() {
+ ORCFileUtil util1 = ORCFileUtil.getInstance();
+ ORCFileUtil util2 = ORCFileUtil.getInstance();
+ assertNotNull(util1);
+ assertSame(util1, util2);
+ }
+
+ @Test
+ void testGetORCCompression() {
+ ORCFileUtil util = new ORCFileUtil();
+ assertEquals(CompressionKind.SNAPPY, util.getORCCompression("snappy"));
+ assertEquals(CompressionKind.LZO, util.getORCCompression("lzo"));
+ assertEquals(CompressionKind.ZLIB, util.getORCCompression("zlib"));
+ assertEquals(CompressionKind.NONE, util.getORCCompression("none"));
+ // Default fallback
+ assertEquals(CompressionKind.SNAPPY, util.getORCCompression("unknown"));
+ assertEquals(CompressionKind.SNAPPY, util.getORCCompression(null));
+ }
+
+ @Test
+ void testGetShortFieldType() {
+ ORCFileUtil util = new ORCFileUtil();
+ assertEquals("string", util.getShortFieldType("java.lang.String"));
+ assertEquals("int", util.getShortFieldType("int"));
+ assertEquals("string", util.getShortFieldType("short"));
+ assertEquals("string", util.getShortFieldType("java.util.Date"));
+ assertEquals("bigint", util.getShortFieldType("long"));
+ assertNull(util.getShortFieldType("float"));
+ }
+
+ @Test
+ void testCastLongObject() {
+ ORCFileUtil util = new ORCFileUtil();
+ assertEquals(123L, util.castLongObject(123L));
+ assertEquals(456L, util.castLongObject(456));
+ assertEquals(789L, util.castLongObject("789"));
+ assertEquals(0L, util.castLongObject("notANumber"));
+ assertEquals(0L, util.castLongObject(null));
+ }
+
+ @Test
+ void testCastStringObject() {
+ ORCFileUtil util = new ORCFileUtil();
+ assertEquals("test", util.castStringObject("test"));
+ Date now = new Date();
+ assertNotNull(util.castStringObject(now));
+ assertNull(util.castStringObject(123));
+ }
+
+ @Test
+ void testGetBytesValues() {
+ ORCFileUtil util = new ORCFileUtil();
+ byte[] result = util.getBytesValues("abc");
+ assertArrayEquals("abc".getBytes(), result);
+ assertArrayEquals("".getBytes(), util.getBytesValues(null));
+ }
+
+ @Test
+ void testGetDateString() throws Exception {
+ ORCFileUtil util = new ORCFileUtil();
+ Date date = new Date(1700000000000L);
+ Method m = ORCFileUtil.class.getDeclaredMethod("getDateString", Date.class);
+ m.setAccessible(true);
+ String result = (String) m.invoke(util, date);
+ assertTrue(result.matches("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}"));
+ }
+
+ @Test
+ void testGetAuditSchemaReturnsStruct() throws Exception {
+ ORCFileUtil util = new ORCFileUtil();
+ Method m = ORCFileUtil.class.getDeclaredMethod("getAuditSchema");
+ m.setAccessible(true);
+ String schema = (String) m.invoke(util);
+ assertTrue(schema.startsWith("struct<"));
+ assertTrue(schema.endsWith(">"));
+ }
+
+ @Test
+ void testGetSchemaFieldTypeMap() throws Exception {
+ ORCFileUtil util = new ORCFileUtil();
+ // set auditSchema field
+ String fakeSchema = "struct";
+ java.lang.reflect.Field f = ORCFileUtil.class.getDeclaredField("auditSchema");
+ f.setAccessible(true);
+ f.set(util, fakeSchema);
+
+ Method m = ORCFileUtil.class.getDeclaredMethod("getSchemaFieldTypeMap");
+ m.setAccessible(true);
+ Map map = (Map) m.invoke(util);
+ assertEquals("string", map.get("foo"));
+ assertEquals("int", map.get("bar"));
+ }
+
+ @Test
+ void testGetColumnVectorTypeCoversAllCases() throws Exception {
+ ORCFileUtil util = new ORCFileUtil();
+ Method m = ORCFileUtil.class.getDeclaredMethod("getColumnVectorType", String.class);
+ m.setAccessible(true);
+
+ assertTrue(m.invoke(util, "int") instanceof LongColumnVector);
+ assertTrue(m.invoke(util, "bigint") instanceof LongColumnVector);
+ assertTrue(m.invoke(util, "date") instanceof LongColumnVector);
+ assertTrue(m.invoke(util, "boolean") instanceof LongColumnVector);
+ assertTrue(m.invoke(util, "string") instanceof BytesColumnVector);
+ assertTrue(m.invoke(util, "varchar") instanceof BytesColumnVector);
+ assertTrue(m.invoke(util, "char") instanceof BytesColumnVector);
+ assertTrue(m.invoke(util, "binary") instanceof BytesColumnVector);
+ assertTrue(m.invoke(util, "decimal") instanceof DecimalColumnVector);
+ assertTrue(m.invoke(util, "double") instanceof DoubleColumnVector);
+ assertTrue(m.invoke(util, "float") instanceof DoubleColumnVector);
+ assertNull(m.invoke(util, "unknownType"));
+ }
+
+ @Test
+ void testCastLongObjectHandlesNonNumber() {
+ ORCFileUtil util = new ORCFileUtil();
+ // Should log error and return 0L
+ assertEquals(0L, util.castLongObject(new Object()));
+ }
+
+ @Test
+ void testCastStringObjectHandlesNonStringNonDate() {
+ ORCFileUtil util = new ORCFileUtil();
+ // Should log error and return null
+ assertNull(util.castStringObject(123));
+ }
+
+ @Test
+ void testInitSetsConfiguration() throws Exception {
+ ORCFileUtil util = new ORCFileUtil();
+
+ // Test configuration values
+ int bufferSize = 4096;
+ long stripeSize = 67108864L;
+ String compression = "zlib";
+
+ util.init(bufferSize, stripeSize, compression);
+
+ // Verify fields are set correctly
+ assertEquals(bufferSize, util.orcBufferSize);
+ assertEquals(stripeSize, util.orcStripeSize);
+ assertEquals(CompressionKind.ZLIB, util.compressionKind);
+ assertNotNull(util.schema);
+ assertNotNull(util.batch);
+ assertNotNull(util.auditSchema);
+ }
+
+ @Test
+ void testBuildVectorRowBatch() throws Exception {
+ ORCFileUtil util = new ORCFileUtil();
+
+ // Initialize to create schema
+ util.init(1000, 10000L, "none");
+
+ // Get the vectorizedRowBatchMap via reflection
+ java.lang.reflect.Field field = ORCFileUtil.class.getDeclaredField("vectorizedRowBatchMap");
+ field.setAccessible(true);
+ Map map = (Map) field.get(util);
+
+ // Should have entries for each field in the schema
+ assertFalse(map.isEmpty());
+
+ // Check for expected field types
+ field = ORCFileUtil.class.getDeclaredField("schemaFields");
+ field.setAccessible(true);
+ ArrayList schemaFields = (ArrayList) field.get(util);
+
+ // Ensure all schema fields have corresponding column vectors
+ for (String fieldName : schemaFields) {
+ assertTrue(map.containsKey(fieldName));
+ assertNotNull(map.get(fieldName));
+ }
+ }
+
+ @Test
+ void testGetFieldValueReturnsCorrectInfo() throws Exception {
+ ORCFileUtil util = new ORCFileUtil();
+
+ // Create a sample event
+ AuthzAuditEvent event = new AuthzAuditEvent();
+ event.setEventId("test123");
+ event.setClientIP("192.168.1.1");
+
+ // Test getting eventId field
+ ORCFileUtil.SchemaInfo info = util.getFieldValue(event, "eventId");
+ assertEquals("eventId", info.getField());
+ assertEquals("java.lang.String", info.getType());
+ assertEquals("test123", info.getValue());
+
+ // Test getting clientIP field
+ info = util.getFieldValue(event, "clientIP");
+ assertEquals("clientIP", info.getField());
+ assertEquals("java.lang.String", info.getType());
+ assertEquals("192.168.1.1", info.getValue());
+ }
+
+ @Test
+ void testSchemaInfoGettersAndSetters() {
+ ORCFileUtil.SchemaInfo info = new ORCFileUtil.SchemaInfo();
+
+ // Test field property
+ info.setField("testField");
+ assertEquals("testField", info.getField());
+
+ // Test type property
+ info.setType("string");
+ assertEquals("string", info.getType());
+
+ // Test value property
+ Object testValue = "testValue";
+ info.setValue(testValue);
+ assertEquals(testValue, info.getValue());
+ }
+
+ @Test
+ void testLogHandlesEmptyEvents() throws Exception {
+ ORCFileUtil util = new ORCFileUtil();
+ util.init(10, 100L, "none");
+
+ // Create mock writer
+ Writer mockWriter = mock(Writer.class);
+
+ // Test with empty collection
+ Collection emptyEvents = new ArrayList<>();
+
+ // Should not throw exception
+ util.log(mockWriter, emptyEvents);
+
+ // Verify writer was not called with any row batch
+ verify(mockWriter, never()).addRowBatch(any());
+ }
+
+ @Test
+ void testLogHandlesBatchSizeExactlyMatchingBufferSize() throws Exception {
+ ORCFileUtil util = new ORCFileUtil();
+ int bufferSize = 3; // Small buffer size for testing
+ util.init(bufferSize, 100L, "none");
+
+ // Create mock writer
+ Writer mockWriter = mock(Writer.class);
+
+ // Create exactly bufferSize events
+ Collection events = new ArrayList<>();
+ for (int i = 0; i < bufferSize; i++) {
+ AuthzAuditEvent event = new AuthzAuditEvent();
+ event.setEventId("test" + i);
+ events.add(event);
+ }
+
+ // Log the events
+ util.log(mockWriter, events);
+
+ // Should call addRowBatch exactly once
+ verify(mockWriter, times(1)).addRowBatch(any());
+ }
+
+ @Test
+ void testGetColumnVectorTypeThrowsForUnsupportedTypes() {
+ ORCFileUtil util = new ORCFileUtil();
+
+ // Test unsupported complex types
+ assertThrows(Exception.class, () -> util.getColumnVectorType("array"));
+ assertThrows(Exception.class, () -> util.getColumnVectorType("map"));
+ assertThrows(Exception.class, () -> util.getColumnVectorType("struct"));
+ assertThrows(Exception.class, () -> util.getColumnVectorType("uniontype"));
+ }
+}
diff --git a/agents-audit/orc-util/src/test/java/org/apache/ranger/audit/utils/RangerORCAuditWriterTest.java b/agents-audit/orc-util/src/test/java/org/apache/ranger/audit/utils/RangerORCAuditWriterTest.java
new file mode 100644
index 0000000000..cea544ae79
--- /dev/null
+++ b/agents-audit/orc-util/src/test/java/org/apache/ranger/audit/utils/RangerORCAuditWriterTest.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.audit.utils;
+
+import org.apache.orc.Writer;
+import org.apache.ranger.audit.model.AuthzAuditEvent;
+import org.junit.jupiter.api.Test;
+
+import java.io.File;
+import java.util.Collections;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+
+/**
+ * @generated by copilot
+ * @description Unit Test cases for RangerORCAuditWriter
+ * */
+class RangerORCAuditWriterTest {
+ @Test
+ void testOrcFileExtensionConstant() {
+ assertEquals(".orc", RangerORCAuditWriter.ORC_FILE_EXTENSION);
+ }
+
+ @Test
+ void testFileTypeDefaultValue() {
+ RangerORCAuditWriter writer = new RangerORCAuditWriter();
+ assertEquals("orc", writer.fileType);
+ }
+
+ @Test
+ void testLogAsORCDelegatesToLogAuditAsORC() throws Exception {
+ RangerORCAuditWriter writer = spy(new RangerORCAuditWriter());
+ doReturn(Collections.singletonList(new AuthzAuditEvent())).when(writer).getAuthzAuditEvents(any());
+ doReturn(true).when(writer).logAuditAsORC(any());
+ assertTrue(writer.logAsORC(Collections.singletonList("event")));
+ verify(writer).logAuditAsORC(any());
+ }
+
+ @Test
+ void testLogFileAlwaysReturnsFalse() throws Exception {
+ RangerORCAuditWriter writer = new RangerORCAuditWriter();
+ assertFalse(writer.logFile(new File("test")));
+ }
+
+ @Test
+ void testStopClosesOrcLogWriter() throws Exception {
+ RangerORCAuditWriter writer = new RangerORCAuditWriter();
+ Writer mockWriter = mock(Writer.class);
+ ORCFileUtil mockOrcUtil = mock(ORCFileUtil.class);
+ writer.orcLogWriter = mockWriter;
+ writer.orcFileUtil = mockOrcUtil;
+ writer.stop();
+ verify(mockOrcUtil).close(mockWriter);
+ assertNull(writer.orcLogWriter);
+ }
+
+ @Test
+ void testStopHandlesExceptionAndNullifiesWriter() throws Exception {
+ RangerORCAuditWriter writer = new RangerORCAuditWriter();
+ Writer mockWriter = mock(Writer.class);
+ ORCFileUtil mockOrcUtil = mock(ORCFileUtil.class);
+ doThrow(new RuntimeException("close error")).when(mockOrcUtil).close(mockWriter);
+ writer.orcLogWriter = mockWriter;
+ writer.orcFileUtil = mockOrcUtil;
+ writer.stop();
+ assertNull(writer.orcLogWriter);
+ }
+
+ @Test
+ void testFlushAndStartAreNoOps() {
+ RangerORCAuditWriter writer = new RangerORCAuditWriter();
+ writer.flush();
+ writer.start();
+ // No exception means pass
+ }
+}