Skip to content

Commit

Permalink
Merge branch 'BlockAndStopWordTest' of https://github.com/Nitish1814/…
Browse files Browse the repository at this point in the history
…zingg-Nitish into testRefactor

# Conflicts:
#	common/client/src/main/java/zingg/common/client/util/PojoToArrayConverter.java
#	common/client/src/main/java/zingg/common/client/util/StructTypeFromPojoClass.java
#	common/client/src/main/java/zingg/common/client/util/WithSession.java
#	common/core/src/test/java/zingg/common/core/block/TestBlockBase.java
#	common/core/src/test/java/zingg/common/core/preprocess/TestStopWordsBase.java
#	spark/client/src/main/java/zingg/spark/client/util/SparkDFObjectUtil.java
#	spark/core/src/test/java/zingg/common/core/block/TestSparkBlock.java
#	spark/core/src/test/java/zingg/common/core/preprocess/TestSparkStopWords.java
#	spark/core/src/test/java/zingg/common/core/util/SparkStopWordRemoverUtility.java
  • Loading branch information
Nitish1814 committed Jul 13, 2024
2 parents 41e3b3f + c0f3ee2 commit 268595c
Show file tree
Hide file tree
Showing 11 changed files with 86 additions and 35 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package zingg.common.client.util;

public interface IWithSession<S> {

public void setSession(S s);

public S getSession();

}
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,34 @@
public class PojoToArrayConverter {

public static Object[] getObjectArray(Object object) throws IllegalAccessException {
Field[] fields = object.getClass().getDeclaredFields();
int fieldCount = fields.length;
Field[] fieldsInChildClass = object.getClass().getDeclaredFields();
Field[] fieldsInParentClass = null;

int fieldCountInChildClass = fieldsInChildClass.length;
int fieldCount = fieldCountInChildClass;

if (object.getClass().getSuperclass() != null) {
fieldCount += object.getClass().getSuperclass().getDeclaredFields().length;
fieldsInParentClass = object.getClass().getSuperclass().getDeclaredFields();
}

//fieldCount = fieldCountChild + fieldCountParent
Object[] objArr = new Object[fieldCount];

for (int i = 0; i < objArr.length; i++) {
Field field = fields[i];
int idx = 0;

//iterate through child class fields
for (; idx < fieldCountInChildClass; idx++) {
Field field = fieldsInChildClass[idx];
field.setAccessible(true);
objArr[idx] = field.get(object);
}

objArr[i] = field.get(object);
//iterate through super class fields
for (; idx < fieldCount; idx++) {
Field field = fieldsInParentClass[idx - fieldCountInChildClass];
field.setAccessible(true);
objArr[idx] = field.get(object);
}

return objArr;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,20 @@ public abstract class StructTypeFromPojoClass<ST, SF, T> {

public List<SF> getFields(Class<?> objClass) {
List<SF> structFields = new ArrayList<SF>();
Field[] fields = objClass.getDeclaredFields();

//add child class fields in struct
for (Field f : fields) {
structFields.add(getStructField(f));
}

//add parent class fields in struct
if (objClass.getSuperclass() != null) {
Field[] fieldsSuper = objClass.getSuperclass().getDeclaredFields();
for (Field f : fieldsSuper) {
structFields.add(getStructField(f));
}
}
Field[] fields = objClass.getDeclaredFields();
for (Field f : fields) {
structFields.add(getStructField(f));
}
return structFields;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
package zingg.common.client.util;

public interface WithSession<S> {

public void setSession(S s);

public S getSession();

public class WithSession<S> implements IWithSession<S> {

S session;
@Override
public void setSession(S session) {
this.session = session;
}

@Override
public S getSession() {
return session;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import java.util.ArrayList;
import java.util.List;
import java.util.Objects;

import org.junit.jupiter.api.Test;

Expand Down Expand Up @@ -51,7 +52,6 @@ public void testTree() throws Throwable {
}

private IArguments getArguments() throws ZinggClientException {
String configFilePath = getClass().getResource("../../testFebrl/config.json").getFile();

IArguments args = argumentsUtil.createArgumentsFromJSON(configFilePath, "trainMatch");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import zingg.common.core.model.Statement;
import zingg.common.core.model.PostStopWordProcess;
import zingg.common.core.model.PriorStopWordProcess;
import zingg.common.core.util.StopWordRemoverUtility;
import zingg.common.core.util.IStopWordRemoverUtility;

public abstract class TestStopWordsBase<S, D, R, C, T> {

Expand All @@ -29,10 +29,10 @@ public abstract class TestStopWordsBase<S, D, R, C, T> {
private final Context<S, D, R, C, T> context;


public TestStopWordsBase(DFObjectUtil<S, D, R, C> dfObjectUtil, StopWordRemoverUtility<S, D, R, C, T> stopWordRemoverUtility,
public TestStopWordsBase(DFObjectUtil<S, D, R, C> dfObjectUtil, IStopWordRemoverUtility<S, D, R, C, T> IStopWordRemoverUtility,
Context<S, D, R, C, T> context) throws ZinggClientException {
this.dfObjectUtil = dfObjectUtil;
this.stopWordsRemovers = stopWordRemoverUtility.getStopWordRemovers(context, new Arguments());
this.stopWordsRemovers = IStopWordRemoverUtility.getStopWordRemovers(context, new Arguments());
this.context = context;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package zingg.common.core.util;

import zingg.common.client.IArguments;
import zingg.common.client.ZinggClientException;
import zingg.common.core.context.Context;
import zingg.common.core.preprocess.StopWordsRemover;

import java.util.List;

public interface IStopWordRemoverUtility<S, D, R, C, T> {

List<StopWordsRemover<S, D, R, C, T>> getStopWordRemovers(Context<S, D, R, C, T> context, IArguments arguments) throws ZinggClientException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@

import zingg.common.client.ZFrame;
import zingg.common.client.util.DFObjectUtil;
import zingg.common.client.util.WithSession;
import zingg.common.client.util.IWithSession;
import zingg.spark.client.SparkFrame;

public class SparkDFObjectUtil extends DFObjectUtil<SparkSession, Dataset<Row>, Row, Column> {

private final WithSession<SparkSession> withSparkSession;
private final IWithSession<SparkSession> withSparkSession;

public SparkDFObjectUtil(WithSession<SparkSession> withSparkSession) {
public SparkDFObjectUtil(IWithSession<SparkSession> withSparkSession) {
this.withSparkSession = withSparkSession;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import zingg.common.client.IArguments;
import zingg.common.client.util.IWithSession;
import zingg.common.client.util.WithSession;
import zingg.spark.client.util.SparkDFObjectUtil;
import zingg.spark.client.util.WithSparkSession;
import zingg.spark.core.context.ZinggSparkContext;
import zingg.spark.core.util.SparkBlockingTreeUtil;
import zingg.spark.core.util.SparkHashUtil;
Expand All @@ -25,10 +25,10 @@ public class TestSparkBlock extends TestBlockBase<SparkSession, Dataset<Row>, Ro
public static JavaSparkContext ctx;
public static ZinggSparkContext zsCTX;
public static SparkSession spark;
public static WithSession<SparkSession> withSession;
public static IWithSession<SparkSession> iWithSession;

public TestSparkBlock() {
super(new SparkDFObjectUtil(withSession), new SparkHashUtil(spark), new SparkBlockingTreeUtil(spark, zsCTX.getPipeUtil()));
super(new SparkDFObjectUtil(iWithSession), new SparkHashUtil(spark), new SparkBlockingTreeUtil(spark, zsCTX.getPipeUtil()));
}

@BeforeAll
Expand All @@ -44,8 +44,8 @@ protected static void setUpSpark() {
.appName("Zingg" + "Junit")
.getOrCreate();
ctx = new JavaSparkContext(spark.sparkContext());
withSession = new WithSparkSession();
withSession.setSession(spark);
iWithSession = new WithSession<>();
iWithSession.setSession(spark);
zsCTX = new ZinggSparkContext();
zsCTX.init(spark);
} catch (Throwable e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import zingg.common.client.ZinggClientException;
import zingg.common.client.util.IWithSession;
import zingg.common.client.util.WithSession;
import zingg.common.core.util.SparkStopWordRemoverUtility;
import zingg.spark.client.util.SparkDFObjectUtil;
import zingg.spark.client.util.WithSparkSession;
import zingg.spark.core.context.ZinggSparkContext;

public class TestSparkStopWords extends TestStopWordsBase<SparkSession, Dataset<Row>, Row, Column, DataType> {
Expand All @@ -23,15 +23,15 @@ public class TestSparkStopWords extends TestStopWordsBase<SparkSession, Dataset<
public static JavaSparkContext ctx;
public static SparkSession spark;
public static ZinggSparkContext zsCTX;
public static WithSession<SparkSession> withSession;
public static IWithSession<SparkSession> iWithSession;

@BeforeAll
public static void setup() {
setUpSpark();
}

public TestSparkStopWords() throws ZinggClientException {
super(new SparkDFObjectUtil(withSession), new SparkStopWordRemoverUtility(), zsCTX);
super(new SparkDFObjectUtil(iWithSession), new SparkStopWordRemoverUtility(), zsCTX);
}

protected static void setUpSpark() {
Expand All @@ -42,8 +42,8 @@ protected static void setUpSpark() {
.appName("Zingg" + "Junit")
.getOrCreate();
ctx = new JavaSparkContext(spark.sparkContext());
withSession = new WithSparkSession();
withSession.setSession(spark);
iWithSession = new WithSession<>();
iWithSession.setSession(spark);
zsCTX = new ZinggSparkContext();
zsCTX.init(spark);
} catch (Throwable e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import java.util.List;
import java.util.Objects;

public class SparkStopWordRemoverUtility implements StopWordRemoverUtility<SparkSession, Dataset<Row>, Row, Column, DataType> {
public class SparkStopWordRemoverUtility implements IStopWordRemoverUtility<SparkSession, Dataset<Row>, Row, Column, DataType> {

@Override
public List<StopWordsRemover<SparkSession, Dataset<Row>, Row, Column, DataType>> getStopWordRemovers(Context<SparkSession, Dataset<Row>, Row, Column, DataType> context, IArguments arguments) throws ZinggClientException {
Expand All @@ -40,7 +40,7 @@ public List<StopWordsRemover<SparkSession, Dataset<Row>, Row, Column, DataType>>

//add second stopWordRemover
String stopWordsFileName1 = Objects.requireNonNull(
StopWordRemoverUtility.class.getResource("../../../../preProcess/stopWords.csv")).getFile();
IStopWordRemoverUtility.class.getResource("../../../../preProcess/stopWords.csv")).getFile();
FieldDefinition fieldDefinition1 = new FieldDefinition();
fieldDefinition1.setStopWords(stopWordsFileName1);
fieldDefinition1.setFieldName("field1");
Expand All @@ -50,7 +50,7 @@ public List<StopWordsRemover<SparkSession, Dataset<Row>, Row, Column, DataType>>

//add third stopWordRemover
String stopWordsFileName2 = Objects.requireNonNull(
StopWordRemoverUtility.class.getResource("../../../../preProcess/stopWordsWithoutHeader.csv")).getFile();
IStopWordRemoverUtility.class.getResource("../../../../preProcess/stopWordsWithoutHeader.csv")).getFile();
FieldDefinition fieldDefinition2 = new FieldDefinition();
fieldDefinition2.setStopWords(stopWordsFileName2);
fieldDefinition2.setFieldName("field1");
Expand Down

0 comments on commit 268595c

Please sign in to comment.