Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

junit not working #529

Merged
merged 11 commits into from
Mar 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
133 changes: 0 additions & 133 deletions common/core/src/test/java/zingg/TestZingg.java

This file was deleted.

45 changes: 0 additions & 45 deletions common/core/src/test/java/zingg/block/TestBlock.java

This file was deleted.

20 changes: 3 additions & 17 deletions common/core/src/test/resources/testFebrl/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -84,23 +84,9 @@
"delimiter": ",",
"header":false
},
"schema":
"{\"type\" : \"struct\",
\"fields\" : [
{\"name\":\"id\", \"type\":\"string\", \"nullable\":false},
{\"name\":\"fname\", \"type\":\"string\", \"nullable\":true},
{\"name\":\"lname\",\"type\":\"string\",\"nullable\":true} ,
{\"name\":\"stNo\", \"type\":\"string\", \"nullable\":true},
{\"name\":\"add1\", \"type\":\"string\", \"nullable\":true},
{\"name\":\"add2\",\"type\":\"string\",\"nullable\":true} ,
{\"name\":\"city\", \"type\":\"string\", \"nullable\":true},
{\"name\":\"areacode\", \"type\":\"string\", \"nullable\":true},
{\"name\":\"state\", \"type\":\"string\", \"nullable\":true},
{\"name\":\"dob\",\"type\":\"string\",\"nullable\":true} ,
{\"name\":\"ssn\",\"type\":\"string\",\"nullable\":true}
]
}"
}],
"schema": "id string, fname string, lname string, stNo string, add1 string, add2 string, city string, state string, areacode string, dob string, ssn string"
}
],
"labelDataSampleSize" : 0.5,
"numPartitions":4,
"modelId": 100,
Expand Down
5 changes: 5 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,11 @@
<showWarnings>true</showWarnings>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.2</version>
</plugin>
</plugins>

</build>
Expand Down
6 changes: 5 additions & 1 deletion spark/client/src/test/java/zingg/client/TestSparkFrame.java
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package zingg.client;

import static org.apache.spark.sql.functions.col;
import org.apache.spark.sql.functions;
import static org.junit.jupiter.api.Assertions.assertTrue;

import java.util.Arrays;
Expand All @@ -12,9 +11,11 @@
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.functions;
import org.junit.jupiter.api.Test;

import scala.collection.JavaConverters;
import zingg.common.client.ZFrame;
import zingg.spark.client.SparkFrame;

public class TestSparkFrame extends TestSparkFrameBase {
Expand Down Expand Up @@ -154,6 +155,9 @@ public void testHead() {

@Test
public void testIsEmpty() {
if (spark==null) {
setUpSpark();
}
Dataset<Row> df = spark.emptyDataFrame();
SparkFrame sf = new SparkFrame(df);
assertTrue(sf.isEmpty(), "DataFrame is not empty");
Expand Down
19 changes: 18 additions & 1 deletion spark/client/src/test/java/zingg/client/TestSparkFrameBase.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;

import zingg.common.client.Arguments;
import zingg.common.client.ZFrame;
import zingg.spark.client.SparkFrame;

public class TestSparkFrameBase {
Expand All @@ -38,6 +40,10 @@ public class TestSparkFrameBase {

@BeforeAll
public static void setup() {
setUpSpark();
}

protected static void setUpSpark() {
try {
spark = SparkSession
.builder()
Expand Down Expand Up @@ -67,6 +73,11 @@ public static void teardown() {
}

public Dataset<Row> createSampleDataset() {

if (spark==null) {
setUpSpark();
}

StructType schemaOfSample = new StructType(new StructField[] {
new StructField("recid", DataTypes.StringType, false, Metadata.empty()),
new StructField("givenname", DataTypes.StringType, false, Metadata.empty()),
Expand All @@ -91,6 +102,10 @@ public Dataset<Row> createSampleDataset() {
}

public Dataset<Row> createSampleDatasetHavingMixedDataTypes() {
if (spark==null) {
setUpSpark();
}

StructType schemaOfSample = new StructType(new StructField[] {
new StructField(STR_RECID, DataTypes.IntegerType, false, Metadata.empty()),
new StructField(STR_GIVENNAME, DataTypes.StringType, false, Metadata.empty()),
Expand All @@ -109,10 +124,12 @@ public Dataset<Row> createSampleDatasetHavingMixedDataTypes() {
return sample;
}


protected void assertTrueCheckingExceptOutput(ZFrame<Dataset<Row>, Row, Column> sf1, ZFrame<Dataset<Row>, Row, Column> sf2, String message) {
assertTrue(sf1.except(sf2).isEmpty(), message);
}



protected void assertTrueCheckingExceptOutput(ZFrame<Dataset<Row>, Row, Column> sf1, Dataset<Row> df2, String message) {
SparkFrame sf2 = new SparkFrame(df2);
assertTrue(sf1.except(sf2).isEmpty(), message);
Expand Down
43 changes: 24 additions & 19 deletions spark/core/src/test/java/zingg/TestFebrlDataset.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,51 +8,56 @@
import org.apache.commons.logging.LogFactory;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

import zingg.client.Arguments;
import zingg.client.ZinggClientException;
import zingg.client.pipe.FilePipe;
import zingg.client.pipe.InMemoryPipe;
import zingg.client.pipe.Pipe;
import zingg.client.util.ColName;
import zingg.common.client.Arguments;
import zingg.common.client.ZinggClientException;
import zingg.common.client.pipe.FilePipe;
import zingg.common.client.pipe.Pipe;
import zingg.common.client.util.ColName;
import zingg.common.core.executor.TrainMatcher;
import zingg.spark.client.pipe.SparkPipe;
import zingg.spark.core.executor.SparkTrainMatcher;
import zingg.spark.core.executor.ZinggSparkTester;
/**end to end integration test*/
public class TestFebrlDataset extends ZinggSparkTester{
public static final Log LOG = LogFactory.getLog(TestFebrlDataset.class);


InMemoryPipe outputPipe;
SparkPipe outputPipe;

@BeforeEach
public void setUp() throws Exception, ZinggClientException{
args = Arguments.createArgumentsFromJSON(getClass().getResource("/testFebrl/config.json").getFile());
args.setZinggDir(getClass().getResource("/testFebrl/models").getPath());
String configFilePath = getClass().getResource("../testFebrl/config.json").getFile();
System.out.println("configFilePath "+configFilePath);
args = Arguments.createArgumentsFromJSON(configFilePath);
String modelPath = getClass().getResource("../testFebrl/models").getPath();
System.out.println("modelPath "+modelPath);
args.setZinggDir(modelPath);
Pipe dataPipe = args.getData()[0];
dataPipe.setProp(FilePipe.LOCATION, getClass().getResource("/testFebrl/test.csv").getPath());
String csvPath = getClass().getResource("../testFebrl/test.csv").getPath();
System.out.println("csvPath "+csvPath);
dataPipe.setProp(FilePipe.LOCATION, csvPath);
args.setData(new Pipe[]{dataPipe});
outputPipe = new InMemoryPipe();
outputPipe = new SparkPipe();
outputPipe.setFormat(Pipe.FORMAT_INMEMORY);
args.setOutput(new Pipe[]{outputPipe});
}


@Test
public void testModelAccuracy(){
TrainMatcher tm = new TrainMatcher();
TrainMatcher tm = new SparkTrainMatcher();
try {
tm.init(args, "");
tm.setSpark(spark);
tm.setCtx(ctx);
// tm.setSpark(spark);
// tm.setCtx(ctx);
tm.setArgs(args);
tm.execute();


Dataset<Row> df = outputPipe.getRecords();
Dataset<Row> df = outputPipe.getDataset().df();
assertEquals(65,df.count());


Expand Down
Loading