Skip to content
This repository has been archived by the owner on Apr 14, 2023. It is now read-only.

Commit

Permalink
WorkflowExample comment clarification
Browse files Browse the repository at this point in the history
  • Loading branch information
tkakantousis committed Jan 22, 2018
1 parent 449d261 commit 37625bd
Showing 1 changed file with 6 additions and 5 deletions.
11 changes: 6 additions & 5 deletions spark/src/main/java/io/hops/examples/spark/WorkflowExample.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
import org.apache.spark.sql.SparkSession;

/**
* Example demo showing how to build Spark job workflows in Hopsworks. In particular, it expected that two Spark
* jobs are already created in the project
* Example demo showing how to build Spark job workflows in Hopsworks. In particular, it expects that two Spark
* jobs are already created in the project. It then starts the first job, waits for it to finish and then starts the
* second one.
* <p>
*/
public class WorkflowExample {
Expand All @@ -24,16 +25,16 @@ public static void main(String[] args) throws CredentialsNotFoundException, Inte
.appName(HopsUtil.getJobName())
.getOrCreate();

//Start job with ID: 6145 that prepares data for job with id 6146
//if Start job with given ID
Response resp = WorkflowManager.startJobs(Integer.parseInt(args[0]));
if (resp.getStatus() != Response.Status.OK.getStatusCode()) {
LOG.log(Level.SEVERE, "Job could not be started");
System.exit(1);
}
//Wait for job with ID: 6145 to complete
//Wait for previous job to complete
WorkflowManager.waitForJobs(Integer.parseInt(args[0]));

//Start job with ID: 6146
//Start second job
WorkflowManager.startJobs(Integer.parseInt(args[1]));

//Stop spark session
Expand Down

0 comments on commit 37625bd

Please sign in to comment.