Skip to content

Commit

Permalink
[HWORKS-1082] Always initialize udfso for checking Spark job dependen…
Browse files Browse the repository at this point in the history
…cies (#1738) (#1509)
  • Loading branch information
robzor92 authored Mar 18, 2024
1 parent 57e0048 commit 96bb5e5
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 1 deletion.
9 changes: 9 additions & 0 deletions hopsworks-IT/src/test/ruby/spec/jupyter_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,15 @@
update_jupyter(@project, settings)
end

it "should work to start jupyter server with spark files attached" do
get_settings(@project)
settings = json_body
settings[:jobConfig][:"spark.yarn.dist.files"]="hdfs:///Projects/#{@project[:projectname]}/Resources/README.md"
start_jupyter(@project, settings: settings)
jupyter_running(@project, expected_status: 200)
stop_jupyter(@project)
end

it "should not allow starting multiple notebook servers" do
start_jupyter(@project)
start_jupyter(@project, expected_status: 400)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -218,8 +218,8 @@ public void inspectDependencies(Project project, Users user, SparkJobConfigurati
throws ProjectException, GenericException {
DistributedFileSystemOps udfso = null;
try {
udfso = dfs.getDfsOps(hdfsUsersBean.getHdfsUserName(project, user));
if(isJob) {
udfso = dfs.getDfsOps(hdfsUsersBean.getHdfsUserName(project, user));
if (!udfso.exists(jobConf.getAppPath())) {
throw new ProjectException(RESTCodes.ProjectErrorCode.FILE_NOT_FOUND, Level.FINEST,
"Job application file does not exist: " + jobConf.getAppPath());
Expand Down

0 comments on commit 96bb5e5

Please sign in to comment.