diff --git a/exporter/impl_jobs.go b/exporter/impl_jobs.go new file mode 100644 index 000000000..a74c2568c --- /dev/null +++ b/exporter/impl_jobs.go @@ -0,0 +1,198 @@ +package exporter + +import ( + "fmt" + "log" + "strconv" + "strings" + + sdk_jobs "github.com/databricks/databricks-sdk-go/service/jobs" + + "github.com/databricks/terraform-provider-databricks/common" + tf_jobs "github.com/databricks/terraform-provider-databricks/jobs" + "github.com/databricks/terraform-provider-databricks/workspace" +) + +func importTask(ic *importContext, task sdk_jobs.Task, jobName, rID string) { + if task.NotebookTask != nil { + if task.NotebookTask.Source != "GIT" { + ic.emitNotebookOrRepo(task.NotebookTask.NotebookPath) + } + ic.emitFilesFromMap(task.NotebookTask.BaseParameters) + if task.NotebookTask.WarehouseId != "" { + ic.Emit(&resource{ + Resource: "databricks_sql_endpoint", + ID: task.NotebookTask.WarehouseId, + }) + } + } + if task.PipelineTask != nil { + ic.Emit(&resource{ + Resource: "databricks_pipeline", + ID: task.PipelineTask.PipelineId, + }) + } + if task.SparkPythonTask != nil { + if task.SparkPythonTask.Source != "GIT" { + if strings.HasPrefix(task.SparkPythonTask.PythonFile, "dbfs:") { + ic.Emit(&resource{ + Resource: "databricks_dbfs_file", + ID: task.SparkPythonTask.PythonFile, + }) + } else { + ic.emitWorkspaceFileOrRepo(task.SparkPythonTask.PythonFile) + } + } + ic.emitFilesFromSlice(task.SparkPythonTask.Parameters) + } + if task.PythonWheelTask != nil { + ic.emitFilesFromSlice(task.PythonWheelTask.Parameters) + ic.emitFilesFromMap(task.PythonWheelTask.NamedParameters) + } + if task.SparkJarTask != nil { + ic.emitFilesFromSlice(task.SparkJarTask.Parameters) + } + if task.SparkSubmitTask != nil { + ic.emitFilesFromSlice(task.SparkSubmitTask.Parameters) + } + if task.SqlTask != nil { + if task.SqlTask.Query != nil { + ic.Emit(&resource{ + Resource: "databricks_query", + ID: task.SqlTask.Query.QueryId, + }) + } + if task.SqlTask.Dashboard != nil { + ic.Emit(&resource{ + Resource: "databricks_sql_dashboard", + ID: task.SqlTask.Dashboard.DashboardId, + }) + } + if task.SqlTask.Alert != nil { + ic.Emit(&resource{ + Resource: "databricks_alert", + ID: task.SqlTask.Alert.AlertId, + }) + } + if task.SqlTask.WarehouseId != "" { + ic.Emit(&resource{ + Resource: "databricks_sql_endpoint", + ID: task.SqlTask.WarehouseId, + }) + } + if task.SqlTask.File != nil && task.SqlTask.File.Source == "WORKSPACE" { + ic.emitWorkspaceFileOrRepo(task.SqlTask.File.Path) + } + } + if task.DbtTask != nil { + if task.DbtTask.WarehouseId != "" { + ic.Emit(&resource{ + Resource: "databricks_sql_endpoint", + ID: task.DbtTask.WarehouseId, + }) + } + if task.DbtTask.Source == "WORKSPACE" { + directory := task.DbtTask.ProjectDirectory + if ic.isInRepoOrGitFolder(directory, true) { + ic.emitRepoOrGitFolder(directory, true) + } else { + // Traverse the dbt project directory and emit all objects found in it + nbAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) + objects, err := nbAPI.List(directory, true, true) + if err == nil { + for _, object := range objects { + if object.ObjectType != workspace.File { + continue + } + ic.maybeEmitWorkspaceObject("databricks_workspace_file", object.Path, &object) + } + } else { + log.Printf("[WARN] Can't list directory %s for DBT task in job %s (id: %s)", directory, jobName, rID) + } + } + } + } + if task.RunJobTask != nil && task.RunJobTask.JobId != 0 { + ic.Emit(&resource{ + Resource: "databricks_job", + ID: strconv.FormatInt(task.RunJobTask.JobId, 10), + }) + ic.emitFilesFromMap(task.RunJobTask.JobParameters) + } + if task.ForEachTask != nil { + importTask(ic, task.ForEachTask.Task, jobName, rID) + } + ic.importCluster(task.NewCluster) + if task.ExistingClusterId != "" { + ic.Emit(&resource{ + Resource: "databricks_cluster", + ID: task.ExistingClusterId, + }) + } + ic.emitLibraries(task.Libraries) + + if task.WebhookNotifications != nil { + ic.emitJobsDestinationNotifications(task.WebhookNotifications.OnFailure) + ic.emitJobsDestinationNotifications(task.WebhookNotifications.OnSuccess) + ic.emitJobsDestinationNotifications(task.WebhookNotifications.OnDurationWarningThresholdExceeded) + ic.emitJobsDestinationNotifications(task.WebhookNotifications.OnStart) + ic.emitJobsDestinationNotifications(task.WebhookNotifications.OnStreamingBacklogExceeded) + } + if task.EmailNotifications != nil { + ic.emitListOfUsers(task.EmailNotifications.OnDurationWarningThresholdExceeded) + ic.emitListOfUsers(task.EmailNotifications.OnFailure) + ic.emitListOfUsers(task.EmailNotifications.OnStart) + ic.emitListOfUsers(task.EmailNotifications.OnSuccess) + ic.emitListOfUsers(task.EmailNotifications.OnStreamingBacklogExceeded) + } +} + +func importJob(ic *importContext, r *resource) error { + var job tf_jobs.JobSettingsResource + s := ic.Resources["databricks_job"].Schema + common.DataToStructPointer(r.Data, s, &job) + ic.emitPermissionsIfNotIgnored(r, fmt.Sprintf("/jobs/%s", r.ID), + "job_"+ic.Importables["databricks_job"].Name(ic, r.Data)) + for _, task := range job.Tasks { + importTask(ic, task, job.Name, r.ID) + } + for _, jc := range job.JobClusters { + ic.importCluster(&jc.NewCluster) + } + if job.RunAs != nil { + if job.RunAs.UserName != "" { + ic.Emit(&resource{ + Resource: "databricks_user", + Attribute: "user_name", + Value: job.RunAs.UserName, + }) + } + if job.RunAs.ServicePrincipalName != "" { + ic.Emit(&resource{ + Resource: "databricks_service_principal", + Attribute: "application_id", + Value: job.RunAs.ServicePrincipalName, + }) + } + } + if job.EmailNotifications != nil { + ic.emitListOfUsers(job.EmailNotifications.OnDurationWarningThresholdExceeded) + ic.emitListOfUsers(job.EmailNotifications.OnFailure) + ic.emitListOfUsers(job.EmailNotifications.OnStart) + ic.emitListOfUsers(job.EmailNotifications.OnSuccess) + ic.emitListOfUsers(job.EmailNotifications.OnStreamingBacklogExceeded) + } + if job.WebhookNotifications != nil { + ic.emitJobsDestinationNotifications(job.WebhookNotifications.OnFailure) + ic.emitJobsDestinationNotifications(job.WebhookNotifications.OnSuccess) + ic.emitJobsDestinationNotifications(job.WebhookNotifications.OnDurationWarningThresholdExceeded) + ic.emitJobsDestinationNotifications(job.WebhookNotifications.OnStart) + ic.emitJobsDestinationNotifications(job.WebhookNotifications.OnStreamingBacklogExceeded) + } + for _, param := range job.Parameters { + ic.emitIfWsfsFile(param.Default) + ic.emitIfVolumeFile(param.Default) + } + + return ic.importLibraries(r.Data, s) +} diff --git a/exporter/importables.go b/exporter/importables.go index aeb265037..31010bc56 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -451,6 +451,62 @@ var resourcesMap map[string]importable = map[string]importable{ {Path: "task.webhook_notifications.on_start.id", Resource: "databricks_notification_destination"}, {Path: "task.webhook_notifications.on_success.id", Resource: "databricks_notification_destination"}, {Path: "task.webhook_notifications.on_streaming_backlog_exceeded.id", Resource: "databricks_notification_destination"}, + {Path: "task.for_each_task.task.dbt_task.warehouse_id", Resource: "databricks_sql_endpoint"}, + {Path: "task.for_each_task.task.existing_cluster_id", Resource: "databricks_cluster"}, + {Path: "task.for_each_task.task.library.egg", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.library.egg", Resource: "databricks_workspace_file", Match: "workspace_path"}, + {Path: "task.for_each_task.task.library.jar", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.library.jar", Resource: "databricks_file"}, + {Path: "task.for_each_task.task.library.jar", Resource: "databricks_workspace_file", Match: "workspace_path"}, + {Path: "task.for_each_task.task.library.whl", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.library.whl", Resource: "databricks_file"}, + {Path: "task.for_each_task.task.library.whl", Resource: "databricks_workspace_file", Match: "workspace_path"}, + {Path: "task.for_each_task.task.library.requirements", Resource: "databricks_file"}, + {Path: "task.for_each_task.task.library.requirements", Resource: "databricks_workspace_file", Match: "workspace_path"}, + {Path: "task.for_each_task.task.new_cluster.aws_attributes.instance_profile_arn", Resource: "databricks_instance_profile"}, + {Path: "task.for_each_task.task.new_cluster.init_scripts.dbfs.destination", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.new_cluster.init_scripts.volumes.destination", Resource: "databricks_file"}, + {Path: "task.for_each_task.task.new_cluster.init_scripts.workspace.destination", Resource: "databricks_workspace_file"}, + {Path: "task.for_each_task.task.new_cluster.instance_pool_id", Resource: "databricks_instance_pool"}, + {Path: "task.for_each_task.task.new_cluster.driver_instance_pool_id", Resource: "databricks_instance_pool"}, + {Path: "task.for_each_task.task.new_cluster.policy_id", Resource: "databricks_cluster_policy"}, + {Path: "task.for_each_task.task.notebook_task.base_parameters", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.notebook_task.base_parameters", Resource: "databricks_file"}, + {Path: "task.for_each_task.task.notebook_task.base_parameters", Resource: "databricks_workspace_file", Match: "workspace_path"}, + {Path: "task.for_each_task.task.notebook_task.notebook_path", Resource: "databricks_notebook"}, + {Path: "task.for_each_task.task.notebook_task.notebook_path", Resource: "databricks_notebook", Match: "workspace_path"}, + {Path: "task.for_each_task.task.notebook_task.warehouse_id", Resource: "databricks_sql_endpoint"}, + {Path: "task.for_each_task.task.pipeline_task.pipeline_id", Resource: "databricks_pipeline"}, + {Path: "task.for_each_task.task.python_wheel_task.named_parameters", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.python_wheel_task.named_parameters", Resource: "databricks_file"}, + {Path: "task.for_each_task.task.python_wheel_task.named_parameters", Resource: "databricks_workspace_file", Match: "workspace_path"}, + {Path: "task.for_each_task.task.python_wheel_task.parameters", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.python_wheel_task.parameters", Resource: "databricks_workspace_file", Match: "workspace_path"}, + {Path: "task.for_each_task.task.run_job_task.job_id", Resource: "databricks_job"}, + {Path: "task.for_each_task.task.run_job_task.job_parameters", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.run_job_task.job_parameters", Resource: "databricks_workspace_file", Match: "workspace_path"}, + {Path: "task.for_each_task.task.spark_jar_task.jar_uri", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.spark_jar_task.parameters", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.spark_jar_task.parameters", Resource: "databricks_file"}, + {Path: "task.for_each_task.task.spark_jar_task.parameters", Resource: "databricks_workspace_file", Match: "workspace_path"}, + {Path: "task.for_each_task.task.spark_python_task.parameters", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.spark_python_task.python_file", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.spark_python_task.python_file", Resource: "databricks_workspace_file", Match: "path"}, + {Path: "task.for_each_task.task.spark_submit_task.parameters", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, + {Path: "task.for_each_task.task.spark_submit_task.parameters", Resource: "databricks_file"}, + {Path: "task.for_each_task.task.spark_submit_task.parameters", Resource: "databricks_workspace_file", Match: "workspace_path"}, + {Path: "task.for_each_task.task.sql_task.file.path", Resource: "databricks_workspace_file", Match: "path"}, + {Path: "task.for_each_task.task.dbt_task.project_directory", Resource: "databricks_directory", Match: "path"}, + {Path: "task.for_each_task.task.sql_task.alert.alert_id", Resource: "databricks_alert"}, + {Path: "task.for_each_task.task.sql_task.dashboard.dashboard_id", Resource: "databricks_sql_dashboard"}, + {Path: "task.for_each_task.task.sql_task.query.query_id", Resource: "databricks_query"}, + {Path: "task.for_each_task.task.sql_task.warehouse_id", Resource: "databricks_sql_endpoint"}, + {Path: "task.for_each_task.task.webhook_notifications.on_duration_warning_threshold_exceeded.id", + Resource: "databricks_notification_destination"}, + {Path: "task.for_each_task.task.webhook_notifications.on_failure.id", Resource: "databricks_notification_destination"}, + {Path: "task.for_each_task.task.webhook_notifications.on_start.id", Resource: "databricks_notification_destination"}, + {Path: "task.for_each_task.task.webhook_notifications.on_success.id", Resource: "databricks_notification_destination"}, + {Path: "task.for_each_task.task.webhook_notifications.on_streaming_backlog_exceeded.id", Resource: "databricks_notification_destination"}, {Path: "parameter.default", Resource: "databricks_workspace_file", Match: "workspace_path"}, {Path: "parameter.default", Resource: "databricks_workspace_file", Match: "path"}, {Path: "parameter.default", Resource: "databricks_file", Match: "path"}, @@ -465,16 +521,31 @@ var resourcesMap map[string]importable = map[string]importable{ {Path: "task.email_notifications.on_streaming_backlog_exceeded", Resource: "databricks_user", Match: "user_name", MatchType: MatchCaseInsensitive}, {Path: "run_as.user_name", Resource: "databricks_user", Match: "user_name", MatchType: MatchCaseInsensitive}, - {Path: "webhook_notifications.on_duration_warning_threshold_exceeded.id", Resource: "databricks_notification_destination"}, + {Path: "task.for_each_task.task.email_notifications.on_duration_warning_threshold_exceeded", Resource: "databricks_user", + Match: "user_name", MatchType: MatchCaseInsensitive}, + {Path: "task.for_each_task.task.email_notifications.on_failure", Resource: "databricks_user", Match: "user_name", + MatchType: MatchCaseInsensitive}, + {Path: "task.for_each_task.task.email_notifications.on_start", Resource: "databricks_user", Match: "user_name", + MatchType: MatchCaseInsensitive}, + {Path: "task.for_each_task.task.email_notifications.on_success", Resource: "databricks_user", Match: "user_name", + MatchType: MatchCaseInsensitive}, + {Path: "task.for_each_task.task.email_notifications.on_streaming_backlog_exceeded", Resource: "databricks_user", + Match: "user_name", MatchType: MatchCaseInsensitive}, + {Path: "webhook_notifications.on_duration_warning_threshold_exceeded.id", + Resource: "databricks_notification_destination"}, {Path: "webhook_notifications.on_failure.id", Resource: "databricks_notification_destination"}, {Path: "webhook_notifications.on_start.id", Resource: "databricks_notification_destination"}, {Path: "webhook_notifications.on_success.id", Resource: "databricks_notification_destination"}, - {Path: "webhook_notifications.on_streaming_backlog_exceeded.id", Resource: "databricks_notification_destination"}, + {Path: "webhook_notifications.on_streaming_backlog_exceeded.id", + Resource: "databricks_notification_destination"}, {Path: "email_notifications.on_duration_warning_threshold_exceeded", Resource: "databricks_user", Match: "user_name", MatchType: MatchCaseInsensitive}, - {Path: "email_notifications.on_failure", Resource: "databricks_user", Match: "user_name", MatchType: MatchCaseInsensitive}, - {Path: "email_notifications.on_start", Resource: "databricks_user", Match: "user_name", MatchType: MatchCaseInsensitive}, - {Path: "email_notifications.on_success", Resource: "databricks_user", Match: "user_name", MatchType: MatchCaseInsensitive}, + {Path: "email_notifications.on_failure", Resource: "databricks_user", + Match: "user_name", MatchType: MatchCaseInsensitive}, + {Path: "email_notifications.on_start", Resource: "databricks_user", + Match: "user_name", MatchType: MatchCaseInsensitive}, + {Path: "email_notifications.on_success", Resource: "databricks_user", + Match: "user_name", MatchType: MatchCaseInsensitive}, {Path: "email_notifications.on_streaming_backlog_exceeded", Resource: "databricks_user", Match: "user_name", MatchType: MatchCaseInsensitive}, {Path: "task.library.whl", Resource: "databricks_repo", Match: "workspace_path", @@ -503,188 +574,52 @@ var resourcesMap map[string]importable = map[string]importable{ MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, {Path: "task.spark_submit_task.parameters", Resource: "databricks_repo", Match: "workspace_path", MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, - {Path: "job_cluster.new_cluster.init_scripts.workspace.destination", Resource: "databricks_repo", Match: "workspace_path", + {Path: "task.for_each_task.task.library.whl", Resource: "databricks_repo", Match: "workspace_path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.new_cluster.init_scripts.workspace.destination", + Resource: "databricks_repo", Match: "workspace_path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.new_cluster.init_scripts.workspace.destination", + Resource: "databricks_repo", Match: "path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.notebook_task.base_parameters", + Resource: "databricks_repo", Match: "workspace_path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.notebook_task.notebook_path", Resource: "databricks_repo", Match: "path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.notebook_task.notebook_path", + Resource: "databricks_repo", Match: "workspace_path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.python_wheel_task.named_parameters", + Resource: "databricks_repo", Match: "workspace_path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.python_wheel_task.parameters", + Resource: "databricks_repo", Match: "workspace_path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.run_job_task.job_parameters", + Resource: "databricks_repo", Match: "workspace_path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.spark_python_task.python_file", + Resource: "databricks_repo", Match: "path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.spark_python_task.python_file", + Resource: "databricks_repo", Match: "workspace_path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.for_each_task.task.spark_jar_task.parameters", + Resource: "databricks_repo", Match: "workspace_path", MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, - {Path: "job_cluster.new_cluster.init_scripts.workspace.destination", Resource: "databricks_repo", Match: "path"}, + {Path: "task.for_each_task.task.spark_submit_task.parameters", + Resource: "databricks_repo", Match: "workspace_path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "job_cluster.new_cluster.init_scripts.workspace.destination", + Resource: "databricks_repo", Match: "workspace_path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "job_cluster.new_cluster.init_scripts.workspace.destination", + Resource: "databricks_repo", Match: "path"}, {Path: "parameter.default", Resource: "databricks_repo", Match: "workspace_path", MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, }, - Import: func(ic *importContext, r *resource) error { - var job jobs.JobSettingsResource - s := ic.Resources["databricks_job"].Schema - common.DataToStructPointer(r.Data, s, &job) - ic.emitPermissionsIfNotIgnored(r, fmt.Sprintf("/jobs/%s", r.ID), - "job_"+ic.Importables["databricks_job"].Name(ic, r.Data)) - for _, task := range job.Tasks { - if task.NotebookTask != nil { - if task.NotebookTask.Source != "GIT" { - ic.emitNotebookOrRepo(task.NotebookTask.NotebookPath) - } - ic.emitFilesFromMap(task.NotebookTask.BaseParameters) - if task.NotebookTask.WarehouseId != "" { - ic.Emit(&resource{ - Resource: "databricks_sql_endpoint", - ID: task.NotebookTask.WarehouseId, - }) - } - } - if task.PipelineTask != nil { - ic.Emit(&resource{ - Resource: "databricks_pipeline", - ID: task.PipelineTask.PipelineId, - }) - } - if task.SparkPythonTask != nil { - if task.SparkPythonTask.Source != "GIT" { - if strings.HasPrefix(task.SparkPythonTask.PythonFile, "dbfs:") { - ic.Emit(&resource{ - Resource: "databricks_dbfs_file", - ID: task.SparkPythonTask.PythonFile, - }) - } else { - ic.emitWorkspaceFileOrRepo(task.SparkPythonTask.PythonFile) - } - } - ic.emitFilesFromSlice(task.SparkPythonTask.Parameters) - } - if task.PythonWheelTask != nil { - ic.emitFilesFromSlice(task.PythonWheelTask.Parameters) - ic.emitFilesFromMap(task.PythonWheelTask.NamedParameters) - } - if task.SparkJarTask != nil { - ic.emitFilesFromSlice(task.SparkJarTask.Parameters) - } - if task.SparkSubmitTask != nil { - ic.emitFilesFromSlice(task.SparkSubmitTask.Parameters) - } - if task.SqlTask != nil { - if task.SqlTask.Query != nil { - ic.Emit(&resource{ - Resource: "databricks_query", - ID: task.SqlTask.Query.QueryId, - }) - } - if task.SqlTask.Dashboard != nil { - ic.Emit(&resource{ - Resource: "databricks_sql_dashboard", - ID: task.SqlTask.Dashboard.DashboardId, - }) - } - if task.SqlTask.Alert != nil { - ic.Emit(&resource{ - Resource: "databricks_alert", - ID: task.SqlTask.Alert.AlertId, - }) - } - if task.SqlTask.WarehouseId != "" { - ic.Emit(&resource{ - Resource: "databricks_sql_endpoint", - ID: task.SqlTask.WarehouseId, - }) - } - if task.SqlTask.File != nil && task.SqlTask.File.Source == "WORKSPACE" { - ic.emitWorkspaceFileOrRepo(task.SqlTask.File.Path) - } - } - if task.DbtTask != nil { - if task.DbtTask.WarehouseId != "" { - ic.Emit(&resource{ - Resource: "databricks_sql_endpoint", - ID: task.DbtTask.WarehouseId, - }) - } - if task.DbtTask.Source == "WORKSPACE" { - directory := task.DbtTask.ProjectDirectory - if ic.isInRepoOrGitFolder(directory, true) { - ic.emitRepoOrGitFolder(directory, true) - } else { - // Traverse the dbt project directory and emit all objects found in it - nbAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) - objects, err := nbAPI.List(directory, true, true) - if err == nil { - for _, object := range objects { - if object.ObjectType != workspace.File { - continue - } - ic.maybeEmitWorkspaceObject("databricks_workspace_file", object.Path, &object) - } - } else { - log.Printf("[WARN] Can't list directory %s for DBT task in job %s (id: %s)", directory, job.Name, r.ID) - } - } - } - } - if task.RunJobTask != nil && task.RunJobTask.JobId != 0 { - ic.Emit(&resource{ - Resource: "databricks_job", - ID: strconv.FormatInt(task.RunJobTask.JobId, 10), - }) - ic.emitFilesFromMap(task.RunJobTask.JobParameters) - } - ic.importCluster(task.NewCluster) - if task.ExistingClusterId != "" { - ic.Emit(&resource{ - Resource: "databricks_cluster", - ID: task.ExistingClusterId, - }) - } - ic.emitLibraries(task.Libraries) - - if task.WebhookNotifications != nil { - ic.emitJobsDestinationNotifications(task.WebhookNotifications.OnFailure) - ic.emitJobsDestinationNotifications(task.WebhookNotifications.OnSuccess) - ic.emitJobsDestinationNotifications(task.WebhookNotifications.OnDurationWarningThresholdExceeded) - ic.emitJobsDestinationNotifications(task.WebhookNotifications.OnStart) - ic.emitJobsDestinationNotifications(task.WebhookNotifications.OnStreamingBacklogExceeded) - } - if task.EmailNotifications != nil { - ic.emitListOfUsers(task.EmailNotifications.OnDurationWarningThresholdExceeded) - ic.emitListOfUsers(task.EmailNotifications.OnFailure) - ic.emitListOfUsers(task.EmailNotifications.OnStart) - ic.emitListOfUsers(task.EmailNotifications.OnSuccess) - ic.emitListOfUsers(task.EmailNotifications.OnStreamingBacklogExceeded) - } - } - for _, jc := range job.JobClusters { - ic.importCluster(&jc.NewCluster) - } - if job.RunAs != nil { - if job.RunAs.UserName != "" { - ic.Emit(&resource{ - Resource: "databricks_user", - Attribute: "user_name", - Value: job.RunAs.UserName, - }) - } - if job.RunAs.ServicePrincipalName != "" { - ic.Emit(&resource{ - Resource: "databricks_service_principal", - Attribute: "application_id", - Value: job.RunAs.ServicePrincipalName, - }) - } - } - if job.EmailNotifications != nil { - ic.emitListOfUsers(job.EmailNotifications.OnDurationWarningThresholdExceeded) - ic.emitListOfUsers(job.EmailNotifications.OnFailure) - ic.emitListOfUsers(job.EmailNotifications.OnStart) - ic.emitListOfUsers(job.EmailNotifications.OnSuccess) - ic.emitListOfUsers(job.EmailNotifications.OnStreamingBacklogExceeded) - } - if job.WebhookNotifications != nil { - ic.emitJobsDestinationNotifications(job.WebhookNotifications.OnFailure) - ic.emitJobsDestinationNotifications(job.WebhookNotifications.OnSuccess) - ic.emitJobsDestinationNotifications(job.WebhookNotifications.OnDurationWarningThresholdExceeded) - ic.emitJobsDestinationNotifications(job.WebhookNotifications.OnStart) - ic.emitJobsDestinationNotifications(job.WebhookNotifications.OnStreamingBacklogExceeded) - } - for _, param := range job.Parameters { - ic.emitIfWsfsFile(param.Default) - ic.emitIfVolumeFile(param.Default) - } - - return ic.importLibraries(r.Data, s) - }, + Import: importJob, List: func(ic *importContext) error { if l, err := jobs.NewJobsAPI(ic.Context, ic.Client).List(); err == nil { ic.importJobs(l)