diff --git a/standalones/multi_domain_failed_jobs/.primary_servers.json.swp b/standalones/multi_domain_failed_jobs/.primary_servers.json.swp new file mode 100644 index 0000000..359a407 Binary files /dev/null and b/standalones/multi_domain_failed_jobs/.primary_servers.json.swp differ diff --git a/standalones/multi_domain_failed_jobs/.primary_servers.json.un~ b/standalones/multi_domain_failed_jobs/.primary_servers.json.un~ new file mode 100644 index 0000000..0978fff Binary files /dev/null and b/standalones/multi_domain_failed_jobs/.primary_servers.json.un~ differ diff --git a/standalones/multi_domain_failed_jobs/README.md b/standalones/multi_domain_failed_jobs/README.md new file mode 100644 index 0000000..21e3bc8 --- /dev/null +++ b/standalones/multi_domain_failed_jobs/README.md @@ -0,0 +1,52 @@ +### Multi-domain failed job + +Writing scripts to run on the NetBackup primary server that generate failed job listing has been around from the beginning. BUT many customer environments have security issues where remotely executing scripts as root has been restricted. We can now use the NetBackup APIs to gather from multiple NetBackup domains without having to actually remotely execute commands. + +#### Disclaimer +These examples are only meant to be used as a reference. Please do not use these in production. + +#### Pre-requisites + +- Tested with NetBackup 9.1 +- For PowerShell script, tested with the following + - PowerShell 5.1 +- For Python script, testing with the following + - Python 3.6.8 +- API user with key generated associated with role having these permissions + - For the policy_export scripts + - Global -> Protection -> Policies -> View + - For the policy_import scripts + - Global -> Protection -> Policies -> View + - Global -> Protection -> Policies -> Create + - Global -> Protection -> Policies -> Update + +#### Executing policy_export + +This PowerShell script is not signed so you may encounter errors trying to run this. You can use the PowerShell cmdlet [Set-Execution Policy](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.security/set-executionpolicy?view=powershell-7) to adjust your environment to allow running unsigned PowerShell scripts. + +To execute, run the command like this: + +``` +policy_export.ps1 -p "PRIMARY_SERVER" -k "APIKEY" [-v] +OR +policy_export.py3 -p "PRIMARY_SERVER" -k "APIKEY" [-v] +``` + +Replace PRIMARY_SERVER with the NetBackup primary server to export the policies from and APIKEY with the API key generated through the NetBackup web UI. The optional -v option will provide additional information during the processing. Without the -v option, policy_export.ps1 will run silently. + +All policies (including any protection plan policies) will be stored in JSON format in a file named PRIMARY_SERVER--POLICY_NAME in the current working directory. + +#### Executing policy_import.ps1 + +This PowerShell script is not signed so you may encounter errors trying to run this. You can use the PowerShell cmdlet [Set-Execution Policy](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.security/set-executionpolicy?view=powershell-7) to adjust your environment to allow running unsigned PowerShell scripts. + +To execute, run the command like this: + +``` +policy_import.ps1 -f "JSON_FILE" -p "PRIMARY_SERVER" -k "APIKEY" [-v] +OR +policy_import.py3 -f "JSON_FILE" -p "PRIMARY_SERVER" -k "APIKEY" [-v] +``` + +Replace JSON_FILE with the name of a JSON formatted policy file (preferred on from the policy_export.ps1 or policy_export.py3 script), PRIMARY_SERVER with the NetBackup primary server to export the policies from and APIKEY with the API key generated through the NetBackup web UI. The optional -v option will provide additional information during the processing. Without the -v option, policy_export.ps1 will run silently. + diff --git a/standalones/multi_domain_failed_jobs/cli_failed_jobs.ps1 b/standalones/multi_domain_failed_jobs/cli_failed_jobs.ps1 new file mode 100644 index 0000000..afd3fad --- /dev/null +++ b/standalones/multi_domain_failed_jobs/cli_failed_jobs.ps1 @@ -0,0 +1,201 @@ +<# +# .SYNOPSIS +# This sample script reads a JSON data file to get a list of failed backup +# jobs from the primary servers. +# +# .DESCRIPTION +# This script will read the JSON data file to get a list of NetBackup primary +# servers with valid API keys. For each primary server, the "GET /admin/jobs" +# API will be executed with a filter to get just failed jobs. +# +# .EXAMPLE +# ./cli_failed_jobs.py3 [ -v ] +# +# Requirements and comments for running this script +# Tested with PowerShell 5.1 +# Tested with NetBackup 9.1 +# API key uesr must have following minimum privileges assigned to it's role: +# Manage -> Jobs -> View +#> + +##################################################################### +# Getting the various command line parameters +##################################################################### +param ( + [switch]$v = $false +) +$verbose=$v + +##################################################################### +# Initial Setup +# Note: This allows self-signed certificates and enables TLS v1.2 +##################################################################### + +function InitialSetup() +{ + # Allow self-signed certificates + if ([System.Net.ServicePointManager]::CertificatePolicy -notlike 'TrustAllCertsPolicy') + { + Add-Type -TypeDefinition @" + using System.Net; + using System.Security.Cryptography.X509Certificates; + public class TrustAllCertsPolicy : ICertificatePolicy { + public bool CheckValidationResult( + ServicePoint srvPoint, X509Certificate certificate, + WebRequest request, int certificateProblem) { + return true; + } + } +"@ + [System.Net.ServicePointManager]::CertificatePolicy = New-Object -TypeName TrustAllCertsPolicy + + # Force TLS v1.2 + try { + if ([Net.ServicePointManager]::SecurityProtocol -notmatch 'Tls12') { + [Net.ServicePointManager]::SecurityProtocol += [Net.SecurityProtocolType]::Tls12 + } + } + catch { + Write-Host $_.Exception.InnerException.Message + } + } +} + +InitialSetup + +################################################################ +# Setting some variables to be used through the rest of the processing +################################################################ +$primary_file = "primary_servers.json" +$page_limit=100 # 100 is maxium number to retreive at a time +$content_type = "application/vnd.netbackup+json;version=6.0" +if ( $verbose ) { + Write-Host "Using $primary_file for server and API keys" + Write-Host "Collecting $page_limit jobs at a time" + Write-Host +} + +################################################################ +# Reading the primary_servers.json file to get list of +# NBU primary servers and API keys to use for authorization +################################################################ +if ( -not (Test-Path -Path $primary_file -PathType Leaf)) { + throw "Specified file $primary_file does not exist" +} + +$primary_data = Get-Content -Raw -Path $primary_file | ConvertFrom-Json + +################################################################ +# Loop through all the primary servers getting a list of +# failed jobs +################################################################ +$table = @("Master JobID Status Type Client Policy Schedule") +foreach ( $data in $primary_data.primaryServers ) { + if ( $verbose ) { + Write-Host "Getting job data from"$data.name"with"$data.apikey + } + + #################################### + # Build out the HTTP request details + $uri = "https://" + $data.name + "/netbackup/admin/jobs/" + $query_params= @{ + "page[limit]" = $page_limit + "filter" = "status gt 0 and state eq 'DONE' and jobType eq 'BACKUP'" + "sort" = "-jobId" # Sorting by job ID in descending order + } + $header = @{ + "Authorization" = $data.apikey + "Accept" = $content_type + } + if ( $verbose ) { + Write-Host "Getting list of jobs from $data.name" + Write-Host "User URI $uri" + } + + #################################### + # Make the job API call + $response = Invoke-WebRequest ` + -Uri $uri ` + -Method GET ` + -Body $query_params ` + -ContentType $content_type ` + -Headers $header + + if ($response.StatusCode -ne 200) { + Write-Host "Unable to get the list of Netbackup jobs!" + throw "API status code = "+$response.StatusCode + } + + #################################### + # Collecting data into table array + $job_data=(ConvertFrom-Json -InputObject $response) + + if ( $job_data.data -eq $null ) { + $table+="No failed backup jobs found for $data.name" + continue + } else { + foreach ($job in $job_data.data) { + $line=($data.name).subString(0,12)+" " + $line+=(($job.attributes.jobId).ToString()).PadRight(7," ")+" " + $line+=(($job.attributes.status).ToString()).PadRight(7," ")+" " + $line+=($job.attributes.jobType).PadRight(8," ")+" " + $line+=(($job.attributes.clientName).PadRight(15," ")).subString(0,15)+" " + $line+=(($job.attributes.policyName).PadRight(20," ")).subString(0,20)+" " + $line+=(($job.attributes.scheduleName).PadRight(20," ")).subString(0,20)+" " + $table+=$line + } + } + + # If the first call to jobs generates more data than page_limit, + # then loop through until finished collecting all the pages of jobs + if ( $job_data.links.next -ne $null ) { + #################################### + # Getting the next page URI + $next_uri=$job_data.links.next.href + + while ($true) { + #################################### + # Make the job API call + #################################### + # Make the job API call + $response = Invoke-WebRequest ` + -Uri $next_uri ` + -Method GET ` + -ContentType $content_type ` + -Headers $header + + if ($response.StatusCode -ne 200) { + Write-Host "Unable to get the list of Netbackup jobs!" + throw "API status code = "+$response.StatusCode + } + + #################################### + # Add information to policy_dict + $job_data=(ConvertFrom-Json -InputObject $response) + foreach ($job in $job_data.data) { + $line=($data.name).subString(0,12)+" " + $line+=(($job.attributes.jobId).ToString()).PadRight(7," ")+" " + $line+=(($job.attributes.status).ToString()).PadRight(7," ")+" " + $line+=($job.attributes.jobType).PadRight(8," ")+" " + $line+=(($job.attributes.clientName).PadRight(15," ")).subString(0,15)+" " + $line+=(($job.attributes.policyName).PadRight(20," ")).subString(0,20)+" " + $line+=(($job.attributes.scheduleName).PadRight(20," ")).subString(0,20)+" " + $table+=$line + } + + #################################### + # Break out of the pagination loop + # if there is no next href page + if ( $job_data.links.next -ne $null) { + $next_uri=$job_data.links.next.href + } else { + break + } + } + } +} + +################################## +# Finally output the built table with all the job information +################################## +$table | Format-Table -AutoSize \ No newline at end of file diff --git a/standalones/multi_domain_failed_jobs/cli_failed_jobs.py3 b/standalones/multi_domain_failed_jobs/cli_failed_jobs.py3 new file mode 100644 index 0000000..1d491cf --- /dev/null +++ b/standalones/multi_domain_failed_jobs/cli_failed_jobs.py3 @@ -0,0 +1,150 @@ +#!/usr/bin/python3 +# +# SYNOPSIS +# This sample script reads a JSON data file to get a list of failed backup +# jobs from the primary servers. +# +# DESCRIPTION +# This script will read the JSON data file to get a list of NetBackup primary +# servers with valid API keys. For each primary server, the "GET /admin/jobs" +# API will be executed with a filter to get just failed jobs. +# +# EXAMPLE +# ./cli_failed_jobs.py3 +# +# Requirements and comments for running this script +# Tested with Python 3.8 +# Tested with NetBackup 9.1 +# API key uesr must have following minimum privileges assigned to it's role: +# Manage -> Jobs -> View + +import sys +import argparse +from datetime import datetime, timedelta +import requests +requests.packages.urllib3.disable_warnings() +from urllib.parse import quote +from urllib.parse import urlencode +import json +from os.path import exists + +################################################################ +# Parsing the command line to get the policy and API key +################################################################ +parser = argparse.ArgumentParser() +parser.add_argument("-v", dest='verbose', action="store_true", help="verbose output for debugging") +cli_args=parser.parse_args() + +if cli_args.verbose : + print("Verbose?: {}.".format(cli_args.verbose)) + +################################################################ +# Setting some variables to be used through the rest of the processing +################################################################ +primary_file = "primary_servers.json" +page_limit=2 # 100 is maxium number to retreive at a time +content_type = "application/vnd.netbackup+json;version=6.0" +if cli_args.verbose : + print("Using {} for server and API keys".format(primary_file)) + print("Collecting {} jobs at a time".format(page_limit)) + print("") + + +################################################################ +# Reading the primary_servers.json file to get list of +# NBU primary servers and API keys to use for authorization +################################################################ +if not exists(primary_file) : + print("Specified file {} does not exist".format(primary_file)) + sys.exit() + +with open(primary_file) as json_file : + primary_data = json.load(json_file) + + +################################################################ +# Loop through all the primary servers getting a list of +# failed jobs +################################################################ +print("Master JobID Status Type Client Policy Schedule") +for server in primary_data['primaryServers'] : + if cli_args.verbose: + print("Getting job data from",server['name']) + + #################################### + # Build out the HTTP request details + uri = "https://" + server['name'] + "/netbackup/admin/jobs/" + query_params= { + "page[limit]": page_limit, + "filter": "status gt 0 and state eq 'DONE' and jobType eq 'BACKUP'", + "sort": "-jobId" # Sorting by job ID in descending order + } + header = { + "Authorization": server['api-key'], + "Accept": content_type + } + if cli_args.verbose : + print("Getting list of jobs from {}".format(server['name'])) + print("User URI {}".format(uri)) + + #################################### + # Make the job API call + response = requests.get(uri, headers=header, params=query_params, verify=False) + if response.status_code != 200 : + print("Unable to get the list of NetBackup images!") + print("API status code = {}".format(response.status_code)) + sys.exit() + + #################################### + # Printing out this batch of jobs + tjson=response.json() + if not "data" in tjson : + print("No failed backup jobs found for {}".format(server['name'])) + continue + else : + for job in tjson['data'] : + print("{:<12s} ".format(server['name']), end='') + print("{:<7s} ".format(str(job['attributes']['jobId'])), end='') + print("{:<7s} ".format(str(job['attributes']['status'])), end='') + print("{:<8s} ".format(job['attributes']['jobType'][:8]), end='') + print("{:<15s} ".format(job['attributes']['clientName'][:15]), end='') + print("{:<20s} ".format(job['attributes']['policyName'][:20]), end='') + print("{:<20s}".format(job['attributes']['scheduleName'][:20])) + + # If the first call to jobs generates more data than page_limit, + # then loop through until finished collecting all the pages of jobs + if "next" in tjson['links'] : + #################################### + # Getting the next page URI + next_uri=tjson['links']['next']['href'] + + while True : + #################################### + # Make the job API call + response = requests.get(next_uri, headers=header, verify=False) + if response.status_code != 200 : + print("Unable to get the list of NetBackup images!") + print("API status code = {}".format(response.status_code)) + sys.exit() + + #################################### + # Add information to policy_dict + tjson=response.json() + for job in tjson['data'] : + print("{:<12s} ".format(server['name']), end='') + print("{:<7s} ".format(str(job['attributes']['jobId'])), end='') + print("{:<7s} ".format(str(job['attributes']['status'])), end='') + print("{:<8s} ".format(job['attributes']['jobType'][:8]), end='') + print("{:<15s} ".format(job['attributes']['clientName'][:15]), end='') + print("{:<20s} ".format(job['attributes']['policyName'][:20]), end='') + print("{:<20s}".format(job['attributes']['scheduleName'][:20])) + + #################################### + # Break out of the pagination loop + # if there is no next href page + if "next" in tjson['links'] : + next_uri=tjson['links']['next']['href'] + else : + break + +sys.exit() diff --git a/standalones/multi_domain_failed_jobs/primary_servers.json b/standalones/multi_domain_failed_jobs/primary_servers.json new file mode 100644 index 0000000..e03d24b --- /dev/null +++ b/standalones/multi_domain_failed_jobs/primary_servers.json @@ -0,0 +1,12 @@ +{ + "primaryServers": [ + { + "name": "nbu-master01.tcdemolab.ros2100.veritas.com", + "apikey": "AyFSuBOtjYIiUi3yhF5NznXPfjYkpmn_cfSskWrJ7g_tfnIhePsbTgphgF7K5yWM" + }, + { + "name": "nbu-master02.tcdemolab.ros2100.veritas.com", + "apikey": "A6XhWkpsHmaI-0C6hymo4R31CB_dRO4MQeT5ZzPMdidBw8_j_TupbSxHXtdCEN5L" + } + ] +} diff --git a/standalones/multi_domain_failed_jobs/primary_servers.json~ b/standalones/multi_domain_failed_jobs/primary_servers.json~ new file mode 100644 index 0000000..152944e --- /dev/null +++ b/standalones/multi_domain_failed_jobs/primary_servers.json~ @@ -0,0 +1,12 @@ +{ + "primaryServers": [ + { + "name": "nbu-master01", + "apikey": "AyFSuBOtjYIiUi3yhF5NznXPfjYkpmn_cfSskWrJ7g_tfnIhePsbTgphgF7K5yWM" + }, + { + "name": "nbu-master02", + "apikey": "A6XhWkpsHmaI-0C6hymo4R31CB_dRO4MQeT5ZzPMdidBw8_j_TupbSxHXtdCEN5L" + } + ] +}