diff --git a/yaetos/deploy.py b/yaetos/deploy.py index b2c7ce89..d8654ae1 100644 --- a/yaetos/deploy.py +++ b/yaetos/deploy.py @@ -98,7 +98,7 @@ def run(self): self.run_direct() elif self.deploy_args['deploy'] in ('EMR_Scheduled', 'EMR_DataPipeTest'): self.run_aws_data_pipeline() - elif self.deploy_args['deploy'] in ('EMR_Scheduled_AWSAF', 'AirflowTest'): + elif self.deploy_args['deploy'] in ('airflow'): self.run_aws_airflow() elif self.deploy_args['deploy'] in ('code'): self.run_push_code() diff --git a/yaetos/etl_utils.py b/yaetos/etl_utils.py index f23a39b6..6c8899bd 100644 --- a/yaetos/etl_utils.py +++ b/yaetos/etl_utils.py @@ -806,7 +806,7 @@ def __init__(self, defaults_args, yml_args, job_args, cmd_args, job_name=None, l args.update(job_args) args.update(cmd_args) args.update({'job_name': job_name} if job_name else {}) - args['mode'] = 'dev_EMR' if args['mode'] == 'dev_local' and args['deploy'] in ('EMR', 'EMR_Scheduled', 'EMR_Scheduled_AWSAF') else args['mode'] + args['mode'] = 'dev_EMR' if args['mode'] == 'dev_local' and args['deploy'] in ('EMR', 'EMR_Scheduled', 'airflow') else args['mode'] assert 'job_name' in args.keys() yml_args = Job_Yml_Parser(args['job_name'], args['job_param_file'], args['mode'], args.get('skip_job', False)).yml_args @@ -816,7 +816,7 @@ def __init__(self, defaults_args, yml_args, job_args, cmd_args, job_name=None, l args.update(yml_args) args.update(job_args) args.update(cmd_args) - args['mode'] = 'dev_EMR' if args['mode'] == 'dev_local' and args['deploy'] in ('EMR', 'EMR_Scheduled', 'EMR_Scheduled_AWSAF') else args['mode'] + args['mode'] = 'dev_EMR' if args['mode'] == 'dev_local' and args['deploy'] in ('EMR', 'EMR_Scheduled', 'airflow') else args['mode'] args = self.update_args(args, loaded_inputs) [setattr(self, key, value) for key, value in args.items()] # attach vars to self.* @@ -954,7 +954,7 @@ def run(self): # Executing or deploying if job.jargs.deploy in ('none'): # when executing job code job = self.launch_run_mode(job) - elif job.jargs.deploy in ('EMR', 'EMR_Scheduled', 'EMR_Scheduled_AWSAF', 'code'): # when deploying to AWS for execution there + elif job.jargs.deploy in ('EMR', 'EMR_Scheduled', 'airflow', 'code'): # when deploying to AWS for execution there self.launch_deploy_mode(job.jargs.get_deploy_args(), job.jargs.get_app_args()) return job @@ -972,7 +972,7 @@ def define_commandline_args(): # Defined here separatly from parsing for overridability. # Defaults should not be set in parser so they can be set outside of command line functionality. parser = argparse.ArgumentParser() - parser.add_argument("-d", "--deploy", choices=set(['none', 'EMR', 'EMR_Scheduled', 'EMR_Scheduled_AWSAF', 'EMR_DataPipeTest', 'code']), help="Choose where to run the job.") + parser.add_argument("-d", "--deploy", choices=set(['none', 'EMR', 'EMR_Scheduled', 'airflow', 'EMR_DataPipeTest', 'code']), help="Choose where to run the job.") parser.add_argument("-m", "--mode", choices=set(['dev_local', 'dev_EMR', 'prod_EMR']), help="Choose which set of params to use from jobs_metadata.yml file.") parser.add_argument("-j", "--job_param_file", help="Identify file to use. It can be set to 'False' to not load any file and provide all parameters through job or command line arguments.") parser.add_argument("-n", "--job_name", help="Identify registry job to use.")