forked from RealImpactAnalytics/airflow
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
integrating config parser to read config values from a file
- Loading branch information
Krishna Puttaswamy
committed
Nov 7, 2014
1 parent
cc94aa0
commit 0a673b4
Showing
16 changed files
with
164 additions
and
83 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
[core] | ||
AIRFLOW_HOME: /Users/krishna_puttaswamy/workspace/Airflow | ||
BASE_LOG_FOLDER: %(AIRFLOW_HOME)s/logs | ||
DAGS_FOLDER: %(AIRFLOW_HOME)s/dags | ||
BASE_FOLDER: %(AIRFLOW_HOME)s/airflow | ||
|
||
[server] | ||
WEB_SERVER_HOST: 0.0.0.0 | ||
WEB_SERVER_PORT: 8080 | ||
|
||
[smpt] | ||
SMTP_HOST: 'localhost' | ||
SMTP_PORT: 25 | ||
SMTP_PASSWORD: None | ||
SMTP_MAIL_FROM: '[email protected]' | ||
|
||
[celery] | ||
CELERY_APP_NAME: airflow.executors.celery_worker | ||
CELERY_BROKER: amqp | ||
CELERY_RESULTS_BACKEND: amqp:// | ||
|
||
[hooks] | ||
HIVE_HOME_PY: '/usr/lib/hive/lib/py' | ||
PRESTO_DEFAULT_DBID: presto_default | ||
HIVE_DEFAULT_DBID: hive_default | ||
|
||
[misc] | ||
RUN_AS_MASTER: True | ||
JOB_HEARTBEAT_SEC: 5 | ||
# Used for dag_id and task_id VARCHAR length | ||
ID_LEN: 250 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
import logging | ||
import os | ||
|
||
from ConfigParser import ConfigParser, NoOptionError, NoSectionError | ||
|
||
class AirflowConfigParser(ConfigParser): | ||
NO_DEFAULT = object() | ||
_instance = None | ||
_config_paths = ['airflow.cfg'] | ||
if 'AIRFLOW_CONFIG_PATH' in os.environ: | ||
_config_paths.append(os.environ['AIRFLOW_CONFIG_PATH']) | ||
logging.info("Config paths is " + str(_config_paths)) | ||
print("Config paths is " + str(_config_paths)) | ||
|
||
@classmethod | ||
def add_config_paths(cls, path): | ||
cls._config_paths.append(path) | ||
cls.reload() | ||
|
||
@classmethod | ||
def instance(cls, *args, **kwargs): | ||
if cls._instance is None: | ||
cls._instance = cls(*args, **kwargs) | ||
cls._instance.reload() | ||
|
||
return cls._instance | ||
|
||
@classmethod | ||
def reload(cls): | ||
loaded_obj = cls.instance().read(cls._config_paths) | ||
logging.info("the config object after loading is " + str(loaded_obj)) | ||
return loaded_obj | ||
|
||
def get_with_default(self, method, section, option, default): | ||
try: | ||
return method(self, section, option) | ||
except (NoOptionError, NoSectionError): | ||
if default is AirflowConfigParser.NO_DEFAULT: | ||
raise | ||
return default | ||
|
||
def get(self, section, option, default=NO_DEFAULT): | ||
return self.get_with_default(ConfigParser.get, section, option, default) | ||
|
||
def getint(self, section, option, default=NO_DEFAULT): | ||
return self.get_with_default(ConfigParser.getint, section, option, default) | ||
|
||
def getboolean(self, section, option, default=NO_DEFAULT): | ||
return self.get_with_default(ConfigParser.getboolean, section, option, default) | ||
|
||
def set(self, section, option, value): | ||
if not ConfigParser.has_section(self, section): | ||
ConfigParser.add_section(self, section) | ||
return ConfigParser.set(self, section, option, value) | ||
|
||
def get_config(): | ||
return AirflowConfigParser.instance() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,8 +1,9 @@ | ||
from airflow import settings | ||
from airflow import configuration | ||
|
||
|
||
def max_partition( | ||
table, schema="default", hive_dbid=settings.HIVE_DEFAULT_DBID): | ||
table, schema="default", | ||
hive_dbid=configuration.get_config().get('hooks', 'HIVE_DEFAULT_DBID')): | ||
from airflow.hooks.hive_hook import HiveHook | ||
hh = HiveHook(hive_dbid=hive_dbid) | ||
return hh.max_partition(schema=schema, table=table) |
Oops, something went wrong.