Skip to content

Commit

Permalink
inogre type change as mypy bumped
Browse files Browse the repository at this point in the history
Signed-off-by: cmuhao <[email protected]>
  • Loading branch information
HaoXuAI committed Jul 18, 2024
1 parent 8f11a2d commit 374b4c2
Show file tree
Hide file tree
Showing 11 changed files with 37 additions and 36 deletions.
2 changes: 1 addition & 1 deletion databuilder/databuilder/extractor/feast_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def _extract_feature_view(
ColumnMetadata(
feature.name,
None,
feature.dtype.name,
feature.dtype.name, # type: ignore
len(feature_view.entities) + index,
)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def extract(self) -> Union[TableMetadata, None]:
def get_scope(self) -> str:
return 'extractor.kafka_schema_registry'

def _get_extract_iter(self) -> Optional[Iterator[TableMetadata]]:
def _get_extract_iter(self) -> Optional[Iterator[TableMetadata]]: # type: ignore
"""
Return an iterator generating TableMetadata for all of the schemas.
"""
Expand Down
4 changes: 2 additions & 2 deletions databuilder/databuilder/extractor/salesforce_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def _get_extract_iter(self) -> Iterator[TableMetadata]:
# Filter the sobjects if `OBJECT_NAMES_KEY` is set otherwise return all
sobjects = [
sobject
for sobject in self._client.describe()["sobjects"]
for sobject in self._client.describe()["sobjects"] # type: ignore
if (len(self._object_names) == 0 or sobject["name"] in self._object_names)
]

Expand All @@ -71,7 +71,7 @@ def _get_extract_iter(self) -> Iterator[TableMetadata]:
f"({i+1}/{len(sobjects)}) Extracting SalesForce object ({object_name})"
)
data = self._client.restful(path=f"sobjects/{object_name}/describe")
yield self._extract_table_metadata(object_name=object_name, data=data)
yield self._extract_table_metadata(object_name=object_name, data=data) # type: ignore

def _extract_table_metadata(
self, object_name: str, data: Dict[str, Any]
Expand Down
6 changes: 3 additions & 3 deletions databuilder/databuilder/models/dashboard/dashboard_chart.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,11 +142,11 @@ def _create_record_iterator(self) -> Iterator[RDSModel]:
)
)
if self._chart_name:
record.name = self._chart_name
record.name = self._chart_name # type: ignore
if self._chart_type:
record.type = self._chart_type
record.type = self._chart_type # type: ignore
if self._chart_url:
record.url = self._chart_url
record.url = self._chart_url # type: ignore

yield record

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -387,7 +387,7 @@ def _create_record_iterator(self) -> Iterator[RDSModel]:
cluster_rk=self._get_cluster_key()
)
if self.dashboard_group_url:
dashboard_group_record.dashboard_group_url = self.dashboard_group_url
dashboard_group_record.dashboard_group_url = self.dashboard_group_url # type: ignore

yield dashboard_group_record

Expand All @@ -406,10 +406,10 @@ def _create_record_iterator(self) -> Iterator[RDSModel]:
dashboard_group_rk=self._get_dashboard_group_key()
)
if self.created_timestamp:
dashboard_record.created_timestamp = self.created_timestamp
dashboard_record.created_timestamp = self.created_timestamp # type: ignore

if self.dashboard_url:
dashboard_record.dashboard_url = self.dashboard_url
dashboard_record.dashboard_url = self.dashboard_url # type: ignore

yield dashboard_record

Expand Down
4 changes: 2 additions & 2 deletions databuilder/databuilder/models/dashboard/dashboard_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,9 @@ def _create_record_iterator(self) -> Iterator[RDSModel]:
)
)
if self._url:
record.url = self._url
record.url = self._url # type: ignore
if self._query_text:
record.query_text = self._query_text
record.query_text = self._query_text # type: ignore

yield record

Expand Down
4 changes: 2 additions & 2 deletions databuilder/databuilder/models/user.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,10 +197,10 @@ def get_user_record(self) -> RDSModel:
# or the flag allows to update empty values
for attr, value in record_attr_map.items():
if value or not self.do_not_update_empty_attribute:
record.__setattr__(attr.key, value)
record.__setattr__(attr.key, value) # type: ignore

if self.manager_email:
record.manager_rk = self.get_user_model_key(email=self.manager_email)
record.manager_rk = self.get_user_model_key(email=self.manager_email) # type: ignore

return record

Expand Down
6 changes: 3 additions & 3 deletions databuilder/databuilder/publisher/mysql_csv_publisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def _sort_record_files(self, files: List[str]) -> List[str]:
:param files:
:return:
"""
sorted_table_names = [table.name for table in Base.metadata.sorted_tables]
sorted_table_names = [table.name for table in Base.metadata.sorted_tables] # type: ignore
return sorted(files, key=lambda file: sorted_table_names.index(self._get_table_name_from_file(file)))

def _get_table_name_from_file(self, file: str) -> str:
Expand Down Expand Up @@ -187,8 +187,8 @@ def _create_record(self, model: Type[RDSModel], record_dict: Dict) -> RDSModel:
:return:
"""
record = model(**record_dict)
record.published_tag = self._publish_tag
record.publisher_last_updated_epoch_ms = int(time.time() * 1000)
record.published_tag = self._publish_tag # type: ignore
record.publisher_last_updated_epoch_ms = int(time.time() * 1000) # type: ignore
return record

def _execute(self, session: Session) -> None:
Expand Down
12 changes: 6 additions & 6 deletions databuilder/databuilder/task/mysql_staleness_removal_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,11 @@ class MySQLStalenessRemovalTask(Task):
MIN_MS_TO_EXPIRE = "minimum_milliseconds_to_expire"

_DEFAULT_CONFIG = ConfigFactory.from_dict({STALENESS_MAX_PCT: 5,
TARGET_TABLES: [],
STALENESS_PCT_MAX_DICT: {},
MIN_MS_TO_EXPIRE: 86400000,
DRY_RUN: False,
ENGINE_ECHO: False})
TARGET_TABLES: [],
STALENESS_PCT_MAX_DICT: {},
MIN_MS_TO_EXPIRE: 86400000,
DRY_RUN: False,
ENGINE_ECHO: False})

def get_scope(self) -> str:
return 'task.mysql_remove_stale_data'
Expand Down Expand Up @@ -121,7 +121,7 @@ def run(self) -> None:
referenced tables data which will be deleted in a cascade delete)
:return:
"""
sorted_table_names = [table.name for table in Base.metadata.sorted_tables]
sorted_table_names = [table.name for table in Base.metadata.sorted_tables] # type: ignore
sorted_target_tables = sorted(
self.target_tables, key=lambda table: sorted_table_names.index(table), reverse=True)
try:
Expand Down
2 changes: 1 addition & 1 deletion databuilder/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ Jinja2>=2.10.0,<4
pandas>=0.21.0,<=2.2.2
responses>=0.10.6
jsonref==0.2
numpy<2
numpy<2.0

amundsen-common>=0.16.0
amundsen-rds==0.0.8
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,9 @@ def test_table_search(self,
table.schema = schema

table.description = TableDescription(rk='test_table_description_key', description='test_table_description')
table.programmatic_descriptions = [TableProgrammaticDescription(rk='test_table_prog_description_key',
description='test_table_prog_description')]
table.programmatic_descriptions = [
TableProgrammaticDescription(rk='test_table_prog_description_key', # type: ignore
description='test_table_prog_description')]

table.timestamp = TableTimestamp(rk='test_table_timestamp_key', last_updated_timestamp=123456789)

Expand All @@ -64,17 +65,17 @@ def test_table_search(self,
description='test_col1_description')
column2.description = ColumnDescription(rk='test_col2_description_key',
description='test_col2_description')
table.columns = [column1, column2, column3]
table.columns = [column1, column2, column3] # type: ignore

usage1 = TableUsage(user_rk='test_user1_key', table_rk='test_table_key', read_count=5)
usage2 = TableUsage(user_rk='test_user2_key', table_rk='test_table_key', read_count=10)
table.usage = [usage1, usage2]
table.usage = [usage1, usage2] # type: ignore

tags = [Tag(rk='test_tag', tag_type='default')]
table.tags = tags
table.tags = tags # type: ignore

badges = [Badge(rk='test_badge')]
table.badges = badges
table.badges = badges # type: ignore

tables = [table]

Expand Down Expand Up @@ -198,21 +199,21 @@ def test_dashboard_search(self,
group.cluster = cluster

last_exec = DashboardExecution(rk='test_dashboard_exec_key/_last_successful_execution', timestamp=123456789)
dashboard.execution = [last_exec]
dashboard.execution = [last_exec] # type: ignore

usage1 = DashboardUsage(user_rk='test_user1_key', dashboard_rk='test_dashboard_key', read_count=10)
usage2 = DashboardUsage(user_rk='test_user2_key', dashboard_rk='test_dashboard_key', read_count=5)
dashboard.usage = [usage1, usage2]
dashboard.usage = [usage1, usage2] # type: ignore

query = DashboardQuery(rk='test_query_key', name='test_query')
query.charts = [DashboardChart(rk='test_chart_key', name='test_chart')]
dashboard.queries = [query]
query.charts = [DashboardChart(rk='test_chart_key', name='test_chart')] # type: ignore
dashboard.queries = [query] # type: ignore

tags = [Tag(rk='test_tag', tag_type='default')]
dashboard.tags = tags
dashboard.tags = tags # type: ignore

badges = [Badge(rk='test_badge')]
dashboard.badges = badges
dashboard.badges = badges # type: ignore

dashboards = [dashboard]

Expand Down

0 comments on commit 374b4c2

Please sign in to comment.