Skip to content

Commit

Permalink
String the complex types (for backwards compatibility with python <=3.9)
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 561125589
  • Loading branch information
samihaija authored and mangpo committed Aug 29, 2023
1 parent d4f3fc2 commit 422a990
Show file tree
Hide file tree
Showing 4 changed files with 37 additions and 37 deletions.
30 changes: 15 additions & 15 deletions tpu_graphs/baselines/layout/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,30 +232,30 @@ def __init__(self):
self._num_node_splits: list[int] = [0] # ^^

# Populated in `finalize()`.
self.node_feat: tf.Tensor | None = None # indexed by node_ranges.
self.node_opcode: tf.Tensor | None = None # ^^
self.edge_index: tf.Tensor | None = None # indexed by edge_ranges.
self.config_runtime: tf.Tensor | None = None # indexed by config_ranges.
self.node_feat: 'tf.Tensor | None' = None # indexed by node_ranges.
self.node_opcode: 'tf.Tensor | None' = None # ^^
self.edge_index: 'tf.Tensor | None' = None # indexed by edge_ranges.
self.config_runtime: 'tf.Tensor | None' = None # indexed by config_ranges.
self.argsort_config_runtime: tf.Tensor|None = None # by flat_config_ranges.
self.graph_id: tf.Tensor | None = None
self.graph_id: 'tf.Tensor | None' = None
# indexed by config_ranges and config_node_ranges
self.node_config_feat: tf.Tensor | None = None
self.node_config_feat: 'tf.Tensor | None' = None

# finalize() sets to: cumsum([0, numEdges(graph_1), numEdges(graph_2), ..]).
self.edge_ranges: tf.Tensor | None = None
self.edge_ranges: 'tf.Tensor | None' = None
# finalize() sets to: cumsum([0, numNodes(graph_1), numNodes(graph_2), ..]).
self.node_ranges: tf.Tensor | None = None
self.node_ranges: 'tf.Tensor | None' = None
# finalize() sets to: cumsum([0, numConfigs(graph_1), nCfgs(graph_2), ..]).
self.config_ranges: tf.Tensor | None = None
self.config_ranges: 'tf.Tensor | None' = None
# finalize() sets to: cumsum([0, numModules(graph_1), nModul(graph_2), ..]).
self.config_node_ranges: tf.Tensor | None = None
self.config_node_ranges: 'tf.Tensor | None' = None
# _compute_flat_config_ranges (via finalize() and load_from_file()) sets to:
# cumsum([0, numConfigs(graph_1) * numModules(graph_1), ... ])
self.flat_config_ranges: tf.Tensor | None = None
self.flat_config_ranges: 'tf.Tensor | None' = None

self.node_split_ranges: tf.Tensor | None = None
self.node_splits: tf.Tensor | None = None
self.node_config_ids: tf.Tensor | None = None
self.node_split_ranges: 'tf.Tensor | None' = None
self.node_splits: 'tf.Tensor | None' = None
self.node_config_ids: 'tf.Tensor | None' = None

def save_to_file(self, cache_file: str):
"""Saves dataset as numpy. Can be restored with `load_from_file`."""
Expand Down Expand Up @@ -563,7 +563,7 @@ def get_npz_split(

def get_npz_dataset(
root_path: str, min_train_configs=-1, max_train_configs=-1,
cache_dir: None | str = None) -> NpzDataset:
cache_dir: 'None | str' = None) -> NpzDataset:
"""Returns {train, test, validation} partitions of layout dataset collection.
All partitions will be normalized: statistics are computed from training set
Expand Down
22 changes: 11 additions & 11 deletions tpu_graphs/baselines/tiles/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,20 +151,20 @@ def __init__(self):
self._num_nodes: list[int] = [0] # ^^

# Populated in `finalize()`.
self.node_feat: tf.Tensor | None = None # indexed by node_ranges.
self.node_opcode: tf.Tensor | None = None # ^^
self.edge_index: tf.Tensor | None = None # indexed by edge_ranges.
self.config_feat: tf.Tensor | None = None # indexed by config_ranges.
self.config_runtime: tf.Tensor | None = None # ^^
self.config_runtime_normalizers: tf.Tensor | None = None # ^^
self.tile_id: tf.Tensor | None = None
self.node_feat: 'tf.Tensor | None' = None # indexed by node_ranges.
self.node_opcode: 'tf.Tensor | None' = None # ^^
self.edge_index: 'tf.Tensor | None' = None # indexed by edge_ranges.
self.config_feat: 'tf.Tensor | None' = None # indexed by config_ranges.
self.config_runtime: 'tf.Tensor | None' = None # ^^
self.config_runtime_normalizers: 'tf.Tensor | None' = None # ^^
self.tile_id: 'tf.Tensor | None' = None

# finalize() sets to: cumsum([0, numEdges(graph_1), numEdges(graph_2), ..]).
self.edge_ranges: tf.Tensor | None = None
self.edge_ranges: 'tf.Tensor | None' = None
# finalize() sets to: cumsum([0, numNodes(graph_1), numNodes(graph_2), ..]).
self.node_ranges: tf.Tensor | None = None
self.node_ranges: 'tf.Tensor | None' = None
# finalize() sets to: cumsum([0, numModules(graph_1), nModul(graph_2), ..]).
self.config_ranges: tf.Tensor | None = None
self.config_ranges: 'tf.Tensor | None' = None

def save_to_file(self, cache_file: str):
"""Saves dataset as numpy. Can be restored with `load_from_file`."""
Expand Down Expand Up @@ -378,7 +378,7 @@ def normalize(self):

def get_npz_dataset(
root_path: str, min_train_configs=-1,
cache_dir: None | str = None) -> NpzDataset:
cache_dir: 'None | str' = None) -> NpzDataset:
"""Returns {train, test, validation} partitions of tiles dataset collection.
All partitions will be normalized: statistics are computed from training set
Expand Down
20 changes: 10 additions & 10 deletions tpu_graphs/baselines/tiles/implicit.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def rmatmul(self, mat: tf.Tensor) -> tf.Tensor:
raise NotImplementedError()

@property
def shape(self) -> tuple[int|tf.Tensor, int|tf.Tensor]:
def shape(self) -> tuple['int|tf.Tensor', 'int|tf.Tensor']:
raise NotImplementedError()

def __matmul__(self, mat: tf.Tensor) -> tf.Tensor:
Expand All @@ -60,7 +60,7 @@ def add_eye(self, diag_weight=float(1.0)) -> 'Multiplier':
tf.assert_equal(self.shape[0], self.shape[1])
return Sum(self, DiagMatrix(diag_weight * tf.ones([self.shape[0]])))

def rowsums(self, replace_if_0: None | float | tf.Tensor = None) -> tf.Tensor:
def rowsums(self, replace_if_0: 'None|float|tf.Tensor' = None) -> tf.Tensor:
"""Returns vector with shape `num_rows = [self.shape[0]]` that sums rows.
Args:
Expand All @@ -73,7 +73,7 @@ def rowsums(self, replace_if_0: None | float | tf.Tensor = None) -> tf.Tensor:
y = tf.where(tf.abs(y) < EPSILON, replace_if_0 * tf.ones_like(y), y)
return y

def colsums(self, replace_if_0: None | float | tf.Tensor = None) -> tf.Tensor:
def colsums(self, replace_if_0: 'None|float|tf.Tensor' = None) -> tf.Tensor:
"""Returns vector with shape `num_cols = [self.shape[1]]` that sums columns.
Args:
Expand Down Expand Up @@ -121,7 +121,7 @@ def rmatmul(self, mat: tf.Tensor) -> tf.Tensor:
return tf.transpose(self._multiplier @ tf.transpose(mat))

@property
def shape(self) -> tuple[int|tf.Tensor, int|tf.Tensor]:
def shape(self) -> tuple['int|tf.Tensor', 'int|tf.Tensor']:
transpose_shape = self._multiplier.shape
return (transpose_shape[1], transpose_shape[0])

Expand All @@ -144,7 +144,7 @@ def rmatmul(self, mat: tf.Tensor) -> tf.Tensor:
return tf.einsum('i,...i->...i', self._diag_vector, mat)

@property
def shape(self) -> tuple[int|tf.Tensor, int|tf.Tensor]:
def shape(self) -> tuple['int|tf.Tensor', 'int|tf.Tensor']:
return (self._vec_shape, self._vec_shape)


Expand All @@ -169,7 +169,7 @@ def rmatmul(self, mat: tf.Tensor) -> tf.Tensor:
return mat

@property
def shape(self) -> tuple[int|tf.Tensor, int|tf.Tensor]:
def shape(self) -> tuple['int|tf.Tensor', 'int|tf.Tensor']:
return (self._multipliers[0].shape[0], self._multipliers[-1].shape[1])


Expand All @@ -190,7 +190,7 @@ def rmatmul(self, mat: tf.Tensor) -> tf.Tensor:
return tf.add_n([mat @ m for m in self._multipliers])

@property
def shape(self) -> tuple[int|tf.Tensor, int|tf.Tensor]:
def shape(self) -> tuple['int|tf.Tensor', 'int|tf.Tensor']:
return self._multipliers[0].shape


Expand All @@ -210,7 +210,7 @@ class AdjacencyMultiplier(Multiplier):

def __init__(
self, graph, edge_set_name: tfgnn.EdgeSetName,
edge_weight_feature_name: None|tfgnn.FieldName = None,
edge_weight_feature_name: 'None|tfgnn.FieldName' = None,
sender_tag: tfgnn.IncidentNodeTag = tfgnn.SOURCE):
tfgnn.check_scalar_graph_tensor(graph, 'AdjacencyMultiplier')
self._sender_tag = sender_tag
Expand All @@ -220,7 +220,7 @@ def __init__(
self._edge_weight_feature_name = edge_weight_feature_name

@property
def shape(self) -> tuple[int|tf.Tensor, int|tf.Tensor]:
def shape(self) -> tuple['int|tf.Tensor', 'int|tf.Tensor']:
"""Shape is (size of receiver node set, size of sender node set)."""
adj = self._graph.edge_sets[self._edge_set_name].adjacency
sender_node_set_name = adj.node_set_name(self._sender_tag)
Expand Down Expand Up @@ -256,7 +256,7 @@ def rmatmul(self, mat):
feature_value=edge_level))


def shape(tensor: tf.Tensor) -> list[int]|tf.Tensor:
def shape(tensor: tf.Tensor) -> 'list[int]|tf.Tensor':
"""Helper function returns shape of eager or symbolic tensors."""
if any([s is None for s in tensor.shape]):
return tf.shape(tensor)
Expand Down
2 changes: 1 addition & 1 deletion tpu_graphs/baselines/tiles/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ class CombinedLoss(tf.keras.losses.Loss):

def __init__(
self,
weighted_losses: None|list[tuple[float, tf.keras.losses.Loss]] = None,
weighted_losses: 'None|list[tuple[float, tf.keras.losses.Loss]]' = None,
reduction=None, name=None):
super().__init__()
if weighted_losses is None:
Expand Down

0 comments on commit 422a990

Please sign in to comment.