diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 973612d2..e4ac5caf 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -13,4 +13,4 @@ jobs: - uses: actions/checkout@v3 - uses: chartboost/ruff-action@v1 with: - src: './topobenchmarkx' \ No newline at end of file + src: './topobenchmark' \ No newline at end of file diff --git a/codecov.yml b/codecov.yml index 85ba6f8b..ac4c5a9c 100644 --- a/codecov.yml +++ b/codecov.yml @@ -4,4 +4,5 @@ coverage: round: down precision: 2 ignore: - - "test/" \ No newline at end of file + - "test/" + - "topobenchmark/run.py" \ No newline at end of file diff --git a/configs/dataset/graph/AQSOL.yaml b/configs/dataset/graph/AQSOL.yaml index 0fefc55d..c802fcba 100644 --- a/configs/dataset/graph/AQSOL.yaml +++ b/configs/dataset/graph/AQSOL.yaml @@ -1,6 +1,6 @@ # Dataset loader config loader: - _target_: topobenchmarkx.data.loaders.MoleculeDatasetLoader + _target_: topobenchmark.data.loaders.MoleculeDatasetLoader parameters: data_domain: graph data_type: AQSOL diff --git a/configs/dataset/graph/IMDB-MULTI.yaml b/configs/dataset/graph/IMDB-MULTI.yaml index c485040e..27fb0c23 100755 --- a/configs/dataset/graph/IMDB-MULTI.yaml +++ b/configs/dataset/graph/IMDB-MULTI.yaml @@ -1,6 +1,6 @@ # Dataset loader config loader: - _target_: topobenchmarkx.data.loaders.TUDatasetLoader + _target_: topobenchmark.data.loaders.TUDatasetLoader parameters: data_domain: graph data_type: TUDataset diff --git a/configs/dataset/graph/NCI1.yaml b/configs/dataset/graph/NCI1.yaml index ea7f4c1e..d21d807e 100755 --- a/configs/dataset/graph/NCI1.yaml +++ b/configs/dataset/graph/NCI1.yaml @@ -1,5 +1,5 @@ loader: - _target_: topobenchmarkx.data.loaders.TUDatasetLoader + _target_: topobenchmark.data.loaders.TUDatasetLoader parameters: data_domain: graph data_type: TUDataset diff --git a/configs/dataset/graph/PROTEINS.yaml b/configs/dataset/graph/PROTEINS.yaml index 58dcaac8..762f362b 100755 --- a/configs/dataset/graph/PROTEINS.yaml +++ b/configs/dataset/graph/PROTEINS.yaml @@ -1,6 +1,6 @@ # Dataset loader config loader: - _target_: topobenchmarkx.data.loaders.TUDatasetLoader + _target_: topobenchmark.data.loaders.TUDatasetLoader parameters: data_domain: graph data_type: TUDataset diff --git a/configs/dataset/graph/cocitation_citeseer.yaml b/configs/dataset/graph/cocitation_citeseer.yaml index 5d96cdc9..cfb1b6fe 100755 --- a/configs/dataset/graph/cocitation_citeseer.yaml +++ b/configs/dataset/graph/cocitation_citeseer.yaml @@ -1,6 +1,6 @@ # Dataset loader config loader: - _target_: topobenchmarkx.data.loaders.PlanetoidDatasetLoader + _target_: topobenchmark.data.loaders.PlanetoidDatasetLoader parameters: data_domain: graph data_type: cocitation diff --git a/configs/dataset/graph/tolokers.yaml b/configs/dataset/graph/tolokers.yaml index 4921cea9..f1657f16 100755 --- a/configs/dataset/graph/tolokers.yaml +++ b/configs/dataset/graph/tolokers.yaml @@ -1,6 +1,6 @@ # Dataset loader config loader: - _target_: topobenchmarkx.data.loaders.HeterophilousGraphDatasetLoader + _target_: topobenchmark.data.loaders.HeterophilousGraphDatasetLoader parameters: data_domain: graph data_type: heterophilic diff --git a/configs/dataset/hypergraph/coauthorship_cora.yaml b/configs/dataset/hypergraph/coauthorship_cora.yaml index d864623e..80699bbd 100755 --- a/configs/dataset/hypergraph/coauthorship_cora.yaml +++ b/configs/dataset/hypergraph/coauthorship_cora.yaml @@ -1,6 +1,6 @@ # Dataset loader config loader: - _target_: topobenchmarkx.data.loaders.CitationHypergraphDatasetLoader + _target_: topobenchmark.data.loaders.CitationHypergraphDatasetLoader parameters: data_domain: hypergraph data_type: coauthorship diff --git a/configs/dataset/hypergraph/cocitation_pubmed.yaml b/configs/dataset/hypergraph/cocitation_pubmed.yaml index 5204e26a..8aa19826 100755 --- a/configs/dataset/hypergraph/cocitation_pubmed.yaml +++ b/configs/dataset/hypergraph/cocitation_pubmed.yaml @@ -1,6 +1,6 @@ # Dataset loader config loader: - _target_: topobenchmarkx.data.loaders.CitationHypergraphDatasetLoader + _target_: topobenchmark.data.loaders.CitationHypergraphDatasetLoader parameters: data_domain: hypergraph data_type: cocitation diff --git a/configs/model/cell/ccxn.yaml b/configs/model/cell/ccxn.yaml index cdf45b1c..cfd9ad87 100755 --- a/configs/model/cell/ccxn.yaml +++ b/configs/model/cell/ccxn.yaml @@ -1,10 +1,10 @@ -_target_: topobenchmarkx.model.TBXModel +_target_: topobenchmark.model.TBModel model_name: ccxn model_domain: cell feature_encoder: - _target_: topobenchmarkx.nn.encoders.${model.feature_encoder.encoder_name} + _target_: topobenchmark.nn.encoders.${model.feature_encoder.encoder_name} encoder_name: AllCellFeatureEncoder in_channels: ${infer_in_channels:${dataset},${oc.select:transforms,null}} out_channels: 32 @@ -22,14 +22,14 @@ backbone_additional_params: hidden_channels: ${model.feature_encoder.out_channels} backbone_wrapper: - _target_: topobenchmarkx.nn.wrappers.CCXNWrapper + _target_: topobenchmark.nn.wrappers.CCXNWrapper _partial_: true wrapper_name: CCXNWrapper out_channels: ${model.feature_encoder.out_channels} num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} readout: - _target_: topobenchmarkx.nn.readouts.${model.readout.readout_name} + _target_: topobenchmark.nn.readouts.${model.readout.readout_name} readout_name: PropagateSignalDown # Use in case readout is not needed Options: PropagateSignalDown num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} # The highest order of cell dimensions to consider hidden_dim: ${model.feature_encoder.out_channels} diff --git a/configs/model/graph/gat.yaml b/configs/model/graph/gat.yaml index 1841d3c2..8c71b06d 100755 --- a/configs/model/graph/gat.yaml +++ b/configs/model/graph/gat.yaml @@ -1,10 +1,10 @@ -_target_: topobenchmarkx.model.TBXModel +_target_: topobenchmark.model.TBModel model_name: gat model_domain: graph feature_encoder: - _target_: topobenchmarkx.nn.encoders.${model.feature_encoder.encoder_name} + _target_: topobenchmark.nn.encoders.${model.feature_encoder.encoder_name} encoder_name: AllCellFeatureEncoder in_channels: ${infer_in_channels:${dataset},${oc.select:transforms,null}} out_channels: 32 @@ -22,14 +22,14 @@ backbone: concat: true backbone_wrapper: - _target_: topobenchmarkx.nn.wrappers.GNNWrapper + _target_: topobenchmark.nn.wrappers.GNNWrapper _partial_: true wrapper_name: GNNWrapper out_channels: ${model.feature_encoder.out_channels} num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} readout: - _target_: topobenchmarkx.nn.readouts.${model.readout.readout_name} + _target_: topobenchmark.nn.readouts.${model.readout.readout_name} readout_name: NoReadOut # Use in case readout is not needed Options: PropagateSignalDown num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} # The highest order of cell dimensions to consider hidden_dim: ${model.feature_encoder.out_channels} diff --git a/configs/model/graph/gin.yaml b/configs/model/graph/gin.yaml index 6f941c95..816affa7 100755 --- a/configs/model/graph/gin.yaml +++ b/configs/model/graph/gin.yaml @@ -1,10 +1,10 @@ -_target_: topobenchmarkx.model.TBXModel +_target_: topobenchmark.model.TBModel model_name: gin model_domain: graph feature_encoder: - _target_: topobenchmarkx.nn.encoders.${model.feature_encoder.encoder_name} + _target_: topobenchmark.nn.encoders.${model.feature_encoder.encoder_name} encoder_name: AllCellFeatureEncoder in_channels: ${infer_in_channels:${dataset},${oc.select:transforms,null}} out_channels: 32 @@ -19,14 +19,14 @@ backbone: act: relu backbone_wrapper: - _target_: topobenchmarkx.nn.wrappers.GNNWrapper + _target_: topobenchmark.nn.wrappers.GNNWrapper _partial_: true wrapper_name: GNNWrapper out_channels: ${model.feature_encoder.out_channels} num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} readout: - _target_: topobenchmarkx.nn.readouts.${model.readout.readout_name} + _target_: topobenchmark.nn.readouts.${model.readout.readout_name} readout_name: NoReadOut # Use in case readout is not needed Options: PropagateSignalDown num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} # The highest order of cell dimensions to consider hidden_dim: ${model.feature_encoder.out_channels} diff --git a/configs/model/hypergraph/alldeepset.yaml b/configs/model/hypergraph/alldeepset.yaml index 8e251a16..fe6a5e4b 100755 --- a/configs/model/hypergraph/alldeepset.yaml +++ b/configs/model/hypergraph/alldeepset.yaml @@ -1,10 +1,10 @@ -_target_: topobenchmarkx.model.TBXModel +_target_: topobenchmark.model.TBModel model_name: alldeepset model_domain: hypergraph feature_encoder: - _target_: topobenchmarkx.nn.encoders.${model.feature_encoder.encoder_name} + _target_: topobenchmark.nn.encoders.${model.feature_encoder.encoder_name} encoder_name: AllCellFeatureEncoder in_channels: ${infer_in_channels:${dataset},${oc.select:transforms,null}} out_channels: 32 @@ -27,14 +27,14 @@ backbone: #num_features: ${model.backbone.hidden_channels} backbone_wrapper: - _target_: topobenchmarkx.nn.wrappers.HypergraphWrapper + _target_: topobenchmark.nn.wrappers.HypergraphWrapper _partial_: true wrapper_name: HypergraphWrapper out_channels: ${model.feature_encoder.out_channels} num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} readout: - _target_: topobenchmarkx.nn.readouts.${model.readout.readout_name} + _target_: topobenchmark.nn.readouts.${model.readout.readout_name} readout_name: PropagateSignalDown # Use in case readout is not needed Options: PropagateSignalDown num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} # The highest order of cell dimensions to consider hidden_dim: ${model.feature_encoder.out_channels} diff --git a/configs/model/hypergraph/unignn2.yaml b/configs/model/hypergraph/unignn2.yaml index f3f8dc4b..d61dc28f 100755 --- a/configs/model/hypergraph/unignn2.yaml +++ b/configs/model/hypergraph/unignn2.yaml @@ -1,10 +1,10 @@ -_target_: topobenchmarkx.model.TBXModel +_target_: topobenchmark.model.TBModel model_name: unignn2 model_domain: hypergraph feature_encoder: - _target_: topobenchmarkx.nn.encoders.${model.feature_encoder.encoder_name} + _target_: topobenchmark.nn.encoders.${model.feature_encoder.encoder_name} encoder_name: AllCellFeatureEncoder in_channels: ${infer_in_channels:${dataset},${oc.select:transforms,null}} out_channels: 128 @@ -21,14 +21,14 @@ backbone: layer_drop: 0.0 backbone_wrapper: - _target_: topobenchmarkx.nn.wrappers.HypergraphWrapper + _target_: topobenchmark.nn.wrappers.HypergraphWrapper _partial_: true wrapper_name: HypergraphWrapper out_channels: ${model.feature_encoder.out_channels} num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} readout: - _target_: topobenchmarkx.nn.readouts.${model.readout.readout_name} + _target_: topobenchmark.nn.readouts.${model.readout.readout_name} readout_name: PropagateSignalDown # Use in case readout is not needed Options: PropagateSignalDown num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} # The highest order of cell dimensions to consider hidden_dim: ${model.feature_encoder.out_channels} diff --git a/configs/model/simplicial/sccn.yaml b/configs/model/simplicial/sccn.yaml index 91144079..0c90eb62 100755 --- a/configs/model/simplicial/sccn.yaml +++ b/configs/model/simplicial/sccn.yaml @@ -1,10 +1,10 @@ -_target_: topobenchmarkx.model.TBXModel +_target_: topobenchmark.model.TBModel model_name: sccn model_domain: simplicial feature_encoder: - _target_: topobenchmarkx.nn.encoders.${model.feature_encoder.encoder_name} + _target_: topobenchmark.nn.encoders.${model.feature_encoder.encoder_name} encoder_name: AllCellFeatureEncoder in_channels: ${infer_in_channels:${dataset},${oc.select:transforms,null}} out_channels: 32 @@ -18,14 +18,14 @@ backbone: update_func: "sigmoid" backbone_wrapper: - _target_: topobenchmarkx.nn.wrappers.SCCNWrapper + _target_: topobenchmark.nn.wrappers.SCCNWrapper _partial_: true wrapper_name: SCCNWrapper out_channels: ${model.feature_encoder.out_channels} num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} readout: - _target_: topobenchmarkx.nn.readouts.${model.readout.readout_name} + _target_: topobenchmark.nn.readouts.${model.readout.readout_name} readout_name: PropagateSignalDown # Use in case readout is not needed Options: PropagateSignalDown num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} # The highest order of cell dimensions to consider hidden_dim: ${model.feature_encoder.out_channels} diff --git a/configs/model/simplicial/sccnn.yaml b/configs/model/simplicial/sccnn.yaml index e631c7e2..6de88175 100755 --- a/configs/model/simplicial/sccnn.yaml +++ b/configs/model/simplicial/sccnn.yaml @@ -1,10 +1,10 @@ -_target_: topobenchmarkx.model.TBXModel +_target_: topobenchmark.model.TBModel model_name: sccnn model_domain: simplicial feature_encoder: - _target_: topobenchmarkx.nn.encoders.${model.feature_encoder.encoder_name} + _target_: topobenchmark.nn.encoders.${model.feature_encoder.encoder_name} encoder_name: AllCellFeatureEncoder in_channels: ${infer_in_channels:${dataset},${oc.select:transforms,null}} out_channels: 32 @@ -31,14 +31,14 @@ backbone: n_layers: 1 backbone_wrapper: - _target_: topobenchmarkx.nn.wrappers.SCCNNWrapper + _target_: topobenchmark.nn.wrappers.SCCNNWrapper _partial_: true wrapper_name: SCCNNWrapper out_channels: ${model.feature_encoder.out_channels} num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} readout: - _target_: topobenchmarkx.nn.readouts.${model.readout.readout_name} + _target_: topobenchmark.nn.readouts.${model.readout.readout_name} readout_name: PropagateSignalDown # Use in case readout is not needed Options: PropagateSignalDown num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} # The highest order of cell dimensions to consider hidden_dim: ${model.feature_encoder.out_channels} diff --git a/configs/model/simplicial/topotune.yaml b/configs/model/simplicial/topotune.yaml index 4e7bf859..6c0228b3 100755 --- a/configs/model/simplicial/topotune.yaml +++ b/configs/model/simplicial/topotune.yaml @@ -1,11 +1,11 @@ -_target_: topobenchmarkx.model.TBXModel +_target_: topobenchmark.model.TBModel model_name: topotune model_domain: simplicial tune_gnn: GIN feature_encoder: - _target_: topobenchmarkx.nn.encoders.${model.feature_encoder.encoder_name} + _target_: topobenchmark.nn.encoders.${model.feature_encoder.encoder_name} encoder_name: AllCellFeatureEncoder in_channels: ${infer_in_channels:${dataset},${oc.select:transforms,null}} out_channels: 32 @@ -16,9 +16,9 @@ feature_encoder: - 2 backbone: - _target_: topobenchmarkx.nn.backbones.combinatorial.gccn.TopoTune + _target_: topobenchmark.nn.backbones.combinatorial.gccn.TopoTune GNN: - _target_: topobenchmarkx.nn.backbones.graph.${model.tune_gnn} + _target_: topobenchmark.nn.backbones.graph.${model.tune_gnn} in_channels: ${model.feature_encoder.out_channels} out_channels: ${model.feature_encoder.out_channels} hidden_channels: ${model.feature_encoder.out_channels} @@ -35,14 +35,14 @@ backbone: activation: relu backbone_wrapper: - _target_: topobenchmarkx.nn.wrappers.combinatorial.TuneWrapper + _target_: topobenchmark.nn.wrappers.combinatorial.TuneWrapper _partial_: true wrapper_name: TuneWrapper out_channels: ${model.feature_encoder.out_channels} num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} readout: - _target_: topobenchmarkx.nn.readouts.${model.readout.readout_name} + _target_: topobenchmark.nn.readouts.${model.readout.readout_name} readout_name: PropagateSignalDown # Use in case readout is not needed Options: PropagateSignalDown num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} # The highest order of cell dimensions to consider hidden_dim: ${model.feature_encoder.out_channels} diff --git a/configs/model/simplicial/topotune_onehasse.yaml b/configs/model/simplicial/topotune_onehasse.yaml index 4bf21276..01c0bd35 100644 --- a/configs/model/simplicial/topotune_onehasse.yaml +++ b/configs/model/simplicial/topotune_onehasse.yaml @@ -1,11 +1,11 @@ -_target_: topobenchmarkx.model.TBXModel +_target_: topobenchmark.model.TBModel model_name: topotune_onehasse model_domain: simplicial tune_gnn: GCN feature_encoder: - _target_: topobenchmarkx.nn.encoders.${model.feature_encoder.encoder_name} + _target_: topobenchmark.nn.encoders.${model.feature_encoder.encoder_name} encoder_name: AllCellFeatureEncoder in_channels: ${infer_in_channels:${dataset},${oc.select:transforms,null}} out_channels: 32 @@ -16,9 +16,9 @@ feature_encoder: - 2 backbone: - _target_: topobenchmarkx.nn.backbones.combinatorial.gccn_onehasse.TopoTune_OneHasse + _target_: topobenchmark.nn.backbones.combinatorial.gccn_onehasse.TopoTune_OneHasse GNN: - _target_: topobenchmarkx.nn.backbones.graph.${model.tune_gnn} + _target_: topobenchmark.nn.backbones.graph.${model.tune_gnn} in_channels: ${model.feature_encoder.out_channels} out_channels: ${model.feature_encoder.out_channels} hidden_channels: ${model.feature_encoder.out_channels} @@ -34,14 +34,14 @@ backbone: activation: relu backbone_wrapper: - _target_: topobenchmarkx.nn.wrappers.combinatorial.TuneWrapper + _target_: topobenchmark.nn.wrappers.combinatorial.TuneWrapper _partial_: true wrapper_name: TuneWrapper out_channels: ${model.feature_encoder.out_channels} num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} readout: - _target_: topobenchmarkx.nn.readouts.${model.readout.readout_name} + _target_: topobenchmark.nn.readouts.${model.readout.readout_name} readout_name: PropagateSignalDown # Use in case readout is not needed Options: PropagateSignalDown num_cell_dimensions: ${infere_num_cell_dimensions:${oc.select:model.feature_encoder.selected_dimensions,null},${model.feature_encoder.in_channels}} # The highest order of cell dimensions to consider hidden_dim: ${model.feature_encoder.out_channels} diff --git a/configs/transforms/data_manipulations/identity.yaml b/configs/transforms/data_manipulations/identity.yaml index c5deadbe..422be56f 100755 --- a/configs/transforms/data_manipulations/identity.yaml +++ b/configs/transforms/data_manipulations/identity.yaml @@ -1,3 +1,3 @@ -_target_: topobenchmarkx.transforms.data_transform.DataTransform +_target_: topobenchmark.transforms.data_transform.DataTransform transform_name: "Identity" transform_type: null \ No newline at end of file diff --git a/configs/transforms/data_manipulations/mp_homophily.yaml b/configs/transforms/data_manipulations/mp_homophily.yaml index 431b5371..b02b1dcd 100755 --- a/configs/transforms/data_manipulations/mp_homophily.yaml +++ b/configs/transforms/data_manipulations/mp_homophily.yaml @@ -1,4 +1,4 @@ -_target_: topobenchmarkx.transforms.data_transform.DataTransform +_target_: topobenchmark.transforms.data_transform.DataTransform transform_name: "MessagePassingHomophily" transform_type: "data manipulation" num_steps: 10 diff --git a/configs/transforms/liftings/graph2hypergraph/khop.yaml b/configs/transforms/liftings/graph2hypergraph/khop.yaml index 9fc6d185..8b2dfe30 100755 --- a/configs/transforms/liftings/graph2hypergraph/khop.yaml +++ b/configs/transforms/liftings/graph2hypergraph/khop.yaml @@ -1,4 +1,4 @@ -_target_: topobenchmarkx.transforms.data_transform.DataTransform +_target_: topobenchmark.transforms.data_transform.DataTransform transform_type: 'lifting' transform_name: "HypergraphKHopLifting" k_value: 1 diff --git a/docs/api/loss/index.rst b/docs/api/loss/index.rst index 4f0d195d..56ce1796 100644 --- a/docs/api/loss/index.rst +++ b/docs/api/loss/index.rst @@ -4,8 +4,8 @@ Loss This module implements custom Python classes to compute losses in `TopoBenchmarkX`. -.. automodule:: topobenchmarkx.loss.base +.. automodule:: topobenchmark.loss.base :members: -.. automodule:: topobenchmarkx.loss.loss +.. automodule:: topobenchmark.loss.loss :members: diff --git a/docs/api/nn/backbones/index.rst b/docs/api/nn/backbones/index.rst index 01909597..0dff2b96 100644 --- a/docs/api/nn/backbones/index.rst +++ b/docs/api/nn/backbones/index.rst @@ -2,11 +2,11 @@ Backbones ********* -.. automodule:: topobenchmarkx.nn.backbones.cell.cccn +.. automodule:: topobenchmark.nn.backbones.cell.cccn :members: -.. automodule:: topobenchmarkx.nn.backbones.hypergraph.edgnn +.. automodule:: topobenchmark.nn.backbones.hypergraph.edgnn :members: -.. automodule:: topobenchmarkx.nn.backbones.simplicial.sccnn +.. automodule:: topobenchmark.nn.backbones.simplicial.sccnn :members: \ No newline at end of file diff --git a/docs/api/nn/encoders/index.rst b/docs/api/nn/encoders/index.rst index 93f05e23..36f0ce77 100644 --- a/docs/api/nn/encoders/index.rst +++ b/docs/api/nn/encoders/index.rst @@ -2,8 +2,8 @@ Encoders ******** -.. automodule:: topobenchmarkx.nn.encoders.base +.. automodule:: topobenchmark.nn.encoders.base :members: -.. automodule:: topobenchmarkx.nn.encoders.all_cell_encoder +.. automodule:: topobenchmark.nn.encoders.all_cell_encoder :members: \ No newline at end of file diff --git a/docs/api/nn/wrappers/index.rst b/docs/api/nn/wrappers/index.rst index b915848a..2c2d72da 100644 --- a/docs/api/nn/wrappers/index.rst +++ b/docs/api/nn/wrappers/index.rst @@ -2,35 +2,35 @@ Wrappers ******** -.. automodule:: topobenchmarkx.nn.wrappers.base +.. automodule:: topobenchmark.nn.wrappers.base :members: -.. automodule:: topobenchmarkx.nn.wrappers.cell.can_wrapper +.. automodule:: topobenchmark.nn.wrappers.cell.can_wrapper :members: -.. automodule:: topobenchmarkx.nn.wrappers.cell.cccn_wrapper +.. automodule:: topobenchmark.nn.wrappers.cell.cccn_wrapper :members: -.. automodule:: topobenchmarkx.nn.wrappers.cell.ccxn_wrapper +.. automodule:: topobenchmark.nn.wrappers.cell.ccxn_wrapper :members: -.. automodule:: topobenchmarkx.nn.wrappers.cell.cwn_wrapper +.. automodule:: topobenchmark.nn.wrappers.cell.cwn_wrapper :members: -.. automodule:: topobenchmarkx.nn.wrappers.graph.gnn_wrapper +.. automodule:: topobenchmark.nn.wrappers.graph.gnn_wrapper :members: -.. automodule:: topobenchmarkx.nn.wrappers.hypergraph.hypergraph_wrapper +.. automodule:: topobenchmark.nn.wrappers.hypergraph.hypergraph_wrapper :members: -.. automodule:: topobenchmarkx.nn.wrappers.simplicial.san_wrapper +.. automodule:: topobenchmark.nn.wrappers.simplicial.san_wrapper :members: -.. automodule:: topobenchmarkx.nn.wrappers.simplicial.sccn_wrapper +.. automodule:: topobenchmark.nn.wrappers.simplicial.sccn_wrapper :members: -.. automodule:: topobenchmarkx.nn.wrappers.simplicial.sccnn_wrapper +.. automodule:: topobenchmark.nn.wrappers.simplicial.sccnn_wrapper :members: -.. automodule:: topobenchmarkx.nn.wrappers.simplicial.scn_wrapper +.. automodule:: topobenchmark.nn.wrappers.simplicial.scn_wrapper :members: \ No newline at end of file diff --git a/docs/api/optimizer/index.rst b/docs/api/optimizer/index.rst index 28f49b85..9e69f4bf 100644 --- a/docs/api/optimizer/index.rst +++ b/docs/api/optimizer/index.rst @@ -4,8 +4,8 @@ Optimizer This module implements a custom Python class to manage `PyTorch` optimizers and learning rate schedulers in `TopoBenchmarkX`. -.. automodule:: topobenchmarkx.optimizer.base +.. automodule:: topobenchmark.optimizer.base :members: -.. automodule:: topobenchmarkx.optimizer.optimizer +.. automodule:: topobenchmark.optimizer.optimizer :members: diff --git a/docs/api/transforms/feature_liftings/index.rst b/docs/api/transforms/feature_liftings/index.rst index f79244a7..b71f1c13 100644 --- a/docs/api/transforms/feature_liftings/index.rst +++ b/docs/api/transforms/feature_liftings/index.rst @@ -2,14 +2,14 @@ Feature Liftings **************** -.. automodule:: topobenchmarkx.transforms.feature_liftings.concatenation +.. automodule:: topobenchmark.transforms.feature_liftings.concatenation :members: -.. automodule:: topobenchmarkx.transforms.feature_liftings.identity +.. automodule:: topobenchmark.transforms.feature_liftings.identity :members: -.. automodule:: topobenchmarkx.transforms.feature_liftings.projection_sum +.. automodule:: topobenchmark.transforms.feature_liftings.projection_sum :members: -.. automodule:: topobenchmarkx.transforms.feature_liftings.set +.. automodule:: topobenchmark.transforms.feature_liftings.set :members: \ No newline at end of file diff --git a/docs/contributing/index.rst b/docs/contributing/index.rst index 2d58b05b..c26aecda 100644 --- a/docs/contributing/index.rst +++ b/docs/contributing/index.rst @@ -13,7 +13,7 @@ community effort, and everyone is welcome to contribute. Making Changes -------------- -The preferred way to contribute to topobenchmarkx is to fork the `upstream +The preferred way to contribute to topobenchmark is to fork the `upstream repository `__ and submit a "pull request" (PR). Follow these steps before submitting a PR: @@ -107,7 +107,7 @@ A docstring is a well-formatted description of your function/class/module which its purpose, usage, and other information. There are different markdown languages/formats used for docstrings in Python. The most common -three are reStructuredText, numpy, and google docstring styles. For topobenchmarkx, we are +three are reStructuredText, numpy, and google docstring styles. For topobenchmark, we are using the numpy docstring standard. When writing up your docstrings, please review the `NumPy docstring guide `_ to understand the role and syntax of each section. Following this syntax is important not only for readability, diff --git a/pyproject.toml b/pyproject.toml index 5b122550..76668357 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta" name = "TopoBenchmark" dynamic = ["version"] authors = [ - {name = "PyT-Team Authors", email = "tlscabinet@gmail.com"} + {name = "Topological Intelligence Team Authors", email = "tlscabinet@gmail.com"} ] readme = "README.md" description = "Topological Deep Learning" @@ -132,14 +132,15 @@ convention = "numpy" [tool.ruff.lint.per-file-ignores] "__init__.py" = ["F403"] +"tests/*" = ["D"] [tool.setuptools.dynamic] -version = {attr = "topobenchmarkx.__version__"} +version = {attr = "topobenchmark.__version__"} [tool.setuptools.packages.find] include = [ - "topobenchmarkx", - "topobenchmarkx.*" + "topobenchmark", + "topobenchmark.*" ] [tool.mypy] diff --git a/resources/workflow.jpg b/resources/workflow.jpg index 321008b5..ad589cd2 100644 Binary files a/resources/workflow.jpg and b/resources/workflow.jpg differ diff --git a/scripts/topotune/existing_models/tune_cwn.sh b/scripts/topotune/existing_models/tune_cwn.sh index e2bd341c..f571cc86 100644 --- a/scripts/topotune/existing_models/tune_cwn.sh +++ b/scripts/topotune/existing_models/tune_cwn.sh @@ -1,8 +1,8 @@ -python -m topobenchmarkx \ +python -m topobenchmark \ model=cell/topotune_onehasse,cell/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1 \ - model.backbone.routes=\[\[\[0,1\],coboundary\],\[\[1,1\],adjacency\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_incidence-0,1-up_adjacency-1,1-down_incidence-2\] \ logger.wandb.project=TopoTune_CWN \ dataset=graph/MUTAG \ optimizer.parameters.lr=0.001 \ @@ -20,11 +20,11 @@ python -m topobenchmarkx \ trainer.devices=\[1\] \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ model=cell/topotune_onehasse,cell/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1 \ - model.backbone.routes=\[\[\[0,1\],coboundary\],\[\[1,1\],adjacency\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_incidence-0,1-up_adjacency-1,1-down_incidence-2\] \ logger.wandb.project=TopoTune_CWN \ dataset=graph/NCI1 \ optimizer.parameters.lr=0.001 \ @@ -41,11 +41,11 @@ python -m topobenchmarkx \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ model=cell/topotune_onehasse,cell/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1 \ - model.backbone.routes=\[\[\[0,1\],coboundary\],\[\[1,1\],adjacency\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_incidence-0,1-up_adjacency-1,1-down_incidence-2\] \ logger.wandb.project=TopoTune_CWN \ dataset=graph/NCI109 \ optimizer.parameters.lr=0.001 \ @@ -61,11 +61,11 @@ python -m topobenchmarkx \ trainer.devices=\[2\] \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ model=cell/topotune_onehasse,cell/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1 \ - model.backbone.routes=\[\[\[0,1\],coboundary\],\[\[1,1\],adjacency\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_incidence-0,1-up_adjacency-1,1-down_incidence-2\] \ logger.wandb.project=TopoTune_CWN \ dataset=graph/ZINC \ optimizer.parameters.lr=0.001 \ @@ -86,11 +86,11 @@ python -m topobenchmarkx \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ model=cell/topotune_onehasse,cell/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1 \ - model.backbone.routes=\[\[\[0,1\],coboundary\],\[\[1,1\],adjacency\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_incidence-0,1-up_adjacency-1,1-down_incidence-2\] \ logger.wandb.project=TopoTune_CWN \ dataset=graph/cocitation_citeseer \ optimizer.parameters.lr=0.001 \ @@ -107,11 +107,11 @@ python -m topobenchmarkx \ trainer.devices=\[3\] \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ model=cell/topotune_onehasse,cell/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1 \ - model.backbone.routes=\[\[\[0,1\],coboundary\],\[\[1,1\],adjacency\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_incidence-0,1-up_adjacency-1,1-down_incidence-2\] \ logger.wandb.project=TopoTune_CWN \ dataset=graph/cocitation_pubmed \ optimizer.parameters.lr=0.01 \ @@ -130,11 +130,11 @@ python -m topobenchmarkx \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ model=cell/topotune_onehasse,cell/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1 \ - model.backbone.routes=\[\[\[0,1\],coboundary\],\[\[1,1\],adjacency\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_incidence-0,1-up_adjacency-1,1-down_incidence-2\] \ logger.wandb.project=TopoTune_CWN \ dataset=graph/PROTEINS,graph/cocitation_cora \ optimizer.parameters.lr=0.001 \ diff --git a/scripts/topotune/existing_models/tune_sccn.sh b/scripts/topotune/existing_models/tune_sccn.sh index b925f562..6330b5c9 100644 --- a/scripts/topotune/existing_models/tune_sccn.sh +++ b/scripts/topotune/existing_models/tune_sccn.sh @@ -1,11 +1,11 @@ # SCCN -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/MUTAG \ model=simplicial/topotune_onehasse,simplicial/topotune \ model.feature_encoder.out_channels=128 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=3 \ dataset.split_params.data_seed=1,3,5,7,9 \ model.readout.readout_name=NoReadOut \ @@ -22,13 +22,13 @@ python -m topobenchmarkx \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/NCI1 \ model=simplicial/topotune_onehasse,simplicial/topotune \ model.feature_encoder.out_channels=64 \ model.backbone.GNN.num_layers=1 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=3 \ model.feature_encoder.proj_dropout=0.5 \ model.readout.readout_name=PropagateSignalDown \ @@ -45,13 +45,13 @@ python -m topobenchmarkx \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/NCI109 \ model=simplicial/topotune_onehasse,simplicial/topotune \ model.feature_encoder.out_channels=64 \ model.backbone.GNN.num_layers=1 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=4 \ model.readout.readout_name=NoReadOut \ transforms.graph2simplicial_lifting.signed=True \ @@ -69,10 +69,10 @@ python -m topobenchmarkx \ -python -m topobenchmarkx \ +python -m topobenchmark \ model=simplicial/topotune_onehasse,simplicial/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\] \ dataset=graph/PROTEINS \ optimizer.parameters.lr=0.01 \ model.feature_encoder.out_channels=128 \ @@ -91,11 +91,11 @@ python -m topobenchmarkx \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ model=simplicial/topotune_onehasse,simplicial/topotune \ dataset=graph/ZINC \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\] \ optimizer.parameters.lr=0.001 \ model.feature_encoder.out_channels=128 \ model.backbone.layers=4 \ @@ -114,10 +114,10 @@ python -m topobenchmarkx \ trainer.devices=\[0\] \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ model=simplicial/topotune_onehasse,simplicial/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\] \ dataset=graph/cocitation_citeseer \ optimizer.parameters.lr=0.01 \ model.feature_encoder.out_channels=64 \ @@ -135,11 +135,11 @@ python -m topobenchmarkx \ trainer.devices=\[0\] \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ model=simplicial/topotune_onehasse,simplicial/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ - model.backbone.GNN._target_=topobenchmarkx.nn.backbones.graph.IdentityGCN \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.GNN._target_=topobenchmark.nn.backbones.graph.IdentityGCN \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\] \ dataset=graph/cocitation_cora \ optimizer.parameters.lr=0.01 \ model.feature_encoder.out_channels=32 \ @@ -157,10 +157,10 @@ python -m topobenchmarkx \ trainer.devices=\[1\] \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ model=simplicial/topotune_onehasse,simplicial/topotune \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\] \ dataset=graph/cocitation_pubmed \ optimizer.parameters.lr=0.01 \ model.feature_encoder.out_channels=64 \ diff --git a/scripts/topotune/search_gccn_cell.sh b/scripts/topotune/search_gccn_cell.sh index 2a006935..463fb41c 100644 --- a/scripts/topotune/search_gccn_cell.sh +++ b/scripts/topotune/search_gccn_cell.sh @@ -1,10 +1,10 @@ -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/NCI109 \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-down_laplacian-1,1-up_laplacian-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_incidence-1,1-up_laplacian-1,1-down_incidence-2\],\[1-up_laplacian-0,1-up_incidence-0,1-up_laplacian-1,1-up_incidence-1\],\[1-up_laplacian-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -18,13 +18,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/cocitation_cora \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-down_laplacian-1,1-up_laplacian-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_incidence-1,1-up_laplacian-1,1-down_incidence-2\],\[1-up_laplacian-0,1-up_incidence-0,1-up_laplacian-1,1-up_incidence-1\],\[1-up_laplacian-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -38,13 +38,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/PROTEINS \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-down_laplacian-1,1-up_laplacian-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_incidence-1,1-up_laplacian-1,1-down_incidence-2\],\[1-up_laplacian-0,1-up_incidence-0,1-up_laplacian-1,1-up_incidence-1\],\[1-up_laplacian-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -58,13 +58,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/MUTAG \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-down_laplacian-1,1-up_laplacian-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_incidence-1,1-up_laplacian-1,1-down_incidence-2\],\[1-up_laplacian-0,1-up_incidence-0,1-up_laplacian-1,1-up_incidence-1\],\[1-up_laplacian-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -78,13 +78,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/ZINC \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-down_laplacian-1,1-up_laplacian-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_incidence-1,1-up_laplacian-1,1-down_incidence-2\],\[1-up_laplacian-0,1-up_incidence-0,1-up_laplacian-1,1-up_incidence-1\],\[1-up_laplacian-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -99,13 +99,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/cocitation_citeseer \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-down_laplacian-1,1-up_laplacian-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_incidence-1,1-up_laplacian-1,1-down_incidence-2\],\[1-up_laplacian-0,1-up_incidence-0,1-up_laplacian-1,1-up_incidence-1\],\[1-up_laplacian-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -119,13 +119,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/NCI1 \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-down_laplacian-1,1-up_laplacian-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_incidence-1,1-up_laplacian-1,1-down_incidence-2\],\[1-up_laplacian-0,1-up_incidence-0,1-up_laplacian-1,1-up_incidence-1\],\[1-up_laplacian-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -139,13 +139,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/cocitation_pubmed \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\]\],\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\]\],\[\[\[0,0\],up_laplacian\],\[\[1,0\],boundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[2,1\],boundary\],\[\[2,2\],down_laplacian\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-down_laplacian-1,1-up_laplacian-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-up_incidence-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_incidence-2,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_incidence-1,1-up_laplacian-1,1-down_incidence-2\],\[1-up_laplacian-0,1-up_incidence-0,1-up_laplacian-1,1-up_incidence-1\],\[1-up_laplacian-0,1-down_incidence-1,1-down_laplacian-1,1-up_laplacian-1,1-down_incidence-2,1-down_laplacian-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -159,13 +159,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/NCI109 \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1,1-down_laplacian-1\],\[1-up_laplacian-0,1-down_incidence-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -179,13 +179,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/cocitation_cora \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1,1-down_laplacian-1\],\[1-up_laplacian-0,1-down_incidence-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -199,13 +199,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/PROTEINS \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1,1-down_laplacian-1\],\[1-up_laplacian-0,1-down_incidence-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -219,13 +219,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/MUTAG \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1,1-down_laplacian-1\],\[1-up_laplacian-0,1-down_incidence-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -239,13 +239,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/cocitation_citeseer \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1,1-down_laplacian-1\],\[1-up_laplacian-0,1-down_incidence-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -259,13 +259,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/NCI1 \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1,1-down_laplacian-1\],\[1-up_laplacian-0,1-down_incidence-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ @@ -279,13 +279,13 @@ python -m topobenchmarkx \ tags="[FirstExperiments]" \ --multirun & -python -m topobenchmarkx \ +python -m topobenchmark \ dataset=graph/cocitation_pubmed \ model=cell/topotune,cell/topotune_onehasse \ model.feature_encoder.out_channels=32 \ model.tune_gnn=GCN,GIN,GAT,GraphSAGE \ model.backbone.GNN.num_layers=1,2 \ - model.backbone.routes=\[\[\[0,0\],up_laplacian\],\[\[0,1\],coboundary\],\[\[1,1\],down_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,2\],coboundary\],\[\[2,2\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[1,1\],up_laplacian\],\[\[1,1\],down_laplacian\]\],\[\[\[0,0\],up_laplacian\],\[\[2,1\],boundary\]\] \ + model.backbone.neighborhoods=\[1-up_laplacian-0,1-up_incidence-0,1-down_laplacian-1,1-up_laplacian-1,1-up_incidence-1,1-down_laplacian-2\],\[1-up_laplacian-0,1-down_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1\],\[1-up_laplacian-0,1-up_laplacian-1,1-down_laplacian-1\],\[1-up_laplacian-0,1-down_incidence-2\] \ model.backbone.layers=2,4,8 \ model.feature_encoder.proj_dropout=0.3 \ dataset.split_params.data_seed=1,3,5,7,9 \ diff --git a/test/conftest.py b/test/conftest.py index 026c110c..c84a1b72 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -3,10 +3,10 @@ import pytest import torch import torch_geometric -from topobenchmarkx.transforms.liftings.graph2simplicial import ( +from topobenchmark.transforms.liftings.graph2simplicial import ( SimplicialCliqueLifting ) -from topobenchmarkx.transforms.liftings.graph2cell import ( +from topobenchmark.transforms.liftings.graph2cell import ( CellCycleLifting ) diff --git a/test/data/dataload/test_Dataloaders.py b/test/data/dataload/test_Dataloaders.py index 36cfd279..35770d68 100644 --- a/test/data/dataload/test_Dataloaders.py +++ b/test/data/dataload/test_Dataloaders.py @@ -4,13 +4,13 @@ import rootutils import torch -from topobenchmarkx.data.preprocessor import PreProcessor -from topobenchmarkx.dataloader import TBXDataloader -from topobenchmarkx.dataloader.utils import to_data_list +from topobenchmark.data.preprocessor import PreProcessor +from topobenchmark.dataloader import TBDataloader +from topobenchmark.dataloader.utils import to_data_list from omegaconf import OmegaConf import os -from topobenchmarkx.run import initialize_hydra +from topobenchmark.run import initialize_hydra # rootutils.setup_root("./", indicator=".project-root", pythonpath=True) @@ -35,7 +35,7 @@ def setup_method(self): ) self.batch_size = 2 - datamodule = TBXDataloader( + datamodule = TBDataloader( dataset_train=dataset_train, dataset_val=dataset_val, dataset_test=dataset_test, @@ -47,7 +47,7 @@ def setup_method(self): def test_lift_features(self): """Test the collate funciton. - To test the collate function we use the TBXDataloader class to create a dataloader that uses the collate function. + To test the collate function we use the TBDataloader class to create a dataloader that uses the collate function. We then first check that the batched data has the expected shape. We then convert the batched data back to a list and check that the data in the list is the same as the original data. """ diff --git a/test/data/preprocess/test_preprocessor.py b/test/data/preprocess/test_preprocessor.py index 2c17545b..8e25536f 100644 --- a/test/data/preprocess/test_preprocessor.py +++ b/test/data/preprocess/test_preprocessor.py @@ -6,7 +6,7 @@ import torch_geometric from omegaconf import DictConfig -from topobenchmarkx.data.preprocessor import PreProcessor +from topobenchmark.data.preprocessor import PreProcessor from ..._utils.flow_mocker import FlowMocker @@ -115,7 +115,7 @@ def test_init_with_transform(self, mocker_fixture): ) self.flow_mocker.assert_all(self.preprocessor_with_tranform) - @patch("topobenchmarkx.data.preprocessor.preprocessor.load_inductive_splits") + @patch("topobenchmark.data.preprocessor.preprocessor.load_inductive_splits") def test_load_dataset_splits_inductive(self, mock_load_inductive_splits): """Test loading dataset splits for inductive learning. @@ -131,7 +131,7 @@ def test_load_dataset_splits_inductive(self, mock_load_inductive_splits): ) @patch( - "topobenchmarkx.data.preprocessor.preprocessor.load_transductive_splits" + "topobenchmark.data.preprocessor.preprocessor.load_transductive_splits" ) def test_load_dataset_splits_transductive( self, mock_load_transductive_splits diff --git a/test/loss/test_dataset_loss.py b/test/loss/test_dataset_loss.py index 2097eba6..5572304d 100644 --- a/test/loss/test_dataset_loss.py +++ b/test/loss/test_dataset_loss.py @@ -1,12 +1,12 @@ -""" Test the TBXEvaluator class.""" +""" Test the TBEvaluator class.""" import pytest import torch import torch_geometric -from topobenchmarkx.loss.dataset import DatasetLoss +from topobenchmark.loss.dataset import DatasetLoss class TestDatasetLoss: - """ Test the TBXEvaluator class.""" + """ Test the TBEvaluator class.""" def setup_method(self): """ Setup the test.""" diff --git a/test/transforms/data_manipulations/test_ConnectivityTransforms.py b/test/transforms/data_manipulations/test_ConnectivityTransforms.py index 127e0a43..4eb1ba1f 100644 --- a/test/transforms/data_manipulations/test_ConnectivityTransforms.py +++ b/test/transforms/data_manipulations/test_ConnectivityTransforms.py @@ -2,7 +2,7 @@ import torch from torch_geometric.data import Data -from topobenchmarkx.transforms.data_manipulations import ( +from topobenchmark.transforms.data_manipulations import ( InfereKNNConnectivity, InfereRadiusConnectivity, ) diff --git a/test/transforms/data_manipulations/test_EqualGausFeatures.py b/test/transforms/data_manipulations/test_EqualGausFeatures.py index dbff7459..15c681fb 100644 --- a/test/transforms/data_manipulations/test_EqualGausFeatures.py +++ b/test/transforms/data_manipulations/test_EqualGausFeatures.py @@ -3,7 +3,7 @@ import pytest import torch from torch_geometric.data import Data -from topobenchmarkx.transforms.data_manipulations import EqualGausFeatures +from topobenchmark.transforms.data_manipulations import EqualGausFeatures class TestEqualGausFeatures: diff --git a/test/transforms/data_manipulations/test_GroupHomophily.py b/test/transforms/data_manipulations/test_GroupHomophily.py index b39d123a..2a83da69 100644 --- a/test/transforms/data_manipulations/test_GroupHomophily.py +++ b/test/transforms/data_manipulations/test_GroupHomophily.py @@ -3,7 +3,7 @@ import pytest import torch from torch_geometric.data import Data -from topobenchmarkx.transforms.data_manipulations import GroupCombinatorialHomophily +from topobenchmark.transforms.data_manipulations import GroupCombinatorialHomophily class TestGroupCombinatorialHomophily: diff --git a/test/transforms/data_manipulations/test_IdentityTransform.py b/test/transforms/data_manipulations/test_IdentityTransform.py index 50c841b8..a362d427 100644 --- a/test/transforms/data_manipulations/test_IdentityTransform.py +++ b/test/transforms/data_manipulations/test_IdentityTransform.py @@ -3,7 +3,7 @@ import pytest import torch from torch_geometric.data import Data -from topobenchmarkx.transforms.data_manipulations import IdentityTransform +from topobenchmark.transforms.data_manipulations import IdentityTransform class TestIdentityTransform: diff --git a/test/transforms/data_manipulations/test_MessagePassingHomophily.py b/test/transforms/data_manipulations/test_MessagePassingHomophily.py index 9411e389..8d58ee4f 100644 --- a/test/transforms/data_manipulations/test_MessagePassingHomophily.py +++ b/test/transforms/data_manipulations/test_MessagePassingHomophily.py @@ -4,7 +4,7 @@ import pytest import torch from torch_geometric.data import Data -from topobenchmarkx.transforms.data_manipulations import MessagePassingHomophily +from topobenchmark.transforms.data_manipulations import MessagePassingHomophily class TestMessagePassingHomophily: diff --git a/test/transforms/data_manipulations/test_OnlyConnectedComponent.py b/test/transforms/data_manipulations/test_OnlyConnectedComponent.py index 64b58ef6..bd3b2efe 100644 --- a/test/transforms/data_manipulations/test_OnlyConnectedComponent.py +++ b/test/transforms/data_manipulations/test_OnlyConnectedComponent.py @@ -3,7 +3,7 @@ import pytest import torch from torch_geometric.data import Data -from topobenchmarkx.transforms.data_manipulations import KeepOnlyConnectedComponent +from topobenchmark.transforms.data_manipulations import KeepOnlyConnectedComponent class TestKeepOnlyConnectedComponent: diff --git a/test/transforms/data_manipulations/test_SimplicialCurvature.py b/test/transforms/data_manipulations/test_SimplicialCurvature.py index 4d556c96..e4cb517b 100644 --- a/test/transforms/data_manipulations/test_SimplicialCurvature.py +++ b/test/transforms/data_manipulations/test_SimplicialCurvature.py @@ -2,8 +2,8 @@ import torch from torch_geometric.data import Data -from topobenchmarkx.transforms.data_manipulations import CalculateSimplicialCurvature -from topobenchmarkx.transforms.liftings.graph2simplicial import SimplicialCliqueLifting +from topobenchmark.transforms.data_manipulations import CalculateSimplicialCurvature +from topobenchmark.transforms.liftings.graph2simplicial import SimplicialCliqueLifting class TestSimplicialCurvature: diff --git a/test/transforms/feature_liftings/test_SetLifting.py b/test/transforms/feature_liftings/test_SetLifting.py index 9f73260a..9b71816f 100644 --- a/test/transforms/feature_liftings/test_SetLifting.py +++ b/test/transforms/feature_liftings/test_SetLifting.py @@ -2,7 +2,7 @@ import torch -from topobenchmarkx.transforms.liftings.graph2simplicial import ( +from topobenchmark.transforms.liftings.graph2simplicial import ( SimplicialCliqueLifting, ) diff --git a/test/transforms/liftings/simplicial/test_SimplicialNeighborhoodLifting.py b/test/transforms/liftings/simplicial/test_SimplicialNeighborhoodLifting.py index 2cf01ac4..5a03f67e 100644 --- a/test/transforms/liftings/simplicial/test_SimplicialNeighborhoodLifting.py +++ b/test/transforms/liftings/simplicial/test_SimplicialNeighborhoodLifting.py @@ -2,7 +2,7 @@ import torch -from topobenchmarkx.transforms.liftings.graph2simplicial import ( +from topobenchmark.transforms.liftings.graph2simplicial import ( SimplicialKHopLifting, ) diff --git a/test/utils/test_logging_utils.py b/test/utils/test_logging_utils.py index 142dcfce..f21631d3 100644 --- a/test/utils/test_logging_utils.py +++ b/test/utils/test_logging_utils.py @@ -1,10 +1,10 @@ """Unit tests for logging utils.""" import pytest from unittest.mock import MagicMock, patch -from topobenchmarkx.utils import log_hyperparameters +from topobenchmark.utils import log_hyperparameters -@patch("topobenchmarkx.utils.logging_utils.pylogger.RankedLogger.warning") -@patch("topobenchmarkx.utils.logging_utils.OmegaConf.to_container") +@patch("topobenchmark.utils.logging_utils.pylogger.RankedLogger.warning") +@patch("topobenchmark.utils.logging_utils.OmegaConf.to_container") def test_log_hyperparameters(mock_to_container, mock_warning): """Test the log_hyperparameters function.