Skip to content

Commit

Permalink
fix: rm src
Browse files Browse the repository at this point in the history
  • Loading branch information
JesperDramsch committed May 28, 2024
1 parent 9bade3d commit e86b39e
Show file tree
Hide file tree
Showing 6 changed files with 15 additions and 17 deletions.
4 changes: 1 addition & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ docs = [

all = []

tests = ["pytest", "hypothesis", "omegaconf"]
tests = ["pytest", "hypothesis"]

dev = [
"sphinx",
Expand All @@ -66,8 +66,6 @@ dev = [
"pandoc",
"pytest",
"hypothesis",
"hydra-core>=1.3",
"pytest",
]

[project.urls]
Expand Down
4 changes: 2 additions & 2 deletions src/anemoi/models/layers/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
else:
_FLASH_ATTENTION_AVAILABLE = True

from src.anemoi.models.distributed.transformer import shard_heads
from src.anemoi.models.distributed.transformer import shard_sequence
from anemoi.models.distributed.transformer import shard_heads
from anemoi.models.distributed.transformer import shard_sequence

LOGGER = logging.getLogger(__name__)

Expand Down
8 changes: 4 additions & 4 deletions src/anemoi/models/layers/block.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@
from torch_geometric.typing import OptPairTensor
from torch_geometric.typing import Size

from anemoi.models.distributed.graph import shard_tensor
from anemoi.models.distributed.graph import sync_tensor
from anemoi.models.distributed.transformer import shard_heads
from anemoi.models.distributed.transformer import shard_sequence
from anemoi.models.layers.attention import MultiHeadSelfAttention
from anemoi.models.layers.conv import GraphConv
from anemoi.models.layers.conv import GraphTransformerConv
from anemoi.models.layers.mlp import MLP
from src.anemoi.models.distributed.graph import shard_tensor
from src.anemoi.models.distributed.graph import sync_tensor
from src.anemoi.models.distributed.transformer import shard_heads
from src.anemoi.models.distributed.transformer import shard_sequence

LOGGER = logging.getLogger(__name__)

Expand Down
8 changes: 4 additions & 4 deletions src/anemoi/models/layers/mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,15 @@
from torch_geometric.typing import Adj
from torch_geometric.typing import PairTensor

from anemoi.models.distributed.graph import gather_tensor
from anemoi.models.distributed.graph import shard_tensor
from anemoi.models.distributed.khop_edges import sort_edges_1hop
from anemoi.models.distributed.shapes import change_channels_in_shape
from anemoi.models.distributed.shapes import get_shape_shards
from anemoi.models.layers.block import GraphConvMapperBlock
from anemoi.models.layers.block import GraphTransformerMapperBlock
from anemoi.models.layers.graph import TrainableTensor
from anemoi.models.layers.mlp import MLP
from src.anemoi.models.distributed.graph import gather_tensor
from src.anemoi.models.distributed.graph import shard_tensor
from src.anemoi.models.distributed.shapes import change_channels_in_shape
from src.anemoi.models.distributed.shapes import get_shape_shards


class BaseMapper(nn.Module, ABC):
Expand Down
6 changes: 3 additions & 3 deletions src/anemoi/models/layers/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@
from torch.utils.checkpoint import checkpoint
from torch_geometric.data import HeteroData

from anemoi.models.distributed.graph import shard_tensor
from anemoi.models.distributed.khop_edges import sort_edges_1hop
from anemoi.models.distributed.shapes import change_channels_in_shape
from anemoi.models.distributed.shapes import get_shape_shards
from anemoi.models.layers.chunk import GNNProcessorChunk
from anemoi.models.layers.chunk import GraphTransformerProcessorChunk
from anemoi.models.layers.chunk import TransformerProcessorChunk
from anemoi.models.layers.graph import TrainableTensor
from anemoi.models.layers.mapper import GraphEdgeMixin
from src.anemoi.models.distributed.graph import shard_tensor
from src.anemoi.models.distributed.shapes import change_channels_in_shape
from src.anemoi.models.distributed.shapes import get_shape_shards


class BaseProcessor(nn.Module, ABC):
Expand Down
2 changes: 1 addition & 1 deletion src/anemoi/models/models/encoder_processor_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@
from torch.utils.checkpoint import checkpoint
from torch_geometric.data import HeteroData

from anemoi.models.distributed.shapes import get_shape_shards
from anemoi.models.layers.graph import TrainableTensor
from anemoi.models.utils.config import DotConfig
from src.anemoi.models.distributed.shapes import get_shape_shards

LOGGER = logging.getLogger(__name__)

Expand Down

0 comments on commit e86b39e

Please sign in to comment.