Skip to content

Commit

Permalink
removing unnecessary code
Browse files Browse the repository at this point in the history
  • Loading branch information
c-w-feldmann committed Apr 25, 2024
1 parent 11cdd8e commit 14d2c5f
Showing 1 changed file with 0 additions and 9 deletions.
9 changes: 0 additions & 9 deletions molpipeline/estimators/chemprop/component_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import abc
from typing import Any, Iterable, Self

import torch
from chemprop.conf import DEFAULT_ATOM_FDIM, DEFAULT_BOND_FDIM, DEFAULT_HIDDEN_DIM
from chemprop.models.model import MPNN as _MPNN
from chemprop.nn.agg import Aggregation
Expand All @@ -21,7 +20,6 @@
)
from chemprop.nn.transforms import UnscaleTransform
from chemprop.nn.utils import Activation, get_activation_function
from chemprop.utils.registry import Factory
from sklearn.base import BaseEstimator
from torch import Tensor, nn

Expand Down Expand Up @@ -165,13 +163,6 @@ def __init__(
output_transform : UnscaleTransform or None, optional (default=None)
Transformations to apply to the output. None defaults to UnscaleTransform.
"""
if criterion is None:
task_weights = torch.ones(n_tasks) if task_weights is None else task_weights
criterion = Factory.build(
self._T_default_criterion,
task_weights=task_weights,
threshold=threshold,
)
super().__init__(
n_tasks=n_tasks,
input_dim=input_dim,
Expand Down

0 comments on commit 14d2c5f

Please sign in to comment.