Skip to content

Commit f028d54

Browse files
committed
Remove docstring of setters (not needed anymore)
1 parent e7d4981 commit f028d54

1 file changed

Lines changed: 0 additions & 8 deletions

File tree

src/torchjd/aggregation/_gradvac.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -51,8 +51,6 @@ def __init__(self, beta: float = 0.5, eps: float = 1e-8) -> None:
5151

5252
@property
5353
def beta(self) -> float:
54-
"""EMA decay coefficient for :math:`\\hat{\\phi}` (paper default ``0.5``)."""
55-
5654
return self._gradvac_weighting.beta
5755

5856
@beta.setter
@@ -61,8 +59,6 @@ def beta(self, value: float) -> None:
6159

6260
@property
6361
def eps(self) -> float:
64-
"""Small non-negative constant added to denominators for numerical stability."""
65-
6662
return self._gradvac_weighting.eps
6763

6864
@eps.setter
@@ -117,8 +113,6 @@ def __init__(self, beta: float = 0.5, eps: float = 1e-8) -> None:
117113

118114
@property
119115
def beta(self) -> float:
120-
"""EMA decay coefficient for :math:`\\hat{\\phi}` (paper default ``0.5``)."""
121-
122116
return self._beta
123117

124118
@beta.setter
@@ -129,8 +123,6 @@ def beta(self, value: float) -> None:
129123

130124
@property
131125
def eps(self) -> float:
132-
"""Small non-negative constant added to denominators for numerical stability."""
133-
134126
return self._eps
135127

136128
@eps.setter

0 commit comments

Comments
 (0)