Skip to content

Commit

Permalink
chore: cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
XuehaiPan committed Sep 11, 2022
1 parent e376123 commit 847549b
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 4 deletions.
2 changes: 1 addition & 1 deletion examples/MAML-RL/func_maml.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def main(args):

if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Reinforcement learning with ' 'Model-Agnostic Meta-Learning (MAML) - Train'
description='Reinforcement learning with Model-Agnostic Meta-Learning (MAML) - Train'
)
parser.add_argument('--seed', type=int, default=1, help='random seed (default: 1)')
args = parser.parse_args()
Expand Down
9 changes: 6 additions & 3 deletions torchopt/_src/optimizer/func/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@

# mypy: ignore-errors
class FuncOptimizer: # pylint: disable=too-few-public-methods
"""A wrapper class to hold the functional optimizer.
It makes it easier to maintain the optimizer states.
"""A wrapper class to hold the functional optimizer. It makes it easier to maintain the
optimizer states.
See Also:
- The functional Adam optimizer: :func:`torchopt.adam`.
Expand All @@ -40,7 +40,7 @@ def __init__(self, impl: GradientTransformation, *, inplace: bool = False) -> No
Args:
impl (GradientTransformation): A low level optimizer function, it could be a optimizer
function provided by `alias.py` or a customized `chain` provided by `combine.py`.
inplace: (default: :data:`False`)
inplace (optional): (default: :data:`False`)
The default value of ``inplace`` for each optimization update.
"""
self.impl = impl
Expand All @@ -61,6 +61,9 @@ def step(
loss that is used to compute the gradients to network parameters.
params: (tree of torch.Tensor)
An tree of :class:`torch.Tensor`\s. Specifies what tensors should be optimized.
inplace (optional): (default: :data:`None`)
Wether to update the parameters in-place. If :data:`None`, use the default value
specified in the constructor.
"""
if self.optim_state is None:
self.optim_state = self.impl.init(params)
Expand Down

0 comments on commit 847549b

Please # to comment.