remove useless and fluid import.

pull/2925/head
zxcd 3 years ago
parent 28733cc60d
commit 756dfb3c13

@ -11,18 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from collections import defaultdict
import paddle import paddle
from paddle import _C_ops
from paddle import _legacy_C_ops
from paddle.fluid import core
from paddle.fluid import framework
from paddle.fluid.dygraph import base as imperative_base
from paddle.fluid.dygraph import no_grad
from paddle.fluid.framework import name_scope
from paddle.fluid.framework import Variable
from paddle.framework import in_dygraph_mode
from paddle.optimizer import Optimizer from paddle.optimizer import Optimizer
__all__ = [] __all__ = []
@ -62,9 +51,9 @@ class SimpleAdadelta(Optimizer):
If a parameter has set regularizer using :ref:`api_fluid_ParamAttr` already, \ If a parameter has set regularizer using :ref:`api_fluid_ParamAttr` already, \
the regularization setting here in optimizer will be ignored for this parameter. \ the regularization setting here in optimizer will be ignored for this parameter. \
Otherwise, the regularization setting here in optimizer will take effect. \ Otherwise, the regularization setting here in optimizer will take effect. \
Default None, meaning there is no regularization. Default None, meaning there is no regularization.
foreach (bool, optional): whether foreach implementation of optimizer is used. The default value is None. foreach (bool, optional): whether foreach implementation of optimizer is used. The default value is None.
maximize (bool, optional): maximize the params based on the objective, instead of minimizing. maximize (bool, optional): maximize the params based on the objective, instead of minimizing.
The default value is False. The default value is False.
name (str, optional): The default value is None. Normally there is no need for user name (str, optional): The default value is None. Normally there is no need for user
to set this property. For more information, please refer to to set this property. For more information, please refer to
@ -72,7 +61,7 @@ class SimpleAdadelta(Optimizer):
Examples: Examples:
.. code-block:: python .. code-block:: python
import paddle import paddle
from paddlespeech.s2t.training.optimizer.adadelta import SimpleAdadelta from paddlespeech.s2t.training.optimizer.adadelta import SimpleAdadelta
@ -120,8 +109,7 @@ class SimpleAdadelta(Optimizer):
self.square_avgs = [] self.square_avgs = []
self.acc_deltas = [] self.acc_deltas = []
@imperative_base.no_grad @paddle.no_grad()
@framework.dygraph_only
def step(self): def step(self):
"""Performs a single optimization step. """Performs a single optimization step.
@ -173,19 +161,16 @@ class SimpleAdadelta(Optimizer):
maximize=self._maximize) maximize=self._maximize)
def adadelta( def adadelta(params_grads,
params_grads, square_avgs,
square_avgs, acc_deltas,
acc_deltas, foreach=None,
# kwonly args with defaults are not supported by functions compiled with torchscript issue #70627 *,
# setting this as kwarg for now as functional API is compiled by torch/distributed/optim learning_rate: float,
foreach=None, rho: float,
*, epsilon: float,
learning_rate: float, weight_decay: float,
rho: float, maximize: bool):
epsilon: float,
weight_decay: float,
maximize: bool):
if foreach is None: if foreach is None:
# if foreach is None, set False # if foreach is None, set False

Loading…
Cancel
Save