remove useless and fluid import.

pull/2925/head
zxcd 3 years ago
parent 28733cc60d
commit 756dfb3c13

@ -11,18 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from collections import defaultdict
import paddle import paddle
from paddle import _C_ops
from paddle import _legacy_C_ops
from paddle.fluid import core
from paddle.fluid import framework
from paddle.fluid.dygraph import base as imperative_base
from paddle.fluid.dygraph import no_grad
from paddle.fluid.framework import name_scope
from paddle.fluid.framework import Variable
from paddle.framework import in_dygraph_mode
from paddle.optimizer import Optimizer from paddle.optimizer import Optimizer
__all__ = [] __all__ = []
@ -120,8 +109,7 @@ class SimpleAdadelta(Optimizer):
self.square_avgs = [] self.square_avgs = []
self.acc_deltas = [] self.acc_deltas = []
@imperative_base.no_grad @paddle.no_grad()
@framework.dygraph_only
def step(self): def step(self):
"""Performs a single optimization step. """Performs a single optimization step.
@ -173,19 +161,16 @@ class SimpleAdadelta(Optimizer):
maximize=self._maximize) maximize=self._maximize)
def adadelta( def adadelta(params_grads,
params_grads, square_avgs,
square_avgs, acc_deltas,
acc_deltas, foreach=None,
# kwonly args with defaults are not supported by functions compiled with torchscript issue #70627 *,
# setting this as kwarg for now as functional API is compiled by torch/distributed/optim learning_rate: float,
foreach=None, rho: float,
*, epsilon: float,
learning_rate: float, weight_decay: float,
rho: float, maximize: bool):
epsilon: float,
weight_decay: float,
maximize: bool):
if foreach is None: if foreach is None:
# if foreach is None, set False # if foreach is None, set False

Loading…
Cancel
Save