Packages

class LazyAdam extends Adam

Linear Supertypes
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. LazyAdam
  2. Adam
  3. Optimizer
  4. AnyRef
  5. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. Protected

Instance Constructors

  1. new LazyAdam(learningRate: Float = 0.001f, decay: Schedule[Float] = FixedSchedule[Float](), weightDecay: Float = 0.0f, excludeFromWeightDecayNames: Set[String] = Set.empty, beta1: Float = 0.9f, beta2: Float = 0.999f, useNesterov: Boolean = false, epsilon: Float = 1e-8f, useLocking: Boolean = false, learningRateSummaryTag: String = null, name: String = "LazyAdam")
    Attributes
    protected

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##: Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. def applyDense[T, I](gradient: Output[T], variable: variables.Variable[T], iteration: Option[variables.Variable[I]])(implicit arg0: core.types.TF[T], arg1: core.types.IsNotQuantized[T], arg2: core.types.TF[I], arg3: core.types.IsIntOrLong[I]): UntypedOp
    Definition Classes
    AdamOptimizer
  5. def applyGradients[T, I](gradientsAndVariables: Seq[(OutputLike[T], variables.Variable[Any])], iteration: Option[variables.Variable[I]] = None, name: String = this.name)(implicit arg0: core.types.TF[T], arg1: LongDefault[I], arg2: core.types.TF[I], arg3: core.types.IsIntOrLong[I]): UntypedOp
    Definition Classes
    Optimizer
    Annotations
    @throws(scala.this.throws.<init>$default$1[IllegalArgumentException])
  6. def applySparse[T, I](gradient: OutputIndexedSlices[T], variable: variables.Variable[T], iteration: Option[variables.Variable[I]])(implicit arg0: core.types.TF[T], arg1: core.types.IsNotQuantized[T], arg2: core.types.TF[I], arg3: core.types.IsIntOrLong[I]): UntypedOp
    Definition Classes
    LazyAdamAdamOptimizer
  7. def applySparseDuplicateIndices[T, I](gradient: OutputIndexedSlices[T], variable: variables.Variable[T], iteration: Option[variables.Variable[I]])(implicit arg0: core.types.TF[T], arg1: core.types.IsNotQuantized[T], arg2: core.types.TF[I], arg3: core.types.IsIntOrLong[I]): UntypedOp
    Definition Classes
    Optimizer
  8. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  9. val beta1: Float
    Definition Classes
    LazyAdamAdam
  10. var beta1Tensor: Output[Float]
    Attributes
    protected
    Definition Classes
    Adam
  11. val beta2: Float
    Definition Classes
    LazyAdamAdam
  12. var beta2Tensor: Output[Float]
    Attributes
    protected
    Definition Classes
    Adam
  13. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.CloneNotSupportedException]) @native() @HotSpotIntrinsicCandidate()
  14. def computeGradients[T](loss: Output[T], lossGradients: Seq[OutputLike[T]] = null, variables: Set[variables.Variable[Any]] = null, gradientsGatingMethod: GatingMethod = Gradients.OpGating, gradientsAggregationMethod: AggregationMethod = Gradients.AddAggregationMethod, colocateGradientsWithOps: Boolean = false)(implicit arg0: core.types.TF[T], arg1: core.types.IsFloatOrDouble[T]): Seq[(OutputLike[T], variables.Variable[Any])]
    Definition Classes
    Optimizer
    Annotations
    @throws(scala.this.throws.<init>$default$1[IllegalArgumentException])
  15. def createSlots(variables: Seq[variables.Variable[Any]]): Unit
    Definition Classes
    AdamOptimizer
  16. val decay: Schedule[Float]
    Definition Classes
    LazyAdamAdam
  17. def doWeightDecay[V](variable: variables.Variable[V])(implicit arg0: core.types.TF[V]): Boolean
    Attributes
    protected
    Definition Classes
    Adam
  18. val epsilon: Float
    Definition Classes
    LazyAdamAdam
  19. var epsilonTensor: Output[Float]
    Attributes
    protected
    Definition Classes
    Adam
  20. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  21. def equals(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef → Any
  22. val excludeFromWeightDecayNames: Set[String]
    Definition Classes
    LazyAdamAdam
  23. def finish(updateOps: Set[UntypedOp], nameScope: String): UntypedOp
    Definition Classes
    AdamOptimizer
  24. def getBeta1[V](variable: variables.Variable[V])(implicit arg0: core.types.TF[V]): Output[V]
    Attributes
    protected
    Definition Classes
    Adam
  25. def getBeta2[V](variable: variables.Variable[V])(implicit arg0: core.types.TF[V]): Output[V]
    Attributes
    protected
    Definition Classes
    Adam
  26. def getBetaPowerAccumulators: (variables.Variable[Float], variables.Variable[Float])
    Attributes
    protected
    Definition Classes
    Adam
  27. final def getClass(): Class[_ <: AnyRef]
    Definition Classes
    AnyRef → Any
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  28. def getEpsilon[V](variable: variables.Variable[V])(implicit arg0: core.types.TF[V]): Output[V]
    Attributes
    protected
    Definition Classes
    Adam
  29. def getLearningRate[V, I](variable: variables.Variable[V], iteration: Option[variables.Variable[I]])(implicit arg0: core.types.TF[V], arg1: core.types.TF[I], arg2: core.types.IsIntOrLong[I]): Output[V]
    Attributes
    protected
    Definition Classes
    Adam
  30. final def getNonSlotVariable[T](name: String, graph: core.Graph = null): variables.Variable[T]
    Attributes
    protected
    Definition Classes
    Optimizer
  31. final def getNonSlotVariables: Iterable[variables.Variable[Any]]
    Attributes
    protected
    Definition Classes
    Optimizer
  32. final def getOrCreateNonSlotVariable[T](name: String, initialValue: tensors.Tensor[T], colocationOps: Set[UntypedOp] = Set.empty, ignoreExisting: Boolean = false)(implicit arg0: core.types.TF[T]): variables.Variable[T]
    Attributes
    protected
    Definition Classes
    Optimizer
  33. final def getSlot[T, R](name: String, variable: variables.Variable[T])(implicit arg0: core.types.TF[R]): variables.Variable[R]
    Attributes
    protected
    Definition Classes
    Optimizer
  34. final def getSlot[T, R](name: String, variable: variables.Variable[T], dataType: core.types.DataType[R], initializer: Initializer, shape: core.Shape, variableScope: String)(implicit arg0: core.types.TF[R]): variables.Variable[R]
    Attributes
    protected
    Definition Classes
    Optimizer
  35. def getWeightDecay[V](variable: variables.Variable[V])(implicit arg0: core.types.TF[V]): Output[V]
    Attributes
    protected
    Definition Classes
    Adam
  36. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  37. val ignoreDuplicateSparseIndices: Boolean
    Definition Classes
    LazyAdamAdamOptimizer
  38. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  39. val learningRate: Float
    Definition Classes
    LazyAdamAdam
  40. val learningRateSummaryTag: String
    Definition Classes
    LazyAdamAdam
  41. var learningRateTensor: Output[Float]
    Attributes
    protected
    Definition Classes
    Adam
  42. def minimize[T, I](loss: Output[T], lossGradients: Seq[OutputLike[T]] = null, variables: Set[variables.Variable[Any]] = null, gradientsGatingMethod: GatingMethod = Gradients.OpGating, gradientsAggregationMethod: AggregationMethod = Gradients.AddAggregationMethod, colocateGradientsWithOps: Boolean = false, iteration: Option[variables.Variable[I]] = None, name: String = "Minimize")(implicit arg0: core.types.TF[T], arg1: core.types.IsFloatOrDouble[T], arg2: LongDefault[I], arg3: core.types.TF[I], arg4: core.types.IsIntOrLong[I]): UntypedOp
    Definition Classes
    Optimizer
    Annotations
    @throws(scala.this.throws.<init>$default$1[IllegalArgumentException])
  43. val name: String
    Definition Classes
    LazyAdamAdamOptimizer
  44. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  45. final val nonSlotVariables: Map[(String, Option[core.Graph]), variables.Variable[Any]]
    Attributes
    protected
    Definition Classes
    Optimizer
  46. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  47. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  48. def prepare[I](iteration: Option[variables.Variable[I]])(implicit arg0: core.types.TF[I], arg1: core.types.IsIntOrLong[I]): Unit
    Definition Classes
    AdamOptimizer
  49. final def slotNames: Set[String]
    Attributes
    protected
    Definition Classes
    Optimizer
  50. final val slots: Map[String, Map[variables.Variable[Any], variables.Variable[Any]]]
    Attributes
    protected
    Definition Classes
    Optimizer
  51. final def state: Seq[variables.Variable[Any]]
    Definition Classes
    Optimizer
  52. final def synchronized[T0](arg0: => T0): T0
    Definition Classes
    AnyRef
  53. def toString(): String
    Definition Classes
    AnyRef → Any
  54. val useLocking: Boolean
    Definition Classes
    LazyAdamAdamOptimizer
  55. val useNesterov: Boolean
    Definition Classes
    LazyAdamAdam
  56. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  57. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException]) @native()
  58. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  59. val weightDecay: Float
    Definition Classes
    LazyAdamAdam
  60. var weightDecayTensor: Output[Float]
    Attributes
    protected
    Definition Classes
    Adam
  61. final def zerosSlot[T](name: String, variable: variables.Variable[T], variableScope: String)(implicit arg0: core.types.TF[T]): variables.Variable[T]
    Attributes
    protected
    Definition Classes
    Optimizer

Deprecated Value Members

  1. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.Throwable]) @Deprecated @deprecated
    Deprecated

    (Since version ) see corresponding Javadoc for more information.

Inherited from Adam

Inherited from Optimizer

Inherited from AnyRef

Inherited from Any

Ungrouped