case class BatchNormalization[T](name: String, axis: Int = -1, momentum: Float = 0.9f, epsilon: Float = 1e-3f, center: Boolean = true, scale: Boolean = true, betaInitializer: tf.VariableInitializer = tf.ZerosInitializer, gammaInitializer: tf.VariableInitializer = tf.OnesInitializer, movingMeanInitializer: tf.VariableInitializer = tf.ZerosInitializer, movingVarianceInitializer: tf.VariableInitializer = tf.OnesInitializer, betaRegularizer: tf.VariableRegularizer = null, gammaRegularizer: tf.VariableRegularizer = null, renorm: Boolean = false, renormMomentum: Float = 0.9f, fused: Boolean = true)(implicit evidence$1: TF[T], evidence$2: IsDecimal[T]) extends Layer[ops.Output[T], ops.Output[T]] with Product with Serializable

Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. BatchNormalization
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. Layer
  7. AnyRef
  8. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new BatchNormalization(name: String, axis: Int = -1, momentum: Float = 0.9f, epsilon: Float = 1e-3f, center: Boolean = true, scale: Boolean = true, betaInitializer: tf.VariableInitializer = tf.ZerosInitializer, gammaInitializer: tf.VariableInitializer = tf.OnesInitializer, movingMeanInitializer: tf.VariableInitializer = tf.ZerosInitializer, movingVarianceInitializer: tf.VariableInitializer = tf.OnesInitializer, betaRegularizer: tf.VariableRegularizer = null, gammaRegularizer: tf.VariableRegularizer = null, renorm: Boolean = false, renormMomentum: Float = 0.9f, fused: Boolean = true)(implicit arg0: TF[T], arg1: IsDecimal[T])

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. def +(other: Layer[ops.Output[T], ops.Output[T]]): Concatenate[ops.Output[T], ops.Output[T]]
    Definition Classes
    Layer
  4. def ++(others: Seq[Layer[ops.Output[T], ops.Output[T]]]): Concatenate[ops.Output[T], ops.Output[T]]
    Definition Classes
    Layer
  5. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  6. def >>[S](other: Layer[ops.Output[T], S]): Compose[ops.Output[T], ops.Output[T], S]
    Definition Classes
    Layer
  7. def apply(input: ops.Output[T])(implicit mode: Mode): ops.Output[T]
    Definition Classes
    Layer
  8. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  9. def assignMovingAverage(variable: Variable[Float], value: ops.Output[Float], momentum: ops.Output[Float]): ops.Output[Float]
    Attributes
    protected
  10. val axis: Int
  11. val betaInitializer: tf.VariableInitializer
  12. val betaRegularizer: tf.VariableRegularizer
  13. val center: Boolean
  14. def clone(): AnyRef
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @native() @throws( ... )
  15. def compose[S](other: Layer[ops.Output[T], S]): Compose[ops.Output[T], ops.Output[T], S]
    Definition Classes
    Layer
  16. def concatenate(others: Layer[ops.Output[T], ops.Output[T]]*): Concatenate[ops.Output[T], ops.Output[T]]
    Definition Classes
    Layer
  17. final def currentStep: ops.Output[Long]
    Definition Classes
    Layer
  18. val epsilon: Float
  19. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  20. def finalize(): Unit
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  21. def forward(input: ops.Output[T])(implicit mode: Mode): ops.Output[T]
    Definition Classes
    Layer
  22. def forwardWithoutContext(input: ops.Output[T])(implicit mode: Mode): ops.Output[T]
    Definition Classes
    BatchNormalizationLayer
  23. val fused: Boolean
  24. val gammaInitializer: tf.VariableInitializer
  25. val gammaRegularizer: tf.VariableRegularizer
  26. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  27. final def getParameter[P](name: String, shape: core.Shape, initializer: Initializer = null, regularizer: Regularizer = null, trainable: Boolean = true, reuse: Reuse = ReuseOrCreateNew, collections: Set[Key[ops.variables.Variable[Any]]] = Set.empty, cachingDevice: (OpSpecification) ⇒ String = null)(implicit arg0: core.types.TF[P]): ops.Output[P]
    Definition Classes
    Layer
  28. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  29. val layerType: String
    Definition Classes
    BatchNormalizationLayer
  30. def map[MR](mapFn: (ops.Output[T]) ⇒ MR): Layer[ops.Output[T], MR]
    Definition Classes
    Layer
  31. val momentum: Float
  32. val movingMeanInitializer: tf.VariableInitializer
  33. val movingVarianceInitializer: tf.VariableInitializer
  34. val name: String
    Definition Classes
    BatchNormalizationLayer
  35. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  36. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  37. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  38. val renorm: Boolean
  39. val renormMomentum: Float
  40. val scale: Boolean
  41. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  42. def toString(): String
    Definition Classes
    Layer → AnyRef → Any
  43. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  44. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  45. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @throws( ... )

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from Layer[ops.Output[T], ops.Output[T]]

Inherited from AnyRef

Inherited from Any

Ungrouped