Instance Constructors
-
new
NeuralModel(baseModel: SpanModel[L, L2, W], labelFeaturizer: RefinedFeaturizer[L, W, Feature], surfaceFeaturizer: IndexedSplitSpanFeaturizer[W], transform: Transform[FeatureVector, DenseVector[Double]], numOutputs: Int, initialFeatureVal: (Feature) ⇒ Option[Double] = ...)
Value Members
-
final
def
!=(arg0: AnyRef): Boolean
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: AnyRef): Boolean
-
final
def
==(arg0: Any): Boolean
-
-
final
def
accumulateCounts(inf: Inference, d: TreeInstance[L, W], accum: ExpectedCounts, scale: Double): Unit
-
final
def
asInstanceOf[T0]: T0
-
def
cacheFeatureWeights(weights: DenseVector[Double], suffix: String = ""): Unit
-
def
clone(): AnyRef
-
def
constrainer: Factory[L, W]
-
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
-
def
expectedCountsToObjective(ecounts: StandardExpectedCounts[Feature]): (Double, DenseVector[Double])
-
def
extractParser(weights: DenseVector[Double])(implicit deb: Debinarizer[L]): Parser[L, W]
-
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
def
inferenceFromWeights(weights: DenseVector[Double]): Inference
-
def
initialValueForFeature(f: Feature): Double
-
final
def
isInstanceOf[T0]: Boolean
-
def
lexicon: Lexicon[L, W]
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
def
numFeatures: Int
-
def
readCachedFeatureWeights(suffix: String = ""): Option[DenseVector[Double]]
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
-
def
weightsCacheName: String
Inherited from AnyRef
Inherited from Any
The neural model is really just a