class
NounPhraseEntityTypeLabeler extends DocumentAnnotator
Instance Constructors
-
new
NounPhraseEntityTypeLabeler(url: URL)
-
new
NounPhraseEntityTypeLabeler(file: File)
-
new
NounPhraseEntityTypeLabeler(stream: InputStream)
-
new
NounPhraseEntityTypeLabeler()
Value Members
-
final
def
!=(arg0: AnyRef): Boolean
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: AnyRef): Boolean
-
final
def
==(arg0: Any): Boolean
-
-
-
final
def
asInstanceOf[T0]: T0
-
def
clone(): AnyRef
-
def
deserialize(stream: InputStream): Unit
-
def
deserialize(file: File): Unit
-
def
documentAnnotationString(document: Document): String
-
def
entityTypeIndex(mention: NounPhrase): Int
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
-
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
final
def
isInstanceOf[T0]: Boolean
-
def
isWordNetPerson(token: Token): Boolean
-
val
lexicons: Seq[Lexicon]
-
def
mentionAnnotationString(mention: Mention): String
-
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
def
phraseAnnotationString(mention: Phrase): String
-
def
postAttrs: Iterable[Class[_]]
-
def
prereqAttrs: Iterable[Class[_]]
-
-
def
processNounPhrase(mention: NounPhrase): Unit
-
-
-
def
serialize(stream: OutputStream): Unit
-
def
serialize(filename: String): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
def
tokenAnnotationString(token: Token): String
-
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit