2013-04-21 2 views
0
public static void main(String[] args) { 
    Properties props = new Properties(); 
props.put("annotators", "tokenize, ssplit, pos, lemma, ner, parse, dcoref"); 
StanfordCoreNLP pipeline = new StanfordCoreNLP(props); 

// read some text in the text variable 
String text = "the quick fox jumps over the lazy dog"; 

// create an empty Annotation just with the given text 
Annotation document = new Annotation(text); 

// run all Annotators on this text 
pipeline.annotate(document); 

// these are all the sentences in this document 
// a CoreMap is essentially a Map that uses class objects as keys and has values with custom types 
List<CoreMap> sentences = document.get(SentencesAnnotation.class); 

for(CoreMap sentence: sentences) { 
    // traversing the words in the current sentence 
    // a CoreLabel is a CoreMap with additional token-specific methods 
    for (CoreLabel token: sentence.get(TokensAnnotation.class)) { 
    // this is the text of the token 
    String word = token.get(TextAnnotation.class); 
    // this is the POS tag of the token 
    String pos = token.get(PartOfSpeechAnnotation.class); 
    // this is the NER label of the token 
    String ne = token.get(NamedEntityTagAnnotation.class);  
    } 

    // this is the parse tree of the current sentence 
    Tree tree = sentence.get(TreeAnnotation.class); 

    // this is the Stanford dependency graph of the current sentence 
    SemanticGraph dependencies = sentence.get(CollapsedCCProcessedDependenciesAnnotation.class); 
} 

// This is the coreference link graph 
// Each chain stores a set of mentions that link to each other, 
// along with a method for getting the most representative mention 
// Both sentence and token offsets start at 1! 
Map<Integer, CorefChain> graph = 
    document.get(CorefChainAnnotation.class); 
} 

}Создать объект CoreNLP с Явой

Я хочу, чтобы запустить свою программу, но я получаю эту ошибку

Loading classifier from edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz ... Exception in thread "main" java.lang.OutOfMemoryError: Java heap space 
at java.lang.StringBuilder.toString(StringBuilder.java:405) 
at java.io.ObjectInputStream$BlockDataInputStream.readUTFBody(ObjectInputStream.java:3066) 
at java.io.ObjectInputStream$BlockDataInputStream.readUTF(ObjectInputStream.java:2862) 
at java.io.ObjectInputStream.readString(ObjectInputStream.java:1636) 
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1339) 
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) 
at java.util.HashMap.readObject(HashMap.java:1154) 
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
at java.lang.reflect.Method.invoke(Method.java:601) 
at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1004) 
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1891) 
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) 
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) 
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1989) 
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1913) 
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) 
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) 
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) 
at edu.stanford.nlp.ie.crf.CRFClassifier.loadClassifier(CRFClassifier.java:2255) 
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1444) 
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1421) 
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1500) 
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1487) 
at edu.stanford.nlp.ie.crf.CRFClassifier.getClassifier(CRFClassifier.java:2386) 
at edu.stanford.nlp.ie.ClassifierCombiner.loadClassifierFromPath(ClassifierCombiner.java:130) 
at edu.stanford.nlp.ie.ClassifierCombiner.loadClassifiers(ClassifierCombiner.java:116) 
at edu.stanford.nlp.ie.ClassifierCombiner.<init>(ClassifierCombiner.java:98) 
at edu.stanford.nlp.ie.NERClassifierCombiner.<init>(NERClassifierCombiner.java:64) 
at edu.stanford.nlp.pipeline.StanfordCoreNLP$6.create(StanfordCoreNLP.java:500) 
at edu.stanford.nlp.pipeline.StanfordCoreNLP$6.create(StanfordCoreNLP.java:471) 

я не знаю, как я могу увеличить размер кучи или эту ошибку relateto к другой проблеме что я не могу понять. кто-нибудь может мне помочь? (извините, мой английский не очень хорош)

ответ

1

Если ваш проект java-проект ---> Щелкните правой кнопкой мыши по проекту ---> Выбрать свойства ---> Нажмите «Пуск» -> с правой стороны вы увидите параметры VM, там вам нужно ввести настройки кучи.

Пример:

-Xmx512m 

Примечание: Если 512 не работает, увеличить его до 1024 и попробовать.

+0

Но, на самом деле, вам может понадобиться 3g, если это 64-битная Java. –

Смежные вопросы