moved future iterations outside as prior, added some check msgs, should try to compact more annotations problably
This commit is contained in:
parent
ee1603dc50
commit
8d77c73a88
@ -222,7 +222,7 @@ public class Datahandler {
|
||||
hlStatsMessages.put(str, hlStatsMessages.size());
|
||||
}
|
||||
}
|
||||
int capacity = 5500;
|
||||
int capacity = 2500;
|
||||
hlStatsMessages.keySet().forEach(str -> {
|
||||
if (!str.startsWith("!") && MessageResponseHandler.getStr().values().size() < capacity) {
|
||||
String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null);
|
||||
@ -272,6 +272,7 @@ public class Datahandler {
|
||||
String newPrimary = similarityMatrixes.get(0).getPrimaryString();
|
||||
int evaluationCap = 50000;
|
||||
boolean hitCap = false;
|
||||
int iterator = 0;
|
||||
for (SimilarityMatrix SMX : similarityMatrixes) {
|
||||
if (!newPrimary.equals(SMX.getPrimaryString())) {
|
||||
newPrimary = SMX.getPrimaryString();
|
||||
@ -289,12 +290,14 @@ public class Datahandler {
|
||||
hitCap = true;
|
||||
}
|
||||
}
|
||||
System.out.println("similarityMatrixes size: " + similarityMatrixes.size() + "\niterator: " + iterator);
|
||||
iterator++;
|
||||
}
|
||||
return strmapreturn;
|
||||
}
|
||||
|
||||
private ConcurrentMap<Integer, String> addSMXToMapReturn(ConcurrentMap<Integer, String> strmapreturn, SimilarityMatrix SMX) {
|
||||
System.out.println("RelationCounter cap: " + RelationCounter);
|
||||
//System.out.println("RelationCounter cap: " + RelationCounter);
|
||||
boolean related = RelationCounter > 0;
|
||||
if (related) {
|
||||
strmapreturn.put(strmapreturn.size(), SMX.getPrimaryString());
|
||||
@ -318,7 +321,6 @@ public class Datahandler {
|
||||
ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
|
||||
SentimentValueCache sentimentCacheStr = sentimentCachingMap.getOrDefault(str, null);
|
||||
List<SimilarityMatrix> smxReturnList = new ArrayList();
|
||||
ExecutorService smxService = Executors.newFixedThreadPool(6);
|
||||
List<String> randomIterationComparision = new ArrayList();
|
||||
int iteratecap = strCacheLocal.size() > 150 ? strCacheLocal.size() - 150 : strCacheLocal.size();
|
||||
int iterator = ThreadLocalRandom.current().nextInt(0, iteratecap);
|
||||
@ -332,6 +334,7 @@ public class Datahandler {
|
||||
}
|
||||
iterated++;
|
||||
}
|
||||
List<Future<SimilarityMatrix>> futureSMX = new ArrayList(randomIterationComparision.size() + 1);
|
||||
for (String str1 : randomIterationComparision) {
|
||||
if (!str.equals(str1)) {
|
||||
SimilarityMatrix SMXInit = new SimilarityMatrix(str, str1);
|
||||
@ -348,16 +351,19 @@ public class Datahandler {
|
||||
pipelineAnnotationCache.get(str1), localPipelineSentimentAnnotation.get(str),
|
||||
pipelineSentimentAnnotationCache.get(str1), localCoreDocumentMap.get(str), coreDocumentAnnotationCache.get(str1), sentimentCacheStr, sentimentCacheStr1);
|
||||
}
|
||||
ExecutorService smxService = Executors.newSingleThreadExecutor();
|
||||
Future<SimilarityMatrix> future = smxService.submit(worker);
|
||||
try {
|
||||
SimilarityMatrix SMX = future.get();
|
||||
if (SMX != null) {
|
||||
smxReturnList.add(SMX);
|
||||
//System.out.println("added SMX: " + SMX.getPrimaryString() + "\n" + SMX.getSecondaryString() + "\n\n");
|
||||
}
|
||||
} catch (InterruptedException | ExecutionException ex) {
|
||||
//System.out.println("failed future get");
|
||||
futureSMX.add(future);
|
||||
}
|
||||
}
|
||||
for (Future<SimilarityMatrix> future : futureSMX) {
|
||||
try {
|
||||
SimilarityMatrix SMX = future.get();
|
||||
if (SMX != null) {
|
||||
smxReturnList.add(SMX);
|
||||
}
|
||||
} catch (InterruptedException | ExecutionException ex) {
|
||||
//System.out.println("failed future get");
|
||||
}
|
||||
}
|
||||
return smxReturnList;
|
||||
@ -369,12 +375,16 @@ public class Datahandler {
|
||||
ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
|
||||
ConcurrentMap<Integer, String> strmapreturn = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
List<SimilarityMatrix> strSenseRelationMap = new ArrayList();
|
||||
int iterator = 0;
|
||||
for (String str : strmap.values()) {
|
||||
List<SimilarityMatrix> localNoSentenceRelationList = StrComparringNoSentenceRelationMap(strCacheLocal, str,
|
||||
localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap);
|
||||
for (SimilarityMatrix SMX : localNoSentenceRelationList) {
|
||||
strSenseRelationMap.add(SMX);
|
||||
System.out.println("added SMX: " + SMX.getPrimaryString() + "\n" + SMX.getSecondaryString() + "\nstrSenseRelationMap.size(): "
|
||||
+ strSenseRelationMap.size() + "\nstrmap size: " + strmap.size() + "\niterator: " + iterator + "\n\n");
|
||||
}
|
||||
iterator++;
|
||||
}
|
||||
Collections.sort(strSenseRelationMap, (e1, e2) -> e1.getPrimaryString().compareTo(e2.getPrimaryString()));
|
||||
strmapreturn = futuresReturnOverallEvaluation(strSenseRelationMap, strmapreturn);
|
||||
@ -746,6 +756,7 @@ public class Datahandler {
|
||||
ConcurrentMap<String, Annotation> Annotationspipeline = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
ConcurrentMap<String, Annotation> AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
ConcurrentMap<String, CoreDocument> coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(strmap.values(), pipeline);
|
||||
System.out.println("finished getMultipleCoreDocumentsWaySuggestion");
|
||||
strmap.values().parallelStream().forEach(str -> {
|
||||
Annotation strAnno1 = new Annotation(str);
|
||||
Annotationspipeline.put(str, strAnno1);
|
||||
@ -753,7 +764,6 @@ public class Datahandler {
|
||||
AnnotationspipelineSentiment.put(str, strAnno2);
|
||||
stringCache.put(stringCache.size() + 1, str);
|
||||
});
|
||||
System.out.println("pre iterator annotation update \n");
|
||||
pipeline.annotate(Annotationspipeline.values());
|
||||
pipelineSentiment.annotate(AnnotationspipelineSentiment.values());
|
||||
Annotationspipeline.entrySet().forEach(pipelineEntry -> {
|
||||
@ -790,22 +800,25 @@ public class Datahandler {
|
||||
|
||||
public static ConcurrentMap<String, CoreDocument> getMultipleCoreDocumentsWaySuggestion(Collection<String> str, StanfordCoreNLP localNLP) {
|
||||
AnnotationCollector<Annotation> annCollector = new AnnotationCollector();
|
||||
for (final String exampleString : str) {
|
||||
// System.out.println("exampleString: " + exampleString + "\n");
|
||||
for (String exampleString : str) {
|
||||
localNLP.annotate(new Annotation(exampleString), annCollector);
|
||||
annCollector.i++;
|
||||
// System.out.println("iterator: " + annCollector.i + "\nstr size: " + str.size() + "\n");
|
||||
//System.out.println("iterator: " + annCollector.i + "\nstr size: " + str.size() + "\n");
|
||||
}
|
||||
try {
|
||||
Thread.sleep(10000);
|
||||
Thread.sleep(8000);
|
||||
} catch (InterruptedException ex) {
|
||||
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
ConcurrentMap<String, CoreDocument> annotationreturnMap = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
int iterator = 0;
|
||||
for (Annotation ann : annCollector.annotationsT) {
|
||||
if (ann != null) {
|
||||
ann.compact();
|
||||
CoreDocument CD = new CoreDocument(ann);
|
||||
annotationreturnMap.put(CD.text(), CD);
|
||||
//System.out.println("CD text:" + CD.text() + "\niterator: " + iterator + "\nsize: " + annCollector.annotationsT.size());
|
||||
iterator++;
|
||||
}
|
||||
}
|
||||
return annotationreturnMap;
|
||||
|
Loading…
Reference in New Issue
Block a user