caching indeppendent values for sentiment analyzing
This commit is contained in:
parent
5ccadbffbd
commit
a4139e4ae4
@ -7,6 +7,7 @@ package FunctionLayer;
|
||||
|
||||
import DataLayer.DataMapper;
|
||||
import FunctionLayer.StanfordParser.SentimentAnalyzerTest;
|
||||
import FunctionLayer.StanfordParser.SentimentValueCache;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
|
||||
@ -69,6 +70,7 @@ public class Datahandler {
|
||||
private static ConcurrentMap<String, CoreDocument> coreDocumentAnnotationCache;
|
||||
private static ConcurrentMap<String, Integer> conversationMatchMap;
|
||||
private static ConcurrentMap<String, Integer> conversationUserMatchMap;
|
||||
private static ConcurrentMap<String, SentimentValueCache> sentimentCachingMap = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
private static final ConcurrentMap<String, Integer> locateFaultySentences = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
private static final ConcurrentMap<String, Double> mapUdate = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
private final static ConcurrentMap<Integer, String> strmapreturn = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
@ -233,7 +235,7 @@ public class Datahandler {
|
||||
hlStatsMessages.put(str, hlStatsMessages.size());
|
||||
}
|
||||
}
|
||||
int capacity = 15000;
|
||||
int capacity = 5550;
|
||||
hlStatsMessages.keySet().forEach(str -> {
|
||||
if (!str.startsWith("!") && MessageResponseHandler.getStr().values().size() < capacity) {
|
||||
String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null);
|
||||
@ -278,28 +280,40 @@ public class Datahandler {
|
||||
}
|
||||
}
|
||||
|
||||
private final static void futuresReturnOverallEvaluation(ConcurrentMap<String, Future<SimilarityMatrix>> entries, String str) {
|
||||
private static void futuresReturnOverallEvaluation(ConcurrentMap<String, Future<SimilarityMatrix>> entries, String str) {
|
||||
for (Entry<String, Future<SimilarityMatrix>> entrySet : entries.entrySet()) {
|
||||
String transmittedStr = entrySet.getKey();
|
||||
final SimilarityMatrix getSMX = retrieveFutureSMX(entrySet.getValue());
|
||||
SimilarityMatrix getSMX = retrieveFutureSMX(entrySet.getValue());
|
||||
if (handleRetrievedSMX(getSMX, str, transmittedStr)) {
|
||||
break;
|
||||
}
|
||||
try {
|
||||
SentimentValueCache cacheValue1 = getSMX.getCacheValue1();
|
||||
SentimentValueCache cacheValue2 = getSMX.getCacheValue2();
|
||||
if (cacheValue1 != null && !sentimentCachingMap.keySet().contains(str)) {
|
||||
sentimentCachingMap.put(str, getSMX.getCacheValue1());
|
||||
}
|
||||
if (cacheValue2 != null && !sentimentCachingMap.keySet().contains(transmittedStr)) {
|
||||
sentimentCachingMap.put(transmittedStr, getSMX.getCacheValue2());
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
//System.out.println("FAILING futures return. EX: " + ex.getLocalizedMessage() + "\n");
|
||||
}
|
||||
}
|
||||
sentenceRelationMap.put(str, mapUdate);
|
||||
}
|
||||
|
||||
private static final boolean handleRetrievedSMX(SimilarityMatrix getSMX, String str, String transmittedStr) {
|
||||
private static boolean handleRetrievedSMX(SimilarityMatrix getSMX, String str, String transmittedStr) {
|
||||
final int relationCap = 20;
|
||||
if (getSMX != null) {
|
||||
//System.out.println("getSMX primary: " + getSMX.getPrimaryString() + "\ngetSMX secondary: " + getSMX.getSecondaryString() + "\n");
|
||||
final Double scoreRelationNewMsgToRecentMsg = getSMX.getDistance();
|
||||
System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\n");
|
||||
//System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\n");
|
||||
mapUdate.put(getSMX.getSecondaryString(), scoreRelationNewMsgToRecentMsg);
|
||||
System.out.println("getSMX primary: " + getSMX.getPrimaryString() + "\ngetSMX secodary: " + getSMX.getSecondaryString() + "\n");
|
||||
if (scoreRelationNewMsgToRecentMsg >= 200.0) {
|
||||
positiveRelationCounter++;
|
||||
if (positiveRelationCounter >= relationCap) {
|
||||
System.out.println("added to strmapreturn str: " + str + "\n");
|
||||
//System.out.println("added to strmapreturn str: " + str + "\n");
|
||||
strmapreturn.put(strmapreturn.size() + 1, str);
|
||||
return true;
|
||||
}
|
||||
@ -320,7 +334,9 @@ public class Datahandler {
|
||||
return false;
|
||||
}
|
||||
|
||||
private final static Entry<ConcurrentMap<String, Future<SimilarityMatrix>>, ConcurrentMap<Integer, String>> StrComparringNoSentenceRelationMap(ConcurrentMap<Integer, String> strCacheLocal, String str, ConcurrentMap<String, Annotation> localJMWEMap, ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
|
||||
private static Entry<ConcurrentMap<String, Future<SimilarityMatrix>>, ConcurrentMap<Integer, String>> StrComparringNoSentenceRelationMap(
|
||||
ConcurrentMap<Integer, String> strCacheLocal, String str, ConcurrentMap<String, Annotation> localJMWEMap,
|
||||
ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
|
||||
ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
|
||||
final ConcurrentMap<String, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
strCacheLocal.values().removeIf(e -> {
|
||||
@ -330,27 +346,30 @@ public class Datahandler {
|
||||
}
|
||||
return true;
|
||||
});
|
||||
SentimentValueCache sentimentCacheStr = sentimentCachingMap.getOrDefault(str, null);
|
||||
for (String str1 : strCacheLocal.values()) {
|
||||
if (!str.equals(str1)) {
|
||||
//experimental change
|
||||
if (!str.equals(str1) && !futures.keySet().contains(str1)) {
|
||||
final SimilarityMatrix SMX = new SimilarityMatrix(str, str1);
|
||||
SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
|
||||
final Callable<SimilarityMatrix> worker;
|
||||
if (stringCache.size() < 150) {
|
||||
worker = new SentimentAnalyzerTest(str, str1, SMX,
|
||||
localJMWEMap.get(str), localJMWEMap.get(str1), localPipelineAnnotation.get(str),
|
||||
localPipelineAnnotation.get(str1), localPipelineSentimentAnnotation.get(str),
|
||||
localPipelineSentimentAnnotation.get(str1), localCoreDocumentMap.get(str), localCoreDocumentMap.get(str1));
|
||||
localPipelineSentimentAnnotation.get(str1), localCoreDocumentMap.get(str), localCoreDocumentMap.get(str1), sentimentCacheStr, sentimentCacheStr1);
|
||||
} else {
|
||||
worker = new SentimentAnalyzerTest(str, str1, SMX,
|
||||
localJMWEMap.get(str), jmweAnnotationCache.get(str1), localPipelineAnnotation.get(str),
|
||||
pipelineAnnotationCache.get(str1), localPipelineSentimentAnnotation.get(str),
|
||||
pipelineSentimentAnnotationCache.get(str1), localCoreDocumentMap.get(str), coreDocumentAnnotationCache.get(str1));
|
||||
pipelineSentimentAnnotationCache.get(str1), localCoreDocumentMap.get(str), coreDocumentAnnotationCache.get(str1), sentimentCacheStr, sentimentCacheStr1);
|
||||
}
|
||||
futures.put(str1, executor.submit(worker));
|
||||
}
|
||||
}
|
||||
Map.Entry<ConcurrentMap<String, Future<SimilarityMatrix>>, ConcurrentMap<Integer, String>> entryReturn
|
||||
Map.Entry<ConcurrentMap<String, Future<SimilarityMatrix>>, ConcurrentMap<Integer, String>> mapreturn
|
||||
= new AbstractMap.SimpleEntry(futures, strCacheLocal);
|
||||
return entryReturn;
|
||||
return mapreturn;
|
||||
}
|
||||
|
||||
private static ConcurrentMap<Integer, String> stringIteratorComparator(ConcurrentMap<Integer, String> strmap,
|
||||
@ -387,11 +406,14 @@ public class Datahandler {
|
||||
return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap);
|
||||
}
|
||||
|
||||
private final static SimilarityMatrix retrieveFutureSMX(Future<SimilarityMatrix> future) {
|
||||
private static SimilarityMatrix retrieveFutureSMX(Future<SimilarityMatrix> future) {
|
||||
try {
|
||||
return future.get(5, TimeUnit.SECONDS);
|
||||
// SimilarityMatrix SMX = future.get();
|
||||
SimilarityMatrix SMX = future.get(5, TimeUnit.SECONDS);
|
||||
return SMX;
|
||||
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
|
||||
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
System.out.println("retrieveFutureSMX timeout Exception; " + ex.getLocalizedMessage() + "\n");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@ -435,13 +457,6 @@ public class Datahandler {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param str is strF aka user message,
|
||||
* @param MostRecent String most recently responded with
|
||||
* @return
|
||||
* @throws CustomError
|
||||
*/
|
||||
public synchronized String getResponseMsg(String str, String MostRecent) throws CustomError {
|
||||
str = str.trim();
|
||||
if (str.startsWith("<@")) {
|
||||
@ -470,19 +485,21 @@ public class Datahandler {
|
||||
}
|
||||
if (!present) {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix(strF, str1);
|
||||
SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
|
||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(strF, str1, SMX,
|
||||
strAnnoJMWE, jmweAnnotationCache.get(str1), strAnno,
|
||||
pipelineAnnotationCache.get(str1), strAnnoSentiment,
|
||||
pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1));
|
||||
pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1), null, sentimentCacheStr1);
|
||||
futureslocal.put(futureslocal.size() + 1, executor.submit(worker));
|
||||
}
|
||||
}
|
||||
});
|
||||
futureslocal.values().forEach((future) -> {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix("", "");
|
||||
SimilarityMatrix SMX;
|
||||
try {
|
||||
SMX = future.get(5, TimeUnit.SECONDS);
|
||||
futureAndCacheCombineMap.put(futureAndCacheCombineMap.size(), SMX);
|
||||
//System.out.println("futureAndCacheCombineMap size: " + futureAndCacheCombineMap.size() + "\n");
|
||||
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
|
||||
System.out.println("ex getResponsemsg: " + ex.getMessage() + "\n");
|
||||
}
|
||||
@ -494,6 +511,7 @@ public class Datahandler {
|
||||
futureAndCacheCombineMap.put(futureAndCacheCombineMap.size(), SMX);
|
||||
}
|
||||
}
|
||||
//System.out.println("futureAndCacheCombineMap size: " + futureAndCacheCombineMap.size() + "\n");
|
||||
futureAndCacheCombineMap.values().parallelStream().forEach((SMX) -> {
|
||||
if (sentenceRelationMap.get(strF) == null) {
|
||||
ConcurrentMap<String, Double> localMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
@ -514,13 +532,20 @@ public class Datahandler {
|
||||
Double scoreRelationOldUserMsg = 0.0;
|
||||
ConcurrentMap<String, Double> getPrimaryLocal = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
for (String conversationStr : conversationMatchMap.keySet()) {
|
||||
getPrimaryLocal = sentenceRelationMap.get(strF);
|
||||
Double getSecondary = getPrimaryLocal.get(conversationStr);
|
||||
Double getSecondary = 0.0;
|
||||
getPrimaryLocal = sentenceRelationMap.getOrDefault(strF, null);
|
||||
if (getPrimaryLocal == null) {
|
||||
getSecondary = getScoreRelationStrF(strF, conversationStr);
|
||||
getPrimaryLocal.put(conversationStr, getSecondary);
|
||||
sentenceRelationMap.put(strF, getPrimaryLocal);
|
||||
} else {
|
||||
getSecondary = getPrimaryLocal.get(conversationStr);
|
||||
if (getSecondary == null) {
|
||||
getSecondary = getScoreRelationStrF(strF, conversationStr);
|
||||
getPrimaryLocal.put(conversationStr, getSecondary);
|
||||
sentenceRelationMap.put(strF, getPrimaryLocal);
|
||||
}
|
||||
}
|
||||
scoreRelationNewMsgToRecentMsg += getSecondary;
|
||||
System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\n");
|
||||
}
|
||||
@ -546,6 +571,7 @@ public class Datahandler {
|
||||
}
|
||||
ConcurrentMap<Integer, Entry<Double, SimilarityMatrix>> concurrentRelationsMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> preRelationUserCountersMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
System.out.println("futureAndCacheCombineMap size for: " + futureAndCacheCombineMap.values().size() + "\n");
|
||||
for (SimilarityMatrix SMX : futureAndCacheCombineMap.values()) {
|
||||
Double scoreRelation = 500.0;
|
||||
Double scoreRelationLastUserMsg = SMX.getDistance();
|
||||
@ -574,13 +600,17 @@ public class Datahandler {
|
||||
}
|
||||
}
|
||||
Double totalRelation = scoreRelation + scoreRelationLastUserMsg;
|
||||
if (totalRelation > preRelationCounters + preRelationUserCounters && scoreRelationLastUserMsg + (preRelationUserCounters / 10)
|
||||
> preRelationUserCounters) {
|
||||
if (totalRelation > preRelationCounters + preRelationUserCounters && (scoreRelationLastUserMsg + (preRelationUserCounters / 10)
|
||||
>= preRelationUserCounters) || preRelationUserCounters == -100.0) {
|
||||
Entry<Double, SimilarityMatrix> localEntry = new AbstractMap.SimpleEntry(totalRelation, SMX);
|
||||
concurrentRelationsMap.put(concurrentRelationsMap.size(), localEntry);
|
||||
preRelationUserCountersMap.put(preRelationUserCountersMap.size(), preRelationUserCounters);
|
||||
System.out.println("SUCESS concurrentRelationsMap size: " + concurrentRelationsMap.size() + "\n");
|
||||
preRelationCounters = scoreRelation;
|
||||
preRelationUserCounters = scoreRelationLastUserMsg;
|
||||
} else {
|
||||
System.out.println("FAILED totalRelation: " + totalRelation + "\npreRelationUserCounters: " + preRelationUserCounters + "\npreRelationCounters: "
|
||||
+ preRelationCounters + "\nscoreRelationLastUserMsg: " + scoreRelationLastUserMsg + "\n");
|
||||
}
|
||||
}
|
||||
StringBuilder SB = new StringBuilder();
|
||||
@ -643,11 +673,13 @@ public class Datahandler {
|
||||
|
||||
private Double getScoreRelationNewMsgToRecentMsg(String str, String mostRecentMsg) {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg);
|
||||
SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null);
|
||||
SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null);
|
||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX,
|
||||
jmweAnnotationCache.get(str), jmweAnnotationCache.get(mostRecentMsg), pipelineAnnotationCache.get(str),
|
||||
pipelineAnnotationCache.get(mostRecentMsg), pipelineSentimentAnnotationCache.get(str),
|
||||
pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDocumentAnnotationCache.get(str),
|
||||
coreDocumentAnnotationCache.get(mostRecentMsg));
|
||||
coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2);
|
||||
SimilarityMatrix callSMX = null;
|
||||
try {
|
||||
callSMX = worker.call();
|
||||
@ -664,10 +696,12 @@ public class Datahandler {
|
||||
|
||||
private Double getScoreRelationStrF(String str, String mostRecentMsg) {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg);
|
||||
SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null);
|
||||
SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null);
|
||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX,
|
||||
strAnnoJMWE, jmweAnnotationCache.get(mostRecentMsg), strAnno,
|
||||
pipelineAnnotationCache.get(mostRecentMsg), strAnnoSentiment,
|
||||
pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDoc, coreDocumentAnnotationCache.get(mostRecentMsg));
|
||||
pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDoc, coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2);
|
||||
SimilarityMatrix callSMX = null;
|
||||
try {
|
||||
callSMX = worker.call();
|
||||
|
@ -5,22 +5,33 @@
|
||||
*/
|
||||
package FunctionLayer;
|
||||
|
||||
import FunctionLayer.StanfordParser.SentimentValueCache;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
public class SimilarityMatrix{
|
||||
public class SimilarityMatrix {
|
||||
|
||||
private String PrimaryString;
|
||||
private String SecondaryString;
|
||||
private double distance;
|
||||
private static String PrimaryString;
|
||||
private static String SecondaryString;
|
||||
private static double distance;
|
||||
private static SentimentValueCache cacheValue1;
|
||||
private static SentimentValueCache cacheValue2;
|
||||
|
||||
public double getDistance() {
|
||||
public final SentimentValueCache getCacheValue2() {
|
||||
return cacheValue2;
|
||||
}
|
||||
|
||||
public final void setCacheValue2(SentimentValueCache cacheValue2) {
|
||||
SimilarityMatrix.cacheValue2 = cacheValue2;
|
||||
}
|
||||
|
||||
public final double getDistance() {
|
||||
return distance;
|
||||
}
|
||||
|
||||
public void setDistance(double distance) {
|
||||
public final void setDistance(double distance) {
|
||||
this.distance = distance;
|
||||
}
|
||||
|
||||
@ -35,21 +46,28 @@ public class SimilarityMatrix{
|
||||
this.distance = result;
|
||||
}
|
||||
|
||||
public String getPrimaryString() {
|
||||
public final String getPrimaryString() {
|
||||
return PrimaryString;
|
||||
}
|
||||
|
||||
public void setPrimaryString(String PrimaryString) {
|
||||
public final void setPrimaryString(String PrimaryString) {
|
||||
this.PrimaryString = PrimaryString;
|
||||
}
|
||||
|
||||
public String getSecondaryString() {
|
||||
public final String getSecondaryString() {
|
||||
return SecondaryString;
|
||||
}
|
||||
|
||||
public void setSecondaryString(String SecondaryString) {
|
||||
public final void setSecondaryString(String SecondaryString) {
|
||||
this.SecondaryString = SecondaryString;
|
||||
}
|
||||
|
||||
public final SentimentValueCache getCacheValue1() {
|
||||
return cacheValue1;
|
||||
}
|
||||
|
||||
public final void setCacheValue1(SentimentValueCache cacheValue1) {
|
||||
this.cacheValue1 = cacheValue1;
|
||||
}
|
||||
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,334 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package FunctionLayer.StanfordParser;
|
||||
|
||||
import com.google.common.collect.MapMaker;
|
||||
import edu.stanford.nlp.ling.TaggedWord;
|
||||
import edu.stanford.nlp.trees.GrammaticalStructure;
|
||||
import edu.stanford.nlp.trees.Tree;
|
||||
import edu.stanford.nlp.trees.TypedDependency;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import org.ejml.simple.SimpleMatrix;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
public final class SentimentValueCache {
|
||||
|
||||
private final String sentence;
|
||||
private static int counter;
|
||||
private static List<List<TaggedWord>> taggedwordlist = new ArrayList();
|
||||
private final ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, Tree> sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final Collection<TypedDependency> allTypedDependencies = new ArrayList();
|
||||
private final ConcurrentMap<Integer, GrammaticalStructure> gsMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
private final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
private final ConcurrentMap<Integer, Integer> rnnPredictClassMap = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
private static List classifyRaw;
|
||||
private static int mainSentiment = 0;
|
||||
private static int longest = 0;
|
||||
private static int tokensCounter = 0;
|
||||
private static int anotatorcounter = 0;
|
||||
private static int inflectedCounterPositive = 0;
|
||||
private static int inflectedCounterNegative = 0;
|
||||
private static int MarkedContinuousCounter = 0;
|
||||
private static int MarkedContiniousCounterEntries = 0;
|
||||
private static int UnmarkedPatternCounter = 0;
|
||||
private static int pairCounter = 0;
|
||||
private final ConcurrentMap<Integer, String> ITokenMapTag = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> strTokenStems = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> strTokenForm = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> strTokenGetEntry = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> strTokenGetiPart = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> strTokenEntryPOS = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, Integer> entryCounts = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> nerEntities1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> nerEntities2 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> nerEntityTokenTags = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
private final ConcurrentMap<Integer, String> stopwordTokens = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> stopWordLemma = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
|
||||
public final int getPairCounter() {
|
||||
return pairCounter;
|
||||
}
|
||||
|
||||
public final void setPairCounter(int pairCounter) {
|
||||
SentimentValueCache.pairCounter = pairCounter;
|
||||
}
|
||||
|
||||
public final void addStopWordLemma(String str) {
|
||||
stopWordLemma.put(stopWordLemma.size(), str);
|
||||
}
|
||||
|
||||
public final void addstopwordTokens(String str) {
|
||||
stopwordTokens.put(stopwordTokens.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getStopwordTokens() {
|
||||
return stopwordTokens;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getStopWordLemma() {
|
||||
return stopWordLemma;
|
||||
}
|
||||
|
||||
public final void addnerEntityTokenTags(String str) {
|
||||
nerEntityTokenTags.put(nerEntityTokenTags.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getnerEntityTokenTags() {
|
||||
return nerEntityTokenTags;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getnerEntities1() {
|
||||
return nerEntities1;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getnerEntities2() {
|
||||
return nerEntities2;
|
||||
}
|
||||
|
||||
public final void addNEREntities1(String str) {
|
||||
nerEntities1.put(nerEntities1.size(), str);
|
||||
}
|
||||
|
||||
public final void addNEREntities2(String str) {
|
||||
nerEntities2.put(nerEntities2.size(), str);
|
||||
}
|
||||
|
||||
public final void setTaggedwords(List<List<TaggedWord>> twlist) {
|
||||
taggedwordlist = twlist;
|
||||
}
|
||||
|
||||
public final List<List<TaggedWord>> getTaggedwordlist() {
|
||||
return taggedwordlist;
|
||||
}
|
||||
|
||||
public final void addEntryCounts(int counts) {
|
||||
entryCounts.put(entryCounts.size(), counts);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, Integer> getEntryCounts() {
|
||||
return entryCounts;
|
||||
}
|
||||
|
||||
public final void addstrTokenEntryPOS(String str) {
|
||||
strTokenEntryPOS.put(strTokenEntryPOS.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getstrTokenEntryPOS() {
|
||||
return strTokenEntryPOS;
|
||||
}
|
||||
|
||||
public final void addstrTokenGetiPart(String str) {
|
||||
strTokenGetiPart.put(strTokenGetiPart.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getstrTokenGetiPart() {
|
||||
return strTokenGetiPart;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getstrTokenGetEntry() {
|
||||
return strTokenGetEntry;
|
||||
}
|
||||
|
||||
public final void addstrTokenGetEntry(String str) {
|
||||
strTokenGetEntry.put(strTokenGetEntry.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getstrTokenForm() {
|
||||
return strTokenForm;
|
||||
}
|
||||
|
||||
public final void addstrTokenForm(String str) {
|
||||
strTokenForm.put(strTokenForm.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getstrTokenStems() {
|
||||
return strTokenStems;
|
||||
}
|
||||
|
||||
public final void addstrTokenStems(String str) {
|
||||
strTokenStems.put(strTokenStems.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getITokenMapTag() {
|
||||
return ITokenMapTag;
|
||||
}
|
||||
|
||||
public final void addITokenMapTag(String str) {
|
||||
ITokenMapTag.put(ITokenMapTag.size(), str);
|
||||
}
|
||||
|
||||
public final int getUnmarkedPatternCounter() {
|
||||
return UnmarkedPatternCounter;
|
||||
}
|
||||
|
||||
public final void setUnmarkedPatternCounter(int UnmarkedPatternCounter) {
|
||||
SentimentValueCache.UnmarkedPatternCounter = UnmarkedPatternCounter;
|
||||
}
|
||||
|
||||
public final int getMarkedContiniousCounterEntries() {
|
||||
return MarkedContiniousCounterEntries;
|
||||
}
|
||||
|
||||
public final void setMarkedContiniousCounterEntries(int MarkedContiniousCounterEntries) {
|
||||
SentimentValueCache.MarkedContiniousCounterEntries = MarkedContiniousCounterEntries;
|
||||
}
|
||||
|
||||
public final int getMarkedContinuousCounter() {
|
||||
return MarkedContinuousCounter;
|
||||
}
|
||||
|
||||
public final void setMarkedContinuousCounter(int MarkedContinuousCounter) {
|
||||
SentimentValueCache.MarkedContinuousCounter = MarkedContinuousCounter;
|
||||
}
|
||||
|
||||
public final int getInflectedCounterNegative() {
|
||||
return inflectedCounterNegative;
|
||||
}
|
||||
|
||||
public final void setInflectedCounterNegative(int inflectedCounterNegative) {
|
||||
SentimentValueCache.inflectedCounterNegative = inflectedCounterNegative;
|
||||
}
|
||||
|
||||
public final int getInflectedCounterPositive() {
|
||||
return inflectedCounterPositive;
|
||||
}
|
||||
|
||||
public final void setInflectedCounterPositive(int inflectedCounterPositive) {
|
||||
SentimentValueCache.inflectedCounterPositive = inflectedCounterPositive;
|
||||
}
|
||||
|
||||
public final int getAnotatorcounter() {
|
||||
return anotatorcounter;
|
||||
}
|
||||
|
||||
public final void setAnotatorcounter(int anotatorcounter) {
|
||||
SentimentValueCache.anotatorcounter = anotatorcounter;
|
||||
}
|
||||
|
||||
public final int getTokensCounter() {
|
||||
return tokensCounter;
|
||||
}
|
||||
|
||||
public final void setTokensCounter(int tokensCounter) {
|
||||
SentimentValueCache.tokensCounter = tokensCounter;
|
||||
}
|
||||
|
||||
public final int getMainSentiment() {
|
||||
return mainSentiment;
|
||||
}
|
||||
|
||||
public final void setMainSentiment(int mainSentiment) {
|
||||
SentimentValueCache.mainSentiment = mainSentiment;
|
||||
}
|
||||
|
||||
public final int getLongest() {
|
||||
return longest;
|
||||
}
|
||||
|
||||
public final void setLongest(int longest) {
|
||||
SentimentValueCache.longest = longest;
|
||||
}
|
||||
|
||||
public final List getClassifyRaw() {
|
||||
return classifyRaw;
|
||||
}
|
||||
|
||||
public final void setClassifyRaw(List classifyRaw) {
|
||||
SentimentValueCache.classifyRaw = classifyRaw;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, Integer> getRnnPrediectClassMap() {
|
||||
return rnnPredictClassMap;
|
||||
}
|
||||
|
||||
public final void addRNNPredictClass(int rnnPrediction) {
|
||||
rnnPredictClassMap.put(rnnPredictClassMap.size(), rnnPrediction);
|
||||
}
|
||||
|
||||
public final void addSimpleMatrix(SimpleMatrix SMX) {
|
||||
simpleSMXlist.put(simpleSMXlist.size(), SMX);
|
||||
}
|
||||
|
||||
public final void addSimpleMatrixVector(SimpleMatrix SMX) {
|
||||
simpleSMXlistVector.put(simpleSMXlistVector.size(), SMX);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, GrammaticalStructure> getGsMap() {
|
||||
return gsMap;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, SimpleMatrix> getSimpleSMXlist() {
|
||||
return simpleSMXlist;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, SimpleMatrix> getSimpleSMXlistVector() {
|
||||
return simpleSMXlistVector;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, GrammaticalStructure> getGs() {
|
||||
return gsMap;
|
||||
}
|
||||
|
||||
public final int getCounter() {
|
||||
return counter;
|
||||
}
|
||||
|
||||
public final void addGS(GrammaticalStructure gs) {
|
||||
gsMap.put(gsMap.size(), gs);
|
||||
}
|
||||
|
||||
public final Collection<TypedDependency> getAllTypedDependencies() {
|
||||
return allTypedDependencies;
|
||||
}
|
||||
|
||||
public final void addTypedDependencies(Collection<TypedDependency> TDPlist) {
|
||||
for (TypedDependency TDP : TDPlist) {
|
||||
allTypedDependencies.add(TDP);
|
||||
}
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, Tree> getSentenceConstituencyParseList() {
|
||||
return sentenceConstituencyParseList;
|
||||
}
|
||||
|
||||
public final void addSentenceConstituencyParse(Tree tree) {
|
||||
sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), tree);
|
||||
}
|
||||
|
||||
public final void setCounter(int counter) {
|
||||
SentimentValueCache.counter = counter;
|
||||
}
|
||||
|
||||
public final String getSentence() {
|
||||
return sentence;
|
||||
}
|
||||
|
||||
public SentimentValueCache(String str, int counter) {
|
||||
this.sentence = str;
|
||||
this.counter = counter;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getTgwlistIndex() {
|
||||
return tgwlistIndex;
|
||||
}
|
||||
|
||||
public final void addTgwlistIndex(String str) {
|
||||
tgwlistIndex.put(tgwlistIndex.size(), str);
|
||||
}
|
||||
|
||||
public SentimentValueCache(String str) {
|
||||
this.sentence = str;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user