caching indeppendent values for sentiment analyzing
This commit is contained in:
parent
5ccadbffbd
commit
a4139e4ae4
@ -7,6 +7,7 @@ package FunctionLayer;
|
||||
|
||||
import DataLayer.DataMapper;
|
||||
import FunctionLayer.StanfordParser.SentimentAnalyzerTest;
|
||||
import FunctionLayer.StanfordParser.SentimentValueCache;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
|
||||
@ -48,7 +49,7 @@ import java.util.stream.Collectors;
|
||||
* @author install1
|
||||
*/
|
||||
public class Datahandler {
|
||||
|
||||
|
||||
public static final long EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(10, TimeUnit.MINUTES);
|
||||
public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS);
|
||||
public static Datahandler instance = new Datahandler();
|
||||
@ -69,6 +70,7 @@ public class Datahandler {
|
||||
private static ConcurrentMap<String, CoreDocument> coreDocumentAnnotationCache;
|
||||
private static ConcurrentMap<String, Integer> conversationMatchMap;
|
||||
private static ConcurrentMap<String, Integer> conversationUserMatchMap;
|
||||
private static ConcurrentMap<String, SentimentValueCache> sentimentCachingMap = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
private static final ConcurrentMap<String, Integer> locateFaultySentences = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
private static final ConcurrentMap<String, Double> mapUdate = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
private final static ConcurrentMap<Integer, String> strmapreturn = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
@ -95,7 +97,7 @@ public class Datahandler {
|
||||
// set up Stanford CoreNLP pipeline
|
||||
private static final StanfordCoreNLP pipeline = getPipeLineSetUp();
|
||||
private static StanfordCoreNLP pipelineSentiment;
|
||||
|
||||
|
||||
public Datahandler() {
|
||||
this.stopwatch = Stopwatch.createUnstarted();
|
||||
this.jmweAnnotationCache = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
@ -105,7 +107,7 @@ public class Datahandler {
|
||||
this.conversationMatchMap = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
this.conversationUserMatchMap = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
}
|
||||
|
||||
|
||||
private static StanfordCoreNLP getPipeLineSetUp() {
|
||||
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse");
|
||||
props.setProperty("parse.model", shiftReduceParserPath);
|
||||
@ -123,7 +125,7 @@ public class Datahandler {
|
||||
props.setProperty("tokenize.options", "untokenizable=firstDelete");
|
||||
return new StanfordCoreNLP(props);
|
||||
}
|
||||
|
||||
|
||||
public void shiftReduceParserInitiate() {
|
||||
//got 8 cores
|
||||
CountDownLatch cdl = new CountDownLatch(2);
|
||||
@ -161,15 +163,15 @@ public class Datahandler {
|
||||
}
|
||||
System.out.println("finished shiftReduceParserInitiate\n");
|
||||
}
|
||||
|
||||
|
||||
public static AbstractSequenceClassifier<CoreLabel> getClassifier() {
|
||||
return classifier;
|
||||
}
|
||||
|
||||
|
||||
public static void setClassifier(AbstractSequenceClassifier<CoreLabel> classifier) {
|
||||
Datahandler.classifier = classifier;
|
||||
}
|
||||
|
||||
|
||||
public void updateStringCache() {
|
||||
try {
|
||||
checkIfUpdateStrings(true);
|
||||
@ -177,42 +179,42 @@ public class Datahandler {
|
||||
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static ForkJoinPool instantiateExecutor() {
|
||||
//Runtime.getRuntime().availableProcessors() or static value like 25
|
||||
return new ForkJoinPool(Runtime.getRuntime().availableProcessors(),
|
||||
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
|
||||
null, false);
|
||||
}
|
||||
|
||||
|
||||
public static GrammaticalStructureFactory getGsf() {
|
||||
return gsf;
|
||||
}
|
||||
|
||||
|
||||
public static StanfordCoreNLP getPipeline() {
|
||||
return pipeline;
|
||||
}
|
||||
|
||||
|
||||
public static StanfordCoreNLP getPipelineSentiment() {
|
||||
return pipelineSentiment;
|
||||
}
|
||||
|
||||
|
||||
public static MaxentTagger getTagger() {
|
||||
return tagger;
|
||||
}
|
||||
|
||||
|
||||
private Map<Integer, String> getCache() throws SQLException, IOException, CustomError {
|
||||
return DataMapper.getAllStrings();
|
||||
}
|
||||
|
||||
|
||||
public int getlHMSMXSize() {
|
||||
return lHMSMX.size();
|
||||
}
|
||||
|
||||
|
||||
public int getstringCacheSize() {
|
||||
return stringCache.size();
|
||||
}
|
||||
|
||||
|
||||
public void initiateMYSQL() throws SQLException, IOException {
|
||||
try {
|
||||
DataMapper.createTables();
|
||||
@ -223,7 +225,7 @@ public class Datahandler {
|
||||
.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void addHLstatsMessages() {
|
||||
ConcurrentMap<String, Integer> hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, String> strCacheLocal = stringCache;
|
||||
@ -233,7 +235,7 @@ public class Datahandler {
|
||||
hlStatsMessages.put(str, hlStatsMessages.size());
|
||||
}
|
||||
}
|
||||
int capacity = 15000;
|
||||
int capacity = 5550;
|
||||
hlStatsMessages.keySet().forEach(str -> {
|
||||
if (!str.startsWith("!") && MessageResponseHandler.getStr().values().size() < capacity) {
|
||||
String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null);
|
||||
@ -243,7 +245,7 @@ public class Datahandler {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
public void instantiateAnnotationMapJMWE() {
|
||||
if (!stringCache.isEmpty()) {
|
||||
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(stringCache.values());
|
||||
@ -252,7 +254,7 @@ public class Datahandler {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void instantiateAnnotationMap() {
|
||||
if (!stringCache.isEmpty()) {
|
||||
ConcurrentMap<String, Annotation> Annotationspipeline = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
@ -277,29 +279,41 @@ public class Datahandler {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private final static void futuresReturnOverallEvaluation(ConcurrentMap<String, Future<SimilarityMatrix>> entries, String str) {
|
||||
|
||||
private static void futuresReturnOverallEvaluation(ConcurrentMap<String, Future<SimilarityMatrix>> entries, String str) {
|
||||
for (Entry<String, Future<SimilarityMatrix>> entrySet : entries.entrySet()) {
|
||||
String transmittedStr = entrySet.getKey();
|
||||
final SimilarityMatrix getSMX = retrieveFutureSMX(entrySet.getValue());
|
||||
SimilarityMatrix getSMX = retrieveFutureSMX(entrySet.getValue());
|
||||
if (handleRetrievedSMX(getSMX, str, transmittedStr)) {
|
||||
break;
|
||||
}
|
||||
try {
|
||||
SentimentValueCache cacheValue1 = getSMX.getCacheValue1();
|
||||
SentimentValueCache cacheValue2 = getSMX.getCacheValue2();
|
||||
if (cacheValue1 != null && !sentimentCachingMap.keySet().contains(str)) {
|
||||
sentimentCachingMap.put(str, getSMX.getCacheValue1());
|
||||
}
|
||||
if (cacheValue2 != null && !sentimentCachingMap.keySet().contains(transmittedStr)) {
|
||||
sentimentCachingMap.put(transmittedStr, getSMX.getCacheValue2());
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
//System.out.println("FAILING futures return. EX: " + ex.getLocalizedMessage() + "\n");
|
||||
}
|
||||
}
|
||||
sentenceRelationMap.put(str, mapUdate);
|
||||
}
|
||||
|
||||
private static final boolean handleRetrievedSMX(SimilarityMatrix getSMX, String str, String transmittedStr) {
|
||||
|
||||
private static boolean handleRetrievedSMX(SimilarityMatrix getSMX, String str, String transmittedStr) {
|
||||
final int relationCap = 20;
|
||||
if (getSMX != null) {
|
||||
//System.out.println("getSMX primary: " + getSMX.getPrimaryString() + "\ngetSMX secondary: " + getSMX.getSecondaryString() + "\n");
|
||||
final Double scoreRelationNewMsgToRecentMsg = getSMX.getDistance();
|
||||
System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\n");
|
||||
//System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\n");
|
||||
mapUdate.put(getSMX.getSecondaryString(), scoreRelationNewMsgToRecentMsg);
|
||||
System.out.println("getSMX primary: " + getSMX.getPrimaryString() + "\ngetSMX secodary: " + getSMX.getSecondaryString() + "\n");
|
||||
if (scoreRelationNewMsgToRecentMsg >= 200.0) {
|
||||
positiveRelationCounter++;
|
||||
if (positiveRelationCounter >= relationCap) {
|
||||
System.out.println("added to strmapreturn str: " + str + "\n");
|
||||
//System.out.println("added to strmapreturn str: " + str + "\n");
|
||||
strmapreturn.put(strmapreturn.size() + 1, str);
|
||||
return true;
|
||||
}
|
||||
@ -319,8 +333,10 @@ public class Datahandler {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private final static Entry<ConcurrentMap<String, Future<SimilarityMatrix>>, ConcurrentMap<Integer, String>> StrComparringNoSentenceRelationMap(ConcurrentMap<Integer, String> strCacheLocal, String str, ConcurrentMap<String, Annotation> localJMWEMap, ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
|
||||
|
||||
private static Entry<ConcurrentMap<String, Future<SimilarityMatrix>>, ConcurrentMap<Integer, String>> StrComparringNoSentenceRelationMap(
|
||||
ConcurrentMap<Integer, String> strCacheLocal, String str, ConcurrentMap<String, Annotation> localJMWEMap,
|
||||
ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
|
||||
ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
|
||||
final ConcurrentMap<String, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
strCacheLocal.values().removeIf(e -> {
|
||||
@ -330,29 +346,32 @@ public class Datahandler {
|
||||
}
|
||||
return true;
|
||||
});
|
||||
SentimentValueCache sentimentCacheStr = sentimentCachingMap.getOrDefault(str, null);
|
||||
for (String str1 : strCacheLocal.values()) {
|
||||
if (!str.equals(str1)) {
|
||||
//experimental change
|
||||
if (!str.equals(str1) && !futures.keySet().contains(str1)) {
|
||||
final SimilarityMatrix SMX = new SimilarityMatrix(str, str1);
|
||||
SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
|
||||
final Callable<SimilarityMatrix> worker;
|
||||
if (stringCache.size() < 150) {
|
||||
worker = new SentimentAnalyzerTest(str, str1, SMX,
|
||||
localJMWEMap.get(str), localJMWEMap.get(str1), localPipelineAnnotation.get(str),
|
||||
localPipelineAnnotation.get(str1), localPipelineSentimentAnnotation.get(str),
|
||||
localPipelineSentimentAnnotation.get(str1), localCoreDocumentMap.get(str), localCoreDocumentMap.get(str1));
|
||||
localPipelineSentimentAnnotation.get(str1), localCoreDocumentMap.get(str), localCoreDocumentMap.get(str1), sentimentCacheStr, sentimentCacheStr1);
|
||||
} else {
|
||||
worker = new SentimentAnalyzerTest(str, str1, SMX,
|
||||
localJMWEMap.get(str), jmweAnnotationCache.get(str1), localPipelineAnnotation.get(str),
|
||||
pipelineAnnotationCache.get(str1), localPipelineSentimentAnnotation.get(str),
|
||||
pipelineSentimentAnnotationCache.get(str1), localCoreDocumentMap.get(str), coreDocumentAnnotationCache.get(str1));
|
||||
pipelineSentimentAnnotationCache.get(str1), localCoreDocumentMap.get(str), coreDocumentAnnotationCache.get(str1), sentimentCacheStr, sentimentCacheStr1);
|
||||
}
|
||||
futures.put(str1, executor.submit(worker));
|
||||
}
|
||||
}
|
||||
Map.Entry<ConcurrentMap<String, Future<SimilarityMatrix>>, ConcurrentMap<Integer, String>> entryReturn
|
||||
Map.Entry<ConcurrentMap<String, Future<SimilarityMatrix>>, ConcurrentMap<Integer, String>> mapreturn
|
||||
= new AbstractMap.SimpleEntry(futures, strCacheLocal);
|
||||
return entryReturn;
|
||||
return mapreturn;
|
||||
}
|
||||
|
||||
|
||||
private static ConcurrentMap<Integer, String> stringIteratorComparator(ConcurrentMap<Integer, String> strmap,
|
||||
ConcurrentMap<Integer, String> strCacheLocal, ConcurrentMap<String, Annotation> localJMWEMap,
|
||||
ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
|
||||
@ -376,7 +395,7 @@ public class Datahandler {
|
||||
}
|
||||
return strmapreturn;
|
||||
}
|
||||
|
||||
|
||||
private static final ConcurrentMap<Integer, String> removeNonSensicalStrings(ConcurrentMap<Integer, String> strmap) {
|
||||
final ConcurrentMap<Integer, String> strCacheLocal = stringCache.size() < 150 ? strmap : stringCache;
|
||||
final ConcurrentMap<String, Annotation> localJMWEMap = getMultipleJMWEAnnotation(strmap.values());
|
||||
@ -386,16 +405,19 @@ public class Datahandler {
|
||||
System.out.println("finished removeNonSensicalStrings annotations \n");
|
||||
return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap);
|
||||
}
|
||||
|
||||
private final static SimilarityMatrix retrieveFutureSMX(Future<SimilarityMatrix> future) {
|
||||
|
||||
private static SimilarityMatrix retrieveFutureSMX(Future<SimilarityMatrix> future) {
|
||||
try {
|
||||
return future.get(5, TimeUnit.SECONDS);
|
||||
// SimilarityMatrix SMX = future.get();
|
||||
SimilarityMatrix SMX = future.get(5, TimeUnit.SECONDS);
|
||||
return SMX;
|
||||
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
|
||||
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
System.out.println("retrieveFutureSMX timeout Exception; " + ex.getLocalizedMessage() + "\n");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
public synchronized void checkIfUpdateStrings(boolean hlStatsMsg) throws CustomError {
|
||||
if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning()) {
|
||||
ConcurrentMap<Integer, String> str = MessageResponseHandler.getStr();
|
||||
@ -434,14 +456,7 @@ public class Datahandler {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param str is strF aka user message,
|
||||
* @param MostRecent String most recently responded with
|
||||
* @return
|
||||
* @throws CustomError
|
||||
*/
|
||||
|
||||
public synchronized String getResponseMsg(String str, String MostRecent) throws CustomError {
|
||||
str = str.trim();
|
||||
if (str.startsWith("<@")) {
|
||||
@ -470,19 +485,21 @@ public class Datahandler {
|
||||
}
|
||||
if (!present) {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix(strF, str1);
|
||||
SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
|
||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(strF, str1, SMX,
|
||||
strAnnoJMWE, jmweAnnotationCache.get(str1), strAnno,
|
||||
pipelineAnnotationCache.get(str1), strAnnoSentiment,
|
||||
pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1));
|
||||
pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1), null, sentimentCacheStr1);
|
||||
futureslocal.put(futureslocal.size() + 1, executor.submit(worker));
|
||||
}
|
||||
}
|
||||
});
|
||||
futureslocal.values().forEach((future) -> {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix("", "");
|
||||
SimilarityMatrix SMX;
|
||||
try {
|
||||
SMX = future.get(5, TimeUnit.SECONDS);
|
||||
futureAndCacheCombineMap.put(futureAndCacheCombineMap.size(), SMX);
|
||||
//System.out.println("futureAndCacheCombineMap size: " + futureAndCacheCombineMap.size() + "\n");
|
||||
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
|
||||
System.out.println("ex getResponsemsg: " + ex.getMessage() + "\n");
|
||||
}
|
||||
@ -494,6 +511,7 @@ public class Datahandler {
|
||||
futureAndCacheCombineMap.put(futureAndCacheCombineMap.size(), SMX);
|
||||
}
|
||||
}
|
||||
//System.out.println("futureAndCacheCombineMap size: " + futureAndCacheCombineMap.size() + "\n");
|
||||
futureAndCacheCombineMap.values().parallelStream().forEach((SMX) -> {
|
||||
if (sentenceRelationMap.get(strF) == null) {
|
||||
ConcurrentMap<String, Double> localMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
@ -514,12 +532,19 @@ public class Datahandler {
|
||||
Double scoreRelationOldUserMsg = 0.0;
|
||||
ConcurrentMap<String, Double> getPrimaryLocal = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
for (String conversationStr : conversationMatchMap.keySet()) {
|
||||
getPrimaryLocal = sentenceRelationMap.get(strF);
|
||||
Double getSecondary = getPrimaryLocal.get(conversationStr);
|
||||
if (getSecondary == null) {
|
||||
Double getSecondary = 0.0;
|
||||
getPrimaryLocal = sentenceRelationMap.getOrDefault(strF, null);
|
||||
if (getPrimaryLocal == null) {
|
||||
getSecondary = getScoreRelationStrF(strF, conversationStr);
|
||||
getPrimaryLocal.put(conversationStr, getSecondary);
|
||||
sentenceRelationMap.put(strF, getPrimaryLocal);
|
||||
} else {
|
||||
getSecondary = getPrimaryLocal.get(conversationStr);
|
||||
if (getSecondary == null) {
|
||||
getSecondary = getScoreRelationStrF(strF, conversationStr);
|
||||
getPrimaryLocal.put(conversationStr, getSecondary);
|
||||
sentenceRelationMap.put(strF, getPrimaryLocal);
|
||||
}
|
||||
}
|
||||
scoreRelationNewMsgToRecentMsg += getSecondary;
|
||||
System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\n");
|
||||
@ -546,6 +571,7 @@ public class Datahandler {
|
||||
}
|
||||
ConcurrentMap<Integer, Entry<Double, SimilarityMatrix>> concurrentRelationsMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> preRelationUserCountersMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
System.out.println("futureAndCacheCombineMap size for: " + futureAndCacheCombineMap.values().size() + "\n");
|
||||
for (SimilarityMatrix SMX : futureAndCacheCombineMap.values()) {
|
||||
Double scoreRelation = 500.0;
|
||||
Double scoreRelationLastUserMsg = SMX.getDistance();
|
||||
@ -574,13 +600,17 @@ public class Datahandler {
|
||||
}
|
||||
}
|
||||
Double totalRelation = scoreRelation + scoreRelationLastUserMsg;
|
||||
if (totalRelation > preRelationCounters + preRelationUserCounters && scoreRelationLastUserMsg + (preRelationUserCounters / 10)
|
||||
> preRelationUserCounters) {
|
||||
if (totalRelation > preRelationCounters + preRelationUserCounters && (scoreRelationLastUserMsg + (preRelationUserCounters / 10)
|
||||
>= preRelationUserCounters) || preRelationUserCounters == -100.0) {
|
||||
Entry<Double, SimilarityMatrix> localEntry = new AbstractMap.SimpleEntry(totalRelation, SMX);
|
||||
concurrentRelationsMap.put(concurrentRelationsMap.size(), localEntry);
|
||||
preRelationUserCountersMap.put(preRelationUserCountersMap.size(), preRelationUserCounters);
|
||||
System.out.println("SUCESS concurrentRelationsMap size: " + concurrentRelationsMap.size() + "\n");
|
||||
preRelationCounters = scoreRelation;
|
||||
preRelationUserCounters = scoreRelationLastUserMsg;
|
||||
} else {
|
||||
System.out.println("FAILED totalRelation: " + totalRelation + "\npreRelationUserCounters: " + preRelationUserCounters + "\npreRelationCounters: "
|
||||
+ preRelationCounters + "\nscoreRelationLastUserMsg: " + scoreRelationLastUserMsg + "\n");
|
||||
}
|
||||
}
|
||||
StringBuilder SB = new StringBuilder();
|
||||
@ -601,7 +631,7 @@ public class Datahandler {
|
||||
System.out.println("Reached end: SB: " + SB.toString() + "\n: ");
|
||||
return SB.toString();
|
||||
}
|
||||
|
||||
|
||||
public void getSingularAnnotation(String str) {
|
||||
strAnno = new Annotation(str);
|
||||
pipeline.annotate(strAnno);
|
||||
@ -615,12 +645,12 @@ public class Datahandler {
|
||||
pipeline.annotate(coreDocument);
|
||||
coreDoc = coreDocument;
|
||||
}
|
||||
|
||||
|
||||
private static ConcurrentMap<String, Annotation> getMultipleJMWEAnnotation(Collection<String> str) {
|
||||
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str);
|
||||
return jmweAnnotation;
|
||||
}
|
||||
|
||||
|
||||
private static ConcurrentMap<String, Annotation> getMultiplePipelineAnnotation(Collection<String> str) {
|
||||
ConcurrentMap<String, Annotation> pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
for (String str1 : str) {
|
||||
@ -630,7 +660,7 @@ public class Datahandler {
|
||||
pipeline.annotate(pipelineAnnotationMap.values());
|
||||
return pipelineAnnotationMap;
|
||||
}
|
||||
|
||||
|
||||
private static ConcurrentMap<String, Annotation> getMultiplePipelineSentimentAnnotation(Collection<String> str) {
|
||||
ConcurrentMap<String, Annotation> pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
for (String str1 : str) {
|
||||
@ -640,14 +670,16 @@ public class Datahandler {
|
||||
pipelineSentiment.annotate(pipelineAnnotationMap.values());
|
||||
return pipelineAnnotationMap;
|
||||
}
|
||||
|
||||
|
||||
private Double getScoreRelationNewMsgToRecentMsg(String str, String mostRecentMsg) {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg);
|
||||
SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null);
|
||||
SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null);
|
||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX,
|
||||
jmweAnnotationCache.get(str), jmweAnnotationCache.get(mostRecentMsg), pipelineAnnotationCache.get(str),
|
||||
pipelineAnnotationCache.get(mostRecentMsg), pipelineSentimentAnnotationCache.get(str),
|
||||
pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDocumentAnnotationCache.get(str),
|
||||
coreDocumentAnnotationCache.get(mostRecentMsg));
|
||||
coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2);
|
||||
SimilarityMatrix callSMX = null;
|
||||
try {
|
||||
callSMX = worker.call();
|
||||
@ -661,13 +693,15 @@ public class Datahandler {
|
||||
}
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
|
||||
private Double getScoreRelationStrF(String str, String mostRecentMsg) {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg);
|
||||
SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null);
|
||||
SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null);
|
||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX,
|
||||
strAnnoJMWE, jmweAnnotationCache.get(mostRecentMsg), strAnno,
|
||||
pipelineAnnotationCache.get(mostRecentMsg), strAnnoSentiment,
|
||||
pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDoc, coreDocumentAnnotationCache.get(mostRecentMsg));
|
||||
pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDoc, coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2);
|
||||
SimilarityMatrix callSMX = null;
|
||||
try {
|
||||
callSMX = worker.call();
|
||||
@ -681,7 +715,7 @@ public class Datahandler {
|
||||
}
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
|
||||
public String mostSimilar(String toBeCompared, ConcurrentMap<Integer, String> concurrentStrings, String MostRecent) {
|
||||
similar = "";
|
||||
minDistance = 6.0;
|
||||
@ -759,7 +793,7 @@ public class Datahandler {
|
||||
}
|
||||
return similar.isEmpty() ? null : similar;
|
||||
}
|
||||
|
||||
|
||||
public static ConcurrentMap<Integer, String> cutContent(ConcurrentMap<Integer, String> str, boolean hlStatsMsg) {
|
||||
ConcurrentMap<Integer, String> returnlist = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
str.values().parallelStream().forEach(str1 -> {
|
||||
@ -773,7 +807,7 @@ public class Datahandler {
|
||||
});
|
||||
return returnlist;
|
||||
}
|
||||
|
||||
|
||||
public static ConcurrentMap<Integer, String> filterContent(ConcurrentMap<Integer, String> str) {
|
||||
ConcurrentMap<Integer, String> strlistreturn = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
str.values().parallelStream().forEach(str1 -> {
|
||||
@ -880,7 +914,7 @@ public class Datahandler {
|
||||
});
|
||||
return strlistreturn;
|
||||
}
|
||||
|
||||
|
||||
private ConcurrentMap<Integer, String> removeSlacks(ConcurrentMap<Integer, String> str) {
|
||||
ConcurrentMap<Integer, String> strreturn = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
if (stringCache.isEmpty()) {
|
||||
@ -905,7 +939,7 @@ public class Datahandler {
|
||||
});
|
||||
return strreturn;
|
||||
}
|
||||
|
||||
|
||||
private ConcurrentMap<Integer, String> annotationCacheUpdate(ConcurrentMap<Integer, String> strmap) {
|
||||
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values());
|
||||
for (Entry<String, Annotation> jmweitr : jmweAnnotation.entrySet()) {
|
||||
@ -943,18 +977,18 @@ public class Datahandler {
|
||||
});
|
||||
return strmap;
|
||||
}
|
||||
|
||||
|
||||
private static class AnnotationCollector<T> implements Consumer<T> {
|
||||
|
||||
|
||||
private static int i = 0;
|
||||
private final List<T> annotationsT = new ArrayList();
|
||||
|
||||
|
||||
@Override
|
||||
public final void accept(T ann) {
|
||||
annotationsT.add(ann);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public final static ConcurrentMap<String, CoreDocument> getMultipleCoreDocumentsWaySuggestion(Collection<String> str, StanfordCoreNLP localNLP) {
|
||||
AnnotationCollector<Annotation> annCollector = new AnnotationCollector();
|
||||
for (final String exampleString : str) {
|
||||
|
@ -5,22 +5,33 @@
|
||||
*/
|
||||
package FunctionLayer;
|
||||
|
||||
import FunctionLayer.StanfordParser.SentimentValueCache;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
public class SimilarityMatrix{
|
||||
public class SimilarityMatrix {
|
||||
|
||||
private String PrimaryString;
|
||||
private String SecondaryString;
|
||||
private double distance;
|
||||
private static String PrimaryString;
|
||||
private static String SecondaryString;
|
||||
private static double distance;
|
||||
private static SentimentValueCache cacheValue1;
|
||||
private static SentimentValueCache cacheValue2;
|
||||
|
||||
public double getDistance() {
|
||||
public final SentimentValueCache getCacheValue2() {
|
||||
return cacheValue2;
|
||||
}
|
||||
|
||||
public final void setCacheValue2(SentimentValueCache cacheValue2) {
|
||||
SimilarityMatrix.cacheValue2 = cacheValue2;
|
||||
}
|
||||
|
||||
public final double getDistance() {
|
||||
return distance;
|
||||
}
|
||||
|
||||
public void setDistance(double distance) {
|
||||
public final void setDistance(double distance) {
|
||||
this.distance = distance;
|
||||
}
|
||||
|
||||
@ -35,21 +46,28 @@ public class SimilarityMatrix{
|
||||
this.distance = result;
|
||||
}
|
||||
|
||||
public String getPrimaryString() {
|
||||
public final String getPrimaryString() {
|
||||
return PrimaryString;
|
||||
}
|
||||
|
||||
public void setPrimaryString(String PrimaryString) {
|
||||
public final void setPrimaryString(String PrimaryString) {
|
||||
this.PrimaryString = PrimaryString;
|
||||
}
|
||||
|
||||
public String getSecondaryString() {
|
||||
public final String getSecondaryString() {
|
||||
return SecondaryString;
|
||||
}
|
||||
|
||||
public void setSecondaryString(String SecondaryString) {
|
||||
public final void setSecondaryString(String SecondaryString) {
|
||||
this.SecondaryString = SecondaryString;
|
||||
}
|
||||
|
||||
public final SentimentValueCache getCacheValue1() {
|
||||
return cacheValue1;
|
||||
}
|
||||
|
||||
public final void setCacheValue1(SentimentValueCache cacheValue1) {
|
||||
this.cacheValue1 = cacheValue1;
|
||||
}
|
||||
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,334 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package FunctionLayer.StanfordParser;
|
||||
|
||||
import com.google.common.collect.MapMaker;
|
||||
import edu.stanford.nlp.ling.TaggedWord;
|
||||
import edu.stanford.nlp.trees.GrammaticalStructure;
|
||||
import edu.stanford.nlp.trees.Tree;
|
||||
import edu.stanford.nlp.trees.TypedDependency;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import org.ejml.simple.SimpleMatrix;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
public final class SentimentValueCache {
|
||||
|
||||
private final String sentence;
|
||||
private static int counter;
|
||||
private static List<List<TaggedWord>> taggedwordlist = new ArrayList();
|
||||
private final ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, Tree> sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final Collection<TypedDependency> allTypedDependencies = new ArrayList();
|
||||
private final ConcurrentMap<Integer, GrammaticalStructure> gsMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
private final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
private final ConcurrentMap<Integer, Integer> rnnPredictClassMap = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
private static List classifyRaw;
|
||||
private static int mainSentiment = 0;
|
||||
private static int longest = 0;
|
||||
private static int tokensCounter = 0;
|
||||
private static int anotatorcounter = 0;
|
||||
private static int inflectedCounterPositive = 0;
|
||||
private static int inflectedCounterNegative = 0;
|
||||
private static int MarkedContinuousCounter = 0;
|
||||
private static int MarkedContiniousCounterEntries = 0;
|
||||
private static int UnmarkedPatternCounter = 0;
|
||||
private static int pairCounter = 0;
|
||||
private final ConcurrentMap<Integer, String> ITokenMapTag = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> strTokenStems = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> strTokenForm = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> strTokenGetEntry = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> strTokenGetiPart = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> strTokenEntryPOS = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, Integer> entryCounts = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> nerEntities1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> nerEntities2 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> nerEntityTokenTags = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
private final ConcurrentMap<Integer, String> stopwordTokens = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private final ConcurrentMap<Integer, String> stopWordLemma = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
|
||||
public final int getPairCounter() {
|
||||
return pairCounter;
|
||||
}
|
||||
|
||||
public final void setPairCounter(int pairCounter) {
|
||||
SentimentValueCache.pairCounter = pairCounter;
|
||||
}
|
||||
|
||||
public final void addStopWordLemma(String str) {
|
||||
stopWordLemma.put(stopWordLemma.size(), str);
|
||||
}
|
||||
|
||||
public final void addstopwordTokens(String str) {
|
||||
stopwordTokens.put(stopwordTokens.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getStopwordTokens() {
|
||||
return stopwordTokens;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getStopWordLemma() {
|
||||
return stopWordLemma;
|
||||
}
|
||||
|
||||
public final void addnerEntityTokenTags(String str) {
|
||||
nerEntityTokenTags.put(nerEntityTokenTags.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getnerEntityTokenTags() {
|
||||
return nerEntityTokenTags;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getnerEntities1() {
|
||||
return nerEntities1;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getnerEntities2() {
|
||||
return nerEntities2;
|
||||
}
|
||||
|
||||
public final void addNEREntities1(String str) {
|
||||
nerEntities1.put(nerEntities1.size(), str);
|
||||
}
|
||||
|
||||
public final void addNEREntities2(String str) {
|
||||
nerEntities2.put(nerEntities2.size(), str);
|
||||
}
|
||||
|
||||
public final void setTaggedwords(List<List<TaggedWord>> twlist) {
|
||||
taggedwordlist = twlist;
|
||||
}
|
||||
|
||||
public final List<List<TaggedWord>> getTaggedwordlist() {
|
||||
return taggedwordlist;
|
||||
}
|
||||
|
||||
public final void addEntryCounts(int counts) {
|
||||
entryCounts.put(entryCounts.size(), counts);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, Integer> getEntryCounts() {
|
||||
return entryCounts;
|
||||
}
|
||||
|
||||
public final void addstrTokenEntryPOS(String str) {
|
||||
strTokenEntryPOS.put(strTokenEntryPOS.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getstrTokenEntryPOS() {
|
||||
return strTokenEntryPOS;
|
||||
}
|
||||
|
||||
public final void addstrTokenGetiPart(String str) {
|
||||
strTokenGetiPart.put(strTokenGetiPart.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getstrTokenGetiPart() {
|
||||
return strTokenGetiPart;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getstrTokenGetEntry() {
|
||||
return strTokenGetEntry;
|
||||
}
|
||||
|
||||
public final void addstrTokenGetEntry(String str) {
|
||||
strTokenGetEntry.put(strTokenGetEntry.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getstrTokenForm() {
|
||||
return strTokenForm;
|
||||
}
|
||||
|
||||
public final void addstrTokenForm(String str) {
|
||||
strTokenForm.put(strTokenForm.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getstrTokenStems() {
|
||||
return strTokenStems;
|
||||
}
|
||||
|
||||
public final void addstrTokenStems(String str) {
|
||||
strTokenStems.put(strTokenStems.size(), str);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getITokenMapTag() {
|
||||
return ITokenMapTag;
|
||||
}
|
||||
|
||||
public final void addITokenMapTag(String str) {
|
||||
ITokenMapTag.put(ITokenMapTag.size(), str);
|
||||
}
|
||||
|
||||
public final int getUnmarkedPatternCounter() {
|
||||
return UnmarkedPatternCounter;
|
||||
}
|
||||
|
||||
public final void setUnmarkedPatternCounter(int UnmarkedPatternCounter) {
|
||||
SentimentValueCache.UnmarkedPatternCounter = UnmarkedPatternCounter;
|
||||
}
|
||||
|
||||
public final int getMarkedContiniousCounterEntries() {
|
||||
return MarkedContiniousCounterEntries;
|
||||
}
|
||||
|
||||
public final void setMarkedContiniousCounterEntries(int MarkedContiniousCounterEntries) {
|
||||
SentimentValueCache.MarkedContiniousCounterEntries = MarkedContiniousCounterEntries;
|
||||
}
|
||||
|
||||
public final int getMarkedContinuousCounter() {
|
||||
return MarkedContinuousCounter;
|
||||
}
|
||||
|
||||
public final void setMarkedContinuousCounter(int MarkedContinuousCounter) {
|
||||
SentimentValueCache.MarkedContinuousCounter = MarkedContinuousCounter;
|
||||
}
|
||||
|
||||
public final int getInflectedCounterNegative() {
|
||||
return inflectedCounterNegative;
|
||||
}
|
||||
|
||||
public final void setInflectedCounterNegative(int inflectedCounterNegative) {
|
||||
SentimentValueCache.inflectedCounterNegative = inflectedCounterNegative;
|
||||
}
|
||||
|
||||
public final int getInflectedCounterPositive() {
|
||||
return inflectedCounterPositive;
|
||||
}
|
||||
|
||||
public final void setInflectedCounterPositive(int inflectedCounterPositive) {
|
||||
SentimentValueCache.inflectedCounterPositive = inflectedCounterPositive;
|
||||
}
|
||||
|
||||
public final int getAnotatorcounter() {
|
||||
return anotatorcounter;
|
||||
}
|
||||
|
||||
public final void setAnotatorcounter(int anotatorcounter) {
|
||||
SentimentValueCache.anotatorcounter = anotatorcounter;
|
||||
}
|
||||
|
||||
public final int getTokensCounter() {
|
||||
return tokensCounter;
|
||||
}
|
||||
|
||||
public final void setTokensCounter(int tokensCounter) {
|
||||
SentimentValueCache.tokensCounter = tokensCounter;
|
||||
}
|
||||
|
||||
public final int getMainSentiment() {
|
||||
return mainSentiment;
|
||||
}
|
||||
|
||||
public final void setMainSentiment(int mainSentiment) {
|
||||
SentimentValueCache.mainSentiment = mainSentiment;
|
||||
}
|
||||
|
||||
public final int getLongest() {
|
||||
return longest;
|
||||
}
|
||||
|
||||
public final void setLongest(int longest) {
|
||||
SentimentValueCache.longest = longest;
|
||||
}
|
||||
|
||||
public final List getClassifyRaw() {
|
||||
return classifyRaw;
|
||||
}
|
||||
|
||||
public final void setClassifyRaw(List classifyRaw) {
|
||||
SentimentValueCache.classifyRaw = classifyRaw;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, Integer> getRnnPrediectClassMap() {
|
||||
return rnnPredictClassMap;
|
||||
}
|
||||
|
||||
public final void addRNNPredictClass(int rnnPrediction) {
|
||||
rnnPredictClassMap.put(rnnPredictClassMap.size(), rnnPrediction);
|
||||
}
|
||||
|
||||
public final void addSimpleMatrix(SimpleMatrix SMX) {
|
||||
simpleSMXlist.put(simpleSMXlist.size(), SMX);
|
||||
}
|
||||
|
||||
public final void addSimpleMatrixVector(SimpleMatrix SMX) {
|
||||
simpleSMXlistVector.put(simpleSMXlistVector.size(), SMX);
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, GrammaticalStructure> getGsMap() {
|
||||
return gsMap;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, SimpleMatrix> getSimpleSMXlist() {
|
||||
return simpleSMXlist;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, SimpleMatrix> getSimpleSMXlistVector() {
|
||||
return simpleSMXlistVector;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, GrammaticalStructure> getGs() {
|
||||
return gsMap;
|
||||
}
|
||||
|
||||
public final int getCounter() {
|
||||
return counter;
|
||||
}
|
||||
|
||||
public final void addGS(GrammaticalStructure gs) {
|
||||
gsMap.put(gsMap.size(), gs);
|
||||
}
|
||||
|
||||
public final Collection<TypedDependency> getAllTypedDependencies() {
|
||||
return allTypedDependencies;
|
||||
}
|
||||
|
||||
public final void addTypedDependencies(Collection<TypedDependency> TDPlist) {
|
||||
for (TypedDependency TDP : TDPlist) {
|
||||
allTypedDependencies.add(TDP);
|
||||
}
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, Tree> getSentenceConstituencyParseList() {
|
||||
return sentenceConstituencyParseList;
|
||||
}
|
||||
|
||||
public final void addSentenceConstituencyParse(Tree tree) {
|
||||
sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), tree);
|
||||
}
|
||||
|
||||
public final void setCounter(int counter) {
|
||||
SentimentValueCache.counter = counter;
|
||||
}
|
||||
|
||||
public final String getSentence() {
|
||||
return sentence;
|
||||
}
|
||||
|
||||
public SentimentValueCache(String str, int counter) {
|
||||
this.sentence = str;
|
||||
this.counter = counter;
|
||||
}
|
||||
|
||||
public final ConcurrentMap<Integer, String> getTgwlistIndex() {
|
||||
return tgwlistIndex;
|
||||
}
|
||||
|
||||
public final void addTgwlistIndex(String str) {
|
||||
tgwlistIndex.put(tgwlistIndex.size(), str);
|
||||
}
|
||||
|
||||
public SentimentValueCache(String str) {
|
||||
this.sentence = str;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user