WHY THE FUCK CANT YOU JUST TRANSFER A SimilarityMatrix OBJECT LIST LIKE ANY OTHER NORMAL COLLECTION
This commit is contained in:
parent
a4139e4ae4
commit
f54c447eb6
@ -21,10 +21,12 @@ import edu.stanford.nlp.tagger.maxent.MaxentTagger;
|
||||
import edu.stanford.nlp.trees.GrammaticalStructureFactory;
|
||||
import edu.stanford.nlp.trees.TreebankLanguagePack;
|
||||
import java.io.IOException;
|
||||
import static java.lang.Math.random;
|
||||
import java.sql.SQLException;
|
||||
import java.util.AbstractMap;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -36,6 +38,7 @@ import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.concurrent.ForkJoinTask;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
@ -49,7 +52,7 @@ import java.util.stream.Collectors;
|
||||
* @author install1
|
||||
*/
|
||||
public class Datahandler {
|
||||
|
||||
|
||||
public static final long EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(10, TimeUnit.MINUTES);
|
||||
public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS);
|
||||
public static Datahandler instance = new Datahandler();
|
||||
@ -58,9 +61,6 @@ public class Datahandler {
|
||||
private static Annotation strAnnoSentiment;
|
||||
private static Annotation strAnnoJMWE;
|
||||
private static CoreDocument coreDoc;
|
||||
private static final ConcurrentMap<String, ConcurrentMap<String, Double>> sentenceRelationMap = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
private static volatile Double preRelationCounters = 0.0;
|
||||
private static volatile Double preRelationUserCounters = 0.0;
|
||||
private static final ConcurrentMap<Integer, String> stringCache = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private static int positiveRelationCounter = 0;
|
||||
private static int negativeRelationCounter = 0;
|
||||
@ -68,12 +68,10 @@ public class Datahandler {
|
||||
private static ConcurrentMap<String, Annotation> pipelineSentimentAnnotationCache;
|
||||
private static ConcurrentMap<String, Annotation> jmweAnnotationCache;
|
||||
private static ConcurrentMap<String, CoreDocument> coreDocumentAnnotationCache;
|
||||
private static ConcurrentMap<String, Integer> conversationMatchMap;
|
||||
private static ConcurrentMap<Integer, String> conversationMatchMap;
|
||||
private static ConcurrentMap<String, Integer> conversationUserMatchMap;
|
||||
private static ConcurrentMap<String, SentimentValueCache> sentimentCachingMap = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
private static final ConcurrentMap<String, Integer> locateFaultySentences = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
private static final ConcurrentMap<String, Double> mapUdate = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
private final static ConcurrentMap<Integer, String> strmapreturn = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
private LinkedHashMap<String, LinkedHashMap<String, Double>> lHMSMX = new LinkedHashMap();
|
||||
private final Stopwatch stopwatch;
|
||||
private static final ForkJoinPool executor = instantiateExecutor();
|
||||
@ -97,7 +95,7 @@ public class Datahandler {
|
||||
// set up Stanford CoreNLP pipeline
|
||||
private static final StanfordCoreNLP pipeline = getPipeLineSetUp();
|
||||
private static StanfordCoreNLP pipelineSentiment;
|
||||
|
||||
|
||||
public Datahandler() {
|
||||
this.stopwatch = Stopwatch.createUnstarted();
|
||||
this.jmweAnnotationCache = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
@ -107,7 +105,11 @@ public class Datahandler {
|
||||
this.conversationMatchMap = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
this.conversationUserMatchMap = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
}
|
||||
|
||||
|
||||
public static StanfordCoreNLP getPipeline() {
|
||||
return pipeline;
|
||||
}
|
||||
|
||||
private static StanfordCoreNLP getPipeLineSetUp() {
|
||||
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse");
|
||||
props.setProperty("parse.model", shiftReduceParserPath);
|
||||
@ -125,7 +127,7 @@ public class Datahandler {
|
||||
props.setProperty("tokenize.options", "untokenizable=firstDelete");
|
||||
return new StanfordCoreNLP(props);
|
||||
}
|
||||
|
||||
|
||||
public void shiftReduceParserInitiate() {
|
||||
//got 8 cores
|
||||
CountDownLatch cdl = new CountDownLatch(2);
|
||||
@ -163,15 +165,15 @@ public class Datahandler {
|
||||
}
|
||||
System.out.println("finished shiftReduceParserInitiate\n");
|
||||
}
|
||||
|
||||
|
||||
public static AbstractSequenceClassifier<CoreLabel> getClassifier() {
|
||||
return classifier;
|
||||
}
|
||||
|
||||
|
||||
public static void setClassifier(AbstractSequenceClassifier<CoreLabel> classifier) {
|
||||
Datahandler.classifier = classifier;
|
||||
}
|
||||
|
||||
|
||||
public void updateStringCache() {
|
||||
try {
|
||||
checkIfUpdateStrings(true);
|
||||
@ -179,42 +181,34 @@ public class Datahandler {
|
||||
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static ForkJoinPool instantiateExecutor() {
|
||||
//Runtime.getRuntime().availableProcessors() or static value like 25
|
||||
return new ForkJoinPool(Runtime.getRuntime().availableProcessors(),
|
||||
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
|
||||
null, false);
|
||||
}
|
||||
|
||||
|
||||
public static GrammaticalStructureFactory getGsf() {
|
||||
return gsf;
|
||||
}
|
||||
|
||||
public static StanfordCoreNLP getPipeline() {
|
||||
return pipeline;
|
||||
}
|
||||
|
||||
public static StanfordCoreNLP getPipelineSentiment() {
|
||||
return pipelineSentiment;
|
||||
}
|
||||
|
||||
|
||||
public static MaxentTagger getTagger() {
|
||||
return tagger;
|
||||
}
|
||||
|
||||
|
||||
private Map<Integer, String> getCache() throws SQLException, IOException, CustomError {
|
||||
return DataMapper.getAllStrings();
|
||||
}
|
||||
|
||||
|
||||
public int getlHMSMXSize() {
|
||||
return lHMSMX.size();
|
||||
}
|
||||
|
||||
|
||||
public int getstringCacheSize() {
|
||||
return stringCache.size();
|
||||
}
|
||||
|
||||
|
||||
public void initiateMYSQL() throws SQLException, IOException {
|
||||
try {
|
||||
DataMapper.createTables();
|
||||
@ -225,7 +219,7 @@ public class Datahandler {
|
||||
.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void addHLstatsMessages() {
|
||||
ConcurrentMap<String, Integer> hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, String> strCacheLocal = stringCache;
|
||||
@ -235,7 +229,7 @@ public class Datahandler {
|
||||
hlStatsMessages.put(str, hlStatsMessages.size());
|
||||
}
|
||||
}
|
||||
int capacity = 5550;
|
||||
int capacity = 9550;
|
||||
hlStatsMessages.keySet().forEach(str -> {
|
||||
if (!str.startsWith("!") && MessageResponseHandler.getStr().values().size() < capacity) {
|
||||
String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null);
|
||||
@ -245,7 +239,7 @@ public class Datahandler {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
public void instantiateAnnotationMapJMWE() {
|
||||
if (!stringCache.isEmpty()) {
|
||||
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(stringCache.values());
|
||||
@ -254,7 +248,7 @@ public class Datahandler {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void instantiateAnnotationMap() {
|
||||
if (!stringCache.isEmpty()) {
|
||||
ConcurrentMap<String, Annotation> Annotationspipeline = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
@ -279,15 +273,21 @@ public class Datahandler {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private static void futuresReturnOverallEvaluation(ConcurrentMap<String, Future<SimilarityMatrix>> entries, String str) {
|
||||
|
||||
private ConcurrentMap<Integer, String> futuresReturnOverallEvaluation(ConcurrentMap<String, Future<SimilarityMatrix>> entries, String str,
|
||||
ConcurrentMap<Integer, String> strmapreturn) {
|
||||
for (Entry<String, Future<SimilarityMatrix>> entrySet : entries.entrySet()) {
|
||||
String transmittedStr = entrySet.getKey();
|
||||
SimilarityMatrix getSMX = retrieveFutureSMX(entrySet.getValue());
|
||||
if (handleRetrievedSMX(getSMX, str, transmittedStr)) {
|
||||
int handleRetrievedSMX = handleRetrievedSMX(getSMX, transmittedStr);
|
||||
System.out.println("handleRetrievedSMX: " + handleRetrievedSMX + "\n");
|
||||
if (handleRetrievedSMX == 2) {
|
||||
strmapreturn.put(strmapreturn.size(), str);
|
||||
break;
|
||||
} else if (handleRetrievedSMX == 1) {
|
||||
break;
|
||||
}
|
||||
try {
|
||||
if (getSMX != null) {
|
||||
SentimentValueCache cacheValue1 = getSMX.getCacheValue1();
|
||||
SentimentValueCache cacheValue2 = getSMX.getCacheValue2();
|
||||
if (cacheValue1 != null && !sentimentCachingMap.keySet().contains(str)) {
|
||||
@ -296,26 +296,21 @@ public class Datahandler {
|
||||
if (cacheValue2 != null && !sentimentCachingMap.keySet().contains(transmittedStr)) {
|
||||
sentimentCachingMap.put(transmittedStr, getSMX.getCacheValue2());
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
//System.out.println("FAILING futures return. EX: " + ex.getLocalizedMessage() + "\n");
|
||||
}
|
||||
}
|
||||
sentenceRelationMap.put(str, mapUdate);
|
||||
return strmapreturn;
|
||||
}
|
||||
|
||||
private static boolean handleRetrievedSMX(SimilarityMatrix getSMX, String str, String transmittedStr) {
|
||||
|
||||
private int handleRetrievedSMX(SimilarityMatrix getSMX, String transmittedStr) {
|
||||
final int relationCap = 20;
|
||||
if (getSMX != null) {
|
||||
//System.out.println("getSMX primary: " + getSMX.getPrimaryString() + "\ngetSMX secondary: " + getSMX.getSecondaryString() + "\n");
|
||||
final Double scoreRelationNewMsgToRecentMsg = getSMX.getDistance();
|
||||
//System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\n");
|
||||
mapUdate.put(getSMX.getSecondaryString(), scoreRelationNewMsgToRecentMsg);
|
||||
if (scoreRelationNewMsgToRecentMsg >= 200.0) {
|
||||
positiveRelationCounter++;
|
||||
if (positiveRelationCounter >= relationCap) {
|
||||
//System.out.println("added to strmapreturn str: " + str + "\n");
|
||||
strmapreturn.put(strmapreturn.size() + 1, str);
|
||||
return true;
|
||||
return 2;
|
||||
}
|
||||
} else if (scoreRelationNewMsgToRecentMsg <= -6000.0) {
|
||||
negativeRelationCounter += 1;
|
||||
@ -329,12 +324,12 @@ public class Datahandler {
|
||||
}
|
||||
}
|
||||
if (negativeRelationCounter >= relationCap) {
|
||||
return true;
|
||||
return 1;
|
||||
}
|
||||
return false;
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static Entry<ConcurrentMap<String, Future<SimilarityMatrix>>, ConcurrentMap<Integer, String>> StrComparringNoSentenceRelationMap(
|
||||
|
||||
private Entry<ConcurrentMap<String, Future<SimilarityMatrix>>, ConcurrentMap<Integer, String>> StrComparringNoSentenceRelationMap(
|
||||
ConcurrentMap<Integer, String> strCacheLocal, String str, ConcurrentMap<String, Annotation> localJMWEMap,
|
||||
ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
|
||||
ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
|
||||
@ -371,12 +366,13 @@ public class Datahandler {
|
||||
= new AbstractMap.SimpleEntry(futures, strCacheLocal);
|
||||
return mapreturn;
|
||||
}
|
||||
|
||||
private static ConcurrentMap<Integer, String> stringIteratorComparator(ConcurrentMap<Integer, String> strmap,
|
||||
|
||||
private ConcurrentMap<Integer, String> stringIteratorComparator(ConcurrentMap<Integer, String> strmap,
|
||||
ConcurrentMap<Integer, String> strCacheLocal, ConcurrentMap<String, Annotation> localJMWEMap,
|
||||
ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
|
||||
ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
|
||||
int i = 0;
|
||||
ConcurrentMap<Integer, String> strmapreturn = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
for (String str : strmap.values()) {
|
||||
Integer getFaultyOccurences = locateFaultySentences.getOrDefault(str, null);
|
||||
if (getFaultyOccurences == null || getFaultyOccurences <= 20) {
|
||||
@ -387,16 +383,16 @@ public class Datahandler {
|
||||
localCoreDocumentMap);
|
||||
strCacheLocal = strRelationMap.getValue();
|
||||
final ConcurrentMap<String, Future<SimilarityMatrix>> futures = strRelationMap.getKey();
|
||||
futuresReturnOverallEvaluation(futures, str);
|
||||
System.out.println("overall advancement of future iterator: " + i + "\nstrmap size: " + strmap.values().size() + "\nstrmapreturn size: " + strmapreturn.size()
|
||||
+ "\nlocateFaultySentences size: " + locateFaultySentences.size() + "\n");
|
||||
strmapreturn = futuresReturnOverallEvaluation(futures, str, strmapreturn);
|
||||
System.out.println("overall advancement of future iterator: " + i + "\nFullMapsize: " + strmap.values().size()
|
||||
+ "\nstrmapreturn size: " + strmapreturn.size() + "\nlocateFaultySentences size: " + locateFaultySentences.size() + "\n");
|
||||
i++;
|
||||
}
|
||||
}
|
||||
return strmapreturn;
|
||||
}
|
||||
|
||||
private static final ConcurrentMap<Integer, String> removeNonSensicalStrings(ConcurrentMap<Integer, String> strmap) {
|
||||
|
||||
private final ConcurrentMap<Integer, String> removeNonSensicalStrings(ConcurrentMap<Integer, String> strmap) {
|
||||
final ConcurrentMap<Integer, String> strCacheLocal = stringCache.size() < 150 ? strmap : stringCache;
|
||||
final ConcurrentMap<String, Annotation> localJMWEMap = getMultipleJMWEAnnotation(strmap.values());
|
||||
final ConcurrentMap<String, Annotation> localPipelineAnnotation = getMultiplePipelineAnnotation(strmap.values());
|
||||
@ -405,7 +401,7 @@ public class Datahandler {
|
||||
System.out.println("finished removeNonSensicalStrings annotations \n");
|
||||
return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap);
|
||||
}
|
||||
|
||||
|
||||
private static SimilarityMatrix retrieveFutureSMX(Future<SimilarityMatrix> future) {
|
||||
try {
|
||||
// SimilarityMatrix SMX = future.get();
|
||||
@ -417,7 +413,7 @@ public class Datahandler {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
public synchronized void checkIfUpdateStrings(boolean hlStatsMsg) throws CustomError {
|
||||
if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning()) {
|
||||
ConcurrentMap<Integer, String> str = MessageResponseHandler.getStr();
|
||||
@ -456,182 +452,97 @@ public class Datahandler {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized String getResponseMsg(String str, String MostRecent) throws CustomError {
|
||||
|
||||
private String trimString(String str) {
|
||||
str = str.trim();
|
||||
if (str.startsWith("<@")) {
|
||||
str = str.substring(str.indexOf("> ") + 2);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
private String getResponseFutures(String strF) {
|
||||
ConcurrentMap<Integer, String> strCache = stringCache;
|
||||
ConcurrentMap<Integer, Future<SimilarityMatrix>> futureslocal = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, SimilarityMatrix> futureAndCacheCombineMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
String strF = str;
|
||||
getSingularAnnotation(strF);
|
||||
if (!conversationMatchMap.keySet().contains(MostRecent) && !MostRecent.isEmpty()) {
|
||||
conversationMatchMap.put(MostRecent, conversationMatchMap.size());
|
||||
}
|
||||
if (!conversationUserMatchMap.keySet().contains(strF)) {
|
||||
conversationUserMatchMap.put(strF, conversationUserMatchMap.size());
|
||||
}
|
||||
ConcurrentMap<String, Double> getPrimary = sentenceRelationMap.get(strF);
|
||||
strCache.values().parallelStream().forEach((str1) -> {
|
||||
double preRelationUserCounters = -6500.0;
|
||||
//WHY THE FUCK CANT YOU JUST TRANSFER A SimilarityMatrix OBJECT LIST LIKE ANY OTHER NORMAL COLLECTION, WHY DOES IT HAVE TO BE A FUCKING STRING LIST
|
||||
List<String> concurrentRelations = new ArrayList();
|
||||
for (String str1 : strCache.values()) {
|
||||
if (!strF.equals(str1)) {
|
||||
boolean present = false;
|
||||
if (getPrimary != null) {
|
||||
Double getSecondary = getPrimary.get(str1);
|
||||
if (getSecondary != null) {
|
||||
present = true;
|
||||
SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
|
||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(strF, str1, new SimilarityMatrix(strF, str1),
|
||||
strAnnoJMWE, jmweAnnotationCache.get(str1), strAnno,
|
||||
pipelineAnnotationCache.get(str1), strAnnoSentiment,
|
||||
pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1), null, sentimentCacheStr1);
|
||||
try {
|
||||
SimilarityMatrix getSMX = worker.call();
|
||||
//SimilarityMatrix getSMX = executor.submit(worker).get(5, TimeUnit.SECONDS);
|
||||
if (getSMX != null) {
|
||||
Double scoreRelationLastUserMsg = getSMX.getDistance();
|
||||
if (scoreRelationLastUserMsg > preRelationUserCounters) {
|
||||
preRelationUserCounters = scoreRelationLastUserMsg;
|
||||
concurrentRelations.add(getSMX.getSecondaryString());
|
||||
System.out.println("secondary: " + getSMX.getSecondaryString() + "\nDistance: " + getSMX.getDistance() + "\n");
|
||||
System.out.println("SUCESS concurrentRelationsMap size: " + concurrentRelations.size() + "\n");
|
||||
}
|
||||
}
|
||||
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
|
||||
System.out.println("ex getResponsemsg: " + ex.getMessage() + "\n");
|
||||
} catch (Exception ex) {
|
||||
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
if (!present) {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix(strF, str1);
|
||||
SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
|
||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(strF, str1, SMX,
|
||||
strAnnoJMWE, jmweAnnotationCache.get(str1), strAnno,
|
||||
pipelineAnnotationCache.get(str1), strAnnoSentiment,
|
||||
pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1), null, sentimentCacheStr1);
|
||||
futureslocal.put(futureslocal.size() + 1, executor.submit(worker));
|
||||
}
|
||||
}
|
||||
});
|
||||
futureslocal.values().forEach((future) -> {
|
||||
SimilarityMatrix SMX;
|
||||
try {
|
||||
SMX = future.get(5, TimeUnit.SECONDS);
|
||||
futureAndCacheCombineMap.put(futureAndCacheCombineMap.size(), SMX);
|
||||
//System.out.println("futureAndCacheCombineMap size: " + futureAndCacheCombineMap.size() + "\n");
|
||||
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
|
||||
System.out.println("ex getResponsemsg: " + ex.getMessage() + "\n");
|
||||
}
|
||||
});
|
||||
if (getPrimary != null) {
|
||||
for (Entry<String, Double> cacheResults : getPrimary.entrySet()) {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix(strF, cacheResults.getKey());
|
||||
SMX.setDistance(cacheResults.getValue());
|
||||
futureAndCacheCombineMap.put(futureAndCacheCombineMap.size(), SMX);
|
||||
}
|
||||
}
|
||||
//System.out.println("futureAndCacheCombineMap size: " + futureAndCacheCombineMap.size() + "\n");
|
||||
futureAndCacheCombineMap.values().parallelStream().forEach((SMX) -> {
|
||||
if (sentenceRelationMap.get(strF) == null) {
|
||||
ConcurrentMap<String, Double> localMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
localMap.put(SMX.getSecondaryString(), SMX.getDistance());
|
||||
sentenceRelationMap.put(SMX.getPrimaryString(), localMap);
|
||||
} else {
|
||||
ConcurrentMap<String, Double> getPrimaryLocal = sentenceRelationMap.get(strF);
|
||||
Double doubleValue = getPrimaryLocal.get(SMX.getSecondaryString());
|
||||
if (doubleValue == null) {
|
||||
getPrimaryLocal.put(SMX.getSecondaryString(), SMX.getDistance());
|
||||
sentenceRelationMap.put(SMX.getPrimaryString(), getPrimaryLocal);
|
||||
}
|
||||
StringBuilder SB = new StringBuilder();
|
||||
double randomLenghtPermit = strF.length() * (Math.random() * 5.5);
|
||||
Collections.reverse(concurrentRelations);
|
||||
for (String secondaryRelation : concurrentRelations) {
|
||||
if (SB.toString().length() > randomLenghtPermit && !SB.toString().isEmpty()) {
|
||||
break;
|
||||
}
|
||||
});
|
||||
preRelationCounters = -100.0;
|
||||
preRelationUserCounters = -100.0;
|
||||
Double scoreRelationNewMsgToRecentMsg = 0.0;
|
||||
Double scoreRelationOldUserMsg = 0.0;
|
||||
ConcurrentMap<String, Double> getPrimaryLocal = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
for (String conversationStr : conversationMatchMap.keySet()) {
|
||||
System.out.println("relation secondary: " + secondaryRelation + "\n");
|
||||
SB.append(secondaryRelation).append(" ");
|
||||
}
|
||||
System.out.println("Reached end SB: " + SB.toString() + "\n");
|
||||
return SB.toString();
|
||||
}
|
||||
|
||||
private double setupScoreRelationtoRecent(String strF, ConcurrentMap<String, Double> getPrimaryLocal) {
|
||||
double scoreRelationNewMsgToRecentMsg = 0.0;
|
||||
for (String conversationStr : conversationMatchMap.values()) {
|
||||
Double getSecondary = 0.0;
|
||||
getPrimaryLocal = sentenceRelationMap.getOrDefault(strF, null);
|
||||
if (getPrimaryLocal == null) {
|
||||
getSecondary = getPrimaryLocal.get(conversationStr);
|
||||
if (getSecondary == null) {
|
||||
getSecondary = getScoreRelationStrF(strF, conversationStr);
|
||||
getPrimaryLocal.put(conversationStr, getSecondary);
|
||||
sentenceRelationMap.put(strF, getPrimaryLocal);
|
||||
} else {
|
||||
getSecondary = getPrimaryLocal.get(conversationStr);
|
||||
if (getSecondary == null) {
|
||||
getSecondary = getScoreRelationStrF(strF, conversationStr);
|
||||
getPrimaryLocal.put(conversationStr, getSecondary);
|
||||
sentenceRelationMap.put(strF, getPrimaryLocal);
|
||||
}
|
||||
}
|
||||
scoreRelationNewMsgToRecentMsg += getSecondary;
|
||||
System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\n");
|
||||
}
|
||||
return scoreRelationNewMsgToRecentMsg;
|
||||
}
|
||||
|
||||
private double setupScoreRelationOld(String strF, ConcurrentMap<String, Double> getPrimaryLocal) {
|
||||
double scoreRelationOldUserMsg = 0.0;
|
||||
for (String conversationUserStr : conversationUserMatchMap.keySet()) {
|
||||
if (!strF.equals(conversationUserStr)) {
|
||||
getPrimaryLocal = sentenceRelationMap.get(strF);
|
||||
Double getSecondary = getPrimaryLocal.get(conversationUserStr);
|
||||
if (getSecondary == null) {
|
||||
getSecondary = getScoreRelationStrF(strF, conversationUserStr);
|
||||
getPrimaryLocal.put(conversationUserStr, getSecondary);
|
||||
sentenceRelationMap.put(strF, getPrimaryLocal);
|
||||
}
|
||||
double getSecondary = getScoreRelationStrF(strF, conversationUserStr);
|
||||
scoreRelationOldUserMsg += getSecondary;
|
||||
}
|
||||
}
|
||||
boolean userReponseRelated = scoreRelationOldUserMsg >= 250;
|
||||
boolean relatedReponse = scoreRelationNewMsgToRecentMsg >= 250;
|
||||
if (!userReponseRelated) {
|
||||
conversationUserMatchMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
}
|
||||
if (!relatedReponse) {
|
||||
conversationMatchMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
}
|
||||
ConcurrentMap<Integer, Entry<Double, SimilarityMatrix>> concurrentRelationsMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> preRelationUserCountersMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
System.out.println("futureAndCacheCombineMap size for: " + futureAndCacheCombineMap.values().size() + "\n");
|
||||
for (SimilarityMatrix SMX : futureAndCacheCombineMap.values()) {
|
||||
Double scoreRelation = 500.0;
|
||||
Double scoreRelationLastUserMsg = SMX.getDistance();
|
||||
if (relatedReponse) {
|
||||
for (String conversationStr : conversationMatchMap.keySet()) {
|
||||
Double relationNewMsg = 0.0;
|
||||
ConcurrentMap<String, Double> getMap = sentenceRelationMap.get(conversationStr);
|
||||
if (getMap != null) {
|
||||
Double getdoubleValue = getMap.get(conversationStr);
|
||||
if (getdoubleValue == null) {
|
||||
getdoubleValue = getScoreRelationNewMsgToRecentMsg(SMX.getSecondaryString(), conversationStr);
|
||||
getMap.put(conversationStr, getdoubleValue);
|
||||
sentenceRelationMap.put(SMX.getSecondaryString(), getMap);
|
||||
}
|
||||
relationNewMsg += getdoubleValue;
|
||||
} else {
|
||||
relationNewMsg = getScoreRelationNewMsgToRecentMsg(SMX.getSecondaryString(), conversationStr);
|
||||
ConcurrentMap<String, Double> localInnerMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
localInnerMap.put(conversationStr, relationNewMsg);
|
||||
sentenceRelationMap.put(SMX.getSecondaryString(), localInnerMap);
|
||||
}
|
||||
if (userReponseRelated) {
|
||||
relationNewMsg += scoreRelationOldUserMsg / conversationUserMatchMap.size();
|
||||
}
|
||||
scoreRelation += relationNewMsg;
|
||||
}
|
||||
}
|
||||
Double totalRelation = scoreRelation + scoreRelationLastUserMsg;
|
||||
if (totalRelation > preRelationCounters + preRelationUserCounters && (scoreRelationLastUserMsg + (preRelationUserCounters / 10)
|
||||
>= preRelationUserCounters) || preRelationUserCounters == -100.0) {
|
||||
Entry<Double, SimilarityMatrix> localEntry = new AbstractMap.SimpleEntry(totalRelation, SMX);
|
||||
concurrentRelationsMap.put(concurrentRelationsMap.size(), localEntry);
|
||||
preRelationUserCountersMap.put(preRelationUserCountersMap.size(), preRelationUserCounters);
|
||||
System.out.println("SUCESS concurrentRelationsMap size: " + concurrentRelationsMap.size() + "\n");
|
||||
preRelationCounters = scoreRelation;
|
||||
preRelationUserCounters = scoreRelationLastUserMsg;
|
||||
} else {
|
||||
System.out.println("FAILED totalRelation: " + totalRelation + "\npreRelationUserCounters: " + preRelationUserCounters + "\npreRelationCounters: "
|
||||
+ preRelationCounters + "\nscoreRelationLastUserMsg: " + scoreRelationLastUserMsg + "\n");
|
||||
}
|
||||
}
|
||||
StringBuilder SB = new StringBuilder();
|
||||
int iterator = concurrentRelationsMap.size() - 1;
|
||||
System.out.println("iterator: " + iterator + "\n");
|
||||
while (iterator > -1) {
|
||||
Double preRelationUserCounterDouble = preRelationUserCountersMap.get(iterator);
|
||||
Entry<Double, SimilarityMatrix> getRelation = concurrentRelationsMap.get(iterator);
|
||||
Double result = preRelationUserCounterDouble + preRelationUserCounters;
|
||||
SB.append(getRelation.getValue().getSecondaryString()).append(" ");
|
||||
iterator--;
|
||||
System.out.println("result: " + result + "\ngetRelation.getKey(): " + getRelation.getKey() + "\npreRelationUserCounters: "
|
||||
+ preRelationUserCounters + "\npreRelationUserCounterDouble: " + preRelationUserCounterDouble + "\n");
|
||||
if (getRelation.getKey() < result) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
System.out.println("Reached end: SB: " + SB.toString() + "\n: ");
|
||||
return SB.toString();
|
||||
return scoreRelationOldUserMsg;
|
||||
}
|
||||
|
||||
|
||||
public synchronized String getResponseMsg(String str, String MostRecent) throws CustomError {
|
||||
String strF = trimString(str);
|
||||
getSingularAnnotation(strF);
|
||||
if (!conversationMatchMap.values().contains(MostRecent) && !MostRecent.isEmpty()) {
|
||||
conversationMatchMap.put(conversationMatchMap.size(), MostRecent);
|
||||
}
|
||||
if (!conversationUserMatchMap.keySet().contains(strF)) {
|
||||
conversationUserMatchMap.put(strF, conversationUserMatchMap.size());
|
||||
}
|
||||
return getResponseFutures(strF);
|
||||
}
|
||||
|
||||
public void getSingularAnnotation(String str) {
|
||||
strAnno = new Annotation(str);
|
||||
pipeline.annotate(strAnno);
|
||||
@ -645,12 +556,12 @@ public class Datahandler {
|
||||
pipeline.annotate(coreDocument);
|
||||
coreDoc = coreDocument;
|
||||
}
|
||||
|
||||
|
||||
private static ConcurrentMap<String, Annotation> getMultipleJMWEAnnotation(Collection<String> str) {
|
||||
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str);
|
||||
return jmweAnnotation;
|
||||
}
|
||||
|
||||
|
||||
private static ConcurrentMap<String, Annotation> getMultiplePipelineAnnotation(Collection<String> str) {
|
||||
ConcurrentMap<String, Annotation> pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
for (String str1 : str) {
|
||||
@ -660,7 +571,7 @@ public class Datahandler {
|
||||
pipeline.annotate(pipelineAnnotationMap.values());
|
||||
return pipelineAnnotationMap;
|
||||
}
|
||||
|
||||
|
||||
private static ConcurrentMap<String, Annotation> getMultiplePipelineSentimentAnnotation(Collection<String> str) {
|
||||
ConcurrentMap<String, Annotation> pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
for (String str1 : str) {
|
||||
@ -670,7 +581,7 @@ public class Datahandler {
|
||||
pipelineSentiment.annotate(pipelineAnnotationMap.values());
|
||||
return pipelineAnnotationMap;
|
||||
}
|
||||
|
||||
|
||||
private Double getScoreRelationNewMsgToRecentMsg(String str, String mostRecentMsg) {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg);
|
||||
SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null);
|
||||
@ -693,7 +604,7 @@ public class Datahandler {
|
||||
}
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
|
||||
private Double getScoreRelationStrF(String str, String mostRecentMsg) {
|
||||
SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg);
|
||||
SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null);
|
||||
@ -715,85 +626,7 @@ public class Datahandler {
|
||||
}
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
public String mostSimilar(String toBeCompared, ConcurrentMap<Integer, String> concurrentStrings, String MostRecent) {
|
||||
similar = "";
|
||||
minDistance = 6.0;
|
||||
preRelationCounters = 0.0;
|
||||
preRelationUserCounters = 0.0;
|
||||
getSingularAnnotation(toBeCompared);
|
||||
ConcurrentMap<Integer, String> similardistances = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
concurrentStrings.values().parallelStream().forEach((str) -> {
|
||||
LevenshteinDistance leven = new LevenshteinDistance(toBeCompared, str);
|
||||
double distance = leven.computeLevenshteinDistance();
|
||||
if (distance <= minDistance) {
|
||||
System.out.println("distance: " + distance + "\n");
|
||||
similardistances.put(similardistances.size() + 1, str);
|
||||
}
|
||||
});
|
||||
if (similardistances.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
Double maxDistance = 0.0;
|
||||
for (String str : similardistances.values()) {
|
||||
LinkedHashMap<String, Double> orDefault = lHMSMX.getOrDefault(str, null);
|
||||
if (orDefault != null) {
|
||||
for (Entry<String, Double> defaultEntry : orDefault.entrySet()) {
|
||||
Double value = defaultEntry.getValue();
|
||||
String key = defaultEntry.getKey();
|
||||
if (value > maxDistance) {
|
||||
Double RelationScoreLastMsg = 500.0;
|
||||
Double RelationScoreLastUserMsg = 500.0;
|
||||
boolean foundmatch = false;
|
||||
if (!MostRecent.isEmpty()) {
|
||||
LinkedHashMap<String, Double> orDefault1 = lHMSMX.getOrDefault(MostRecent, null);
|
||||
if (orDefault1 != null) {
|
||||
Collection<String> orDefaultstrs = orDefault1.keySet();
|
||||
for (String strs : orDefaultstrs) {
|
||||
if (strs.equals(key)) {
|
||||
RelationScoreLastMsg = orDefault1.get(key);
|
||||
foundmatch = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!foundmatch) {
|
||||
orDefault1 = lHMSMX.getOrDefault(key, null);
|
||||
if (orDefault1 != null) {
|
||||
Collection<String> orDefaultstrs = orDefault1.keySet();
|
||||
for (String strs : orDefaultstrs) {
|
||||
if (strs.equals(MostRecent)) {
|
||||
RelationScoreLastMsg = orDefault1.get(MostRecent);
|
||||
foundmatch = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!foundmatch) {
|
||||
RelationScoreLastMsg = getScoreRelationNewMsgToRecentMsg(key, MostRecent);
|
||||
}
|
||||
RelationScoreLastUserMsg = getScoreRelationNewMsgToRecentMsg(key, toBeCompared);
|
||||
Double totalRelation = RelationScoreLastMsg + RelationScoreLastUserMsg;
|
||||
if (totalRelation > preRelationCounters + preRelationUserCounters) {
|
||||
if (RelationScoreLastMsg + 500 > preRelationUserCounters && RelationScoreLastUserMsg > preRelationCounters
|
||||
|| RelationScoreLastUserMsg + 500 > preRelationCounters && RelationScoreLastMsg > preRelationUserCounters) {
|
||||
if (RelationScoreLastMsg > preRelationCounters && RelationScoreLastUserMsg > preRelationUserCounters) {
|
||||
maxDistance = value;
|
||||
similar = defaultEntry.getKey();
|
||||
preRelationCounters = RelationScoreLastMsg;
|
||||
preRelationUserCounters = RelationScoreLastUserMsg;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return similar.isEmpty() ? null : similar;
|
||||
}
|
||||
|
||||
|
||||
public static ConcurrentMap<Integer, String> cutContent(ConcurrentMap<Integer, String> str, boolean hlStatsMsg) {
|
||||
ConcurrentMap<Integer, String> returnlist = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
str.values().parallelStream().forEach(str1 -> {
|
||||
@ -807,7 +640,7 @@ public class Datahandler {
|
||||
});
|
||||
return returnlist;
|
||||
}
|
||||
|
||||
|
||||
public static ConcurrentMap<Integer, String> filterContent(ConcurrentMap<Integer, String> str) {
|
||||
ConcurrentMap<Integer, String> strlistreturn = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
str.values().parallelStream().forEach(str1 -> {
|
||||
@ -914,7 +747,7 @@ public class Datahandler {
|
||||
});
|
||||
return strlistreturn;
|
||||
}
|
||||
|
||||
|
||||
private ConcurrentMap<Integer, String> removeSlacks(ConcurrentMap<Integer, String> str) {
|
||||
ConcurrentMap<Integer, String> strreturn = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
if (stringCache.isEmpty()) {
|
||||
@ -939,7 +772,7 @@ public class Datahandler {
|
||||
});
|
||||
return strreturn;
|
||||
}
|
||||
|
||||
|
||||
private ConcurrentMap<Integer, String> annotationCacheUpdate(ConcurrentMap<Integer, String> strmap) {
|
||||
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values());
|
||||
for (Entry<String, Annotation> jmweitr : jmweAnnotation.entrySet()) {
|
||||
@ -977,25 +810,26 @@ public class Datahandler {
|
||||
});
|
||||
return strmap;
|
||||
}
|
||||
|
||||
|
||||
private static class AnnotationCollector<T> implements Consumer<T> {
|
||||
|
||||
|
||||
private static int i = 0;
|
||||
private final List<T> annotationsT = new ArrayList();
|
||||
|
||||
|
||||
@Override
|
||||
public final void accept(T ann) {
|
||||
annotationsT.add(ann);
|
||||
}
|
||||
}
|
||||
|
||||
public final static ConcurrentMap<String, CoreDocument> getMultipleCoreDocumentsWaySuggestion(Collection<String> str, StanfordCoreNLP localNLP) {
|
||||
|
||||
public static ConcurrentMap<String, CoreDocument> getMultipleCoreDocumentsWaySuggestion(Collection<String> str, StanfordCoreNLP localNLP) {
|
||||
AnnotationCollector<Annotation> annCollector = new AnnotationCollector();
|
||||
System.out.println("processing multiple coreDocuments Annotation: \n");
|
||||
for (final String exampleString : str) {
|
||||
System.out.println("exampleString: " + exampleString + "\n");
|
||||
//System.out.println("exampleString: " + exampleString + "\n");
|
||||
localNLP.annotate(new Annotation(exampleString), annCollector);
|
||||
annCollector.i++;
|
||||
System.out.println("iterator: " + annCollector.i + "\nstr size: " + str.size() + "\n");
|
||||
//System.out.println("iterator: " + annCollector.i + "\nstr size: " + str.size() + "\n");
|
||||
}
|
||||
try {
|
||||
Thread.sleep(10000);
|
||||
|
@ -68,8 +68,6 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
private final String str1;
|
||||
private final MaxentTagger tagger;
|
||||
private final GrammaticalStructureFactory gsf;
|
||||
private final StanfordCoreNLP pipeline;
|
||||
private final StanfordCoreNLP pipelineSentiment;
|
||||
private final AbstractSequenceClassifier classifier;
|
||||
private final Annotation jmweStrAnnotation1;
|
||||
private final Annotation jmweStrAnnotation2;
|
||||
@ -97,8 +95,6 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
this.str1 = str1;
|
||||
this.smxParam = smxParam;
|
||||
this.tagger = Datahandler.getTagger();
|
||||
this.pipeline = Datahandler.getPipeline();
|
||||
this.pipelineSentiment = Datahandler.getPipelineSentiment();
|
||||
this.gsf = Datahandler.getGsf();
|
||||
this.classifier = Datahandler.getClassifier();
|
||||
this.jmweStrAnnotation1 = str1Annotation;
|
||||
@ -190,13 +186,14 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) {
|
||||
if (grammaticalRelation.isApplicable(sentenceConstituencyParse2)) {
|
||||
score += 700;
|
||||
//System.out.println("grammaticalRelation applicable score: " + score + "\n");
|
||||
// System.out.println("grammaticalRelation applicable score: " + score + "\n");
|
||||
grammaticalRelation1++;
|
||||
}
|
||||
GrammaticalRelation reln = TDY1.reln();
|
||||
//sentenceConstituencyParse1
|
||||
if (reln.isApplicable(sentenceConstituencyParse2)) {
|
||||
score += 525;
|
||||
// System.out.println("reln1 applicable score: " + score + "\n");
|
||||
relationApplicable1++;
|
||||
}
|
||||
}
|
||||
@ -210,22 +207,23 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) {
|
||||
if (grammaticalRelation.isApplicable(sentenceConstituencyParse1)) {
|
||||
score += 700;
|
||||
//System.out.println("grammaticalRelation applicable score: " + score + "\n");
|
||||
// System.out.println("grammaticalRelation applicable score: " + score + "\n");
|
||||
grammaticalRelation2++;
|
||||
}
|
||||
GrammaticalRelation reln = TDY.reln();
|
||||
//sentenceConstituencyParse1
|
||||
if (reln.isApplicable(sentenceConstituencyParse1)) {
|
||||
score += 525;
|
||||
// System.out.println("reln2 applicable score: " + score + "\n");
|
||||
relationApplicable2++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// System.out.println("\ngrammaticalRelation1: " + grammaticalRelation1 + "\ngrammaticalRelation2: " + grammaticalRelation2 + "\n");
|
||||
if ((grammaticalRelation1 == 0 && grammaticalRelation2 > 4) || (grammaticalRelation2 == 0 && grammaticalRelation1 > 4)) {
|
||||
score -= 3450;
|
||||
//System.out.println("grammaticalRelation1 score trim: " + score + "\ngrammaticalRelation1: " + grammaticalRelation1
|
||||
// + "\ngrammaticalRelation2: " + grammaticalRelation2 + "\n");
|
||||
// System.out.println("grammaticalRelation score trim: " + score + "\n");
|
||||
}
|
||||
if (!allTypedDependencies1.isEmpty() || !allTypedDependencies2.isEmpty()) {
|
||||
int allTypeDep1 = allTypedDependencies1.size();
|
||||
@ -233,13 +231,13 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
if (allTypeDep1 <= allTypeDep2 * 5 && allTypeDep2 <= allTypeDep1 * 5) {
|
||||
if (allTypeDep1 > 0 && allTypeDep2 > 0) {
|
||||
if (allTypeDep1 * 2 <= allTypeDep2 || allTypeDep2 * 2 <= allTypeDep1) {
|
||||
score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 160 : (allTypeDep2 - allTypeDep1) * 160;
|
||||
//System.out.println(" allTypeDep score: " + score + "\nallTypeDep1: " + allTypeDep1 + "\nallTypeDep2: "
|
||||
// + allTypeDep2 + "\n");
|
||||
score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 160 : (allTypeDep2 - allTypeDep1) * 160;
|
||||
// System.out.println(" allTypeDep plus score: " + score + "\nallTypeDep1: " + allTypeDep1 + "\nallTypeDep2: "
|
||||
// + allTypeDep2 + "\n");
|
||||
} else {
|
||||
score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 600 : (allTypeDep2 - allTypeDep1) * 600;
|
||||
//System.out.println(" allTypeDep score: " + score + "\nallTypeDep1: " + allTypeDep1 + "\nallTypeDep2: "
|
||||
// + allTypeDep2 + "\n");
|
||||
score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 600 : (allTypeDep2 - allTypeDep1) * 600;
|
||||
// System.out.println(" allTypeDep minus score: " + score + "\nallTypeDep1: " + allTypeDep1 + "\nallTypeDep2: "
|
||||
// + allTypeDep2 + "\n");
|
||||
}
|
||||
alltypeDepsSizeMap.put(alltypeDepsSizeMap.size() + 1, allTypeDep1);
|
||||
alltypeDepsSizeMap.put(alltypeDepsSizeMap.size() + 1, allTypeDep2);
|
||||
@ -253,44 +251,43 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
&& !summationMap.values().contains(summation)) {
|
||||
score += summation * 80;
|
||||
summationMap.put(summationMap.size() + 1, summation);
|
||||
//System.out.println("score post summation: " + score + "\nsummation: " + summation + "\n");
|
||||
// System.out.println("score post summation: " + score + "\nsummation: " + summation + "\n");
|
||||
} else if (largerTypeDep == smallerTypeDep) {
|
||||
score += 2500;
|
||||
//System.out.println("score largerTypeDep equals smallerTypeDep: " + score + "\nlargerTypeDep: " + largerTypeDep + "\n");
|
||||
// System.out.println("score largerTypeDep equals smallerTypeDep: " + score + "\nlargerTypeDep: " + largerTypeDep + "\n");
|
||||
}
|
||||
}
|
||||
if (relationApplicable1 > 0 && relationApplicable2 > 0 && relationApplicable1 == relationApplicable2
|
||||
&& grammaticalRelation1 > 0 && grammaticalRelation2 > 0 && grammaticalRelation1 == grammaticalRelation2) {
|
||||
score += 3500;
|
||||
//System.out.println("score relationApplicable equal: " + score + "\n");
|
||||
if (relationApplicable1 > 0 && relationApplicable2 > 0 && relationApplicable1 != relationApplicable2) {
|
||||
score += 1500;
|
||||
// System.out.println("score relationApplicable equal: " + score + "\n");
|
||||
} else if (allTypeDep1 * 5 < allTypeDep2 || allTypeDep2 * 5 < allTypeDep1) {
|
||||
score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * (allTypeDep2 * 450)
|
||||
: (allTypeDep2 - allTypeDep1) * (allTypeDep1 * 450);
|
||||
//System.out.println("score minus grammaticalRelation equal: " + score + "\n");
|
||||
// System.out.println("score minus grammaticalRelation equal: " + score + "\n");
|
||||
}
|
||||
if (relationApplicable1 > 1 && relationApplicable2 > 1 && relationApplicable1 * 3 > relationApplicable2
|
||||
&& relationApplicable2 * 3 > relationApplicable1) {
|
||||
score += relationApplicable1 > relationApplicable2 ? (relationApplicable1 - relationApplicable2) * 1500
|
||||
: (relationApplicable2 - relationApplicable1) * 1500;
|
||||
//System.out.println("score relationApplicable plus: " + score + "\n");
|
||||
// System.out.println("score relationApplicable plus: " + score + "\n");
|
||||
} else if (relationApplicable1 * 5 < relationApplicable2 || relationApplicable2 * 5 < relationApplicable1) {
|
||||
score -= relationApplicable1 > relationApplicable2 ? (relationApplicable1 - relationApplicable2) * 500
|
||||
: (relationApplicable2 - relationApplicable1) * 500;
|
||||
//System.out.println("score relationApplicable minus: " + score + "\n");
|
||||
// System.out.println("score relationApplicable minus: " + score + "\n");
|
||||
}
|
||||
if (grammaticalRelation1 > 0 && grammaticalRelation2 > 0 && grammaticalRelation1 * 3 > grammaticalRelation2
|
||||
&& grammaticalRelation2 * 3 > grammaticalRelation1) {
|
||||
score += grammaticalRelation1 > grammaticalRelation2 ? (grammaticalRelation1 - grammaticalRelation2) * 1500
|
||||
: (grammaticalRelation2 - grammaticalRelation1) * 1500;
|
||||
//System.out.println("score grammaticalRelation plus: " + score + "\n");
|
||||
// System.out.println("score grammaticalRelation plus: " + score + "\n");
|
||||
} else if (grammaticalRelation1 * 5 < grammaticalRelation2 || grammaticalRelation2 * 5 < grammaticalRelation1) {
|
||||
score -= grammaticalRelation1 > grammaticalRelation2 ? (grammaticalRelation1 - grammaticalRelation2) * 500
|
||||
: (grammaticalRelation2 - grammaticalRelation1) * 500;
|
||||
//System.out.println("score grammaticalRelation minus: " + score + "\n");
|
||||
// System.out.println("score grammaticalRelation minus: " + score + "\n");
|
||||
}
|
||||
//System.out.println("score post relationApplicable1 veri: " + score + "\nrelationApplicable1: " + relationApplicable1
|
||||
// + "\nrelationApplicable2: " + relationApplicable2 + "\ngrammaticalRelation1: " + grammaticalRelation1 + "\n"
|
||||
// + "grammaticalRelation2: " + grammaticalRelation2 + "\n");
|
||||
// System.out.println("score post relationApplicable1 veri: " + score + "\nrelationApplicable1: " + relationApplicable1
|
||||
// + "\nrelationApplicable2: " + relationApplicable2 + "\ngrammaticalRelation1: " + grammaticalRelation1 + "\n"
|
||||
// + "grammaticalRelation2: " + grammaticalRelation2 + "\n");
|
||||
}
|
||||
ConcurrentMap<Integer, String> filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
AtomicInteger runCount1 = new AtomicInteger(0);
|
||||
@ -338,13 +335,12 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
return score;
|
||||
}
|
||||
|
||||
private final Double simpleRNNMatrixCalculations(Double score, ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist1, ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist2) {
|
||||
private Double simpleRNNMatrixCalculations(Double score, ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist1, ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist2) {
|
||||
for (SimpleMatrix simpleSMX2 : simpleSMXlist2.values()) {
|
||||
ConcurrentMap<Integer, Double> AccumulateDotMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> subtractorMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> dotPredictions = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> DotOverTransfer = dotPredictions;
|
||||
dotPredictions = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> DotOverTransfer = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
Double totalSubtraction = 0.0;
|
||||
Double largest = 10.0;
|
||||
Double shortest = 100.0;
|
||||
@ -357,26 +353,38 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
double subtracter2 = dotPrediction2 > 50 ? dotPrediction2 - 100 : dotPrediction2 > 0 ? 100 - dotPrediction2 : 0;
|
||||
subtractorMap.put(subtractorMap.size() + 1, subtracter1);
|
||||
subtractorMap.put(subtractorMap.size() + 1, subtracter2);
|
||||
//System.out.println("dotPrediction: " + dotPrediction + "\nsubtracter: " + subtracter + "\n");
|
||||
// System.out.println("dotPrediction: " + dotPrediction1 + "\n");
|
||||
if (!dotPredictions.values().contains(dotPrediction1)) {
|
||||
for (Double transferDots : DotOverTransfer.values()) {
|
||||
if (transferDots == dotPrediction1) {
|
||||
totalSubtraction += transferDots;
|
||||
} else {
|
||||
score -= subtracter1 * 25;
|
||||
//System.out.println("score minus subtracter: " + score + "\nsubtracter: " + subtracter + "\n");
|
||||
// System.out.println("score minus subtracter: " + score + "\n");
|
||||
}
|
||||
//System.out.println("transferDots: " + transferDots + "\n");
|
||||
// System.out.println("transferDots: " + transferDots + "\n");
|
||||
}
|
||||
DotOverTransfer.put(DotOverTransfer.size(), dotPrediction1);
|
||||
} else {
|
||||
// System.out.println("subtracter1 pre: " + subtracter1 + "\n");
|
||||
subtracter1 -= 100;
|
||||
subtracter1 *= 25;
|
||||
score -= subtracter1 * dotPrediction1;
|
||||
//System.out.println("score minus subtracter * dotPrediction 2: " + score + "\ndotPrediction: "
|
||||
// + dotPrediction + "\n");
|
||||
score += subtracter1 * dotPrediction1;
|
||||
// System.out.println("subtracter1 post: " + subtracter1 + "\n");
|
||||
// System.out.println("score minus subtracter * dotPrediction 2: " + score + "\ndotPrediction1: " + dotPrediction1 + "\n");
|
||||
}
|
||||
dotPredictions.put(dotPredictions.size() + 1, dotPrediction1);
|
||||
if (!dotPredictions.values().contains(dotPrediction2)) {
|
||||
for (Double transferDots : DotOverTransfer.values()) {
|
||||
if (transferDots == dotPrediction2) {
|
||||
totalSubtraction += transferDots;
|
||||
} else {
|
||||
score -= subtracter1 * 25;
|
||||
// System.out.println("score minus subtracter: " + score + "\n");
|
||||
}
|
||||
// System.out.println("transferDots: " + transferDots + "\n");
|
||||
}
|
||||
DotOverTransfer.put(DotOverTransfer.size(), dotPrediction2);
|
||||
if (dotPrediction2 > largest) {
|
||||
largest = dotPrediction2;
|
||||
}
|
||||
@ -392,42 +400,51 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
score -= subtracter2;
|
||||
} else {
|
||||
score += subtracter2;
|
||||
//System.out.println("score + subtracter: " + score + "\nsubtracter: " + subtracter + "\n");
|
||||
// System.out.println("score + subtracter: " + score + "\nsubtracter2: " + subtracter2 + "\n");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
score -= subtracter2 / 10;
|
||||
}
|
||||
} else {
|
||||
subtracter2 -= 100;
|
||||
subtracter2 *= 25;
|
||||
score += subtracter2 * dotPrediction2;
|
||||
//System.out.println("score + subtracter * dotPrediction: " + score + "\nsubtracter: " + subtracter + "\ndotPrediction: "
|
||||
//+ dotPrediction + "\n");
|
||||
} else if (dotPrediction2 < 22.0 || dotPrediction2 > 40.0) {
|
||||
//System.out.println("subtracter2: " + subtracter2 + "\n");
|
||||
if (subtracter2 > 55.0 && subtracter2 < 82.0) {
|
||||
score += subtracter2 * dotPrediction2;
|
||||
// System.out.println("score: " + score + "\ndotPrediction2: " + dotPrediction2 + "\n");
|
||||
}
|
||||
}
|
||||
dotPredictions.put(dotPredictions.size() + 1, dotPrediction2);
|
||||
}
|
||||
//System.out.println("score post subtracter1: " + score + "\n");
|
||||
// System.out.println("score post subtracter1: " + score + "\n");
|
||||
Double subTracPre = 0.0;
|
||||
for (Double subtractors : subtractorMap.values()) {
|
||||
if (Objects.equals(subTracPre, subtractors)) {
|
||||
score -= 1500;
|
||||
//System.out.println("score minus subTracPre equals: " + score + "\nsubTracPre: " + subTracPre + "\n");
|
||||
if (Objects.equals(subTracPre, subtractors) && subTracPre < 70.0 && subTracPre > 20.0) {
|
||||
score += (subTracPre * 10) / subtractorMap.values().size();
|
||||
// System.out.println("score subTracPre plus equals: " + score + "\nsubTracPre: " + subTracPre + "\n");
|
||||
} else {
|
||||
if (subTracPre > 0.0) {
|
||||
score -= (subTracPre * 50) / subtractorMap.values().size();
|
||||
} else {
|
||||
score += (subTracPre * 50) / subtractorMap.values().size();
|
||||
}
|
||||
// System.out.println("score subTracPre minus equals: " + score + "\nsubTracPre: " + subTracPre + "\n");
|
||||
}
|
||||
subTracPre = subtractors;
|
||||
}
|
||||
|
||||
if (totalSubtraction > 45.0) {
|
||||
score -= totalSubtraction * 25;
|
||||
} else {
|
||||
score += totalSubtraction * 25;
|
||||
}
|
||||
//System.out.println("score post totalSubtraction: " + score + "\ntotalSubtraction: " + totalSubtraction + "\n");
|
||||
// System.out.println("score post totalSubtraction: " + score + "\ntotalSubtraction: " + totalSubtraction + "\n");
|
||||
Double preAccumulatorDot = 0.0;
|
||||
Double postAccumulatorDot = 0.0;
|
||||
for (Double accumulators : AccumulateDotMap.values()) {
|
||||
if (Objects.equals(preAccumulatorDot, accumulators)) {
|
||||
if (Objects.equals(postAccumulatorDot, accumulators)) {
|
||||
score -= 1400;
|
||||
// System.out.println("score minus postAccumulatorDot: " + score + "\n");
|
||||
}
|
||||
postAccumulatorDot = accumulators;
|
||||
}
|
||||
@ -436,7 +453,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
subTracPre = 0.0;
|
||||
for (Double subtractors : subtractorMap.values()) {
|
||||
if (Objects.equals(subTracPre, subtractors)) {
|
||||
score -= 500;
|
||||
score += 500;
|
||||
// System.out.println("score minus subTracPre: " + score + "\n");
|
||||
}
|
||||
subTracPre = subtractors;
|
||||
}
|
||||
@ -674,15 +692,13 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
private SentimentValueCache sentimentCoreAnnotationSetup(Annotation pipelineAnnotationSentiment, SentimentValueCache cacheSentimentLocal) {
|
||||
for (CoreMap sentence : pipelineAnnotationSentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
||||
int predictedClass = RNNCoreAnnotations.getPredictedClass(tree);
|
||||
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
||||
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
|
||||
try {
|
||||
if (tree != null) {
|
||||
int predictedClass = RNNCoreAnnotations.getPredictedClass(tree);
|
||||
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
||||
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
|
||||
cacheSentimentLocal.addRNNPredictClass(predictedClass);
|
||||
cacheSentimentLocal.addSimpleMatrix(predictions);
|
||||
cacheSentimentLocal.addSimpleMatrixVector(nodeVector);
|
||||
} catch (Exception ex) {
|
||||
System.out.println("ex: " + ex.getLocalizedMessage() + "\n");
|
||||
}
|
||||
}
|
||||
return cacheSentimentLocal;
|
||||
@ -704,12 +720,15 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
|
||||
private SentimentValueCache jmweAnnotationSetup(Annotation jmweStrAnnotation, SentimentValueCache cacheSentimentLocal) {
|
||||
List<CoreMap> sentences = jmweStrAnnotation.get(CoreAnnotations.SentencesAnnotation.class);
|
||||
Collection<IMWE<IToken>> tokeninflectionMap = new ArrayList();
|
||||
int tokenadder = 0;
|
||||
for (CoreMap sentence : sentences) {
|
||||
for (IMWE<IToken> token : sentence.get(JMWEAnnotation.class)) {
|
||||
if (token.isInflected()) {
|
||||
cacheSentimentLocal.setInflectedCounterPositive(cacheSentimentLocal.getInflectedCounterPositive() + 1);
|
||||
} else {
|
||||
} else if (!tokeninflectionMap.contains(token)) {
|
||||
cacheSentimentLocal.setInflectedCounterNegative(cacheSentimentLocal.getInflectedCounterNegative() + 1);
|
||||
tokeninflectionMap.add(token);
|
||||
}
|
||||
cacheSentimentLocal.addstrTokenForm(token.getForm());
|
||||
cacheSentimentLocal.addstrTokenGetEntry(token.getEntry().toString().substring(token.getEntry().toString().length() - 1));
|
||||
@ -733,21 +752,29 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
cacheSentimentLocal.setMarkedContiniousCounterEntries(cacheSentimentLocal.getMarkedContiniousCounterEntries() + 1);
|
||||
}
|
||||
}
|
||||
cacheSentimentLocal.setTokensCounter(cacheSentimentLocal.getTokensCounter() + 1);
|
||||
tokenadder += 1;
|
||||
}
|
||||
cacheSentimentLocal.setAnotatorcounter(cacheSentimentLocal.getAnotatorcounter() + 1);
|
||||
}
|
||||
cacheSentimentLocal.setTokensCounter(tokenadder);
|
||||
return cacheSentimentLocal;
|
||||
}
|
||||
|
||||
private Double entryCountsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
|
||||
ConcurrentMap<Integer, Integer> countsMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
int totalsize = cacheSentimentLocal1.getEntryCounts().values().size() + cacheSentimentLocal2.getEntryCounts().values().size();
|
||||
for (int counts : cacheSentimentLocal1.getEntryCounts().values()) {
|
||||
for (int counts1 : cacheSentimentLocal2.getEntryCounts().values()) {
|
||||
if (counts == counts1 && counts > 0 && !countsMap.values().contains(counts)) {
|
||||
score += counts * 250;
|
||||
//System.out.println("score post counts: " + score + "\nCounts: " + counts + "\n");
|
||||
countsMap.put(countsMap.size() + 1, counts);
|
||||
if (counts > 0 && counts1 > 0) {
|
||||
//System.out.println("counts1: " + counts + "\ncounts2: " + counts1 + "\n");
|
||||
if (counts == counts1 && !countsMap.values().contains(counts)) {
|
||||
score += (counts * 250) / totalsize;
|
||||
// System.out.println("score post counts plus: " + score + "\ntotalsize: " + totalsize + "\n");
|
||||
countsMap.put(countsMap.size() + 1, counts);
|
||||
} else if (counts * 3 < counts1 || counts1 * 3 < counts) {
|
||||
score -= 600;
|
||||
// System.out.println("score post counts minus: " + score + "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -760,12 +787,17 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
for (String strTokenPos2 : cacheSentimentLocal2.getstrTokenEntryPOS().values()) {
|
||||
if (strTokenPos1.equals(strTokenPos2)) {
|
||||
score += 500;
|
||||
} else {
|
||||
score -= 650;
|
||||
//System.out.println("strTokenEntryPOS score: " + score + "\n");
|
||||
// System.out.println("strTokenEntryPOS score: " + score + "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
int posEntrySize1 = cacheSentimentLocal1.getstrTokenEntryPOS().values().size();
|
||||
int posEntrySize2 = cacheSentimentLocal2.getstrTokenEntryPOS().values().size();
|
||||
if (posEntrySize1 * 3 > posEntrySize2 && posEntrySize2 * 3 > posEntrySize1) {
|
||||
score += posEntrySize1 > posEntrySize2 ? (posEntrySize1 - posEntrySize2) * 700 : (posEntrySize2 - posEntrySize1) * 700;
|
||||
//System.out.println("posEntrySize plus score: " + score + "\n");
|
||||
}
|
||||
// System.out.println("posEntrySize1: " + posEntrySize1 + "\nposEntrySize2: " + posEntrySize2 + "\n");
|
||||
}
|
||||
return score;
|
||||
}
|
||||
@ -773,11 +805,19 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
private Double unmarkedPatternCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
|
||||
int UnmarkedPatternCounter1 = cacheSentimentLocal1.getUnmarkedPatternCounter();
|
||||
int UnmarkedPatternCounter2 = cacheSentimentLocal2.getUnmarkedPatternCounter();
|
||||
// System.out.println("UnmarkedPatternCounter1: " + UnmarkedPatternCounter1 + "\nUnmarkedPatternCounter2: " + UnmarkedPatternCounter2 + "\n");
|
||||
if (UnmarkedPatternCounter1 > 0 && UnmarkedPatternCounter2 > 0) {
|
||||
if (UnmarkedPatternCounter1 * 2 > UnmarkedPatternCounter2 && UnmarkedPatternCounter2 * 2 > UnmarkedPatternCounter1) {
|
||||
score += 2500;
|
||||
} else if (UnmarkedPatternCounter1 * 5 < UnmarkedPatternCounter2 || UnmarkedPatternCounter2 * 5 < UnmarkedPatternCounter1) {
|
||||
score -= 4000;
|
||||
if (UnmarkedPatternCounter1 < 100 && UnmarkedPatternCounter2 < 100) {
|
||||
if (UnmarkedPatternCounter1 * 2 > UnmarkedPatternCounter2 && UnmarkedPatternCounter2 * 2 > UnmarkedPatternCounter1) {
|
||||
score += 2500;
|
||||
// System.out.println("score plus UnmarkedPattern: " + score + "\n");
|
||||
} else if (UnmarkedPatternCounter1 * 5 < UnmarkedPatternCounter2 || UnmarkedPatternCounter2 * 5 < UnmarkedPatternCounter1) {
|
||||
score -= 4000;
|
||||
// System.out.println("score minus UnmarkedPattern: " + score + "\n");
|
||||
}
|
||||
} else {
|
||||
score -= 2500;
|
||||
// System.out.println("score minus UnmarkedPattern10>: " + score + "\n");
|
||||
}
|
||||
}
|
||||
return score;
|
||||
@ -823,13 +863,24 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
for (String strTokeniPart2 : cacheSentimentLocal2.getstrTokenGetiPart().values()) {
|
||||
if (strTokeniPart1.equals(strTokeniPart2) && !strtokensMap.values().contains(strTokeniPart2)) {
|
||||
strtokensMap.put(strtokensMap.size() + 1, strTokeniPart2);
|
||||
score += 400;
|
||||
} else {
|
||||
score -= 200;
|
||||
//System.out.println("score minus strTokenGetiPart: " + score + "\n");
|
||||
score += 800;
|
||||
// System.out.println("score minus strTokenGetiPart: " + score + "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
int tokenIPartSize1 = cacheSentimentLocal1.getstrTokenGetiPart().values().size();
|
||||
int tokenIPartSize2 = cacheSentimentLocal2.getstrTokenGetiPart().values().size();
|
||||
int strTokenMapSize = strtokensMap.values().size();
|
||||
if (tokenIPartSize1 * 2 > tokenIPartSize2 && tokenIPartSize2 * 2 > tokenIPartSize1) {
|
||||
score += tokenIPartSize1 > tokenIPartSize2 ? (tokenIPartSize1 - tokenIPartSize2) * 700 : (tokenIPartSize2 - tokenIPartSize1) * 700;
|
||||
score += strTokenMapSize * 600;
|
||||
//System.out.println("tokenIPartSize plus score: " + score + "\ntokenIPartSize1: " + tokenIPartSize1 + "\ntokenIPartSize2: "
|
||||
// + tokenIPartSize2 + "\nstrTokenMapSize: " + strTokenMapSize + "\n");
|
||||
} else if (tokenIPartSize1 > 0 && tokenIPartSize2 > 0) {
|
||||
score -= tokenIPartSize1 > tokenIPartSize2 ? (tokenIPartSize1 - tokenIPartSize2) * 700 : (tokenIPartSize2 - tokenIPartSize1) * 700;
|
||||
// System.out.println("tokenIPartSize minus score: " + score + "\ntokenIPartSize1: " + tokenIPartSize1 + "\ntokenIPartSize2: "
|
||||
// + tokenIPartSize2 + "\n");
|
||||
}
|
||||
return score;
|
||||
}
|
||||
|
||||
@ -866,14 +917,24 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
ConcurrentMap<Integer, String> iTokenMapTagsMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
for (String strmapTag : cacheSentimentLocal1.getITokenMapTag().values()) {
|
||||
for (String strmapTag1 : cacheSentimentLocal2.getITokenMapTag().values()) {
|
||||
if (strmapTag.equals(strmapTag1)) {
|
||||
score -= 1450;
|
||||
} else if (!iTokenMapTagsMap.values().contains(strmapTag)) {
|
||||
score += 725;
|
||||
if (strmapTag.equals(strmapTag1) && !iTokenMapTagsMap.values().contains(strmapTag1)) {
|
||||
score += 1450;
|
||||
iTokenMapTagsMap.put(iTokenMapTagsMap.size() + 1, strmapTag);
|
||||
}
|
||||
}
|
||||
}
|
||||
int mapTagsize1 = cacheSentimentLocal1.getITokenMapTag().values().size();
|
||||
int mapTagsize2 = cacheSentimentLocal2.getITokenMapTag().values().size();
|
||||
int tokenTagMapSize = iTokenMapTagsMap.values().size();
|
||||
if (mapTagsize1 * 2 > mapTagsize2 && mapTagsize2 * 2 > mapTagsize1) {
|
||||
score += mapTagsize1 > mapTagsize2 ? (mapTagsize1 - mapTagsize2) * 700 : (mapTagsize2 - mapTagsize1) * 700;
|
||||
score += tokenTagMapSize * 600;
|
||||
// System.out.println("tokenIPartSize 2 plus score: " + score + "\n");
|
||||
//System.out.println("mapTagsize1: " + mapTagsize1 + "\nmapTagsize2: " + mapTagsize2 + "\ntokenTagMapSize: "
|
||||
// + tokenTagMapSize + "\n");
|
||||
} else {
|
||||
score -= mapTagsize1 > mapTagsize2 ? (mapTagsize1 - mapTagsize2) * 700 : (mapTagsize2 - mapTagsize1) * 700;;
|
||||
}
|
||||
return score;
|
||||
}
|
||||
|
||||
@ -910,10 +971,10 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
for (String strTokenStem : cacheSentimentLocal1.getstrTokenStems().values()) {
|
||||
for (String strTokenStem1 : cacheSentimentLocal2.getstrTokenStems().values()) {
|
||||
if (strTokenStem.equals(strTokenStem1) && !tokenStemmingMap.values().contains(strTokenStem)) {
|
||||
score += 1500;
|
||||
score -= 4500;
|
||||
tokenStemmingMap.put(tokenStemmingMap.size() + 1, strTokenStem);
|
||||
// System.out.println("score minus strTokenStem: " + score + "\n");
|
||||
}
|
||||
//System.out.println("score strTokenStem: " + score + "\n");
|
||||
}
|
||||
}
|
||||
return score;
|
||||
@ -922,21 +983,24 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
private Double inflectedCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
|
||||
int inflectedCounterPositive1 = cacheSentimentLocal1.getInflectedCounterPositive();
|
||||
int inflectedCounterPositive2 = cacheSentimentLocal2.getInflectedCounterPositive();
|
||||
int inflectedCounterNegative = cacheSentimentLocal1.getInflectedCounterNegative() + cacheSentimentLocal2.getInflectedCounterNegative();
|
||||
int inflectedCounterNegative = cacheSentimentLocal1.getInflectedCounterNegative() > cacheSentimentLocal2.getInflectedCounterNegative()
|
||||
? cacheSentimentLocal1.getInflectedCounterNegative() - cacheSentimentLocal2.getInflectedCounterNegative()
|
||||
: cacheSentimentLocal2.getInflectedCounterNegative() - cacheSentimentLocal1.getInflectedCounterNegative();
|
||||
//System.out.println("inflectedCounterPositive1: " + inflectedCounterPositive1 + "\ninflectedCounterPositive2: "
|
||||
// + inflectedCounterPositive2 + "\ninflectedCounterNegative: " + inflectedCounterNegative + "\n");
|
||||
if (inflectedCounterPositive1 + inflectedCounterPositive2 > inflectedCounterNegative && inflectedCounterNegative > 0) {
|
||||
//+inflectedCounterPositive2 + "\ninflectedCounterNegative: " + inflectedCounterNegative + "\n" );
|
||||
if ((inflectedCounterPositive1 + inflectedCounterPositive2) > inflectedCounterNegative && inflectedCounterNegative > 0) {
|
||||
score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * 650;
|
||||
//System.out.println("score inflectedCounterPositive plus: " + score + "\n");
|
||||
}
|
||||
if (inflectedCounterPositive1 > 0 && inflectedCounterPositive2 > 0) {
|
||||
if (inflectedCounterPositive1 * 2 > inflectedCounterPositive2 && inflectedCounterPositive2 * 2 > inflectedCounterPositive1) {
|
||||
score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * 550;
|
||||
//System.out.println("score plus inflectedCounterPositive * 2: " + score + "\n");
|
||||
if (inflectedCounterPositive1 * 2 > inflectedCounterPositive2 && inflectedCounterPositive2 * 2 > inflectedCounterPositive1
|
||||
&& inflectedCounterNegative > 0) {
|
||||
score += ((inflectedCounterPositive1 + inflectedCounterPositive2) * 150) - (inflectedCounterNegative * 10);
|
||||
// System.out.println("score plus inflectedCounterPositive * 2: " + score + "\n");
|
||||
} else if (inflectedCounterPositive1 * 5 < inflectedCounterPositive2 || inflectedCounterPositive2 * 5 < inflectedCounterPositive1) {
|
||||
score -= inflectedCounterPositive1 > inflectedCounterPositive2 ? (inflectedCounterPositive1 - inflectedCounterPositive2) * 400
|
||||
: (inflectedCounterPositive2 - inflectedCounterPositive1) * 400;
|
||||
//System.out.println("score minus inflectedCounterPositive * 2: " + score + "\n");
|
||||
// System.out.println("score minus inflectedCounterPositive * 2: " + score + "\n");
|
||||
}
|
||||
}
|
||||
return score;
|
||||
@ -962,22 +1026,23 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
private Double tokensCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
|
||||
int tokensCounter1 = cacheSentimentLocal1.getTokensCounter();
|
||||
int tokensCounter2 = cacheSentimentLocal2.getTokensCounter();
|
||||
//System.out.println("tokensCounter1: " + tokensCounter1 + "\ntokensCounter2: " + tokensCounter2 + "\n");
|
||||
// System.out.println("tokensCounter1: " + tokensCounter1 + "\ntokensCounter2: " + tokensCounter2 + "\n");
|
||||
if ((tokensCounter1 > 1 && tokensCounter2 > 1) && tokensCounter1 < tokensCounter2 * 5 && tokensCounter2 < tokensCounter1 * 5) {
|
||||
if (tokensCounter1 > tokensCounter2 / 2 && tokensCounter2 > tokensCounter1 / 2) {
|
||||
score += (tokensCounter1 + tokensCounter2) * 1400;
|
||||
//System.out.println("score plus tokensCounter: " + score + "\n");
|
||||
if (tokensCounter1 > tokensCounter2 / 2 && tokensCounter2 > tokensCounter1 / 2 && tokensCounter1 < 4 && tokensCounter2 < 4) {
|
||||
//8000 score hardcap
|
||||
score += (tokensCounter1 + tokensCounter2) * 400;
|
||||
// System.out.println("score plus tokensCounter: " + score + "\n");
|
||||
} else {
|
||||
score -= 3500;
|
||||
//System.out.println("score minus tokensCounter: " + score + "\n");
|
||||
}
|
||||
} else {
|
||||
int elseint = tokensCounter1 >= tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500;
|
||||
//System.out.println("elseint: " + elseint + "<n");
|
||||
// System.out.println("elseint: " + elseint + "<n");
|
||||
if ((tokensCounter1 > tokensCounter2 * 5 || tokensCounter2 > tokensCounter1 * 5)
|
||||
&& tokensCounter1 > 0 && tokensCounter2 > 0) {
|
||||
score -= tokensCounter1 > tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500;
|
||||
//System.out.println("score post tokensCounter: " + score + "\n");
|
||||
// System.out.println("score post tokensCounter: " + score + "\n");
|
||||
} else if (elseint > 0 && tokensCounter1 > 0 && tokensCounter2 > 0) {
|
||||
score -= elseint * 2;
|
||||
//System.out.println("score post elseint: " + elseint + "\n");
|
||||
@ -996,14 +1061,12 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
List<CoreLabel> tokens = em.tokens();
|
||||
for (CoreLabel token : tokens) {
|
||||
try {
|
||||
if (token != null) {
|
||||
if (!cacheSentimentLocal.getnerEntityTokenTags().values().contains(token.tag())) {
|
||||
if (entityType.equals("PERSON") && EntityConfidences > 0.80) {
|
||||
cacheSentimentLocal.addnerEntityTokenTags(token.tag());
|
||||
}
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
//System.out.println("failed corelabel ex: " + ex.getLocalizedMessage() + "\n" + ex.getCause() + "\n");
|
||||
}
|
||||
}
|
||||
if (!cacheSentimentLocal.getnerEntities1().values().contains(em.text())) {
|
||||
@ -1157,7 +1220,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
score = tgwListScoreIncrementer(score, cacheSentiment1 == null
|
||||
? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2);
|
||||
// System.out.println("score post runCountGet: " + score + "\n");
|
||||
//System.out.println("score post tgwListScoreIncrementer: " + score + "\n");
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = GrammaticStructureSetup(cacheSentimentLocal1, pipelineAnnotation1);
|
||||
}
|
||||
@ -1169,6 +1232,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1 = cacheSentiment1 == null
|
||||
? cacheSentimentLocal1.getSentenceConstituencyParseList() : cacheSentiment1.getSentenceConstituencyParseList();
|
||||
score = iterateTrees(sentenceConstituencyParseList2, sentenceConstituencyParseList1, score);
|
||||
//System.out.println("score post iterateTrees: " + score + "\n");
|
||||
Collection<TypedDependency> allTypedDependencies2 = cacheSentiment2 == null ? cacheSentimentLocal2.getAllTypedDependencies()
|
||||
: cacheSentiment2.getAllTypedDependencies();
|
||||
Collection<TypedDependency> allTypedDependencies1 = cacheSentiment1 == null ? cacheSentimentLocal1.getAllTypedDependencies()
|
||||
@ -1177,6 +1241,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap2 = cacheSentiment2 == null ? cacheSentimentLocal2.getGs() : cacheSentiment2.getGs();
|
||||
score = typeDependenciesGrammaticalRelation(allTypedDependencies1, allTypedDependencies2, score, grammaticalMap1, grammaticalMap2,
|
||||
sentenceConstituencyParseList1, sentenceConstituencyParseList2);
|
||||
// System.out.println("score post typeDependenciesGrammaticalRelation: " + score + "\n");
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = sentimentCoreAnnotationSetup(pipelineAnnotation1Sentiment, cacheSentimentLocal1);
|
||||
}
|
||||
@ -1191,12 +1256,13 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
? cacheSentimentLocal1.getSimpleSMXlistVector() : cacheSentiment1.getSimpleSMXlistVector();
|
||||
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector2 = cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getSimpleSMXlistVector() : cacheSentiment2.getSimpleSMXlistVector();
|
||||
//System.out.println("score pre pipelineAnnotation2Sentiment: " + score + "\n");
|
||||
//System.out.println("score pre simpleRNNMatrixCalculations: " + score + "\n");
|
||||
score = simpleRNNMatrixCalculations(score, simpleSMXlist1, simpleSMXlist2);
|
||||
//System.out.println("score pre simpleRNNMaxtrixVectors: " + score + "\n");
|
||||
score = simpleRNNMaxtrixVectors(score, simpleSMXlistVector1, simpleSMXlistVector2);
|
||||
int sentiment1 = cacheSentiment1 == null ? cacheSentimentLocal1.getRnnPrediectClassMap().size() : cacheSentiment1.getRnnPrediectClassMap().size();
|
||||
int sentiment2 = cacheSentiment2 == null ? cacheSentimentLocal2.getRnnPrediectClassMap().size() : cacheSentiment2.getRnnPrediectClassMap().size();
|
||||
//System.out.println("score post elementsVariance: " + score + "\n");
|
||||
//System.out.println("score pre sentiment trim: " + score + "\n");
|
||||
score -= (sentiment1 > sentiment2 ? sentiment1 - sentiment2 : sentiment2 - sentiment1) * 500;
|
||||
Map.Entry<Double, Map.Entry<SentimentValueCache, SentimentValueCache>> classifyRawEvaluationEntry = classifyRawEvaluation(score, cacheSentimentLocal1,
|
||||
cacheSentimentLocal2);
|
||||
@ -1227,13 +1293,19 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
SentimentValueCache scoringCache1 = cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1;
|
||||
SentimentValueCache scoringCache2 = cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2;
|
||||
score = entryCountsRelation(score, scoringCache1, scoringCache2);
|
||||
//System.out.println("score post entryCountsRelation: " + score + "\n");
|
||||
score = entryCountsScoring(score, scoringCache1, scoringCache2);
|
||||
// System.out.println("score post entryCountsScoring: " + score + "\n");
|
||||
score = tokenEntryPosScoring(score, scoringCache1, scoringCache2);
|
||||
//System.out.println("score post tokenEntryPosScoring: " + score + "\n");
|
||||
score = unmarkedPatternCounterScoring(score, scoringCache1, scoringCache2);
|
||||
//System.out.println("score post UnmarkedPatternCounter: " + score + "\n");
|
||||
// System.out.println("score post UnmarkedPatternCounter: " + score + "\n");
|
||||
score = markedContiniousCounterScoring(score, scoringCache1, scoringCache2);
|
||||
// System.out.println("score post markedContiniousCounterScoring: " + score + "\n");
|
||||
score = strTokensMapScoring(score, scoringCache1, scoringCache2);
|
||||
// System.out.println("score post strTokensMapScoring: " + score + "\n");
|
||||
score = strTokenEntryScoring(score, scoringCache1, scoringCache2);
|
||||
//System.out.println("score post strTokenEntryScoring: " + score + "\n");
|
||||
score = strTokenMapTagsScoring(score, scoringCache1, scoringCache2);
|
||||
//System.out.println("score post strmapTag: " + score + "\n");
|
||||
score = tokenformSizeScoring(score, scoringCache1, scoringCache2);
|
||||
@ -1241,7 +1313,9 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
score = tokenStemmingMapScoring(score, scoringCache1, scoringCache2);
|
||||
//System.out.println("Score pre inflected: " + score + "\n");
|
||||
score = inflectedCounterScoring(score, scoringCache1, scoringCache2);
|
||||
//System.out.println("Score pre annotatorCountScoring: " + score + "\n");
|
||||
score = annotatorCountScoring(score, scoringCache1, scoringCache2);
|
||||
//System.out.println("Score pre tokensCounterScoring: " + score + "\n");
|
||||
score = tokensCounterScoring(score, scoringCache1, scoringCache2);
|
||||
//System.out.println("Score Pre levenhstein: " + score + "\n");
|
||||
LevenshteinDistance leven = new LevenshteinDistance(str, str1);
|
||||
@ -1254,10 +1328,10 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument2, cacheSentimentLocal2);
|
||||
}
|
||||
//System.out.println("score post PERSON trim: " + score + "\n");
|
||||
//System.out.println("score post SentenceScoreDiff trim: " + score + "\n");
|
||||
score = nerEntitiesAndTokenScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
//System.out.println("score pre stopwordTokens: " + score + "\n");
|
||||
//System.out.println("score post nerEntitiesAndTokenScoring: " + score + "\n");
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = setupStoWordTokensLemma(pipelineAnnotation1Sentiment, cacheSentimentLocal1);
|
||||
}
|
||||
@ -1266,9 +1340,10 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
score = stopWordTokenLemmaScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
//System.out.println("score post stopWordTokenLemmaScoring: " + score + "\n");
|
||||
score = stopwordTokenPairCounterScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
// System.out.println("Final current score: " + score + "\nSentence 1: " + str + "\nSentence 2: " + str1 + "\n");
|
||||
// System.out.println("Final current score: " + score + "\nSentence 1: " + str + "\nSentence 2: " + str1 + "\n");
|
||||
smxParam.setDistance(score);
|
||||
if (cacheSentiment1 == null) {
|
||||
smxParam.setCacheValue1(cacheSentimentLocal1);
|
||||
|
@ -36,9 +36,10 @@ public class DiscordHandler {
|
||||
private static String MostRecentMsg = "";
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "25");
|
||||
System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "15");
|
||||
try {
|
||||
Datahandler.instance.initiateMYSQL();
|
||||
//uncomment db fetch when ready, just keep the comment for future reference
|
||||
System.out.println("finished initiating MYSQL");
|
||||
} catch (SQLException | IOException ex) {
|
||||
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
|
Loading…
Reference in New Issue
Block a user