should reduce idleness of calculations. Once stringcache is large enough and stored calculations are widespread enogh accross the stringcache it should effectly give response messages instead of doing manual calculations each time, also replaced arraylists with concurrentmaps
This commit is contained in:
parent
aff2ddae5b
commit
d8d415cbe0
@ -27,6 +27,7 @@ import java.io.IOException;
|
|||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -58,7 +59,7 @@ public class MYSQLDatahandler {
|
|||||||
private volatile boolean refreshMatrixFromDB;
|
private volatile boolean refreshMatrixFromDB;
|
||||||
private final ConcurrentMap<Integer, String> stringCache;
|
private final ConcurrentMap<Integer, String> stringCache;
|
||||||
private LinkedHashMap<String, LinkedHashMap<String, Double>> lHMSMX = new LinkedHashMap();
|
private LinkedHashMap<String, LinkedHashMap<String, Double>> lHMSMX = new LinkedHashMap();
|
||||||
private List<String> multiprocessCalculations = new ArrayList();
|
private ConcurrentMap<Integer, String> multiprocessCalculations;
|
||||||
private List<Integer> updatedRows = new ArrayList();
|
private List<Integer> updatedRows = new ArrayList();
|
||||||
private final Stopwatch stopwatch;
|
private final Stopwatch stopwatch;
|
||||||
private final Stopwatch stopwatch1;
|
private final Stopwatch stopwatch1;
|
||||||
@ -78,6 +79,7 @@ public class MYSQLDatahandler {
|
|||||||
private static AbstractSequenceClassifier<CoreLabel> classifier;
|
private static AbstractSequenceClassifier<CoreLabel> classifier;
|
||||||
private static StanfordCoreNLP pipeline;
|
private static StanfordCoreNLP pipeline;
|
||||||
private static StanfordCoreNLP pipelineSentiment;
|
private static StanfordCoreNLP pipelineSentiment;
|
||||||
|
private static volatile boolean permitted = false;
|
||||||
|
|
||||||
public static AbstractSequenceClassifier<CoreLabel> getClassifier() {
|
public static AbstractSequenceClassifier<CoreLabel> getClassifier() {
|
||||||
return classifier;
|
return classifier;
|
||||||
@ -91,6 +93,7 @@ public class MYSQLDatahandler {
|
|||||||
this.stopwatch = Stopwatch.createUnstarted();
|
this.stopwatch = Stopwatch.createUnstarted();
|
||||||
this.stopwatch1 = Stopwatch.createStarted();
|
this.stopwatch1 = Stopwatch.createStarted();
|
||||||
this.stringCache = new MapMaker().concurrencyLevel(2).makeMap();
|
this.stringCache = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
this.multiprocessCalculations = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void shiftReduceParserInitiate() {
|
public static void shiftReduceParserInitiate() {
|
||||||
@ -108,11 +111,17 @@ public class MYSQLDatahandler {
|
|||||||
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,parse");
|
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,parse");
|
||||||
props.setProperty("parse.model", modelPath);
|
props.setProperty("parse.model", modelPath);
|
||||||
props.setProperty("parse.maxlen", "100");
|
props.setProperty("parse.maxlen", "100");
|
||||||
|
props.setProperty("tokenize.maxlen", "100");
|
||||||
|
props.setProperty("ssplit.maxlen", "100");
|
||||||
|
props.setProperty("lemma.maxlen", "100");
|
||||||
props.setProperty("parse.binaryTrees", "true");
|
props.setProperty("parse.binaryTrees", "true");
|
||||||
propsSentiment.setProperty("annotators", "tokenize, ssplit, parse, sentiment");
|
propsSentiment.setProperty("annotators", "tokenize, ssplit, parse, sentiment");
|
||||||
propsSentiment.setProperty("parse.model", lexParserEnglishRNN);
|
propsSentiment.setProperty("parse.model", lexParserEnglishRNN);
|
||||||
propsSentiment.setProperty("sentiment.model", sentimentModel);
|
propsSentiment.setProperty("sentiment.model", sentimentModel);
|
||||||
|
propsSentiment.setProperty("sentiment.maxlen", "100");
|
||||||
propsSentiment.setProperty("parse.maxlen", "100");
|
propsSentiment.setProperty("parse.maxlen", "100");
|
||||||
|
propsSentiment.setProperty("tokenize.maxlen", "100");
|
||||||
|
propsSentiment.setProperty("ssplit.maxlen", "100");
|
||||||
// set up pipeline
|
// set up pipeline
|
||||||
pipeline = new StanfordCoreNLP(props);
|
pipeline = new StanfordCoreNLP(props);
|
||||||
pipelineSentiment = new StanfordCoreNLP(propsSentiment);
|
pipelineSentiment = new StanfordCoreNLP(propsSentiment);
|
||||||
@ -164,7 +173,7 @@ public class MYSQLDatahandler {
|
|||||||
public synchronized void checkIfUpdateMatrixes() {
|
public synchronized void checkIfUpdateMatrixes() {
|
||||||
refreshMatrixFromDB = false;
|
refreshMatrixFromDB = false;
|
||||||
int calculationBoundaries = 6;
|
int calculationBoundaries = 6;
|
||||||
int updateBadgesInteger = 65;
|
int updateBadgesInteger = 80;
|
||||||
while (lHMSMX.size() < (stringCache.values().size() * stringCache.values().size()) - stringCache.values().size()) {
|
while (lHMSMX.size() < (stringCache.values().size() * stringCache.values().size()) - stringCache.values().size()) {
|
||||||
if (stopwatch1.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS1) {
|
if (stopwatch1.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS1) {
|
||||||
refreshMatrixFromDB = true;
|
refreshMatrixFromDB = true;
|
||||||
@ -174,7 +183,7 @@ public class MYSQLDatahandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (stringCache.values().size() > 10 && !refreshMatrixFromDB) {
|
if (stringCache.values().size() > 10 && !refreshMatrixFromDB) {
|
||||||
if (multiprocessCalculations.size() <= (calculationBoundaries * calculationBoundaries)) {
|
if (threadCounter < 25) {
|
||||||
threadCounter++;
|
threadCounter++;
|
||||||
ConcurrentMap<Integer, String> stringCachelocal = stringCache;
|
ConcurrentMap<Integer, String> stringCachelocal = stringCache;
|
||||||
List<Integer> updateLocal = updatedRows;
|
List<Integer> updateLocal = updatedRows;
|
||||||
@ -205,7 +214,7 @@ public class MYSQLDatahandler {
|
|||||||
while (beginindex + ij < temp) {
|
while (beginindex + ij < temp) {
|
||||||
String get = stringCachelocal.get(beginindex + ij);
|
String get = stringCachelocal.get(beginindex + ij);
|
||||||
strIndexNavigator.put(ij, get);
|
strIndexNavigator.put(ij, get);
|
||||||
multiprocessCalculations.add(get);
|
multiprocessCalculations.put(multiprocessCalculations.size() + 1, get);
|
||||||
ij++;
|
ij++;
|
||||||
}
|
}
|
||||||
new Thread(() -> {
|
new Thread(() -> {
|
||||||
@ -230,7 +239,7 @@ public class MYSQLDatahandler {
|
|||||||
strIndexNavigatorL.values().forEach((str) -> {
|
strIndexNavigatorL.values().forEach((str) -> {
|
||||||
randomIndexesToUpdate.values().stream().filter((str1) -> (!str.equals(str1))).forEachOrdered((str1) -> {
|
randomIndexesToUpdate.values().stream().filter((str1) -> (!str.equals(str1))).forEachOrdered((str1) -> {
|
||||||
boolean present = false;
|
boolean present = false;
|
||||||
if (multiprocessCalculations.contains(str1)) {
|
if (multiprocessCalculations.values().contains(str1)) {
|
||||||
present = true;
|
present = true;
|
||||||
} else if (LHMSMXLocal.containsKey(str)) {
|
} else if (LHMSMXLocal.containsKey(str)) {
|
||||||
LinkedHashMap<String, Double> orDefault = LHMSMXLocal.get(str);
|
LinkedHashMap<String, Double> orDefault = LHMSMXLocal.get(str);
|
||||||
@ -281,77 +290,68 @@ public class MYSQLDatahandler {
|
|||||||
}).start();
|
}).start();
|
||||||
}).
|
}).
|
||||||
start();
|
start();
|
||||||
} else {
|
} else if (!permitted) {
|
||||||
if (threadCounter == 0) {
|
permitted = true;
|
||||||
threadCounter++;
|
new Thread(() -> {
|
||||||
new Thread(() -> {
|
ConcurrentMap<Integer, String> stringCachelocal = stringCache;
|
||||||
LinkedHashMap<String, LinkedHashMap<String, Double>> LHMSMXLocal = lHMSMX;
|
ConcurrentMap<Integer, SimilarityMatrix> matrixUpdateList = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
ConcurrentMap<Integer, String> strList = stringCache;
|
ConcurrentMap<Integer, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
ConcurrentMap<Integer, SimilarityMatrix> matrixUpdateList = new MapMaker().concurrencyLevel(2).makeMap();
|
ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
|
||||||
ConcurrentMap<Integer, String> randomStrList = new MapMaker().concurrencyLevel(2).makeMap();
|
Collection<String> values = multiprocessCalculations.values();
|
||||||
int indexes = updateBadgesInteger;
|
LinkedHashMap<String, LinkedHashMap<String, Double>> LHMSMXLocal = lHMSMX;
|
||||||
if (indexes >= strList.size()) {
|
values.forEach((str) -> {
|
||||||
indexes = strList.size() - 1;
|
stringCachelocal.values().stream().filter((str1) -> (!str.equals(str1))).forEachOrdered((str1) -> {
|
||||||
}
|
boolean present = false;
|
||||||
int beginindexes = new Random().nextInt((strList.size()) - indexes);
|
if (LHMSMXLocal.containsKey(str)) {
|
||||||
int ij1 = 0;
|
LinkedHashMap<String, Double> orDefault = LHMSMXLocal.get(str);
|
||||||
while (beginindexes + ij1 < beginindexes + indexes) {
|
if (orDefault.containsKey(str1)) {
|
||||||
String get1 = strList.get(beginindexes + ij1);
|
present = true;
|
||||||
randomStrList.put(ij1, get1);
|
|
||||||
ij1++;
|
|
||||||
}
|
|
||||||
ConcurrentMap<Integer, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
|
|
||||||
multiprocessCalculations.forEach((str) -> {
|
|
||||||
randomStrList.values().forEach((str1) -> {
|
|
||||||
boolean present = false;
|
|
||||||
if (LHMSMXLocal.containsKey(str)) {
|
|
||||||
LinkedHashMap<String, Double> orDefault = LHMSMXLocal.get(str);
|
|
||||||
if (orDefault.containsKey(str1)) {
|
|
||||||
present = true;
|
|
||||||
}
|
|
||||||
} else if (LHMSMXLocal.containsKey(str1)) {
|
|
||||||
LinkedHashMap<String, Double> orDefault = LHMSMXLocal.get(str1);
|
|
||||||
if (orDefault.containsKey(str)) {
|
|
||||||
present = true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!present) {
|
} else if (LHMSMXLocal.containsKey(str1)) {
|
||||||
SimilarityMatrix SMX = new SimilarityMatrix(str, str1);
|
LinkedHashMap<String, Double> orDefault = LHMSMXLocal.get(str1);
|
||||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, str1, SMX);
|
if (orDefault.containsKey(str)) {
|
||||||
futures.put(futures.size() + 1, executor.submit(worker));
|
present = true;
|
||||||
}
|
}
|
||||||
});
|
|
||||||
});
|
|
||||||
executor.shutdown();
|
|
||||||
try {
|
|
||||||
for (Future<SimilarityMatrix> future : futures.values()) {
|
|
||||||
SimilarityMatrix SMX = future.get();
|
|
||||||
LinkedHashMap<String, Double> get = lHMSMX.getOrDefault(SMX.getPrimaryString(), null);
|
|
||||||
if (get == null) {
|
|
||||||
get = new LinkedHashMap();
|
|
||||||
}
|
|
||||||
get.put(SMX.getSecondaryString(), SMX.getDistance());
|
|
||||||
lHMSMX.put(SMX.getPrimaryString(), get);
|
|
||||||
matrixUpdateList.put(matrixUpdateList.size() + 1, SMX);
|
|
||||||
}
|
}
|
||||||
} catch (InterruptedException | ExecutionException ex) {
|
if (!present) {
|
||||||
Logger.getLogger(MYSQLDatahandler.class.getName()).log(Level.SEVERE, null, ex);
|
SimilarityMatrix SMX = new SimilarityMatrix(str, str1);
|
||||||
|
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, str1, SMX);
|
||||||
|
futures.put(futures.size() + 1, executor.submit(worker));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
executor.shutdown();
|
||||||
|
try {
|
||||||
|
System.out.println("finished worker assignment, futures size: " + futures.size() + "\n");
|
||||||
|
for (Future<SimilarityMatrix> future : futures.values()) {
|
||||||
|
SimilarityMatrix SMX = future.get();
|
||||||
|
LinkedHashMap<String, Double> get = lHMSMX.getOrDefault(SMX.getPrimaryString(), null);
|
||||||
|
if (get == null) {
|
||||||
|
get = new LinkedHashMap();
|
||||||
|
}
|
||||||
|
get.put(SMX.getSecondaryString(), SMX.getDistance());
|
||||||
|
lHMSMX.put(SMX.getPrimaryString(), get);
|
||||||
|
matrixUpdateList.put(matrixUpdateList.size() + 1, SMX);
|
||||||
}
|
}
|
||||||
|
} catch (InterruptedException | ExecutionException ex) {
|
||||||
|
Logger.getLogger(MYSQLDatahandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
|
}
|
||||||
|
new Thread(() -> {
|
||||||
try {
|
try {
|
||||||
|
multiprocessCalculations = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
permitted = false;
|
||||||
if (!matrixUpdateList.isEmpty()) {
|
if (!matrixUpdateList.isEmpty()) {
|
||||||
DataMapper.insertSementicMatrixes(matrixUpdateList);
|
DataMapper.insertSementicMatrixes(matrixUpdateList);
|
||||||
System.out.println("finished datamapper semetic insert");
|
System.out.println("finished datamapper semetic insert");
|
||||||
}
|
}
|
||||||
|
threadCounter--;
|
||||||
|
System.out.println("\nthreadCounter: " + threadCounter + "\n");
|
||||||
} catch (CustomError ex) {
|
} catch (CustomError ex) {
|
||||||
Logger.getLogger(MYSQLDatahandler.class
|
Logger.getLogger(MYSQLDatahandler.class
|
||||||
.getName()).log(Level.SEVERE, null, ex);
|
.getName()).log(Level.SEVERE, null, ex);
|
||||||
}
|
}
|
||||||
multiprocessCalculations = new ArrayList();
|
|
||||||
updatedRows = new ArrayList();
|
|
||||||
threadCounter--;
|
|
||||||
}).start();
|
}).start();
|
||||||
}
|
}).start();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -464,14 +464,14 @@ public class MYSQLDatahandler {
|
|||||||
public String mostSimilar(String toBeCompared, ConcurrentMap<Integer, String> concurrentStrings) {
|
public String mostSimilar(String toBeCompared, ConcurrentMap<Integer, String> concurrentStrings) {
|
||||||
int minDistance = 8;
|
int minDistance = 8;
|
||||||
String similar = "";
|
String similar = "";
|
||||||
List<Future<DistanceObject>> futures = new ArrayList();
|
ConcurrentMap<Integer, Future<DistanceObject>> futures = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
|
ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
|
||||||
concurrentStrings.values().stream().map((str) -> new LevenshteinDistance(str, toBeCompared, new DistanceObject())).forEachOrdered((worker) -> {
|
concurrentStrings.values().stream().map((str) -> new LevenshteinDistance(str, toBeCompared, new DistanceObject())).forEachOrdered((worker) -> {
|
||||||
futures.add(executor.submit(worker));
|
futures.put(futures.size() + 1, executor.submit(worker));
|
||||||
});
|
});
|
||||||
executor.shutdown();
|
executor.shutdown();
|
||||||
try {
|
try {
|
||||||
for (Future<DistanceObject> future : futures) {
|
for (Future<DistanceObject> future : futures.values()) {
|
||||||
DistanceObject d = future.get();
|
DistanceObject d = future.get();
|
||||||
try {
|
try {
|
||||||
if (d.getSentence() != null && d.getDistance() != null) {
|
if (d.getSentence() != null && d.getDistance() != null) {
|
||||||
@ -598,42 +598,42 @@ public class MYSQLDatahandler {
|
|||||||
ConcurrentMap<Integer, String> strreturn = new MapMaker().concurrencyLevel(2).makeMap();
|
ConcurrentMap<Integer, String> strreturn = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
for (String str1 : str.values()) {
|
for (String str1 : str.values()) {
|
||||||
int counter = 0;
|
int counter = 0;
|
||||||
List<String> TGWList = new ArrayList();
|
ConcurrentMap<Integer, String> TGWList = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(str1));
|
DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(str1));
|
||||||
for (List<HasWord> sentence : tokenizer) {
|
for (List<HasWord> sentence : tokenizer) {
|
||||||
List<TaggedWord> tagged1 = tagger.tagSentence(sentence);
|
List<TaggedWord> tagged1 = tagger.tagSentence(sentence);
|
||||||
Tree tree = model.apply(tagged1);
|
Tree tree = model.apply(tagged1);
|
||||||
taggedWords = tree.taggedYield();
|
taggedWords = tree.taggedYield();
|
||||||
for (TaggedWord TGW : taggedWords) {
|
for (TaggedWord TGW : taggedWords) {
|
||||||
if (!TGWList.contains(TGW.tag()) && !TGW.tag().equals(":") && !TGW.word().equals(TGW.tag())) {
|
if (!TGWList.values().contains(TGW.tag()) && !TGW.tag().equals(":") && !TGW.word().equals(TGW.tag())) {
|
||||||
TGWList.add(TGW.tag());
|
TGWList.put(TGWList.size() + 1, TGW.tag());
|
||||||
counter++;
|
counter++;
|
||||||
}
|
}
|
||||||
if (counter > 3) {
|
if (counter > 3) {
|
||||||
int addCounter = 0;
|
int addCounter = 0;
|
||||||
List<Word> wordList = new ArrayList();
|
ConcurrentMap<Integer, Word> wordList = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
for (Word lab : tree.yieldWords()) {
|
for (Word lab : tree.yieldWords()) {
|
||||||
if (lab != null && lab.word() != null) {
|
if (lab != null && lab.word() != null) {
|
||||||
//System.out.println("lab: " + lab + " \n");
|
//System.out.println("lab: " + lab + " \n");
|
||||||
if (!wordList.contains(lab) && !lab.equals(":")) {
|
if (!wordList.values().contains(lab) && lab.value() != null && !lab.value().equals(":")) {
|
||||||
wordList.add(lab);
|
wordList.put(wordList.size() + 1, lab);
|
||||||
addCounter++;
|
addCounter++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (addCounter > 3) {
|
if (addCounter > 3) {
|
||||||
addCounter = 0;
|
addCounter = 0;
|
||||||
List<HasWord> HWlist = new ArrayList();
|
ConcurrentMap<Integer, HasWord> HWlist = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
for (HasWord HW : tree.yieldHasWord()) {
|
for (HasWord HW : tree.yieldHasWord()) {
|
||||||
if (HW != null && HW.word() != null && !HWlist.contains(HW)) {
|
if (HW != null && HW.word() != null && !HWlist.values().contains(HW)) {
|
||||||
//System.out.println("HasWord: " + HW + "\n");
|
|
||||||
addCounter++;
|
addCounter++;
|
||||||
HWlist.add(HW);
|
HWlist.put(HWlist.size() + 1, HW);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (addCounter > 3) {
|
if (addCounter > 3) {
|
||||||
boolean tooclosematch = false;
|
boolean tooclosematch = false;
|
||||||
for (String strVals : stringCache.values()) {
|
Collection<String> values = stringCache.values();
|
||||||
|
for (String strVals : values) {
|
||||||
LevenshteinDistance leven = new LevenshteinDistance(strVals, str1);
|
LevenshteinDistance leven = new LevenshteinDistance(strVals, str1);
|
||||||
double Distance = leven.computeLevenshteinDistance();
|
double Distance = leven.computeLevenshteinDistance();
|
||||||
int maxpermittedDistance = 2;
|
int maxpermittedDistance = 2;
|
||||||
|
@ -3,6 +3,7 @@ package FunctionLayer.StanfordParser;
|
|||||||
import FunctionLayer.LevenshteinDistance;
|
import FunctionLayer.LevenshteinDistance;
|
||||||
import FunctionLayer.MYSQLDatahandler;
|
import FunctionLayer.MYSQLDatahandler;
|
||||||
import FunctionLayer.SimilarityMatrix;
|
import FunctionLayer.SimilarityMatrix;
|
||||||
|
import com.google.common.collect.MapMaker;
|
||||||
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
|
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
|
||||||
import edu.stanford.nlp.ling.CoreAnnotations;
|
import edu.stanford.nlp.ling.CoreAnnotations;
|
||||||
import edu.stanford.nlp.ling.CoreLabel;
|
import edu.stanford.nlp.ling.CoreLabel;
|
||||||
@ -32,6 +33,7 @@ import java.util.Collection;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
|
import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import org.ejml.simple.SimpleMatrix;
|
import org.ejml.simple.SimpleMatrix;
|
||||||
|
|
||||||
@ -89,48 +91,50 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
|||||||
int overValue = counter >= counter1 ? counter - counter1 : counter1 - counter;
|
int overValue = counter >= counter1 ? counter - counter1 : counter1 - counter;
|
||||||
overValue *= 16;
|
overValue *= 16;
|
||||||
score -= overValue;
|
score -= overValue;
|
||||||
List<String> tgwlistIndex = new ArrayList();
|
ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
taggedwordlist1.forEach((TGWList) -> {
|
taggedwordlist1.forEach((TGWList) -> {
|
||||||
TGWList.forEach((TaggedWord) -> {
|
TGWList.forEach((TaggedWord) -> {
|
||||||
if (!tgwlistIndex.contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) {
|
if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) {
|
||||||
tgwlistIndex.add(TaggedWord.tag());
|
tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
taggedwordlist1.clear();
|
||||||
AtomicInteger runCount = new AtomicInteger(0);
|
AtomicInteger runCount = new AtomicInteger(0);
|
||||||
taggedwordlist2.forEach((TGWList) -> {
|
taggedwordlist2.forEach((TGWList) -> {
|
||||||
TGWList.forEach((TaggedWord) -> {
|
TGWList.forEach((TaggedWord) -> {
|
||||||
if (tgwlistIndex.contains(TaggedWord.tag())) {
|
if (tgwlistIndex.values().contains(TaggedWord.tag())) {
|
||||||
tgwlistIndex.remove(TaggedWord.tag());
|
tgwlistIndex.values().remove(TaggedWord.tag());
|
||||||
runCount.getAndIncrement();
|
runCount.getAndIncrement();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
tgwlistIndex.clear();
|
||||||
|
taggedwordlist2.clear();
|
||||||
score += runCount.get() * 64;
|
score += runCount.get() * 64;
|
||||||
Annotation annotation = new Annotation(str1);
|
Annotation annotation = new Annotation(str1);
|
||||||
pipeline.annotate(annotation);
|
pipeline.annotate(annotation);
|
||||||
List<Tree> sentenceConstituencyParseList = new ArrayList();
|
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
for (CoreMap sentence : annotation.get(CoreAnnotations.SentencesAnnotation.class)) {
|
for (CoreMap sentence : annotation.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||||
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
|
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
|
||||||
sentenceConstituencyParseList.add(sentenceConstituencyParse);
|
sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), sentenceConstituencyParse);
|
||||||
}
|
}
|
||||||
Annotation annotation1 = new Annotation(str);
|
Annotation annotation1 = new Annotation(str);
|
||||||
pipeline.annotate(annotation1);
|
pipeline.annotate(annotation1);
|
||||||
List<String> nerList = new ArrayList();
|
|
||||||
for (CoreMap sentence : annotation1.get(CoreAnnotations.SentencesAnnotation.class)) {
|
for (CoreMap sentence : annotation1.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||||
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
|
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
|
||||||
GrammaticalStructure gs = gsf.newGrammaticalStructure(sentenceConstituencyParse);
|
GrammaticalStructure gs = gsf.newGrammaticalStructure(sentenceConstituencyParse);
|
||||||
Collection<TypedDependency> allTypedDependencies = gs.allTypedDependencies();
|
Collection<TypedDependency> allTypedDependencies = gs.allTypedDependencies();
|
||||||
List<String> filerTreeContent = new ArrayList();
|
ConcurrentMap<Integer, String> filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList) {
|
for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList.values()) {
|
||||||
Set<Constituent> inT1notT2 = Tdiff.markDiff(sentenceConstituencyParse, sentenceConstituencyParse1);
|
Set<Constituent> inT1notT2 = Tdiff.markDiff(sentenceConstituencyParse, sentenceConstituencyParse1);
|
||||||
Set<Constituent> inT2notT1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse);
|
Set<Constituent> inT2notT1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse);
|
||||||
List<String> constiLabels = new ArrayList();
|
ConcurrentMap<Integer, String> constiLabels = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
for (Constituent consti : inT1notT2) {
|
for (Constituent consti : inT1notT2) {
|
||||||
for (Constituent consti1 : inT2notT1) {
|
for (Constituent consti1 : inT2notT1) {
|
||||||
if (consti.value().equals(consti1.value()) && !constiLabels.contains(consti.value())) {
|
if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) {
|
||||||
score += 64; //256
|
score += 64;
|
||||||
constiLabels.add(consti.value());
|
constiLabels.put(constiLabels.size(), consti.value());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -163,8 +167,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
|||||||
AtomicInteger runCount1 = new AtomicInteger(0);
|
AtomicInteger runCount1 = new AtomicInteger(0);
|
||||||
sentenceConstituencyParse.taggedLabeledYield().forEach((LBW) -> {
|
sentenceConstituencyParse.taggedLabeledYield().forEach((LBW) -> {
|
||||||
sentenceConstituencyParse1.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma())
|
sentenceConstituencyParse1.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma())
|
||||||
&& !filerTreeContent.contains(LBW.lemma()))).map((_item) -> {
|
&& !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> {
|
||||||
filerTreeContent.add(LBW.lemma());
|
filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma());
|
||||||
return _item;
|
return _item;
|
||||||
}).forEachOrdered((_item) -> {
|
}).forEachOrdered((_item) -> {
|
||||||
runCount1.getAndIncrement();
|
runCount1.getAndIncrement();
|
||||||
@ -173,30 +177,31 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
|||||||
score += runCount1.get() * 1500;
|
score += runCount1.get() * 1500;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
sentenceConstituencyParseList.clear();
|
||||||
Annotation annotationSentiment1 = pipelineSentiment.process(str);
|
Annotation annotationSentiment1 = pipelineSentiment.process(str);
|
||||||
List<SimpleMatrix> simpleSMXlist = new ArrayList();
|
ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
List<SimpleMatrix> simpleSMXlistVector = new ArrayList();
|
ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
List<Integer> sentiment1 = new ArrayList();
|
ConcurrentMap<Integer, Integer> sentiment1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
List<Integer> sentiment2 = new ArrayList();
|
ConcurrentMap<Integer, Integer> sentiment2 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
for (CoreMap sentence : annotationSentiment1.get(CoreAnnotations.SentencesAnnotation.class)) {
|
for (CoreMap sentence : annotationSentiment1.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||||
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
||||||
sentiment1.add(RNNCoreAnnotations.getPredictedClass(tree));
|
sentiment1.put(sentiment1.size(), RNNCoreAnnotations.getPredictedClass(tree));
|
||||||
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
||||||
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
|
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
|
||||||
simpleSMXlist.add(predictions);
|
simpleSMXlist.put(simpleSMXlist.size(), predictions);
|
||||||
simpleSMXlistVector.add(nodeVector);
|
simpleSMXlistVector.put(simpleSMXlistVector.size() + 1, nodeVector);
|
||||||
}
|
}
|
||||||
annotationSentiment1 = pipelineSentiment.process(str1);
|
annotationSentiment1 = pipelineSentiment.process(str1);
|
||||||
for (CoreMap sentence : annotationSentiment1.get(CoreAnnotations.SentencesAnnotation.class)) {
|
for (CoreMap sentence : annotationSentiment1.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||||
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
||||||
sentiment2.add(RNNCoreAnnotations.getPredictedClass(tree));
|
sentiment2.put(sentiment2.size() + 1, RNNCoreAnnotations.getPredictedClass(tree));
|
||||||
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
||||||
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
|
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
|
||||||
score = simpleSMXlist.stream().map((simpleSMX) -> predictions.dot(simpleSMX) * 100).map((dot) -> dot > 50 ? dot - 50 : 50 - dot).map((subtracter) -> {
|
score = simpleSMXlist.values().stream().map((simpleSMX) -> predictions.dot(simpleSMX) * 100).map((dot) -> dot > 50 ? dot - 50 : 50 - dot).map((subtracter) -> {
|
||||||
subtracter *= 25;
|
subtracter *= 25;
|
||||||
return subtracter;
|
return subtracter;
|
||||||
}).map((subtracter) -> subtracter).reduce(score, (accumulator, _item) -> accumulator - _item);
|
}).map((subtracter) -> subtracter).reduce(score, (accumulator, _item) -> accumulator - _item);
|
||||||
for (SimpleMatrix simpleSMX : simpleSMXlistVector) {
|
for (SimpleMatrix simpleSMX : simpleSMXlistVector.values()) {
|
||||||
double dot = nodeVector.dot(simpleSMX);
|
double dot = nodeVector.dot(simpleSMX);
|
||||||
double elementSum = nodeVector.kron(simpleSMX).elementSum();
|
double elementSum = nodeVector.kron(simpleSMX).elementSum();
|
||||||
elementSum = Math.round(elementSum * 100.0) / 100.0;
|
elementSum = Math.round(elementSum * 100.0) / 100.0;
|
||||||
|
@ -82,7 +82,7 @@ public class DiscordHandler {
|
|||||||
MessageResponseHandler.getMessage(strresult);
|
MessageResponseHandler.getMessage(strresult);
|
||||||
try {
|
try {
|
||||||
MYSQLDatahandler.instance.checkIfUpdateStrings();
|
MYSQLDatahandler.instance.checkIfUpdateStrings();
|
||||||
//MYSQLDatahandler.instance.checkIfUpdateMatrixes();
|
MYSQLDatahandler.instance.checkIfUpdateMatrixes();
|
||||||
} catch (CustomError ex) {
|
} catch (CustomError ex) {
|
||||||
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
|
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user