annotation caching with instance safety for jmwe, changed entry slightly, multithreaded loading in beginning, parralel streaming the annotations, forkjoinpool should handle futures better now with try catches in sementicsanalyzer

This commit is contained in:
jenzur 2019-03-20 22:38:28 +01:00
parent b3631ab1a0
commit c5c1a31a14
6 changed files with 1190 additions and 341 deletions

View File

@ -0,0 +1,773 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FunctionLayer;
import DataLayer.DataMapper;
import FunctionLayer.StanfordParser.SentimentAnalyzerTest;
import com.google.common.base.Stopwatch;
import com.google.common.collect.MapMaker;
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
import edu.stanford.nlp.ie.crf.CRFClassifier;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.ling.HasWord;
import edu.stanford.nlp.ling.TaggedWord;
import edu.stanford.nlp.ling.Word;
import edu.stanford.nlp.parser.lexparser.LexicalizedParser;
import edu.stanford.nlp.parser.shiftreduce.ShiftReduceParser;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.process.DocumentPreprocessor;
import edu.stanford.nlp.tagger.maxent.MaxentTagger;
import edu.stanford.nlp.trees.GrammaticalStructureFactory;
import edu.stanford.nlp.trees.Tree;
import edu.stanford.nlp.trees.TreebankLanguagePack;
import java.io.IOException;
import java.io.StringReader;
import java.sql.SQLException;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.ForkJoinTask;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
*
* @author install1
*/
public class Datahandler {
public static final long EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(6, TimeUnit.MINUTES);
public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS);
public static Datahandler instance = new Datahandler();
private volatile boolean refreshMatrixFromDB;
private static int secondaryIterator = 0;
private final ConcurrentMap<Integer, String> stringCache;
private static ConcurrentMap<String, Annotation> pipelineAnnotationCache;
private static ConcurrentMap<String, Annotation> pipelineSentimentAnnotationCache;
private static ConcurrentMap<String, Annotation> jmweAnnotationCache;
private LinkedHashMap<String, LinkedHashMap<String, Double>> lHMSMX = new LinkedHashMap();
private final Stopwatch stopwatch;
private final Stopwatch stopwatch1;
private ForkJoinPool executor;
private static String shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz";
private static String sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz";
private static String lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz";
private static String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger";
private static String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz";
private static MaxentTagger tagger;
private static ShiftReduceParser model;
private static String[] options = {"-maxLength", "100"};
private static Properties props = new Properties();
private static Properties propsSentiment = new Properties();
private static GrammaticalStructureFactory gsf;
private static LexicalizedParser lp;
private static TreebankLanguagePack tlp;
private static AbstractSequenceClassifier<CoreLabel> classifier;
// set up Stanford CoreNLP pipeline
private static StanfordCoreNLP pipeline;
private static StanfordCoreNLP pipelineSentiment;
public Datahandler() {
this.stopwatch = Stopwatch.createUnstarted();
this.stopwatch1 = Stopwatch.createStarted();
this.stringCache = new MapMaker().concurrencyLevel(2).makeMap();
//cant sadly just have one pipelines for every annotation, one pipeline per annotation is required
this.jmweAnnotationCache = new MapMaker().concurrencyLevel(2).makeMap();
this.pipelineAnnotationCache = new MapMaker().concurrencyLevel(2).makeMap();
this.pipelineSentimentAnnotationCache = new MapMaker().concurrencyLevel(2).makeMap();
}
public void shiftReduceParserInitiate() {
//got 8 cores
CountDownLatch cdl = new CountDownLatch(4);
new Thread(() -> {
model = ShiftReduceParser.loadModel(shiftReduceParserPath, options);
cdl.countDown();
}).start();
new Thread(() -> {
try {
classifier = CRFClassifier.getClassifierNoExceptions(nerModel);
} catch (ClassCastException ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
}
cdl.countDown();
}).start();
new Thread(() -> {
propsSentiment.setProperty("parse.model", lexParserEnglishRNN);
propsSentiment.setProperty("ner.model", nerModel);
propsSentiment.setProperty("sentiment.model", sentimentModel);
propsSentiment.setProperty("parse.maxlen", "100");
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,depparse,sentiment"); //coref too expensive memorywise
pipelineSentiment = new StanfordCoreNLP(propsSentiment);
tagger = new MaxentTagger(taggerPath);
cdl.countDown();
}).start();
new Thread(() -> {
props.setProperty("parse.model", shiftReduceParserPath);
props.setProperty("parse.maxlen", "100");
props.setProperty("parse.binaryTrees", "true");
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,parse");
pipeline = new StanfordCoreNLP(props);
cdl.countDown();
}).start();
lp = LexicalizedParser.loadModel(lexParserEnglishRNN, options);
tlp = lp.getOp().langpack();
gsf = tlp.grammaticalStructureFactory();
try {
cdl.await();
} catch (InterruptedException ex) {
System.out.println("cdl await interrupted: " + ex.getLocalizedMessage() + "\n");
}
System.out.println("finished shiftReduceParserInitiate\n");
}
public static AbstractSequenceClassifier<CoreLabel> getClassifier() {
return classifier;
}
public static void setClassifier(AbstractSequenceClassifier<CoreLabel> classifier) {
Datahandler.classifier = classifier;
}
public void updateStringCache() {
try {
checkIfUpdateStrings(true);
} catch (CustomError ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void instantiateExecutor() {
this.executor = new ForkJoinPool(Runtime.getRuntime().availableProcessors(),
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
null, false); //true
}
public static GrammaticalStructureFactory getGsf() {
return gsf;
}
public static StanfordCoreNLP getPipeline() {
return pipeline;
}
public static StanfordCoreNLP getPipelineSentiment() {
return pipelineSentiment;
}
public static MaxentTagger getTagger() {
return tagger;
}
public static ShiftReduceParser getModel() {
return model;
}
private Map<Integer, String> getCache() throws SQLException, IOException, CustomError {
return DataMapper.getAllStrings();
}
public int getlHMSMXSize() {
return lHMSMX.size();
}
public int getstringCacheSize() {
return stringCache.size();
}
public void initiateMYSQL() throws SQLException, IOException {
try {
DataMapper.createTables();
stringCache.putAll(getCache());
lHMSMX = DataMapper.getAllRelationScores();
} catch (CustomError ex) {
Logger.getLogger(Datahandler.class
.getName()).log(Level.SEVERE, null, ex);
}
}
public void addHLstatsMessages() {
ConcurrentMap<Integer, String> hlStatsMessages = DataMapper.getHLstatsMessages();
ConcurrentMap<Integer, String> strCacheLocal = stringCache;
int hardcap = 7500;
int counter = 0;
for (String str : hlStatsMessages.values()) {
if (!str.startsWith("!")) {
boolean present = false;
for (String str1 : strCacheLocal.values()) {
if (str.equals(str1)) {
present = true;
break;
}
}
if (!present) {
//System.out.println("addHLstatsMessages adding str: " + str + "\n");
MessageResponseHandler.getMessage(str);
}
}
if (counter >= hardcap) {
break;
}
counter++;
}
}
public void instantiateAnnotationMap() {
if (!stringCache.isEmpty()) {
stringCache.values().parallelStream().forEach(str -> {
System.out.println("str annotation pipeline pipelinesentiment: " + str + "\n");
Annotation strAnno = new Annotation(str);
pipeline.annotate(strAnno);
pipelineAnnotationCache.put(str, strAnno);
jmweAnnotationCache.put(str, PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str));
Annotation strAnno2 = new Annotation(str);
pipelineSentiment.annotate(strAnno2);
pipelineSentimentAnnotationCache.put(str, strAnno2);
});
}
System.out.println("FINISHED ALL ANNOTATIONS");
System.out.println("FINISHED ALL ANNOTATIONS");
System.out.println("FINISHED ALL ANNOTATIONS");
/*
int poolsize = Runtime.getRuntime().availableProcessors();
CountDownLatch cdl = new CountDownLatch(poolsize + 1);
int rangeAdder = (stringCache.values().size() / poolsize);
for (int i = 0; i < poolsize; i++) {
final int ij = i;
new Thread(() -> {
int counter = rangeAdder * ij;
for (int j = 0; j < rangeAdder; j++) {
String str = stringCache.getOrDefault(counter + j, null);
if (str != null) {
System.out.println("adding str jmwe annotation: " + str + "\n");
Annotation strAnno = new Annotation(str);
pipelineJMWE.annotate(strAnno);
jmweAnnotationCache.put(str, strAnno);
}
}
cdl.countDown();
}).start();
}
try {
cdl.await();
} catch (InterruptedException ex) {
System.out.println("cdl await interrupted: " + ex.getLocalizedMessage() + "\n");
}*/
}
public synchronized void checkIfUpdateMatrixes() {
refreshMatrixFromDB = false;
if (stopwatch1.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS1) {
refreshMatrixFromDB = true;
lHMSMX = DataMapper.getAllRelationScores();
stopwatch1.reset();
}
//requiring atleast 10 entries ensures no issues in case of empty stringcache
if (stringCache.values().size() > 10 && !refreshMatrixFromDB) {
ConcurrentMap<Integer, String> stringCachelocal = stringCache;
int selectUpdate = -1;
LinkedHashMap<String, LinkedHashMap<String, Double>> LHMSMXLocal = lHMSMX;
int ij2 = 0;
for (String str : stringCachelocal.values()) {
boolean updatepresent = false;
for (String strlocal : LHMSMXLocal.keySet()) {
if (strlocal.equals(str)) {
updatepresent = true;
break;
}
}
if (!updatepresent) {
selectUpdate = ij2;
break;
}
ij2++;
}
if (selectUpdate == -1 || selectUpdate + 1 == stringCachelocal.size()) {
int valueSize = stringCachelocal.size();
if (secondaryIterator + 1 >= valueSize) {
secondaryIterator = 0;
}
selectUpdate = secondaryIterator;
secondaryIterator++;
}
ConcurrentMap<Integer, String> strIndexNavigator = new MapMaker().concurrencyLevel(2).makeMap();
String get = stringCachelocal.getOrDefault(selectUpdate, null);
if (get == null) {
get = stringCachelocal.get(new Random().nextInt(stringCachelocal.size() - 1));
}
strIndexNavigator.put(0, get);
ConcurrentMap<Integer, SimilarityMatrix> matrixUpdateList = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(2).makeMap();
strIndexNavigator.values().forEach((str) -> {
stringCachelocal.values().stream().filter((str1) -> (!str.equals(str1))).forEachOrdered((str1) -> {
boolean present = false;
LinkedHashMap<String, Double> orDefault = lHMSMX.getOrDefault(str, null);
if (orDefault != null) {
for (String strkey : orDefault.keySet()) {
if (strkey.equals(str1)) {
present = true;
break;
}
}
}
if (!present) {
orDefault = lHMSMX.getOrDefault(str1, null);
if (orDefault != null) {
for (String strkey : orDefault.keySet()) {
if (strkey.equals(str)) {
present = true;
break;
}
}
}
}
if (!present) {
LinkedHashMap<String, Double> orDefault1 = lHMSMX.getOrDefault(str, null);
if (orDefault1 == null) {
orDefault1 = new LinkedHashMap<String, Double>();
}
orDefault1.put(str1, 0.0);
lHMSMX.put(str, orDefault1);
SimilarityMatrix SMX = new SimilarityMatrix(str, str1);
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, str1, SMX, jmweAnnotationCache.get(str),
jmweAnnotationCache.get(str1), pipelineAnnotationCache.get(str), pipelineAnnotationCache.get(str1),
pipelineSentimentAnnotationCache.get(str), pipelineSentimentAnnotationCache.get(str1));
futures.put(futures.size() + 1, executor.submit(worker));
}
});
});
System.out.println("finished worker assignment, futures size: " + futures.size() + "\n");
for (Future<SimilarityMatrix> future : futures.values()) {
SimilarityMatrix SMX = new SimilarityMatrix("", "");
try {
SMX = future.get(20, TimeUnit.SECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
SMX = null;
}
if (SMX != null) {
LinkedHashMap<String, Double> getFuture = lHMSMX.getOrDefault(SMX.getPrimaryString(), null);
getFuture.put(SMX.getSecondaryString(), SMX.getDistance());
lHMSMX.put(SMX.getPrimaryString(), getFuture);
matrixUpdateList.put(matrixUpdateList.size() + 1, SMX);
}
}
try {
if (!matrixUpdateList.isEmpty()) {
DataMapper.insertSementicMatrixes(matrixUpdateList);
System.out.println("finished datamapper semetic insert");
}
} catch (CustomError ex) {
Logger.getLogger(Datahandler.class
.getName()).log(Level.SEVERE, null, ex);
}
}
}
public synchronized void checkIfUpdateStrings(boolean hlStatsMsg) throws CustomError {
if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning()) {
ConcurrentMap<Integer, String> str = MessageResponseHandler.getStr();
str = cutContent(str, hlStatsMsg);
str = filterContent(str);
str = removeSlacks(str);
str = verifyCalculationFitness(str);
System.out.println("Check if updateString str size POST: " + str.size() + "\n");
try {
DataMapper.InsertMYSQLStrings(str);
} catch (CustomError ex) {
Logger.getLogger(Datahandler.class
.getName()).log(Level.SEVERE, null, ex);
}
MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(2).makeMap());
if (!stopwatch.isRunning()) {
stopwatch.start();
} else {
stopwatch.reset();
}
}
}
public synchronized String getResponseMsg(String str) throws CustomError {
str = str.trim();
if (str.startsWith("<@")) {
str = str.substring(str.indexOf("> ") + 2);
}
final LinkedHashMap<String, LinkedHashMap<String, Double>> LHMSMXLocal = lHMSMX;
ConcurrentMap<Integer, String> strArrs = stringCache;
double Score = -10000;
SimilarityMatrix SMXreturn = new SimilarityMatrix("", "");
System.out.println("pre mostSimilarSTR \n");
String mostSimilarSTR = mostSimilar(str, strArrs);
if (mostSimilarSTR != null) {
System.out.println("mostSimilarSTR; " + mostSimilarSTR + "\n");
LinkedHashMap<String, Double> orDefault = LHMSMXLocal.getOrDefault(mostSimilarSTR, null);
if (orDefault != null) {
for (Entry<String, Double> entrySet : orDefault.entrySet()) {
double smxDistance = entrySet.getValue();
if (smxDistance > Score) {
Score = smxDistance;
SMXreturn = new SimilarityMatrix(mostSimilarSTR, entrySet.getKey(), smxDistance);
}
}
}
for (Entry<String, LinkedHashMap<String, Double>> values1 : LHMSMXLocal.entrySet()) {
LinkedHashMap<String, Double> value = values1.getValue();
for (Entry<String, Double> keystr : value.entrySet()) {
if (keystr.getKey().equals(mostSimilarSTR)) {
double smxDistance = keystr.getValue();
if (smxDistance > Score) {
Score = smxDistance;
SMXreturn = new SimilarityMatrix(values1.getKey(), keystr.getKey(), smxDistance);
}
}
}
}
if (!SMXreturn.getPrimaryString().isEmpty()) {
if (SMXreturn.getPrimaryString().equals(mostSimilarSTR)) {
return SMXreturn.getSecondaryString();
} else {
return SMXreturn.getPrimaryString();
}
}
}
System.out.println("none within 8 range");
ConcurrentMap<Integer, String> strCache = stringCache;
ConcurrentMap<Integer, Future<SimilarityMatrix>> futureslocal = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, SimilarityMatrix> futurereturn = new MapMaker().concurrencyLevel(2).makeMap();
for (String str1 : strCache.values()) {
if (!str.equals(str1)) {
SimilarityMatrix SMX = new SimilarityMatrix(str, str1);
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, str1, SMX,
jmweAnnotationCache.get(str), jmweAnnotationCache.get(str1), pipelineAnnotationCache.get(str),
pipelineAnnotationCache.get(str1), pipelineSentimentAnnotationCache.get(str),
pipelineSentimentAnnotationCache.get(str1));
futureslocal.put(futureslocal.size() + 1, executor.submit(worker));
}
}
int index = 0;
futureslocal.values().parallelStream().forEach((future) -> {
SimilarityMatrix SMX = new SimilarityMatrix("", "");
try {
SMX = future.get(20, TimeUnit.SECONDS);
futurereturn.put(futurereturn.size() + 1, SMX);
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
System.out.println("ex getResponsemsg: " + ex.getMessage() + "\n");
}
});
for (SimilarityMatrix SMX : futurereturn.values()) {
double distance = SMX.getDistance();
System.out.println("index: " + index + "\nfutures size: " + futureslocal.values().size() + "\nScore: " + SMX.getDistance() + "\nSecondary: "
+ SMX.getSecondaryString() + "\nPrimary: " + SMX.getPrimaryString() + "\n");
if (distance > Score) {
Score = distance;
SMXreturn = SMX;
}
index++;
}
System.out.println("Reached end: secondary: " + SMXreturn.getSecondaryString() + "\nPrimarY: " + SMXreturn.getPrimaryString()
+ "\nScore: " + SMXreturn.getDistance());
return SMXreturn.getSecondaryString();
}
public String mostSimilar(String toBeCompared, ConcurrentMap<Integer, String> concurrentStrings) {
int minDistance = 8;
String similar = "";
ConcurrentMap<Integer, Future<Entry<String, Integer>>> futures = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<String, Integer> futuresreturnvalues = new MapMaker().concurrencyLevel(2).makeMap();
concurrentStrings.values().parallelStream().forEach((str) -> {
Callable<Entry<String, Integer>> worker = new LevenshteinDistance(toBeCompared, str);
futures.put(futures.size() + 1, executor.submit(worker));
});
futures.values().parallelStream().forEach((future) -> {
try {
Entry<String, Integer> get = future.get();
futuresreturnvalues.put(get.getKey(), get.getValue());
} catch (NullPointerException | InterruptedException | ExecutionException ex) {
System.out.println("failed future\nex: " + ex.getMessage() + "\n");
}
});
for (Entry<String, Integer> entritr : futuresreturnvalues.entrySet()) {
int distance = entritr.getValue();
if (distance < minDistance) {
System.out.println("distance: " + distance + "\n");
minDistance = distance;
similar = entritr.getKey();
}
}
return similar;
}
public static ConcurrentMap<Integer, String> cutContent(ConcurrentMap<Integer, String> str, boolean hlStatsMsg) {
ConcurrentMap<Integer, String> returnlist = new MapMaker().concurrencyLevel(2).makeMap();
for (String str1 : str.values()) {
int iend = str1.indexOf("content: ");
if (iend != -1) {
String trs = str1.substring(iend + 9);
returnlist.put(returnlist.size() + 1, trs.substring(0, trs.length() - 1));
} else if (hlStatsMsg) {
returnlist.put(returnlist.size() + 1, str1);
}
}
return returnlist;
}
public static ConcurrentMap<Integer, String> filterContent(ConcurrentMap<Integer, String> str) {
ConcurrentMap<Integer, String> strlistreturn = new MapMaker().concurrencyLevel(2).makeMap();
for (String str1 : str.values()) {
if (str1.isEmpty() || str1.length() < 3) {
continue;
}
str1 = str1.trim();
if (str1.contains("PM*")) {
str1 = str1.substring(str1.indexOf("PM*") + 5);
}
if (str1.contains("AM*")) {
str1 = str1.substring(str1.indexOf("AM*") + 5);
}
for (Character c : str1.toCharArray()) {
if (c == '?' || c == '°') {
str1 = str1.replace("?", " <:wlenny:514861023002624001> ");
str1 = str1.replace("°", " <:wlenny:514861023002624001> ");
}
}
if (str1.contains("(Counter-Terrorist)")) {
str1 = str1.replace("(Counter-Terrorist)", " ");
}
if (str1.contains("(Terrorist)")) {
str1 = str1.replace("(Terrorist)", " ");
}
if (str1.contains("(Spectator)")) {
str1 = str1.replace("(Spectator)", " ");
}
if (str1.contains("*DEAD*")) {
str1 = str1.replace("*DEAD*", " ");
}
if (str1.contains("{red}")) {
str1 = str1.replace("{red}", " ");
}
if (str1.contains("{orange}")) {
str1 = str1.replace("{orange}", " ");
}
if (str1.contains("{yellow}")) {
str1 = str1.replace("{yellow}", " ");
}
if (str1.contains("{green}")) {
str1 = str1.replace("{green}", " ");
}
if (str1.contains("{lightblue}")) {
str1 = str1.replace("{lightblue}", " ");
}
if (str1.contains("{blue}")) {
str1 = str1.replace("{blue}", " ");
}
if (str1.contains("{purple}")) {
str1 = str1.replace("{purple}", " ");
}
if (str1.contains("{white}")) {
str1 = str1.replace("{white}", " ");
}
if (str1.contains("{fullblue}")) {
str1 = str1.replace("{fullblue}", " ");
}
if (str1.contains("{cyan}")) {
str1 = str1.replace("{cyan}", " ");
}
if (str1.contains("{lime}")) {
str1 = str1.replace("{lime}", " ");
}
if (str1.contains("{deeppink}")) {
str1 = str1.replace("{deeppink}", " ");
}
if (str1.contains("{slategray}")) {
str1 = str1.replace("{slategray}", " ");
}
if (str1.contains("{dodgerblue}")) {
str1 = str1.replace("{dodgerblue}", " ");
}
if (str1.contains("{black}")) {
str1 = str1.replace("{black}", " ");
}
if (str1.contains("{orangered}")) {
str1 = str1.replace("{orangered}", " ");
}
if (str1.contains("{darkorchid}")) {
str1 = str1.replace("{darkorchid}", " ");
}
if (str1.contains("{pink}")) {
str1 = str1.replace("{pink}", " ");
}
if (str1.contains("{lightyellow}")) {
str1 = str1.replace("{lightyellow}", " ");
}
if (str1.contains("{chocolate}")) {
str1 = str1.replace("{chocolate}", " ");
}
if (str1.contains("{beige}")) {
str1 = str1.replace("{beige}", " ");
}
if (str1.contains("{azure}")) {
str1 = str1.replace("{azure}", " ");
}
if (str1.contains("{yellowgreen}")) {
str1 = str1.replace("{yellowgreen}", " ");
}
str1 = str1.trim();
if (str1.length() > 2 && (!str1.startsWith("!"))) {
strlistreturn.put(strlistreturn.size() + 1, str1);
}
}
return strlistreturn;
}
private ConcurrentMap<Integer, String> removeSlacks(ConcurrentMap<Integer, String> str) {
ShiftReduceParser model = getModel();
MaxentTagger tagger = getTagger();
List<TaggedWord> taggedWords;
ConcurrentMap<Integer, String> strreturn = new MapMaker().concurrencyLevel(2).makeMap();
for (String str1 : str.values()) {
int counter = 0;
ConcurrentMap<Integer, String> TGWList = new MapMaker().concurrencyLevel(2).makeMap();
DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(str1));
for (List<HasWord> sentence : tokenizer) {
List<TaggedWord> tagged1 = tagger.tagSentence(sentence);
Tree tree = model.apply(tagged1);
taggedWords = tree.taggedYield();
for (TaggedWord TGW : taggedWords) {
if (!TGWList.values().contains(TGW.tag()) && !TGW.tag().equals(":") && !TGW.word().equals(TGW.tag())) {
TGWList.put(TGWList.size() + 1, TGW.tag());
counter++;
}
if (counter > 3) {
int addCounter = 0;
ConcurrentMap<Integer, Word> wordList = new MapMaker().concurrencyLevel(2).makeMap();
for (Word lab : tree.yieldWords()) {
if (lab != null && lab.word() != null) {
//System.out.println("lab: " + lab + " \n");
if (!wordList.values().contains(lab) && lab.value() != null && !lab.value().equals(":")) {
wordList.put(wordList.size() + 1, lab);
addCounter++;
}
}
}
if (addCounter > 3) {
addCounter = 0;
ConcurrentMap<Integer, HasWord> HWlist = new MapMaker().concurrencyLevel(2).makeMap();
for (HasWord HW : tree.yieldHasWord()) {
if (HW != null && HW.word() != null && !HWlist.values().contains(HW)) {
addCounter++;
HWlist.put(HWlist.size() + 1, HW);
}
}
if (addCounter > 3) {
boolean tooclosematch = false;
Collection<String> values = stringCache.values();
for (String strVals : values) {
LevenshteinDistance leven = new LevenshteinDistance(strVals, str1);
double Distance = leven.computeLevenshteinDistance();
int maxpermittedDistance = 2;
if (Distance < maxpermittedDistance) {
tooclosematch = true;
break;
}
}
if (!tooclosematch) {
strreturn.put(strreturn.size() + 1, str1);
}
}
}
break;
}
}
if (counter > 3) {
break;
}
}
}
return strreturn;
}
private ConcurrentMap<Integer, String> verifyCalculationFitness(ConcurrentMap<Integer, String> strmap) {
ConcurrentMap<Integer, String> returnmap = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<String, Annotation> pipelineAnnotateCachelcl = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<String, Annotation> pipelineSentimentAnnotateCachelcl = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<String, Annotation> jmweAnnotateCachelcl = new MapMaker().concurrencyLevel(2).makeMap();
strmap.values().parallelStream().forEach(strCache -> {
Annotation strAnno = new Annotation(strCache);
pipeline.annotate(strAnno);
pipelineAnnotateCachelcl.put(strCache, strAnno);
jmweAnnotateCachelcl.put(strCache, PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strCache));
Annotation strAnno2 = new Annotation(strCache);
pipelineSentiment.annotate(strAnno2);
pipelineSentimentAnnotateCachelcl.put(strCache, strAnno2);
});
final ConcurrentMap<Integer, String> allStrings;
if (!stringCache.isEmpty()) {
allStrings = stringCache;
} else {
allStrings = strmap;
}
ConcurrentMap<Integer, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(2).makeMap();
strmap.values().parallelStream().forEach((str) -> {
allStrings.values().parallelStream().forEach((str1) -> {
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, str1, new SimilarityMatrix(str, str1),
jmweAnnotateCachelcl.get(str), jmweAnnotateCachelcl.get(str1), pipelineAnnotateCachelcl.get(str),
pipelineAnnotateCachelcl.get(str1), pipelineSentimentAnnotateCachelcl.get(str),
pipelineSentimentAnnotateCachelcl.get(str1));
futures.put(futures.size() + 1, executor.submit(worker));
});
});
System.out.println("verifycalc futures size: " + futures.size() + "\n");
futures.values().parallelStream().forEach((future) -> {
SimilarityMatrix get;
try {
get = future.get(20, TimeUnit.SECONDS);
String addStr = get.getPrimaryString();
boolean presentstr = false;
for (String returnstr : returnmap.values()) {
if (returnstr.equals(addStr)) {
presentstr = true;
break;
}
}
if (!presentstr) {
returnmap.put(returnmap.size() + 1, addStr);
System.out.println("adding addStr: " + addStr + "\n");
}
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
System.out.println("failed verification: " + ex.getMessage() + "\n");
}
});
returnmap.values().parallelStream().forEach(strCache -> {
stringCache.put(stringCache.size() + 1, strCache);
System.out.println("str annotation pipeline pipelinesentiment: " + strCache + "\n");
Annotation strAnno = new Annotation(strCache);
pipeline.annotate(strAnno);
pipelineAnnotationCache.put(strCache, strAnno);
jmweAnnotationCache.put(strCache, PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strCache));
Annotation strAnno2 = new Annotation(strCache);
pipelineSentiment.annotate(strAnno2);
pipelineSentimentAnnotationCache.put(strCache, strAnno2);
});
return returnmap;
}
}

View File

@ -6,6 +6,7 @@
package FunctionLayer;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Callable;
/**
@ -16,7 +17,7 @@ public class LevenshteinDistance implements Callable<Map.Entry<String, Integer>>
private CharSequence lhs;
private CharSequence rhs;
private Map.Entry<String, Integer> distanceEntry;
private Entry<String, Integer> distanceEntry;
private static int minimum(int a, int b, int c) {
return Math.min(Math.min(a, b), c);
@ -47,7 +48,7 @@ public class LevenshteinDistance implements Callable<Map.Entry<String, Integer>>
}
@Override
public Map.Entry<String, Integer> call() {
public Entry<String, Integer> call() {
try {
int[][] distance = new int[lhs.length() + 1][rhs.length() + 1];

View File

@ -42,14 +42,14 @@ public class MessageResponseHandler {
public static String selectReponseMessage(String toString) throws CustomError {
ConcurrentMap<Integer, String> str1 = new MapMaker().concurrencyLevel(2).makeMap();
str1.put(str1.size() + 1, toString);
str1 = MYSQLDatahandler.cutContent(str1);
str1 = Datahandler.cutContent(str1, false);
String strreturn = "";
for (String str : str1.values()) {
if (!str.isEmpty()) {
strreturn = str;
}
}
String getResponseMsg = MYSQLDatahandler.instance.getResponseMsg(strreturn);
String getResponseMsg = Datahandler.instance.getResponseMsg(strreturn);
return getResponseMsg;
}
}

View File

@ -0,0 +1,59 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FunctionLayer;
import edu.mit.jmwe.data.IMWEDescID;
import edu.mit.jmwe.data.IRootMWEDesc;
import edu.mit.jmwe.index.IMWEIndex;
import edu.mit.jmwe.index.InMemoryMWEIndex;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.JMWEAnnotator;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author install1
*/
//maybe not public?
public class PipelineJMWESingleton {
public final static PipelineJMWESingleton INSTANCE = new PipelineJMWESingleton();
private static Properties propsJMWE;
private volatile static StanfordCoreNLP pipelineJMWE = initializeJMWE();
//super important synchronization lock
public synchronized final static Annotation getJMWEAnnotation(String str) {
Annotation annoStr = new Annotation(str);
pipelineJMWE.annotate(annoStr);
return annoStr;
}
public final static StanfordCoreNLP initializeJMWE() {
String jmweIndexData = "/home/javatests/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data
String jmweIndexDataLocalTest = "E:/java8/Projects/mweindex_wordnet3.0_semcor1.6.data";
propsJMWE = new Properties();
propsJMWE.setProperty("customAnnotatorClass.jmwe", "edu.stanford.nlp.pipeline.JMWEAnnotator");
propsJMWE.setProperty("customAnnotatorClass.jmwe.verbose", "false");
propsJMWE.setProperty("customAnnotatorClass.jmwe.underscoreReplacement", "-");
propsJMWE.setProperty("customAnnotatorClass.jmwe.indexData", jmweIndexData); //jmweIndexDataLocalTest jmweIndexData
propsJMWE.setProperty("customAnnotatorClass.jmwe.detector", "Exhaustive");
//"Consecutive", "Exhaustive", "ProperNouns", "Complex" and "CompositeConsecutiveProperNouns"
propsJMWE.setProperty("annotators", "tokenize, ssplit, pos, lemma, jmwe");
System.out.println("finished singleton constructor \n");
return new StanfordCoreNLP(propsJMWE);
}
private PipelineJMWESingleton() {
}
public final static PipelineJMWESingleton getINSTANCE() {
return INSTANCE;
}
}

View File

@ -1,7 +1,7 @@
package FunctionLayer.StanfordParser;
import FunctionLayer.LevenshteinDistance;
import FunctionLayer.MYSQLDatahandler;
import FunctionLayer.Datahandler;
import FunctionLayer.SimilarityMatrix;
import com.google.common.collect.MapMaker;
import edu.mit.jmwe.data.IMWE;
@ -73,12 +73,12 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
this.str = str;
this.str1 = str1;
this.smxParam = smxParam;
this.model = MYSQLDatahandler.getModel();
this.tagger = MYSQLDatahandler.getTagger();
this.pipeline = MYSQLDatahandler.getPipeline();
this.pipelineSentiment = MYSQLDatahandler.getPipelineSentiment();
this.gsf = MYSQLDatahandler.getGsf();
this.classifier = MYSQLDatahandler.getClassifier();
this.model = Datahandler.getModel();
this.tagger = Datahandler.getTagger();
this.pipeline = Datahandler.getPipeline();
this.pipelineSentiment = Datahandler.getPipelineSentiment();
this.gsf = Datahandler.getGsf();
this.classifier = Datahandler.getClassifier();
this.jmweStrAnnotation1 = str1Annotation;
this.jmweStrAnnotation2 = str2Annotation;
this.pipelineAnnotation1 = strPipeline1;
@ -90,332 +90,344 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
@Override
public SimilarityMatrix call() {
Double score = -100.0;
List<List<TaggedWord>> taggedwordlist1 = new ArrayList();
List<List<TaggedWord>> taggedwordlist2 = new ArrayList();
DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(str1));
for (List<HasWord> sentence : tokenizer) {
taggedwordlist1.add(model.apply(tagger.tagSentence(sentence)).taggedYield());
}
tokenizer = new DocumentPreprocessor(new StringReader(str));
for (List<HasWord> sentence : tokenizer) {
taggedwordlist2.add(model.apply(tagger.tagSentence(sentence)).taggedYield());
}
int counter = 0;
int counter1 = 0;
counter = taggedwordlist2.stream().map((taggedlist2) -> taggedlist2.size()).reduce(counter, Integer::sum);
counter1 = taggedwordlist1.stream().map((taggedlist1) -> taggedlist1.size()).reduce(counter1, Integer::sum);
int overValue = counter >= counter1 ? counter - counter1 : counter1 - counter;
overValue *= 16;
score -= overValue;
ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
taggedwordlist1.forEach((TGWList) -> {
TGWList.forEach((TaggedWord) -> {
if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) {
tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag());
}
});
});
taggedwordlist1.clear();
AtomicInteger runCount = new AtomicInteger(0);
taggedwordlist2.forEach((TGWList) -> {
TGWList.forEach((TaggedWord) -> {
if (tgwlistIndex.values().contains(TaggedWord.tag())) {
tgwlistIndex.values().remove(TaggedWord.tag());
runCount.getAndIncrement();
}
});
});
tgwlistIndex.clear();
taggedwordlist2.clear();
score += runCount.get() * 64;
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap();
for (CoreMap sentence : pipelineAnnotation1.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), sentenceConstituencyParse);
}
for (CoreMap sentence : pipelineAnnotation2.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
GrammaticalStructure gs = gsf.newGrammaticalStructure(sentenceConstituencyParse);
Collection<TypedDependency> allTypedDependencies = gs.allTypedDependencies();
ConcurrentMap<Integer, String> filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap();
for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList.values()) {
Set<Constituent> inT1notT2 = Tdiff.markDiff(sentenceConstituencyParse, sentenceConstituencyParse1);
Set<Constituent> inT2notT1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse);
ConcurrentMap<Integer, String> constiLabels = new MapMaker().concurrencyLevel(2).makeMap();
for (Constituent consti : inT1notT2) {
for (Constituent consti1 : inT2notT1) {
if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) {
score += 64;
constiLabels.put(constiLabels.size(), consti.value());
}
try {
List<List<TaggedWord>> taggedwordlist1 = new ArrayList();
List<List<TaggedWord>> taggedwordlist2 = new ArrayList();
DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(str1));
for (List<HasWord> sentence : tokenizer) {
taggedwordlist1.add(model.apply(tagger.tagSentence(sentence)).taggedYield());
}
tokenizer = new DocumentPreprocessor(new StringReader(str));
for (List<HasWord> sentence : tokenizer) {
taggedwordlist2.add(model.apply(tagger.tagSentence(sentence)).taggedYield());
}
int counter = 0;
int counter1 = 0;
counter = taggedwordlist2.stream().map((taggedlist2) -> taggedlist2.size()).reduce(counter, Integer::sum);
counter1 = taggedwordlist1.stream().map((taggedlist1) -> taggedlist1.size()).reduce(counter1, Integer::sum);
int overValue = counter >= counter1 ? counter - counter1 : counter1 - counter;
overValue *= 16;
score -= overValue;
ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
taggedwordlist1.forEach((TGWList) -> {
TGWList.forEach((TaggedWord) -> {
if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) {
tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag());
}
}
GrammaticalStructure gs1 = gsf.newGrammaticalStructure(sentenceConstituencyParse1);
Collection<TypedDependency> allTypedDependencies1 = gs1.allTypedDependencies();
for (TypedDependency TDY1 : allTypedDependencies1) {
IndexedWord dep = TDY1.dep();
IndexedWord gov = TDY1.gov();
GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep);
if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) {
score += 900;
}
GrammaticalRelation reln = TDY1.reln();
if (reln.isApplicable(sentenceConstituencyParse)) {
score += 256;
}
}
for (TypedDependency TDY : allTypedDependencies) {
IndexedWord dep = TDY.dep();
IndexedWord gov = TDY.gov();
GrammaticalRelation grammaticalRelation = gs1.getGrammaticalRelation(gov, dep);
if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) {
score += 900;
}
GrammaticalRelation reln = TDY.reln();
if (reln.isApplicable(sentenceConstituencyParse1)) {
score += 256;
}
}
AtomicInteger runCount1 = new AtomicInteger(0);
sentenceConstituencyParse.taggedLabeledYield().forEach((LBW) -> {
sentenceConstituencyParse1.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma())
&& !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> {
filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma());
return _item;
}).forEachOrdered((_item) -> {
runCount1.getAndIncrement();
});
});
score += runCount1.get() * 1500;
}
}
sentenceConstituencyParseList.clear();
ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Integer> sentiment1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Integer> sentiment2 = new MapMaker().concurrencyLevel(2).makeMap();
for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
sentiment1.put(sentiment1.size(), RNNCoreAnnotations.getPredictedClass(tree));
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
simpleSMXlist.put(simpleSMXlist.size(), predictions);
simpleSMXlistVector.put(simpleSMXlistVector.size() + 1, nodeVector);
}
for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
sentiment2.put(sentiment2.size() + 1, RNNCoreAnnotations.getPredictedClass(tree));
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
score = simpleSMXlist.values().stream().map((simpleSMX) -> predictions.dot(simpleSMX) * 100).map((dot) -> dot > 50 ? dot - 50 : 50 - dot).map((subtracter) -> {
subtracter *= 25;
return subtracter;
}).map((subtracter) -> subtracter).reduce(score, (accumulator, _item) -> accumulator - _item);
for (SimpleMatrix simpleSMX : simpleSMXlistVector.values()) {
double dot = nodeVector.dot(simpleSMX);
double elementSum = nodeVector.kron(simpleSMX).elementSum();
elementSum = Math.round(elementSum * 100.0) / 100.0;
if (dot < 0.1) {
score += 256;
});
taggedwordlist1.clear();
AtomicInteger runCount = new AtomicInteger(0);
taggedwordlist2.forEach((TGWList) -> {
TGWList.forEach((TaggedWord) -> {
if (tgwlistIndex.values().contains(TaggedWord.tag())) {
tgwlistIndex.values().remove(TaggedWord.tag());
runCount.getAndIncrement();
}
});
});
tgwlistIndex.clear();
taggedwordlist2.clear();
score += runCount.get() * 64;
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap();
try {
for (CoreMap sentence : pipelineAnnotation1.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), sentenceConstituencyParse);
}
if (elementSum < 0.1 && elementSum > 0.0) {
score += 1300;
} else if (elementSum > 0.1 && elementSum < 1.0) {
score -= 1100;
for (CoreMap sentence : pipelineAnnotation2.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
GrammaticalStructure gs = gsf.newGrammaticalStructure(sentenceConstituencyParse);
Collection<TypedDependency> allTypedDependencies = gs.allTypedDependencies();
ConcurrentMap<Integer, String> filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap();
for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList.values()) {
Set<Constituent> inT1notT2 = Tdiff.markDiff(sentenceConstituencyParse, sentenceConstituencyParse1);
Set<Constituent> inT2notT1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse);
ConcurrentMap<Integer, String> constiLabels = new MapMaker().concurrencyLevel(2).makeMap();
for (Constituent consti : inT1notT2) {
for (Constituent consti1 : inT2notT1) {
if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) {
score += 64;
constiLabels.put(constiLabels.size(), consti.value());
}
}
}
GrammaticalStructure gs1 = gsf.newGrammaticalStructure(sentenceConstituencyParse1);
Collection<TypedDependency> allTypedDependencies1 = gs1.allTypedDependencies();
for (TypedDependency TDY1 : allTypedDependencies1) {
IndexedWord dep = TDY1.dep();
IndexedWord gov = TDY1.gov();
GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep);
if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) {
score += 900;
}
GrammaticalRelation reln = TDY1.reln();
if (reln.isApplicable(sentenceConstituencyParse)) {
score += 256;
}
}
for (TypedDependency TDY : allTypedDependencies) {
IndexedWord dep = TDY.dep();
IndexedWord gov = TDY.gov();
GrammaticalRelation grammaticalRelation = gs1.getGrammaticalRelation(gov, dep);
if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) {
score += 900;
}
GrammaticalRelation reln = TDY.reln();
if (reln.isApplicable(sentenceConstituencyParse1)) {
score += 256;
}
}
AtomicInteger runCount1 = new AtomicInteger(0);
sentenceConstituencyParse.taggedLabeledYield().forEach((LBW) -> {
sentenceConstituencyParse1.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma())
&& !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> {
filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma());
return _item;
}).forEachOrdered((_item) -> {
runCount1.getAndIncrement();
});
});
score += runCount1.get() * 1500;
}
}
} catch (Exception ex) {
System.out.println("pipelineAnnotation stacktrace: " + ex.getLocalizedMessage()+ "\n");
}
sentenceConstituencyParseList.clear();
ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Integer> sentiment1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Integer> sentiment2 = new MapMaker().concurrencyLevel(2).makeMap();
for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
sentiment1.put(sentiment1.size(), RNNCoreAnnotations.getPredictedClass(tree));
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
simpleSMXlist.put(simpleSMXlist.size(), predictions);
simpleSMXlistVector.put(simpleSMXlistVector.size() + 1, nodeVector);
}
for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
sentiment2.put(sentiment2.size() + 1, RNNCoreAnnotations.getPredictedClass(tree));
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
score = simpleSMXlist.values().stream().map((simpleSMX) -> predictions.dot(simpleSMX) * 100).map((dot) -> dot > 50 ? dot - 50 : 50 - dot).map((subtracter) -> {
subtracter *= 25;
return subtracter;
}).map((subtracter) -> subtracter).reduce(score, (accumulator, _item) -> accumulator - _item);
for (SimpleMatrix simpleSMX : simpleSMXlistVector.values()) {
double dot = nodeVector.dot(simpleSMX);
double elementSum = nodeVector.kron(simpleSMX).elementSum();
elementSum = Math.round(elementSum * 100.0) / 100.0;
if (dot < 0.1) {
score += 256;
}
if (elementSum < 0.1 && elementSum > 0.0) {
score += 1300;
} else if (elementSum > 0.1 && elementSum < 1.0) {
score -= 1100;
} else {
score -= 1424;
}
}
}
score -= (sentiment1.size() > sentiment2.size() ? sentiment1.size() - sentiment2.size() : sentiment2.size() - sentiment1.size()) * 500;
DocumentReaderAndWriter<CoreLabel> readerAndWriter = classifier.makePlainTextReaderAndWriter();
List classifyRaw1 = classifier.classifyRaw(str, readerAndWriter);
List classifyRaw2 = classifier.classifyRaw(str1, readerAndWriter);
score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200;
int mainSentiment1 = 0;
int longest1 = 0;
int mainSentiment2 = 0;
int longest2 = 0;
for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
int sentiment = RNNCoreAnnotations.getPredictedClass(tree);
String partText = sentence.toString();
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
if (partText.length() > longest1) {
mainSentiment1 = sentiment;
longest1 = partText.length();
}
}
for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
int sentiment = RNNCoreAnnotations.getPredictedClass(tree);
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
String partText = sentence.toString();
if (partText.length() > longest2) {
mainSentiment2 = sentiment;
longest2 = partText.length();
}
}
if (longest1 != longest2) {
long deffLongest = longest1 > longest2 ? longest1 : longest2;
long deffshorter = longest1 < longest2 ? longest1 : longest2;
if (deffLongest >= (deffshorter * 2) - 1 && deffLongest - deffshorter <= 45) {
score += (deffLongest - deffshorter) * 200;
} else if (mainSentiment1 != mainSentiment2 && deffLongest - deffshorter > 20 && deffLongest - deffshorter < 45) {
score += (deffLongest - deffshorter) * 200;
} else {
score -= 1424;
score -= (deffLongest - deffshorter) * 50;
}
}
}
score -= (sentiment1.size() > sentiment2.size() ? sentiment1.size() - sentiment2.size() : sentiment2.size() - sentiment1.size()) * 500;
DocumentReaderAndWriter<CoreLabel> readerAndWriter = classifier.makePlainTextReaderAndWriter();
List classifyRaw1 = classifier.classifyRaw(str, readerAndWriter);
List classifyRaw2 = classifier.classifyRaw(str1, readerAndWriter);
score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200;
int mainSentiment1 = 0;
int longest1 = 0;
int mainSentiment2 = 0;
int longest2 = 0;
for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
int sentiment = RNNCoreAnnotations.getPredictedClass(tree);
String partText = sentence.toString();
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
if (partText.length() > longest1) {
mainSentiment1 = sentiment;
longest1 = partText.length();
int tokensCounter1 = 0;
int tokensCounter2 = 0;
int anotatorcounter1 = 0;
int anotatorcounter2 = 0;
int inflectedCounterPositive1 = 0;
int inflectedCounterPositive2 = 0;
int inflectedCounterNegative = 0;
int MarkedContinuousCounter1 = 0;
int MarkedContinuousCounter2 = 0;
int UnmarkedPatternCounter = 0;
ConcurrentMap<Integer, String> ITokenMapTag1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> ITokenMapTag2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenStems1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenStems2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenForm1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenForm2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenGetEntry1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenGetEntry2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenGetiPart1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenGetiPart2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenEntryPOS1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenEntryPOS2 = new MapMaker().concurrencyLevel(2).makeMap();
try {
List<CoreMap> sentences = jmweStrAnnotation1.get(CoreAnnotations.SentencesAnnotation.class);
for (CoreMap sentence : sentences) {
for (IMWE<IToken> token : sentence.get(JMWEAnnotation.class)) {
if (token.isInflected()) {
inflectedCounterPositive1++;
} else {
inflectedCounterNegative++;
}
strTokenForm1.put(strTokenForm1.size() + 1, token.getForm());
strTokenGetEntry1.put(strTokenGetEntry1.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1));
Collection<IMWEDesc.IPart> values = token.getPartMap().values();
IMWEDesc entry = token.getEntry();
MarkedContinuousCounter1 += entry.getMarkedContinuous();
UnmarkedPatternCounter += entry.getUnmarkedPattern();
for (IMWEDesc.IPart iPart : values) {
strTokenGetiPart1.put(strTokenGetiPart1.size() + 1, iPart.getForm());
}
for (String strPostPrefix : entry.getPOS().getPrefixes()) {
strTokenEntryPOS1.put(strTokenEntryPOS1.size() + 1, strPostPrefix);
}
for (IToken tokens : token.getTokens()) {
ITokenMapTag1.put(ITokenMapTag1.size() + 1, tokens.getTag());
for (String strtoken : tokens.getStems()) {
strTokenStems1.put(strTokenStems1.size() + 1, strtoken);
}
}
tokensCounter1++;
}
anotatorcounter1++;
}
sentences = jmweStrAnnotation2.get(CoreAnnotations.SentencesAnnotation.class);
for (CoreMap sentence : sentences) {
for (IMWE<IToken> token : sentence.get(JMWEAnnotation.class)) {
if (token.isInflected()) {
inflectedCounterPositive2++;
} else {
inflectedCounterNegative--;
}
strTokenForm2.put(strTokenForm2.size() + 1, token.getForm());
strTokenGetEntry2.put(strTokenGetEntry2.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1));
Collection<IMWEDesc.IPart> values = token.getPartMap().values();
IMWEDesc entry = token.getEntry();
MarkedContinuousCounter2 += entry.getMarkedContinuous();
UnmarkedPatternCounter += entry.getUnmarkedPattern();
for (IMWEDesc.IPart iPart : values) {
strTokenGetiPart2.put(strTokenGetiPart2.size() + 1, iPart.getForm());
}
for (String strPostPrefix : entry.getPOS().getPrefixes()) {
strTokenEntryPOS2.put(strTokenEntryPOS2.size() + 1, strPostPrefix);
}
for (IToken tokens : token.getTokens()) {
ITokenMapTag2.put(ITokenMapTag2.size() + 1, tokens.getTag());
for (String strtoken : tokens.getStems()) {
strTokenStems2.put(strTokenStems2.size() + 1, strtoken);
}
}
tokensCounter2++;
}
anotatorcounter2++;
}
} catch (Exception ex) {
System.out.println("SENTIMENT stacktrace: " + ex.getMessage() + "\n");
}
}
for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
int sentiment = RNNCoreAnnotations.getPredictedClass(tree);
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
String partText = sentence.toString();
if (partText.length() > longest2) {
mainSentiment2 = sentiment;
longest2 = partText.length();
for (String strTokenPos1 : strTokenEntryPOS1.values()) {
for (String strTokenPos2 : strTokenEntryPOS2.values()) {
if (strTokenPos1.equals(strTokenPos2)) {
score += 500;
}
}
}
}
if (longest1 != longest2) {
long deffLongest = longest1 > longest2 ? longest1 : longest2;
long deffshorter = longest1 < longest2 ? longest1 : longest2;
if (deffLongest >= (deffshorter * 2) - 1 && deffLongest - deffshorter <= 45) {
score += (deffLongest - deffshorter) * 200;
} else if (mainSentiment1 != mainSentiment2 && deffLongest - deffshorter > 20 && deffLongest - deffshorter < 45) {
score += (deffLongest - deffshorter) * 200;
score += UnmarkedPatternCounter * 1600;
if (MarkedContinuousCounter1 > 0 && MarkedContinuousCounter2 > 0) {
score += MarkedContinuousCounter1 > MarkedContinuousCounter2 ? (MarkedContinuousCounter1 - MarkedContinuousCounter2) * 500
: (MarkedContinuousCounter2 - MarkedContinuousCounter1) * 500;
}
for (String strTokeniPart1 : strTokenGetiPart1.values()) {
for (String strTokeniPart2 : strTokenGetiPart2.values()) {
if (strTokeniPart1.equals(strTokeniPart2)) {
score += 400;
}
}
}
for (String strTokenEntry1 : strTokenGetEntry1.values()) {
for (String strTokenEntry2 : strTokenGetEntry2.values()) {
if (strTokenEntry1.equals(strTokenEntry2)) {
score += 2500;
}
}
}
for (String strmapTag : ITokenMapTag1.values()) {
for (String strmapTag1 : ITokenMapTag2.values()) {
if (strmapTag.equals(strmapTag1)) {
score += 1450;
}
}
}
for (String strTokenForm1itr1 : strTokenForm1.values()) {
for (String strTokenForm1itr2 : strTokenForm2.values()) {
if (strTokenForm1itr1.equals(strTokenForm1itr2)) {
score += 2600;
} else if (strTokenForm1itr1.contains(strTokenForm1itr2)) {
score += 500;
}
}
}
for (String strTokenStem : strTokenStems1.values()) {
for (String strTokenStem1 : strTokenStems2.values()) {
if (strTokenStem.equals(strTokenStem1)) {
score += 1500;
}
}
}
if (inflectedCounterPositive1 + inflectedCounterPositive2 > inflectedCounterNegative && inflectedCounterNegative > 0) {
score += (inflectedCounterPositive1 - inflectedCounterNegative) * 650;
}
if (inflectedCounterPositive1 > 0 && inflectedCounterPositive2 > 0) {
score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * 550;
}
if (anotatorcounter1 > 1 && anotatorcounter2 > 1) {
score += (anotatorcounter1 + anotatorcounter2) * 400;
}
if (tokensCounter1 > 0 && tokensCounter2 > 0) {
score += (tokensCounter1 + tokensCounter2) * 400;
} else {
score -= (deffLongest - deffshorter) * 50;
score -= tokensCounter1 >= tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500;
}
LevenshteinDistance leven = new LevenshteinDistance(str, str1);
int SentenceScoreDiff = leven.computeLevenshteinDistance();
SentenceScoreDiff *= 15;
score -= SentenceScoreDiff;
} catch (Exception ex) {
System.out.println("SENTIMENT stacktrace Overall catch: " + ex.getMessage() + "\n");
}
List<CoreMap> sentences = jmweStrAnnotation1.get(CoreAnnotations.SentencesAnnotation.class);
int tokensCounter1 = 0;
int tokensCounter2 = 0;
int anotatorcounter1 = 0;
int anotatorcounter2 = 0;
int inflectedCounterPositive1 = 0;
int inflectedCounterPositive2 = 0;
int inflectedCounterNegative = 0;
int MarkedContinuousCounter1 = 0;
int MarkedContinuousCounter2 = 0;
int UnmarkedPatternCounter = 0;
ConcurrentMap<Integer, String> ITokenMapTag1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> ITokenMapTag2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenStems1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenStems2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenForm1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenForm2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenGetEntry1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenGetEntry2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenGetiPart1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenGetiPart2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenEntryPOS1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenEntryPOS2 = new MapMaker().concurrencyLevel(2).makeMap();
for (CoreMap sentence : sentences) {
for (IMWE<IToken> token : sentence.get(JMWEAnnotation.class)) {
if (token.isInflected()) {
inflectedCounterPositive1++;
} else {
inflectedCounterNegative++;
}
strTokenForm1.put(strTokenForm1.size() + 1, token.getForm());
strTokenGetEntry1.put(strTokenGetEntry1.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1));
Collection<IMWEDesc.IPart> values = token.getPartMap().values();
IMWEDesc entry = token.getEntry();
MarkedContinuousCounter1 += entry.getMarkedContinuous();
UnmarkedPatternCounter += entry.getUnmarkedPattern();
for (IMWEDesc.IPart iPart : values) {
strTokenGetiPart1.put(strTokenGetiPart1.size() + 1, iPart.getForm());
}
for (String strPostPrefix : entry.getPOS().getPrefixes()) {
strTokenEntryPOS1.put(strTokenEntryPOS1.size() + 1, strPostPrefix);
}
for (IToken tokens : token.getTokens()) {
ITokenMapTag1.put(ITokenMapTag1.size() + 1, tokens.getTag());
for (String strtoken : tokens.getStems()) {
strTokenStems1.put(strTokenStems1.size() + 1, strtoken);
}
}
tokensCounter1++;
}
anotatorcounter1++;
}
sentences = jmweStrAnnotation2.get(CoreAnnotations.SentencesAnnotation.class);
for (CoreMap sentence : sentences) {
for (IMWE<IToken> token : sentence.get(JMWEAnnotation.class)) {
if (token.isInflected()) {
inflectedCounterPositive2++;
} else {
inflectedCounterNegative--;
}
strTokenForm2.put(strTokenForm2.size() + 1, token.getForm());
strTokenGetEntry2.put(strTokenGetEntry2.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1));
Collection<IMWEDesc.IPart> values = token.getPartMap().values();
IMWEDesc entry = token.getEntry();
MarkedContinuousCounter2 += entry.getMarkedContinuous();
UnmarkedPatternCounter += entry.getUnmarkedPattern();
for (IMWEDesc.IPart iPart : values) {
strTokenGetiPart2.put(strTokenGetiPart2.size() + 1, iPart.getForm());
}
for (String strPostPrefix : entry.getPOS().getPrefixes()) {
strTokenEntryPOS2.put(strTokenEntryPOS2.size() + 1, strPostPrefix);
}
for (IToken tokens : token.getTokens()) {
ITokenMapTag2.put(ITokenMapTag2.size() + 1, tokens.getTag());
for (String strtoken : tokens.getStems()) {
strTokenStems2.put(strTokenStems2.size() + 1, strtoken);
}
}
tokensCounter2++;
}
anotatorcounter2++;
}
for (String strTokenPos1 : strTokenEntryPOS1.values()) {
for (String strTokenPos2 : strTokenEntryPOS2.values()) {
if (strTokenPos1.equals(strTokenPos2)) {
score += 500;
}
}
}
score += UnmarkedPatternCounter * 1600;
if (MarkedContinuousCounter1 > 0 && MarkedContinuousCounter2 > 0) {
score += MarkedContinuousCounter1 > MarkedContinuousCounter2 ? (MarkedContinuousCounter1 - MarkedContinuousCounter2) * 500
: (MarkedContinuousCounter2 - MarkedContinuousCounter1) * 500;
}
for (String strTokeniPart1 : strTokenGetiPart1.values()) {
for (String strTokeniPart2 : strTokenGetiPart2.values()) {
if (strTokeniPart1.equals(strTokeniPart2)) {
score += 400;
}
}
}
for (String strTokenEntry1 : strTokenGetEntry1.values()) {
for (String strTokenEntry2 : strTokenGetEntry2.values()) {
if (strTokenEntry1.equals(strTokenEntry2)) {
score += 2500;
}
}
}
for (String strmapTag : ITokenMapTag1.values()) {
for (String strmapTag1 : ITokenMapTag2.values()) {
if (strmapTag.equals(strmapTag1)) {
score += 1450;
}
}
}
for (String strTokenForm1itr1 : strTokenForm1.values()) {
for (String strTokenForm1itr2 : strTokenForm2.values()) {
if (strTokenForm1itr1.equals(strTokenForm1itr2)) {
score += 2600;
} else if (strTokenForm1itr1.contains(strTokenForm1itr2)) {
score += 500;
}
}
}
for (String strTokenStem : strTokenStems1.values()) {
for (String strTokenStem1 : strTokenStems2.values()) {
if (strTokenStem.equals(strTokenStem1)) {
score += 1500;
}
}
}
if (inflectedCounterPositive1 + inflectedCounterPositive2 > inflectedCounterNegative && inflectedCounterNegative > 0) {
score += (inflectedCounterPositive1 - inflectedCounterNegative) * 650;
}
if (inflectedCounterPositive1 > 0 && inflectedCounterPositive2 > 0) {
score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * 550;
}
if (anotatorcounter1 > 1 && anotatorcounter2 > 1) {
score += (anotatorcounter1 + anotatorcounter2) * 400;
}
if (tokensCounter1 > 0 && tokensCounter2 > 0) {
score += (tokensCounter1 + tokensCounter2) * 400;
} else {
score -= tokensCounter1 >= tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500;
}
LevenshteinDistance leven = new LevenshteinDistance(str, str1);
int SentenceScoreDiff = leven.computeLevenshteinDistance();
SentenceScoreDiff *= 15;
score -= SentenceScoreDiff;
System.out.println("Final current score: " + score + "\nSentence 1: " + str + "\nSentence 2: " + str1 + "\n");
smxParam.setDistance(score);
return smxParam;

View File

@ -15,8 +15,9 @@ screen -X -S (number1) quit
package PresentationLayer;
import FunctionLayer.CustomError;
import FunctionLayer.MYSQLDatahandler;
import FunctionLayer.Datahandler;
import FunctionLayer.MessageResponseHandler;
import FunctionLayer.PipelineJMWESingleton;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
@ -35,22 +36,25 @@ public class DiscordHandler {
public static void main(String[] args) {
new Thread(() -> {
try {
MYSQLDatahandler.instance.initiateMYSQL();
Datahandler.instance.initiateMYSQL();
PipelineJMWESingleton.getINSTANCE();
System.out.println("finished initiating MYSQL");
} catch (SQLException | IOException ex) {
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
}
}).start();
MYSQLDatahandler.instance.shiftReduceParserInitiate();
MYSQLDatahandler.instance.instantiateExecutor();
MYSQLDatahandler.instance.updateStringCache();
MYSQLDatahandler.instance.instantiateAnnotationMap();
if (MYSQLDatahandler.instance.getstringCacheSize() != 0) {
while (MYSQLDatahandler.instance.getlHMSMXSize() * MYSQLDatahandler.instance.getlHMSMXSize() * 5
< (MYSQLDatahandler.instance.getstringCacheSize()
* MYSQLDatahandler.instance.getstringCacheSize())
- MYSQLDatahandler.instance.getstringCacheSize()) {
MYSQLDatahandler.instance.checkIfUpdateMatrixes();
Datahandler.instance.shiftReduceParserInitiate();
Datahandler.instance.instantiateExecutor();
Datahandler.instance.instantiateAnnotationMap();
Datahandler.instance.addHLstatsMessages();
Datahandler.instance.updateStringCache();
//order matters
if (Datahandler.instance.getstringCacheSize() != 0) {
while (Datahandler.instance.getlHMSMXSize() * Datahandler.instance.getlHMSMXSize() * 2.5
< (Datahandler.instance.getstringCacheSize()
* Datahandler.instance.getstringCacheSize())
- Datahandler.instance.getstringCacheSize()) {
Datahandler.instance.checkIfUpdateMatrixes();
}
}
String token = "NTI5NzAxNTk5NjAyMjc4NDAx.Dw0vDg.7-aMjVWdQMYPl8qVNyvTCPS5F_A";
@ -84,15 +88,15 @@ public class DiscordHandler {
MessageResponseHandler.getMessage(strresult);
new Thread(() -> {
try {
MYSQLDatahandler.instance.checkIfUpdateStrings();
MYSQLDatahandler.instance.checkIfUpdateMatrixes();
Datahandler.instance.checkIfUpdateStrings(false);
Datahandler.instance.checkIfUpdateMatrixes();
} catch (CustomError ex) {
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
}
}).start();
}
if (event.getMessage().getMentionedUsers().contains(api.getYourself())
|| event.getServerTextChannel().get().toString().contains("minor-test")) {
|| event.getServerTextChannel().get().toString().contains("general-autism")) {
String ResponseStr;
try {
ResponseStr = MessageResponseHandler.selectReponseMessage(event.getMessage().toString());