pipeline propperty updates, untokenizeable removed, updating the matrixupdater, coreNLP internal thread devider is very effective for mass annotation compared to simple streaming

This commit is contained in:
jenzur 2019-03-26 21:38:03 +01:00
parent 17ef94ef07
commit 511eb0e492
3 changed files with 110 additions and 123 deletions

View File

@ -29,6 +29,7 @@ import java.io.StringReader;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -56,7 +57,7 @@ public class Datahandler {
public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS); public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS);
public static Datahandler instance = new Datahandler(); public static Datahandler instance = new Datahandler();
private volatile boolean refreshMatrixFromDB; private volatile boolean refreshMatrixFromDB;
private static int secondaryIterator = 0; private static volatile int secondaryIterator = 0;
private final ConcurrentMap<Integer, String> stringCache; private final ConcurrentMap<Integer, String> stringCache;
private static ConcurrentMap<String, Annotation> pipelineAnnotationCache; private static ConcurrentMap<String, Annotation> pipelineAnnotationCache;
private static ConcurrentMap<String, Annotation> pipelineSentimentAnnotationCache; private static ConcurrentMap<String, Annotation> pipelineSentimentAnnotationCache;
@ -72,7 +73,7 @@ public class Datahandler {
private static String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz"; private static String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz";
private static MaxentTagger tagger; private static MaxentTagger tagger;
private static ShiftReduceParser model; private static ShiftReduceParser model;
private static String[] options = {"-maxLength", "100"}; private static String[] options = {"-maxLength", "90"};
private static Properties props = new Properties(); private static Properties props = new Properties();
private static Properties propsSentiment = new Properties(); private static Properties propsSentiment = new Properties();
private static GrammaticalStructureFactory gsf; private static GrammaticalStructureFactory gsf;
@ -112,8 +113,12 @@ public class Datahandler {
propsSentiment.setProperty("parse.model", lexParserEnglishRNN); propsSentiment.setProperty("parse.model", lexParserEnglishRNN);
propsSentiment.setProperty("ner.model", nerModel); propsSentiment.setProperty("ner.model", nerModel);
propsSentiment.setProperty("sentiment.model", sentimentModel); propsSentiment.setProperty("sentiment.model", sentimentModel);
propsSentiment.setProperty("parse.maxlen", "100"); propsSentiment.setProperty("parse.maxlen", "90");
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,depparse,sentiment"); //coref too expensive memorywise propsSentiment.setProperty("threads", "25");
propsSentiment.setProperty("pos.maxlen", "90");
propsSentiment.setProperty("tokenize.maxlen", "90");
propsSentiment.setProperty("ssplit.maxlen", "90");
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment"); //coref too expensive memorywise, does it need depparse?
propsSentiment.setProperty("tokenize.options", "untokenizable=firstDelete"); propsSentiment.setProperty("tokenize.options", "untokenizable=firstDelete");
pipelineSentiment = new StanfordCoreNLP(propsSentiment); pipelineSentiment = new StanfordCoreNLP(propsSentiment);
tagger = new MaxentTagger(taggerPath); tagger = new MaxentTagger(taggerPath);
@ -121,8 +126,13 @@ public class Datahandler {
}).start(); }).start();
new Thread(() -> { new Thread(() -> {
props.setProperty("parse.model", shiftReduceParserPath); props.setProperty("parse.model", shiftReduceParserPath);
props.setProperty("parse.maxlen", "100"); props.setProperty("parse.maxlen", "90");
props.setProperty("parse.binaryTrees", "true"); props.setProperty("parse.binaryTrees", "true");
props.setProperty("threads", "25");
props.setProperty("pos.maxlen", "90");
props.setProperty("tokenize.maxlen", "90");
props.setProperty("ssplit.maxlen", "90");
props.setProperty("lemma.maxlen", "90");
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,parse"); props.setProperty("annotators", "tokenize,ssplit,pos,lemma,parse");
props.setProperty("tokenize.options", "untokenizable=firstDelete"); props.setProperty("tokenize.options", "untokenizable=firstDelete");
pipeline = new StanfordCoreNLP(props); pipeline = new StanfordCoreNLP(props);
@ -205,9 +215,17 @@ public class Datahandler {
} }
public void addHLstatsMessages() { public void addHLstatsMessages() {
ConcurrentMap<Integer, String> hlStatsMessages = DataMapper.getHLstatsMessages(); ConcurrentMap<Integer, String> hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strCacheLocal = stringCache; ConcurrentMap<Integer, String> strCacheLocal = stringCache;
//might want a hardcap int hardcap = 8500;
int ij = 0;
for (String str : DataMapper.getHLstatsMessages().values()) {
hlStatsMessages.put(ij, str);
ij++;
if (ij > hardcap) {
break;
}
}
hlStatsMessages.values().parallelStream().forEach(str -> { hlStatsMessages.values().parallelStream().forEach(str -> {
if (!str.startsWith("!")) { if (!str.startsWith("!")) {
String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null); String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null);
@ -229,19 +247,26 @@ public class Datahandler {
public void instantiateAnnotationMap() { public void instantiateAnnotationMap() {
if (!stringCache.isEmpty()) { if (!stringCache.isEmpty()) {
ConcurrentMap<String, Annotation> Annotationspipeline = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<String, Annotation> AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(2).makeMap();
stringCache.values().parallelStream().forEach(str -> { stringCache.values().parallelStream().forEach(str -> {
System.out.println("str annotation pipeline pipelinesentiment: " + str + "\n");
Annotation strAnno = new Annotation(str); Annotation strAnno = new Annotation(str);
pipeline.annotate(strAnno); Annotationspipeline.put(str, strAnno);
pipelineAnnotationCache.put(str, strAnno);
Annotation strAnno2 = new Annotation(str); Annotation strAnno2 = new Annotation(str);
pipelineSentiment.annotate(strAnno2); AnnotationspipelineSentiment.put(str, strAnno2);
pipelineSentimentAnnotationCache.put(str, strAnno2); });
pipeline.annotate(Annotationspipeline.values());
pipelineSentiment.annotate(AnnotationspipelineSentiment.values());
Annotationspipeline.entrySet().forEach(pipelineEntry -> {
pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
});
AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> {
pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
}); });
} }
} }
public synchronized void checkIfUpdateMatrixes() { public synchronized void updateMatrixes() {
refreshMatrixFromDB = false; refreshMatrixFromDB = false;
if (stopwatch1.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS1) { if (stopwatch1.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS1) {
refreshMatrixFromDB = true; refreshMatrixFromDB = true;
@ -276,57 +301,54 @@ public class Datahandler {
selectUpdate = secondaryIterator; selectUpdate = secondaryIterator;
secondaryIterator++; secondaryIterator++;
} }
ConcurrentMap<Integer, String> strIndexNavigator = new MapMaker().concurrencyLevel(2).makeMap(); final String getStringCacheStr = stringCachelocal.getOrDefault(selectUpdate, null);
String get = stringCachelocal.getOrDefault(selectUpdate, null);
if (get == null) {
get = stringCachelocal.get(new Random().nextInt(stringCachelocal.size() - 1));
}
strIndexNavigator.put(0, get);
ConcurrentMap<Integer, SimilarityMatrix> matrixUpdateList = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, SimilarityMatrix> matrixUpdateList = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(2).makeMap();
strIndexNavigator.values().forEach((str) -> { stringCachelocal.values().forEach((str1) -> {
stringCachelocal.values().stream().filter((str1) -> (!str.equals(str1))).forEachOrdered((str1) -> { boolean present = false;
boolean present = false; LinkedHashMap<String, Double> orDefault = lHMSMX.getOrDefault(getStringCacheStr, null);
LinkedHashMap<String, Double> orDefault = lHMSMX.getOrDefault(str, null); if (orDefault != null) {
Iterator<String> strDefaultsItr = orDefault.keySet().iterator();
while (strDefaultsItr.hasNext()) {
String strkey = strDefaultsItr.next();
if (strkey.equals(str1)) {
present = true;
break;
}
}
}
if (!present) {
orDefault = lHMSMX.getOrDefault(str1, null);
if (orDefault != null) { if (orDefault != null) {
for (String strkey : orDefault.keySet()) { Iterator<String> strDefaultsItr = orDefault.keySet().iterator();
if (strkey.equals(str1)) { while (strDefaultsItr.hasNext()) {
String strkey = strDefaultsItr.next();
if (strkey.equals(getStringCacheStr)) {
present = true; present = true;
break; break;
} }
} }
} }
if (!present) { }
orDefault = lHMSMX.getOrDefault(str1, null); if (!present) {
if (orDefault != null) { LinkedHashMap<String, Double> orDefault1 = lHMSMX.getOrDefault(getStringCacheStr, null);
for (String strkey : orDefault.keySet()) { if (orDefault1 == null) {
if (strkey.equals(str)) { orDefault1 = new LinkedHashMap<String, Double>();
present = true;
break;
}
}
}
} }
if (!present) { orDefault1.put(str1, 0.0);
LinkedHashMap<String, Double> orDefault1 = lHMSMX.getOrDefault(str, null); lHMSMX.put(getStringCacheStr, orDefault1);
if (orDefault1 == null) { SimilarityMatrix SMX = new SimilarityMatrix(getStringCacheStr, str1);
orDefault1 = new LinkedHashMap<String, Double>(); Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(getStringCacheStr, str1, SMX, jmweAnnotationCache.get(getStringCacheStr),
} jmweAnnotationCache.get(str1), pipelineAnnotationCache.get(getStringCacheStr), pipelineAnnotationCache.get(str1),
orDefault1.put(str1, 0.0); pipelineSentimentAnnotationCache.get(getStringCacheStr), pipelineSentimentAnnotationCache.get(str1));
lHMSMX.put(str, orDefault1); futures.put(futures.size() + 1, executor.submit(worker));
SimilarityMatrix SMX = new SimilarityMatrix(str, str1); }
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, str1, SMX, jmweAnnotationCache.get(str),
jmweAnnotationCache.get(str1), pipelineAnnotationCache.get(str), pipelineAnnotationCache.get(str1),
pipelineSentimentAnnotationCache.get(str), pipelineSentimentAnnotationCache.get(str1));
futures.put(futures.size() + 1, executor.submit(worker));
}
});
}); });
System.out.println("finished worker assignment, futures size: " + futures.size() + "\n"); System.out.println("finished worker assignment, futures size: " + futures.size() + "\n");
for (Future<SimilarityMatrix> future : futures.values()) { futures.values().parallelStream().forEach((future) -> {
SimilarityMatrix SMX = new SimilarityMatrix("", ""); SimilarityMatrix SMX = new SimilarityMatrix("", "");
try { try {
SMX = future.get(20, TimeUnit.SECONDS); SMX = future.get(5, TimeUnit.SECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException ex) { } catch (InterruptedException | ExecutionException | TimeoutException ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
SMX = null; SMX = null;
@ -337,7 +359,7 @@ public class Datahandler {
lHMSMX.put(SMX.getPrimaryString(), getFuture); lHMSMX.put(SMX.getPrimaryString(), getFuture);
matrixUpdateList.put(matrixUpdateList.size() + 1, SMX); matrixUpdateList.put(matrixUpdateList.size() + 1, SMX);
} }
} });
try { try {
if (!matrixUpdateList.isEmpty()) { if (!matrixUpdateList.isEmpty()) {
DataMapper.insertSementicMatrixes(matrixUpdateList); DataMapper.insertSementicMatrixes(matrixUpdateList);
@ -357,8 +379,8 @@ public class Datahandler {
str = filterContent(str); str = filterContent(str);
str = removeSlacks(str); str = removeSlacks(str);
System.out.println("finished removeSlacks \n" + str.size() + "\n"); System.out.println("finished removeSlacks \n" + str.size() + "\n");
str = verifyCalculationFitness(str); str = annotationCacheUpdate(str);
System.out.println("Check if updateString str size POST: " + str.size() + "\n"); System.out.println("annotationCacheUpdate str size POST: " + str.size() + "\n");
try { try {
DataMapper.InsertMYSQLStrings(str); DataMapper.InsertMYSQLStrings(str);
} catch (CustomError ex) { } catch (CustomError ex) {
@ -694,67 +716,29 @@ public class Datahandler {
return strreturn; return strreturn;
} }
private ConcurrentMap<Integer, String> verifyCalculationFitness(ConcurrentMap<Integer, String> strmap) { private ConcurrentMap<Integer, String> annotationCacheUpdate(ConcurrentMap<Integer, String> strmap) {
ConcurrentMap<Integer, String> returnmap = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<String, Annotation> pipelineAnnotateCachelcl = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<String, Annotation> pipelineSentimentAnnotateCachelcl = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<String, Annotation> jmweAnnotateCachelcl = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values()); ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values());
for (Entry<String, Annotation> jmweitr : jmweAnnotation.entrySet()) {
jmweAnnotateCachelcl.put(jmweitr.getKey(), jmweitr.getValue());
}
strmap.values().parallelStream().forEach(strCache -> {
Annotation strAnno = new Annotation(strCache);
pipeline.annotate(strAnno);
pipelineAnnotateCachelcl.put(strCache, strAnno);
Annotation strAnno2 = new Annotation(strCache);
pipelineSentiment.annotate(strAnno2);
pipelineSentimentAnnotateCachelcl.put(strCache, strAnno2);
System.out.println("normal annotating strCache: " + strCache + "\n");
});
final ConcurrentMap<Integer, String> allStrings;
if (!stringCache.isEmpty()) {
allStrings = stringCache;
} else {
allStrings = strmap;
}
ConcurrentMap<Integer, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(2).makeMap();
strmap.values().parallelStream().forEach((str) -> {
for (String str1 : allStrings.values()) {
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, str1, new SimilarityMatrix(str, str1),
jmweAnnotateCachelcl.get(str), jmweAnnotateCachelcl.get(str1), pipelineAnnotateCachelcl.get(str),
pipelineAnnotateCachelcl.get(str1), pipelineSentimentAnnotateCachelcl.get(str),
pipelineSentimentAnnotateCachelcl.get(str1));
futures.put(futures.size() + 1, executor.submit(worker));
System.out.println("futures size in verify calcs: " + futures.size() + "\n");
}
});
futures.values().parallelStream().forEach((future) -> {
SimilarityMatrix get;
//turning from 20 to 5 might be risky?
try {
get = future.get(5, TimeUnit.SECONDS);
String addStr = get.getPrimaryString();
returnmap.put(returnmap.size() + 1, addStr);
System.out.println("returnmap adding: " + addStr + "\n");
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
System.out.println("failed verification: " + ex.getMessage() + "\n");
}
});
jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(returnmap.values());
for (Entry<String, Annotation> jmweitr : jmweAnnotation.entrySet()) { for (Entry<String, Annotation> jmweitr : jmweAnnotation.entrySet()) {
jmweAnnotationCache.put(jmweitr.getKey(), jmweitr.getValue()); jmweAnnotationCache.put(jmweitr.getKey(), jmweitr.getValue());
} }
returnmap.values().parallelStream().forEach(strCache -> { ConcurrentMap<String, Annotation> Annotationspipeline = new MapMaker().concurrencyLevel(2).makeMap();
stringCache.put(stringCache.size() + 1, strCache); ConcurrentMap<String, Annotation> AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(2).makeMap();
System.out.println("str annotation pipeline pipelinesentiment: " + strCache + "\n"); strmap.values().parallelStream().forEach(str -> {
Annotation strAnno = new Annotation(strCache); Annotation strAnno = new Annotation(str);
pipeline.annotate(strAnno); Annotationspipeline.put(str, strAnno);
pipelineAnnotationCache.put(strCache, strAnno); Annotation strAnno2 = new Annotation(str);
Annotation strAnno2 = new Annotation(strCache); AnnotationspipelineSentiment.put(str, strAnno2);
pipelineSentiment.annotate(strAnno2); stringCache.put(stringCache.size() + 1, str);
pipelineSentimentAnnotationCache.put(strCache, strAnno2);
}); });
return returnmap; System.out.println("pre iterator annotation update \n");
pipeline.annotate(Annotationspipeline.values());
pipelineSentiment.annotate(AnnotationspipelineSentiment.values());
Annotationspipeline.entrySet().forEach(pipelineEntry -> {
pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
});
AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> {
pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
});
return strmap;
} }
} }

View File

@ -18,11 +18,8 @@ import edu.mit.jmwe.detect.MoreFrequentAsMWE;
import edu.mit.jmwe.detect.ProperNouns; import edu.mit.jmwe.detect.ProperNouns;
import edu.mit.jmwe.index.IMWEIndex; import edu.mit.jmwe.index.IMWEIndex;
import edu.mit.jmwe.index.MWEIndex; import edu.mit.jmwe.index.MWEIndex;
import edu.stanford.nlp.ling.CoreAnnotation;
import edu.stanford.nlp.ling.CoreAnnotations; import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreAnnotations.SentencesAnnotation;
import edu.stanford.nlp.ling.CoreLabel; import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.ling.CoreLabel.GenericAnnotation;
import edu.stanford.nlp.ling.JMWEAnnotation; import edu.stanford.nlp.ling.JMWEAnnotation;
import edu.stanford.nlp.pipeline.Annotation; import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP; import edu.stanford.nlp.pipeline.StanfordCoreNLP;
@ -31,6 +28,7 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
@ -44,7 +42,6 @@ public class PipelineJMWESingleton {
//if not needed to be volatile dont make it, increases time //if not needed to be volatile dont make it, increases time
public volatile static PipelineJMWESingleton INSTANCE; public volatile static PipelineJMWESingleton INSTANCE;
private volatile static int incrementer = 0;
private static StanfordCoreNLP localNLP = initializeJMWE(); private static StanfordCoreNLP localNLP = initializeJMWE();
private static String underscoreSpaceReplacement; private static String underscoreSpaceReplacement;
@ -66,20 +63,21 @@ public class PipelineJMWESingleton {
try { try {
index.open(); index.open();
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("unable to open IMWEIndex index"); throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n");
} }
IMWEDetector detector = getDetector(index, detectorName); IMWEDetector detector = getDetector(index, detectorName);
ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap();
Date startDate = new Date();
strvalues.parallelStream().forEach(str -> { strvalues.parallelStream().forEach(str -> {
Annotation annoStr = new Annotation(str); Annotation annoStr = new Annotation(str);
localNLP.annotate(annoStr); returnAnnotations.put(str, annoStr);
});
localNLP.annotate(returnAnnotations.values());
returnAnnotations.values().parallelStream().forEach(annoStr -> {
for (CoreMap sentence : annoStr.get(CoreAnnotations.SentencesAnnotation.class)) { for (CoreMap sentence : annoStr.get(CoreAnnotations.SentencesAnnotation.class)) {
List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, verbose); List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, verbose);
sentence.set(JMWEAnnotation.class, mwes); sentence.set(JMWEAnnotation.class, mwes);
} }
returnAnnotations.put(str, annoStr);
System.out.println("incrementer: " + incrementer + "\n");
incrementer++;
}); });
index.close(); index.close();
return returnAnnotations; return returnAnnotations;
@ -90,6 +88,11 @@ public class PipelineJMWESingleton {
propsJMWE = new Properties(); propsJMWE = new Properties();
propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma"); propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma");
propsJMWE.setProperty("tokenize.options", "untokenizable=firstDelete"); propsJMWE.setProperty("tokenize.options", "untokenizable=firstDelete");
propsJMWE.setProperty("threads", "25");
propsJMWE.setProperty("pos.maxlen", "90");
propsJMWE.setProperty("tokenize.maxlen", "90");
propsJMWE.setProperty("ssplit.maxlen", "90");
propsJMWE.setProperty("lemma.maxlen", "90");
underscoreSpaceReplacement = "-"; underscoreSpaceReplacement = "-";
localNLP = new StanfordCoreNLP(propsJMWE); localNLP = new StanfordCoreNLP(propsJMWE);
System.out.println("finished singleton constructor \n"); System.out.println("finished singleton constructor \n");

View File

@ -52,11 +52,11 @@ public class DiscordHandler {
Datahandler.instance.updateStringCache(); Datahandler.instance.updateStringCache();
//order matters //order matters
if (Datahandler.instance.getstringCacheSize() != 0) { if (Datahandler.instance.getstringCacheSize() != 0) {
while (Datahandler.instance.getlHMSMXSize() * Datahandler.instance.getlHMSMXSize() * 2.5 while (Datahandler.instance.getlHMSMXSize() * Datahandler.instance.getlHMSMXSize() * 3
< (Datahandler.instance.getstringCacheSize() < (Datahandler.instance.getstringCacheSize()
* Datahandler.instance.getstringCacheSize()) * Datahandler.instance.getstringCacheSize())
- Datahandler.instance.getstringCacheSize()) { - Datahandler.instance.getstringCacheSize()) {
Datahandler.instance.checkIfUpdateMatrixes(); Datahandler.instance.updateMatrixes();
} }
} }
String token = "NTI5NzAxNTk5NjAyMjc4NDAx.Dw0vDg.7-aMjVWdQMYPl8qVNyvTCPS5F_A"; String token = "NTI5NzAxNTk5NjAyMjc4NDAx.Dw0vDg.7-aMjVWdQMYPl8qVNyvTCPS5F_A";
@ -91,7 +91,7 @@ public class DiscordHandler {
new Thread(() -> { new Thread(() -> {
try { try {
Datahandler.instance.checkIfUpdateStrings(false); Datahandler.instance.checkIfUpdateStrings(false);
Datahandler.instance.checkIfUpdateMatrixes(); Datahandler.instance.updateMatrixes();
} catch (CustomError ex) { } catch (CustomError ex) {
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex); Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
} }