calculation updates

This commit is contained in:
jenzur 2019-04-05 13:29:20 +02:00
parent 276730418d
commit 376e939980
3 changed files with 241 additions and 56 deletions

View File

@ -217,7 +217,7 @@ public class Datahandler {
public void addHLstatsMessages() {
ConcurrentMap<Integer, String> hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strCacheLocal = stringCache;
int hardcap = 55000;
int hardcap = 10; //55000
int ij = 0;
for (String str : DataMapper.getHLstatsMessages().values()) {
hlStatsMessages.put(ij, str);

View File

@ -144,6 +144,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), sentenceConstituencyParse);
}
ConcurrentMap<Integer, Integer> alltypeDepsSizeMap = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Integer> summationMap = new MapMaker().concurrencyLevel(2).makeMap();
for (CoreMap sentence : pipelineAnnotation2.get(CoreAnnotations.SentencesAnnotation.class)) {
int constiRelationsize = 0;
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
@ -173,12 +175,15 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
Collection<TypedDependency> allTypedDependencies1 = gs1.allTypedDependencies();
int relationApplicable1 = 0;
int relationApplicable2 = 0;
int grammaticalRelation1 = 0;
int grammaticalRelation2 = 0;
for (TypedDependency TDY1 : allTypedDependencies1) {
IndexedWord dep = TDY1.dep();
IndexedWord gov = TDY1.gov();
GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep);
if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) {
score += 1900;
grammaticalRelation1++;
}
GrammaticalRelation reln = TDY1.reln();
if (reln.isApplicable(sentenceConstituencyParse)) {
@ -192,6 +197,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
GrammaticalRelation grammaticalRelation = gs1.getGrammaticalRelation(gov, dep);
if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) {
score += 900;
grammaticalRelation2++;
}
GrammaticalRelation reln = TDY.reln();
if (reln.isApplicable(sentenceConstituencyParse1)) {
@ -199,13 +205,40 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
relationApplicable2++;
}
}
if ((grammaticalRelation1 == 0 && grammaticalRelation2 > 0) || (grammaticalRelation2 == 0 && grammaticalRelation1 > 0)) {
score -= 3450;
}
if (!allTypedDependencies.isEmpty() || !allTypedDependencies1.isEmpty()) {
if (relationApplicable1 > 0 && relationApplicable2 > 0) {
int allTypeDep1 = allTypedDependencies.size();
int allTypeDep2 = allTypedDependencies1.size();
if (allTypeDep1 <= allTypeDep2 * 5 && allTypeDep2 <= allTypeDep1 * 5) {
if (!alltypeDepsSizeMap.values().contains(allTypeDep1)) {
score += allTypeDep1 * 600;
alltypeDepsSizeMap.put(alltypeDepsSizeMap.size() + 1, allTypeDep1);
}
if (!alltypeDepsSizeMap.values().contains(allTypeDep1)) {
score += allTypeDep2 * 600;
alltypeDepsSizeMap.put(alltypeDepsSizeMap.size() + 1, allTypeDep2);
}
}
if (allTypeDep1 >= 5 && allTypeDep2 >= 5) {
int largerTypeDep = allTypeDep1 > allTypeDep2 ? allTypeDep1 : allTypeDep2;
int smallerTypeDep = allTypeDep1 < allTypeDep2 ? allTypeDep1 : allTypeDep2;
int summation = largerTypeDep * largerTypeDep - smallerTypeDep * smallerTypeDep;
if (summation > 50 && summation < 75) {
score += summation * 80;
} else if (!summationMap.values().contains(summation)) {
score -= largerTypeDep * 500;
summationMap.put(summationMap.size() + 1, summation);
}
}
if (relationApplicable1 > 0 && relationApplicable2 > 0 && relationApplicable1 == relationApplicable2
&& grammaticalRelation1 > 0 && grammaticalRelation2 > 0 && grammaticalRelation1 == grammaticalRelation2) {
score += 3500;
} else {
score -= allTypedDependencies.size() > allTypedDependencies1.size()
? (allTypedDependencies.size() - allTypedDependencies1.size()) * (allTypedDependencies.size() * 160)
: (allTypedDependencies1.size() - allTypedDependencies.size()) * (allTypedDependencies1.size() * 160);
score += allTypeDep1 > allTypeDep2
? (allTypeDep2 - allTypeDep1) * (allTypeDep2 * 50)
: (allTypeDep1 - allTypeDep2) * (allTypeDep1 * 50);
}
}
AtomicInteger runCount1 = new AtomicInteger(0);
@ -221,6 +254,32 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
score += runCount1.get() * 1500;
}
}
int typeSizeSmallest = 100;
int typeSizeLargest = 0;
for (Integer i : alltypeDepsSizeMap.values()) {
if (i > typeSizeLargest) {
typeSizeLargest = i;
}
if (i < typeSizeSmallest) {
typeSizeSmallest = i;
}
}
if (typeSizeLargest >= typeSizeSmallest * 3) {
score -= typeSizeLargest * 1600;
}
typeSizeLargest = 0;
typeSizeSmallest = 100;
for (int i : summationMap.values()) {
if (i > typeSizeLargest) {
typeSizeLargest = i;
}
if (i < typeSizeSmallest) {
typeSizeSmallest = i;
}
}
if (typeSizeLargest >= typeSizeSmallest * 3) {
score -= typeSizeLargest * 1600;
}
} catch (Exception ex) {
System.out.println("pipelineAnnotation stacktrace: " + ex.getLocalizedMessage() + "\n");
}
@ -239,6 +298,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
}
ConcurrentMap<Integer, Double> elementSumCounter = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Double> dotMap = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Double> elementSumMap = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Double> dotSumMap = new MapMaker().concurrencyLevel(2).makeMap();
for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
sentiment2.put(sentiment2.size() + 1, RNNCoreAnnotations.getPredictedClass(tree));
@ -343,20 +404,30 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
elementSum = Math.round(elementSum * 100.0) / 100.0;
elementSumCounter.put(elementSumCounter.size() + 1, elementSum);
dotMap.put(dotMap.size() + 1, dot);
if (dot < 0.000) {
score += dot * 1500;
} else if (dot < 0.1) {
score += 256;
}
if (dot > 0.50) {
score -= 2400;
}
if (elementSum < 0.01 && elementSum > 0.00) {
score += 3300;
} else if (elementSum > 0.1 && elementSum < 0.2) {
score += 1100;
if (!dotSumMap.values().contains(dot)) {
if (dot < 0.000) {
score += dot * 1500;
} else if (dot < 0.1) {
score += 256;
}
if (dot > 0.50) {
score -= 2400;
}
dotSumMap.put(dotSumMap.size() + 1, dot);
} else {
score -= elementSum * 1424;
score -= 750;
}
if (!elementSumMap.values().contains(elementSum)) {
if (elementSum < 0.01 && elementSum > 0.00) {
score += 3300;
} else if (elementSum > 0.1 && elementSum < 0.2) {
score += 1100;
} else {
score -= elementSum * 1424;
}
elementSumMap.put(elementSumMap.size() + 1, elementSum);
} else {
score -= 750;
}
}
for (SimpleMatrix simpleSMX : simpleSMXlistVector.values()) {
@ -372,18 +443,28 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
elementSum = Math.round(elementSum * 100.0) / 100.0;
elementSumCounter.put(elementSumCounter.size() + 1, elementSum);
dotMap.put(dotMap.size() + 1, dot);
if (dot < 0.1) {
score += 256;
}
if (dot > 0.50) {
score -= 2400;
}
if (elementSum < 0.01 && elementSum > 0.00) {
score += 1300;
} else if (elementSum > 0.1 && elementSum < 1.0) {
score += 1100;
if (!dotSumMap.values().contains(dot)) {
if (dot < 0.1) {
score += 256;
}
if (dot > 0.50) {
score -= 2400;
}
dotSumMap.put(dotSumMap.size() + 1, dot);
} else {
score -= elementSum * 1424;
score -= 750;
}
if (!elementSumMap.values().contains(elementSum)) {
if (elementSum < 0.01 && elementSum > 0.00) {
score += 1300;
} else if (elementSum > 0.1 && elementSum < 1.0) {
score += 1100;
} else {
score -= elementSum * 1424;
}
elementSumMap.put(elementSumMap.size() + 1, elementSum);
} else {
score -= 750;
}
}
}
@ -409,16 +490,23 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
OptionalDouble minvalueElements = elementSumCounter.values().stream().mapToDouble(Double::doubleValue).min();
OptionalDouble maxvalueElements = elementSumCounter.values().stream().mapToDouble(Double::doubleValue).max();
Double elementsVariance = maxvalueElements.getAsDouble() - minvalueElements.getAsDouble();
if (elementsVariance < 0.05 && maxvalueElements.getAsDouble() > 0.0 && minvalueElements.getAsDouble() > 0.0 && elementsVariance > 0.000) {
if (elementsVariance == 0.0) {
score -= 550;
} else if (elementsVariance < 0.02 && elementsVariance > -0.01) {
score += 3500;
} else if (elementsVariance < 0.5 && maxvalueElements.getAsDouble() > 0.0 && minvalueElements.getAsDouble() > 0.0 && elementsVariance > 0.000) {
score += 3500;
} else if (minvalueElements.getAsDouble() < 0.0 && minvalueElements.getAsDouble() - maxvalueElements.getAsDouble() < 0.50) {
score -= 2500;
}
score -= (sentiment1.size() > sentiment2.size() ? sentiment1.size() - sentiment2.size() : sentiment2.size() - sentiment1.size()) * 500;
DocumentReaderAndWriter<CoreLabel> readerAndWriter = classifier.makePlainTextReaderAndWriter();
List classifyRaw1 = classifier.classifyRaw(str, readerAndWriter);
List classifyRaw2 = classifier.classifyRaw(str1, readerAndWriter);
score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200;
int mainSentiment1 = 0;
int longest1 = 0;
int mainSentiment2 = 0;
@ -449,13 +537,13 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
if (deffLongest > deffshorter * 5) {
score -= 5500;
} else if (deffLongest < (deffshorter * 2) - 1 && deffLongest - deffshorter <= 45) {
score += (deffLongest - deffshorter) * 60;
} else if (mainSentiment1 != mainSentiment2 && deffLongest - deffshorter > 20 && deffLongest - deffshorter < 45) {
score += (deffLongest - deffshorter) * 120;
} else if (mainSentiment1 != mainSentiment2 && deffLongest - deffshorter > 20 && deffLongest - deffshorter < 45) {
score += (deffLongest - deffshorter) * 20;
} else if (deffLongest - deffshorter < 2) {
score += (deffLongest + deffshorter) * 40;
} else if (deffshorter * 2 >= deffLongest && deffshorter * 2 < deffLongest + 5) {
score += deffLongest * 160;
score += deffLongest * 20;
} else {
score -= (deffLongest - deffshorter) * 50;
}
@ -487,6 +575,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
ConcurrentMap<Integer, String> strTokenGetiPart2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenEntryPOS1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strTokenEntryPOS2 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Integer> entryCounts1 = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, Integer> entryCounts2 = new MapMaker().concurrencyLevel(2).makeMap();
try {
List<CoreMap> sentences = jmweStrAnnotation1.get(CoreAnnotations.SentencesAnnotation.class);
for (CoreMap sentence : sentences) {
@ -508,6 +598,9 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
for (String strPostPrefix : entry.getPOS().getPrefixes()) {
strTokenEntryPOS1.put(strTokenEntryPOS1.size() + 1, strPostPrefix);
}
for (int counts : entry.getCounts()) {
entryCounts1.put(entryCounts1.size() + 1, counts);
}
for (IToken tokens : token.getTokens()) {
ITokenMapTag1.put(ITokenMapTag1.size() + 1, tokens.getTag());
for (String strtoken : tokens.getStems()) {
@ -539,6 +632,9 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
for (String strPostPrefix : entry.getPOS().getPrefixes()) {
strTokenEntryPOS2.put(strTokenEntryPOS2.size() + 1, strPostPrefix);
}
for (int counts : entry.getCounts()) {
entryCounts2.put(entryCounts2.size() + 1, counts);
}
for (IToken tokens : token.getTokens()) {
ITokenMapTag2.put(ITokenMapTag2.size() + 1, tokens.getTag());
for (String strtoken : tokens.getStems()) {
@ -553,61 +649,145 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} catch (Exception ex) {
System.out.println("SENTIMENT stacktrace: " + ex.getMessage() + "\n");
}
for (String strTokenPos1 : strTokenEntryPOS1.values()) {
for (String strTokenPos2 : strTokenEntryPOS2.values()) {
if (strTokenPos1.equals(strTokenPos2)) {
score += 500;
int entry1 = entryCounts1.values().size();
int entry2 = entryCounts2.values().size();
if ((entry1 >= entry2 * 5 && entry2 > 0) || (entry2 >= entry1 * 5 && entry1 > 0)) {
score -= entry1 > entry2 ? (entry1 - entry2) * 450 : (entry2 - entry1) * 450;
} else if (entry1 >= entry2 * 50 || entry2 >= entry1 * 50) {
score -= entry1 > entry2 ? entry1 * 180 : entry2 * 180;
} else if (entry1 >= entry2 * 2 || entry2 >= entry1 * 2) {
score += entry1 > entry2 ? (entry1 - entry2) * 450 : (entry2 - entry1) * 450;
} else if (entry1 == 0 && entry2 == 0) {
score -= 4500;
} else if (entry1 == entry2) {
score += 5500;
}
ConcurrentMap<Integer, Integer> countsMap = new MapMaker().concurrencyLevel(2).makeMap();
for (int counts : entryCounts1.values()) {
for (int counts1 : entryCounts2.values()) {
if (counts == counts1 && counts > 0 && !countsMap.values().contains(counts)) {
score += counts * 250;
countsMap.put(countsMap.size() + 1, counts);
}
}
}
if (strTokenEntryPOS1.values().size() > 1 && strTokenEntryPOS2.values().size() > 1) {
for (String strTokenPos1 : strTokenEntryPOS1.values()) {
for (String strTokenPos2 : strTokenEntryPOS2.values()) {
if (strTokenPos1.equals(strTokenPos2)) {
score += 500;
} else {
score -= 650;
}
}
}
}
if (UnmarkedPatternCounter > 0 && UnmarkedPatternCounter < 5) {
score -= UnmarkedPatternCounter * 1600;
} else {
score -= UnmarkedPatternCounter * 10;
}
if (MarkedContinuousCounter1 > 0 && MarkedContinuousCounter2 > 0) {
if (!Objects.equals(MarkedContiniousCounter1Entries, MarkedContiniousCounter2Entries)
if (MarkedContinuousCounter1 > MarkedContinuousCounter2 * 50 || MarkedContinuousCounter2 > MarkedContinuousCounter1 * 50) {
score -= MarkedContinuousCounter1 > MarkedContinuousCounter2 ? MarkedContinuousCounter1 * 120 : MarkedContinuousCounter2 * 120;
} else if (!Objects.equals(MarkedContiniousCounter1Entries, MarkedContiniousCounter2Entries)
&& (MarkedContinuousCounter1 * 2 >= MarkedContinuousCounter2 * MarkedContinuousCounter1)
|| (MarkedContinuousCounter2 * 2 >= MarkedContinuousCounter1 * MarkedContinuousCounter2)) {
score += MarkedContinuousCounter1 > MarkedContinuousCounter2 ? (MarkedContinuousCounter1 - MarkedContinuousCounter2) * 500
: (MarkedContinuousCounter2 - MarkedContinuousCounter1) * 500;
} else {
score += 4500;
} else if (MarkedContiniousCounter1Entries == 0 || MarkedContiniousCounter2Entries == 0) {
score += MarkedContinuousCounter1 > MarkedContinuousCounter2 ? (MarkedContinuousCounter2 - MarkedContinuousCounter1) * 500
: (MarkedContinuousCounter1 - MarkedContinuousCounter2) * 500;
}
if (MarkedContiniousCounter1Entries > 0 && MarkedContiniousCounter2Entries > 0 && MarkedContinuousCounter1 > 0
&& MarkedContinuousCounter2 > 0 && MarkedContinuousCounter1 < MarkedContinuousCounter2 * 10
&& MarkedContinuousCounter2 < MarkedContinuousCounter1 * 10) {
if (MarkedContiniousCounter1Entries > MarkedContiniousCounter2Entries * 5
|| MarkedContiniousCounter2Entries > MarkedContiniousCounter1Entries * 5
|| MarkedContiniousCounter1Entries * 5 < MarkedContinuousCounter1
|| MarkedContiniousCounter1Entries * 5 < MarkedContinuousCounter2
|| MarkedContiniousCounter2Entries * 5 < MarkedContinuousCounter1
|| MarkedContiniousCounter2Entries * 5 < MarkedContinuousCounter2) {
score -= MarkedContinuousCounter1 > MarkedContinuousCounter2 ? MarkedContinuousCounter1 * 400 : MarkedContinuousCounter2 * 400;
}
}
}
ConcurrentMap<Integer, String> strtokensMap = new MapMaker().concurrencyLevel(2).makeMap();
for (String strTokeniPart1 : strTokenGetiPart1.values()) {
for (String strTokeniPart2 : strTokenGetiPart2.values()) {
if (strTokeniPart1.equals(strTokeniPart2)) {
if (strTokeniPart1.equals(strTokeniPart2) && !strtokensMap.values().contains(strTokeniPart2)) {
strtokensMap.put(strtokensMap.size() + 1, strTokeniPart2);
score += 400;
} else {
score -= 200;
}
}
}
int tokenEntry1 = strTokenGetEntry1.values().size();
int tokenEntry2 = strTokenGetEntry2.values().size();
boolean boundariyLeacks = false;
int remnantCounter = 0;
if (tokenEntry1 * 2 != tokenEntry2 && tokenEntry2 * 2 != tokenEntry1) {
boundariyLeacks = true;
}
ConcurrentMap<Integer, String> entryTokenMap = new MapMaker().concurrencyLevel(2).makeMap();
for (String strTokenEntry1 : strTokenGetEntry1.values()) {
for (String strTokenEntry2 : strTokenGetEntry2.values()) {
if (strTokenEntry1.equals(strTokenEntry2)) {
score += 2500;
if (!entryTokenMap.values().contains(strTokenEntry2)) {
if (strTokenEntry1.equals(strTokenEntry2)) {
score += boundariyLeacks ? 2500 : 2500 / 2;
} else if (!boundariyLeacks) {
score -= 1250;
} else {
remnantCounter++;
}
}
entryTokenMap.put(entryTokenMap.size() + 1, strTokenEntry2);
}
}
score -= remnantCounter * 250;
ConcurrentMap<Integer, String> iTokenMapTagsMap = new MapMaker().concurrencyLevel(2).makeMap();
for (String strmapTag : ITokenMapTag1.values()) {
for (String strmapTag1 : ITokenMapTag2.values()) {
if (strmapTag.equals(strmapTag1)) {
score += 1450;
score -= 1450;
} else if (!iTokenMapTagsMap.values().contains(strmapTag)) {
score += 725;
iTokenMapTagsMap.put(iTokenMapTagsMap.size() + 1, strmapTag);
}
}
}
for (String strTokenForm1itr1 : strTokenForm1.values()) {
for (String strTokenForm1itr2 : strTokenForm2.values()) {
if (strTokenForm1itr1.equals(strTokenForm1itr2)) {
score += 2600;
} else if (strTokenForm1itr1.contains(strTokenForm1itr2)) {
score += 500;
int tokenform1size = strTokenForm1.values().size();
int tokenform2size = strTokenForm2.values().size();
if (tokenform1size > 0 || tokenform2size > 0) {
if (tokenform1size < tokenform2size * 5 && tokenform2size < tokenform1size * 5) {
for (String strTokenForm1itr1 : strTokenForm1.values()) {
for (String strTokenForm1itr2 : strTokenForm2.values()) {
if (strTokenForm1itr1.equals(strTokenForm1itr2)) {
score -= 1600;
} else {
score += 500;
}
}
}
} else if (tokenform1size > 0 && tokenform2size > 0) {
score += tokenform1size > tokenform2size ? tokenform1size * 1600 : tokenform2size * 1600;
}
} else {
tokenform1size = tokenform1size > 0 ? tokenform1size : 1;
tokenform2size = tokenform2size > 0 ? tokenform2size : 1;
score -= (tokenform1size + tokenform2size) * 1200;
}
ConcurrentMap<Integer, String> tokenStemmingMap = new MapMaker().concurrencyLevel(2).makeMap();
for (String strTokenStem : strTokenStems1.values()) {
for (String strTokenStem1 : strTokenStems2.values()) {
if (strTokenStem.equals(strTokenStem1)) {
score += 1500;
} else if (!tokenStemmingMap.values().contains(strTokenStem)) {
score -= 150;
tokenStemmingMap.put(tokenStemmingMap.size() + 1, strTokenStem);
}
}
}
@ -620,12 +800,17 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
if (anotatorcounter1 > 1 && anotatorcounter2 > 1) {
score += (anotatorcounter1 - anotatorcounter2) * 400;
}
if (tokensCounter1 > 0 && tokensCounter2 > 0) {
score += (tokensCounter1 + tokensCounter2) * 400;
if ((tokensCounter1 > 0 && tokensCounter2 > 0) && tokensCounter1 < tokensCounter2 * 5 && tokensCounter2 < tokensCounter1 * 5) {
score += (tokensCounter1 + tokensCounter2) * 1400;
} else {
int elseint = tokensCounter1 >= tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500;
if (elseint > 0) {
score -= elseint * 2;
if ((tokensCounter1 > tokensCounter2 * 5 || tokensCounter2 > tokensCounter1 * 5)
&& tokensCounter1 > 0 && tokensCounter2 > 0) {
score -= (tokensCounter1 + tokensCounter2) * 1500;
} else if (elseint > 0 && tokensCounter1 > 0 && tokensCounter2 > 0) {
score += elseint * 2;
} else if (elseint == 0) {
score += 1500;
}
}
LevenshteinDistance leven = new LevenshteinDistance(str, str1);

View File

@ -51,7 +51,7 @@ public class DiscordHandler {
Datahandler.instance.updateStringCache();
//order matters
if (Datahandler.instance.getstringCacheSize() != 0) {
while (Datahandler.instance.getlHMSMXSize() * Datahandler.instance.getlHMSMXSize() * 3
while (Datahandler.instance.getlHMSMXSize() * Datahandler.instance.getlHMSMXSize() * 2.5
< (Datahandler.instance.getstringCacheSize()
* Datahandler.instance.getstringCacheSize())
- Datahandler.instance.getstringCacheSize()) {