diff --git a/ArtificialAutism/src/main/java/DataLayer/DBCPDataSource.java b/ArtificialAutism/src/main/java/DataLayer/DBCPDataSource.java index ae48d11a..f9752479 100644 --- a/ArtificialAutism/src/main/java/DataLayer/DBCPDataSource.java +++ b/ArtificialAutism/src/main/java/DataLayer/DBCPDataSource.java @@ -20,9 +20,9 @@ public class DBCPDataSource { static { try { ds.setDriver(new com.mysql.cj.jdbc.Driver()); - ds.setUrl("jdbc:mysql://151.80.230.149:3306/ArtificialAutism?useLegacyDatetimeCode=false&serverTimezone=UTC"); - ds.setUsername("ArtificialAutism"); - ds.setPassword("b423b54bwbfb1340438fn"); + ds.setUrl("jdbc:mysql://104.248.40.216:3306/ArtificialAutism?useLegacyDatetimeCode=false&serverTimezone=UTC"); + ds.setUsername("root"); + ds.setPassword("fb345972349fnsDW234/ยค)#2"); ds.setMaxTotal(-1); ds.setMinIdle(5); ds.setMaxIdle(-1); diff --git a/ArtificialAutism/src/main/java/DataLayer/DataMapper.java b/ArtificialAutism/src/main/java/DataLayer/DataMapper.java index 824e4e92..bca4da1c 100644 --- a/ArtificialAutism/src/main/java/DataLayer/DataMapper.java +++ b/ArtificialAutism/src/main/java/DataLayer/DataMapper.java @@ -140,41 +140,35 @@ public class DataMapper { public static LinkedHashMap> getAllRelationScores() { int count = getSementicsDBRows(); - int counter2 = 0; - int hardCapRetrieveCount = 500000; LinkedHashMap> LHMSMX = new LinkedHashMap(); - while (count > counter2) { - try (Connection l_cCon = DBCPDataSource.getConnection()) { - l_cCon.setAutoCommit(false); - String l_sSQL = "SELECT * FROM `WordMatrix` WHERE ID > " + counter2 + " AND ID < " + (counter2 + hardCapRetrieveCount); - try (PreparedStatement l_pStatement = l_cCon.prepareStatement(l_sSQL, java.sql.ResultSet.TYPE_FORWARD_ONLY, - java.sql.ResultSet.CONCUR_READ_ONLY)) { - l_pStatement.setFetchSize(Integer.MIN_VALUE); - try (ResultSet l_rsSearch = l_pStatement.executeQuery()) { - int i = 0; - LinkedHashMap LHMLocal = new LinkedHashMap(); - while (l_rsSearch.next() && i < hardCapRetrieveCount) { - String str1 = l_rsSearch.getString(1); - String str2 = l_rsSearch.getString(2); - Double score = l_rsSearch.getDouble(3); + try (Connection l_cCon = DBCPDataSource.getConnection()) { + l_cCon.setAutoCommit(false); + String l_sSQL = "SELECT * FROM `WordMatrix`"; + try (PreparedStatement l_pStatement = l_cCon.prepareStatement(l_sSQL, java.sql.ResultSet.TYPE_FORWARD_ONLY, + java.sql.ResultSet.CONCUR_READ_ONLY)) { + l_pStatement.setFetchSize(Integer.MIN_VALUE); + try (ResultSet l_rsSearch = l_pStatement.executeQuery()) { + int i = 0; + LinkedHashMap LHMLocal = new LinkedHashMap(); + while (l_rsSearch.next()) { + String str1 = l_rsSearch.getString(1); + String str2 = l_rsSearch.getString(2); + Double score = l_rsSearch.getDouble(3); + LHMLocal.put(str2, score); + while (l_rsSearch.next() && str1.equals(l_rsSearch.getString(1))) { + str2 = l_rsSearch.getString(2); + score = l_rsSearch.getDouble(3); LHMLocal.put(str2, score); - while (l_rsSearch.next() && i < hardCapRetrieveCount && str1.equals(l_rsSearch.getString(1))) { - str2 = l_rsSearch.getString(2); - score = l_rsSearch.getDouble(3); - LHMLocal.put(str2, score); - i++; - counter2++; - } - LHMSMX.put(str1, LHMLocal); - System.out.println("i: " + i + "\n" + "free memory: " + Runtime.getRuntime().freeMemory() + "\ncounter2: " + counter2 + "\n"); i++; - counter2++; } + LHMSMX.put(str1, LHMLocal); + System.out.println("i: " + i + "\n" + "free memory: " + Runtime.getRuntime().freeMemory() + "\n"); + i++; } } - } catch (SQLException ex) { - Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex); } + } catch (SQLException ex) { + Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex); } return LHMSMX; } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java b/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java index ec643655..05b8722c 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java @@ -29,13 +29,11 @@ import java.io.StringReader; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; -import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; -import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; @@ -56,6 +54,7 @@ public class Datahandler { public static final long EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(6, TimeUnit.MINUTES); public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS); public static Datahandler instance = new Datahandler(); + private static volatile Double minDistance; private volatile boolean refreshMatrixFromDB; private static volatile int secondaryIterator = 0; private final ConcurrentMap stringCache; @@ -66,6 +65,7 @@ public class Datahandler { private final Stopwatch stopwatch; private final Stopwatch stopwatch1; private ForkJoinPool executor; + private static String similar = ""; private static String shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz"; private static String sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz"; private static String lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"; @@ -278,6 +278,7 @@ public class Datahandler { if (stringCache.values().size() > 10 && !refreshMatrixFromDB) { ConcurrentMap stringCachelocal = stringCache; int selectUpdate = -1; + int iteratorCap = 25; LinkedHashMap> LHMSMXLocal = lHMSMX; int ij2 = 0; for (String str : stringCachelocal.values()) { @@ -290,67 +291,81 @@ public class Datahandler { } if (selectUpdate == -1 || selectUpdate + 1 == stringCachelocal.size()) { int valueSize = stringCachelocal.size(); - if (secondaryIterator + 1 >= valueSize) { + if (secondaryIterator + iteratorCap >= valueSize) { secondaryIterator = 0; } selectUpdate = secondaryIterator; - secondaryIterator++; + secondaryIterator += iteratorCap; } - final String getStringCacheStr = stringCachelocal.getOrDefault(selectUpdate, null); - ConcurrentMap matrixUpdateList = new MapMaker().concurrencyLevel(2).makeMap(); + final ConcurrentMap getStringCacheMap = new MapMaker().concurrencyLevel(2).makeMap(); + for (int i = 0; i < iteratorCap; i++) { + getStringCacheMap.put(i, stringCachelocal.get(selectUpdate)); + selectUpdate++; + } + ConcurrentMap matrixUpdateMap = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap> futures = new MapMaker().concurrencyLevel(2).makeMap(); - stringCachelocal.values().forEach((str1) -> { - boolean present = false; - LinkedHashMap orDefault = lHMSMX.getOrDefault(getStringCacheStr, null); - if (orDefault != null) { - Double orDefault1 = orDefault.getOrDefault(str1, null); - if (orDefault1 != null) { - present = true; - } - } - if (!present) { - orDefault = lHMSMX.getOrDefault(str1, null); - if (orDefault != null) { - Double orDefault1 = orDefault.getOrDefault(getStringCacheStr, null); - if (orDefault1 != null) { - present = true; + getStringCacheMap.values().forEach((getStringCacheStr) -> { + stringCachelocal.values().forEach((str1) -> { + if (!getStringCacheStr.equals(str1)) { + boolean present = false; + LinkedHashMap orDefault = lHMSMX.getOrDefault(getStringCacheStr, null); + if (orDefault != null) { + Collection strkeys = orDefault.keySet(); + for (String strkey : strkeys) { + if (strkey.equals(str1)) { + present = true; + break; + } + } + } + if (!present) { + orDefault = lHMSMX.getOrDefault(str1, null); + if (orDefault != null) { + Collection strkeys = orDefault.keySet(); + for (String strkey : strkeys) { + if (strkey.equals(getStringCacheStr)) { + present = true; + break; + } + } + } + } + if (!present) { + LinkedHashMap orDefault1 = lHMSMX.getOrDefault(getStringCacheStr, null); + if (orDefault1 == null) { + orDefault1 = new LinkedHashMap(); + } + orDefault1.put(str1, 0.0); + lHMSMX.put(getStringCacheStr, orDefault1); + SimilarityMatrix SMX = new SimilarityMatrix(getStringCacheStr, str1); + Callable worker = new SentimentAnalyzerTest(getStringCacheStr, str1, SMX, jmweAnnotationCache.get(getStringCacheStr), + jmweAnnotationCache.get(str1), pipelineAnnotationCache.get(getStringCacheStr), pipelineAnnotationCache.get(str1), + pipelineSentimentAnnotationCache.get(getStringCacheStr), pipelineSentimentAnnotationCache.get(str1)); + futures.put(futures.size() + 1, executor.submit(worker)); } } - } - if (!present) { - LinkedHashMap orDefault1 = lHMSMX.getOrDefault(getStringCacheStr, null); - if (orDefault1 == null) { - orDefault1 = new LinkedHashMap(); + }); + System.out.println("finished worker assignment, futures size: " + futures.size() + "\n"); + futures.values().parallelStream().forEach((future) -> { + SimilarityMatrix SMX = new SimilarityMatrix("", ""); + try { + SMX = future.get(5, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException | TimeoutException ex) { + Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); + SMX = null; } - orDefault1.put(str1, 0.0); - lHMSMX.put(getStringCacheStr, orDefault1); - SimilarityMatrix SMX = new SimilarityMatrix(getStringCacheStr, str1); - Callable worker = new SentimentAnalyzerTest(getStringCacheStr, str1, SMX, jmweAnnotationCache.get(getStringCacheStr), - jmweAnnotationCache.get(str1), pipelineAnnotationCache.get(getStringCacheStr), pipelineAnnotationCache.get(str1), - pipelineSentimentAnnotationCache.get(getStringCacheStr), pipelineSentimentAnnotationCache.get(str1)); - futures.put(futures.size() + 1, executor.submit(worker)); - } - }); - System.out.println("finished worker assignment, futures size: " + futures.size() + "\n"); - futures.values().parallelStream().forEach((future) -> { - SimilarityMatrix SMX = new SimilarityMatrix("", ""); - try { - SMX = future.get(5, TimeUnit.SECONDS); - } catch (InterruptedException | ExecutionException | TimeoutException ex) { - Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); - SMX = null; - } - if (SMX != null) { - LinkedHashMap getFuture = lHMSMX.getOrDefault(SMX.getPrimaryString(), null); - getFuture.put(SMX.getSecondaryString(), SMX.getDistance()); - lHMSMX.put(SMX.getPrimaryString(), getFuture); - matrixUpdateList.put(matrixUpdateList.size() + 1, SMX); - } + if (SMX != null) { + LinkedHashMap getFuture = lHMSMX.getOrDefault(SMX.getPrimaryString(), null); + getFuture.put(SMX.getSecondaryString(), SMX.getDistance()); + lHMSMX.put(SMX.getPrimaryString(), getFuture); + matrixUpdateMap.put(matrixUpdateMap.size() + 1, SMX); + } + }); }); new Thread(() -> { try { - if (!matrixUpdateList.isEmpty()) { - DataMapper.insertSementicMatrixes(matrixUpdateList); + if (!matrixUpdateMap.isEmpty()) { + DataMapper.insertSementicMatrixes(matrixUpdateMap); System.out.println("finished datamapper semetic insert"); } } catch (CustomError ex) { @@ -359,7 +374,6 @@ public class Datahandler { } }).start(); } - } public synchronized void checkIfUpdateStrings(boolean hlStatsMsg) throws CustomError { @@ -398,7 +412,6 @@ public class Datahandler { System.out.println("pre mostSimilarSTR \n"); String mostSimilarSTR = mostSimilar(str, strArrs); if (mostSimilarSTR != null) { - System.out.println("mostSimilarSTR; " + mostSimilarSTR + "\n"); LinkedHashMap orDefault = LHMSMXLocal.getOrDefault(mostSimilarSTR, null); if (orDefault != null) { for (Entry entrySet : orDefault.entrySet()) { @@ -452,7 +465,6 @@ public class Datahandler { futureslocal.put(futureslocal.size() + 1, executor.submit(worker)); } }); - int index = 0; futureslocal.values().parallelStream().forEach((future) -> { SimilarityMatrix SMX = new SimilarityMatrix("", ""); try { @@ -464,15 +476,10 @@ public class Datahandler { }); for (SimilarityMatrix SMX : futurereturn.values()) { double distance = SMX.getDistance(); - /* - System.out.println("index: " + index + "\nfutures size: " + futureslocal.values().size() + "\nScore: " + SMX.getDistance() + "\nSecondary: " - + SMX.getSecondaryString() + "\nPrimary: " + SMX.getPrimaryString() + "\n"); - */ if (distance > Score) { Score = distance; SMXreturn = SMX; } - index++; } System.out.println("Reached end: secondary: " + SMXreturn.getSecondaryString() + "\nPrimarY: " + SMXreturn.getPrimaryString() + "\nScore: " + SMXreturn.getDistance()); @@ -480,30 +487,27 @@ public class Datahandler { } public String mostSimilar(String toBeCompared, ConcurrentMap concurrentStrings) { - int minDistance = 7; - String similar = ""; - List>> futures = new ArrayList(); - ConcurrentMap futuresreturnvalues = new MapMaker().concurrencyLevel(2).makeMap(); + similar = ""; + minDistance = 7.5; concurrentStrings.values().parallelStream().forEach((str) -> { - Callable> worker = new LevenshteinDistance(toBeCompared, str); - futures.add(executor.submit(worker)); - }); - futures.parallelStream().forEach((future) -> { - try { - ConcurrentMap get = future.get(); - get.entrySet().forEach((str) -> { - futuresreturnvalues.put(str.getKey(), str.getValue()); - }); - } catch (NullPointerException | InterruptedException | ExecutionException ex) { - System.out.println("failed future\nex: " + ex.getMessage() + "\n"); + LevenshteinDistance leven = new LevenshteinDistance(toBeCompared, str); + double distance = leven.computeLevenshteinDistance(); + if (distance < minDistance) { + minDistance = distance; + System.out.println("distance: " + distance + "\n"); + similar = str; } }); - for (Entry entritr : futuresreturnvalues.entrySet()) { - int distance = entritr.getValue(); - if (distance < minDistance) { - System.out.println("distance: " + distance + "\n"); - minDistance = distance; - similar = entritr.getKey(); + LinkedHashMap orDefault = lHMSMX.getOrDefault(similar, null); + if (orDefault == null) { + return null; + } + Double maxDistance = 0.0; + for (Entry defaultEntry : orDefault.entrySet()) { + Double value = defaultEntry.getValue(); + if (value > maxDistance) { + maxDistance = value; + similar = defaultEntry.getKey(); } } return similar; diff --git a/ArtificialAutism/src/main/java/FunctionLayer/LevenshteinDistance.java b/ArtificialAutism/src/main/java/FunctionLayer/LevenshteinDistance.java index 870b4d2e..fa8aa7bf 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/LevenshteinDistance.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/LevenshteinDistance.java @@ -15,11 +15,9 @@ import java.util.concurrent.ConcurrentMap; * * @author install1 */ -public class LevenshteinDistance implements Callable> { - +public class LevenshteinDistance { private CharSequence lhs; private CharSequence rhs; - private ConcurrentMap distanceEntry = new MapMaker().concurrencyLevel(2).makeMap(); private static int minimum(int a, int b, int c) { return Math.min(Math.min(a, b), c); @@ -48,25 +46,4 @@ public class LevenshteinDistance implements Callable call() { - int[][] distance = new int[lhs.length() + 1][rhs.length() + 1]; - for (int i = 0; i <= lhs.length(); i++) { - distance[i][0] = i; - } - for (int j = 1; j <= rhs.length(); j++) { - distance[0][j] = j; - } - for (int i = 1; i <= lhs.length(); i++) { - for (int j = 1; j <= rhs.length(); j++) { - distance[i][j] = minimum( - distance[i - 1][j] + 1, - distance[i][j - 1] + 1, - distance[i - 1][j - 1] + ((lhs.charAt(i - 1) == rhs.charAt(j - 1)) ? 0 : 1)); - } - } - distanceEntry.put(lhs.toString(), distance[lhs.length()][rhs.length()]); - return distanceEntry; - } } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java b/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java index 9998bc64..bf886fe6 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java @@ -28,7 +28,6 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; -import java.util.Date; import java.util.List; import java.util.Properties; import java.util.concurrent.ConcurrentMap; @@ -67,8 +66,7 @@ public class PipelineJMWESingleton { } IMWEDetector detector = getDetector(index, detectorName); ConcurrentMap returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap(); - Date startDate = new Date(); - strvalues.parallelStream().forEach(str -> { + strvalues.forEach(str -> { Annotation annoStr = new Annotation(str); returnAnnotations.put(str, annoStr); }); diff --git a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java index 648e7a2f..b2fa3368 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java @@ -38,6 +38,7 @@ import java.io.StringReader; import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.OptionalDouble; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentMap; @@ -95,344 +96,378 @@ public class SentimentAnalyzerTest implements Callable { Double score = -100.0; try { List> taggedwordlist1 = new ArrayList(); - List> taggedwordlist2 = new ArrayList(); - DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(str1)); - //noneDelete - TokenizerFactory ptbTokenizerFactory - = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=firstDelete"); - tokenizer.setTokenizerFactory(ptbTokenizerFactory); - for (List sentence : tokenizer) { - taggedwordlist1.add(model.apply(tagger.tagSentence(sentence)).taggedYield()); - } - tokenizer = new DocumentPreprocessor(new StringReader(str)); - tokenizer.setTokenizerFactory(ptbTokenizerFactory); - for (List sentence : tokenizer) { - taggedwordlist2.add(model.apply(tagger.tagSentence(sentence)).taggedYield()); - } - int counter = 0; - int counter1 = 0; - counter = taggedwordlist2.stream().map((taggedlist2) -> taggedlist2.size()).reduce(counter, Integer::sum); - counter1 = taggedwordlist1.stream().map((taggedlist1) -> taggedlist1.size()).reduce(counter1, Integer::sum); - int overValue = counter >= counter1 ? counter - counter1 : counter1 - counter; - overValue *= 16; - score -= overValue; - ConcurrentMap tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap(); - taggedwordlist1.forEach((TGWList) -> { - TGWList.forEach((TaggedWord) -> { - if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) { - tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag()); - } - }); - }); - taggedwordlist1.clear(); - AtomicInteger runCount = new AtomicInteger(0); - taggedwordlist2.forEach((TGWList) -> { - TGWList.forEach((TaggedWord) -> { - if (tgwlistIndex.values().contains(TaggedWord.tag())) { - tgwlistIndex.values().remove(TaggedWord.tag()); - runCount.getAndIncrement(); - } - }); - }); - tgwlistIndex.clear(); - taggedwordlist2.clear(); - score += runCount.get() * 64; - ConcurrentMap sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap(); - try { - for (CoreMap sentence : pipelineAnnotation1.get(CoreAnnotations.SentencesAnnotation.class)) { - Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class); - sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), sentenceConstituencyParse); + List> taggedwordlist2 = new ArrayList(); + DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(str1)); + //noneDelete + TokenizerFactory ptbTokenizerFactory + = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=firstDelete"); + tokenizer.setTokenizerFactory(ptbTokenizerFactory); + for (List sentence : tokenizer) { + taggedwordlist1.add(model.apply(tagger.tagSentence(sentence)).taggedYield()); + } + tokenizer = new DocumentPreprocessor(new StringReader(str)); + tokenizer.setTokenizerFactory(ptbTokenizerFactory); + for (List sentence : tokenizer) { + taggedwordlist2.add(model.apply(tagger.tagSentence(sentence)).taggedYield()); + } + int counter = 0; + int counter1 = 0; + counter = taggedwordlist2.stream().map((taggedlist2) -> taggedlist2.size()).reduce(counter, Integer::sum); + counter1 = taggedwordlist1.stream().map((taggedlist1) -> taggedlist1.size()).reduce(counter1, Integer::sum); + int overValue = counter >= counter1 ? counter - counter1 : counter1 - counter; + overValue *= 32; + score -= overValue; + ConcurrentMap tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap(); + taggedwordlist1.forEach((TGWList) -> { + TGWList.forEach((TaggedWord) -> { + if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) { + tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag()); } - for (CoreMap sentence : pipelineAnnotation2.get(CoreAnnotations.SentencesAnnotation.class)) { - Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class); - GrammaticalStructure gs = gsf.newGrammaticalStructure(sentenceConstituencyParse); - Collection allTypedDependencies = gs.allTypedDependencies(); - ConcurrentMap filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap(); - for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList.values()) { - Set inT1notT2 = Tdiff.markDiff(sentenceConstituencyParse, sentenceConstituencyParse1); - Set inT2notT1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse); - ConcurrentMap constiLabels = new MapMaker().concurrencyLevel(2).makeMap(); - for (Constituent consti : inT1notT2) { - for (Constituent consti1 : inT2notT1) { - if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) { - score += 64; - constiLabels.put(constiLabels.size(), consti.value()); - } - } - } - GrammaticalStructure gs1 = gsf.newGrammaticalStructure(sentenceConstituencyParse1); - Collection allTypedDependencies1 = gs1.allTypedDependencies(); - for (TypedDependency TDY1 : allTypedDependencies1) { - IndexedWord dep = TDY1.dep(); - IndexedWord gov = TDY1.gov(); - GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep); - if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) { - score += 900; - } - GrammaticalRelation reln = TDY1.reln(); - if (reln.isApplicable(sentenceConstituencyParse)) { - score += 256; - } - } - for (TypedDependency TDY : allTypedDependencies) { - IndexedWord dep = TDY.dep(); - IndexedWord gov = TDY.gov(); - GrammaticalRelation grammaticalRelation = gs1.getGrammaticalRelation(gov, dep); - if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) { - score += 900; - } - GrammaticalRelation reln = TDY.reln(); - if (reln.isApplicable(sentenceConstituencyParse1)) { - score += 256; - } - } - AtomicInteger runCount1 = new AtomicInteger(0); - sentenceConstituencyParse.taggedLabeledYield().forEach((LBW) -> { - sentenceConstituencyParse1.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma()) - && !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> { - filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma()); - return _item; - }).forEachOrdered((_item) -> { - runCount1.getAndIncrement(); - }); - }); - score += runCount1.get() * 1500; - } + }); + }); + AtomicInteger runCount = new AtomicInteger(0); + taggedwordlist2.forEach((TGWList) -> { + TGWList.forEach((TaggedWord) -> { + if (tgwlistIndex.values().contains(TaggedWord.tag())) { + tgwlistIndex.values().remove(TaggedWord.tag()); + runCount.getAndIncrement(); } - } catch (Exception ex) { - System.out.println("pipelineAnnotation stacktrace: " + ex.getLocalizedMessage() + "\n"); + }); + }); + score += runCount.get() * 64; + ConcurrentMap sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap(); + try { + for (CoreMap sentence : pipelineAnnotation1.get(CoreAnnotations.SentencesAnnotation.class)) { + Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class); + sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), sentenceConstituencyParse); } - sentenceConstituencyParseList.clear(); - ConcurrentMap simpleSMXlist = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap simpleSMXlistVector = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap sentiment1 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap sentiment2 = new MapMaker().concurrencyLevel(2).makeMap(); - for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) { - Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); - sentiment1.put(sentiment1.size(), RNNCoreAnnotations.getPredictedClass(tree)); - SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); - SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree); - simpleSMXlist.put(simpleSMXlist.size(), predictions); - simpleSMXlistVector.put(simpleSMXlistVector.size() + 1, nodeVector); - } - for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) { - Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); - sentiment2.put(sentiment2.size() + 1, RNNCoreAnnotations.getPredictedClass(tree)); - SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); - SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree); - score = simpleSMXlist.values().stream().map((simpleSMX) -> predictions.dot(simpleSMX) * 100).map((dot) -> dot > 50 ? dot - 50 : 50 - dot).map((subtracter) -> { - subtracter *= 25; - return subtracter; - }).map((subtracter) -> subtracter).reduce(score, (accumulator, _item) -> accumulator - _item); - for (SimpleMatrix simpleSMX : simpleSMXlistVector.values()) { - double dot = nodeVector.dot(simpleSMX); - double elementSum = nodeVector.kron(simpleSMX).elementSum(); - elementSum = Math.round(elementSum * 100.0) / 100.0; - if (dot < 0.1) { - score += 256; + for (CoreMap sentence : pipelineAnnotation2.get(CoreAnnotations.SentencesAnnotation.class)) { + int constiRelationsize = 0; + Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class); + GrammaticalStructure gs = gsf.newGrammaticalStructure(sentenceConstituencyParse); + Collection allTypedDependencies = gs.allTypedDependencies(); + ConcurrentMap filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap(); + for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList.values()) { + Set constinuent1 = Tdiff.markDiff(sentenceConstituencyParse, sentenceConstituencyParse1); + Set constinuent2 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse); + ConcurrentMap constiLabels = new MapMaker().concurrencyLevel(2).makeMap(); + for (Constituent consti : constinuent1) { + for (Constituent consti1 : constinuent2) { + if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) { + constiLabels.put(constiLabels.size(), consti.value()); + constiRelationsize++; + } + } } - if (elementSum < 0.1 && elementSum > 0.0) { - score += 1300; - } else if (elementSum > 0.1 && elementSum < 1.0) { - score -= 1100; + int constituents1 = constinuent1.size() - constiRelationsize; + int constituents2 = constinuent2.size() - constiRelationsize; + if (constituents1 > 0 && constituents2 > 0) { + score -= (constituents1 + constituents2) * 200; } else { - score -= 1424; + score += constiRelationsize * 200; } + GrammaticalStructure gs1 = gsf.newGrammaticalStructure(sentenceConstituencyParse1); + Collection allTypedDependencies1 = gs1.allTypedDependencies(); + for (TypedDependency TDY1 : allTypedDependencies1) { + IndexedWord dep = TDY1.dep(); + IndexedWord gov = TDY1.gov(); + GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep); + if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) { + score += 1900; + } + GrammaticalRelation reln = TDY1.reln(); + if (reln.isApplicable(sentenceConstituencyParse)) { + score += 525; + } + } + for (TypedDependency TDY : allTypedDependencies) { + IndexedWord dep = TDY.dep(); + IndexedWord gov = TDY.gov(); + GrammaticalRelation grammaticalRelation = gs1.getGrammaticalRelation(gov, dep); + if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) { + score += 900; + } + GrammaticalRelation reln = TDY.reln(); + if (reln.isApplicable(sentenceConstituencyParse1)) { + score += 525; + } + } + AtomicInteger runCount1 = new AtomicInteger(0); + sentenceConstituencyParse.taggedLabeledYield().forEach((LBW) -> { + sentenceConstituencyParse1.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma()) + && !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> { + filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma()); + return _item; + }).forEachOrdered((_item) -> { + runCount1.getAndIncrement(); + }); + }); + score += runCount1.get() * 1500; } } - score -= (sentiment1.size() > sentiment2.size() ? sentiment1.size() - sentiment2.size() : sentiment2.size() - sentiment1.size()) * 500; - DocumentReaderAndWriter readerAndWriter = classifier.makePlainTextReaderAndWriter(); - List classifyRaw1 = classifier.classifyRaw(str, readerAndWriter); - List classifyRaw2 = classifier.classifyRaw(str1, readerAndWriter); - score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200; - int mainSentiment1 = 0; - int longest1 = 0; - int mainSentiment2 = 0; - int longest2 = 0; - for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) { - Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); - int sentiment = RNNCoreAnnotations.getPredictedClass(tree); - String partText = sentence.toString(); - SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); - if (partText.length() > longest1) { - mainSentiment1 = sentiment; - longest1 = partText.length(); + } catch (Exception ex) { + System.out.println("pipelineAnnotation stacktrace: " + ex.getLocalizedMessage() + "\n"); + } + sentenceConstituencyParseList.clear(); + ConcurrentMap simpleSMXlist = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap simpleSMXlistVector = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap sentiment1 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap sentiment2 = new MapMaker().concurrencyLevel(2).makeMap(); + for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) { + Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); + sentiment1.put(sentiment1.size(), RNNCoreAnnotations.getPredictedClass(tree)); + SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); + SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree); + simpleSMXlist.put(simpleSMXlist.size(), predictions); + simpleSMXlistVector.put(simpleSMXlistVector.size() + 1, nodeVector); + } + ConcurrentMap elementSumCounter = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap dotMap = new MapMaker().concurrencyLevel(2).makeMap(); + for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) { + Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); + sentiment2.put(sentiment2.size() + 1, RNNCoreAnnotations.getPredictedClass(tree)); + SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); + SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree); + score += simpleSMXlist.values().stream().map((simpleSMX) -> predictions.dot(simpleSMX) * 100).map((dot) -> dot > 50 ? dot - 50 : dot > 0 ? 50 - dot : 50).map((subtracter) -> { + subtracter *= 25; //25 + return subtracter; + }).map((subtracter) -> subtracter).reduce(score, (accumulator, _item) -> accumulator + _item); + for (SimpleMatrix simpleSMX : simpleSMXlistVector.values()) { + double dot = nodeVector.dot(simpleSMX); + double elementSum = nodeVector.kron(simpleSMX).elementSum(); + elementSum = Math.round(elementSum * 100.0) / 100.0; + elementSumCounter.put(elementSumCounter.size() + 1, elementSum); + dotMap.put(dotMap.size() + 1, dot); + if (dot < 0.1) { + score += 256; } - } - for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) { - Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); - int sentiment = RNNCoreAnnotations.getPredictedClass(tree); - SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); - String partText = sentence.toString(); - if (partText.length() > longest2) { - mainSentiment2 = sentiment; - longest2 = partText.length(); + if (dot > 0.50) { + score -= 2400; } - } - if (longest1 != longest2) { - long deffLongest = longest1 > longest2 ? longest1 : longest2; - long deffshorter = longest1 < longest2 ? longest1 : longest2; - if (deffLongest >= (deffshorter * 2) - 1 && deffLongest - deffshorter <= 45) { - score += (deffLongest - deffshorter) * 200; - } else if (mainSentiment1 != mainSentiment2 && deffLongest - deffshorter > 20 && deffLongest - deffshorter < 45) { - score += (deffLongest - deffshorter) * 200; + if (elementSum < 0.01 && elementSum > 0.00) { + score += 1300; + } else if (elementSum > 0.1 && elementSum < 1.0) { + score += 1100; } else { - score -= (deffLongest - deffshorter) * 50; + score -= elementSum * 1424; } } - int tokensCounter1 = 0; - int tokensCounter2 = 0; - int anotatorcounter1 = 0; - int anotatorcounter2 = 0; - int inflectedCounterPositive1 = 0; - int inflectedCounterPositive2 = 0; - int inflectedCounterNegative = 0; - int MarkedContinuousCounter1 = 0; - int MarkedContinuousCounter2 = 0; - int UnmarkedPatternCounter = 0; - ConcurrentMap ITokenMapTag1 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap ITokenMapTag2 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strTokenStems1 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strTokenStems2 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strTokenForm1 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strTokenForm2 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strTokenGetEntry1 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strTokenGetEntry2 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strTokenGetiPart1 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strTokenGetiPart2 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strTokenEntryPOS1 = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strTokenEntryPOS2 = new MapMaker().concurrencyLevel(2).makeMap(); - try { - List sentences = jmweStrAnnotation1.get(CoreAnnotations.SentencesAnnotation.class); - for (CoreMap sentence : sentences) { - for (IMWE token : sentence.get(JMWEAnnotation.class)) { - if (token.isInflected()) { - inflectedCounterPositive1++; - } else { - inflectedCounterNegative++; - } - strTokenForm1.put(strTokenForm1.size() + 1, token.getForm()); - strTokenGetEntry1.put(strTokenGetEntry1.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1)); - Collection values = token.getPartMap().values(); - IMWEDesc entry = token.getEntry(); - MarkedContinuousCounter1 += entry.getMarkedContinuous(); - UnmarkedPatternCounter += entry.getUnmarkedPattern(); - for (IMWEDesc.IPart iPart : values) { - strTokenGetiPart1.put(strTokenGetiPart1.size() + 1, iPart.getForm()); - } - for (String strPostPrefix : entry.getPOS().getPrefixes()) { - strTokenEntryPOS1.put(strTokenEntryPOS1.size() + 1, strPostPrefix); - } - for (IToken tokens : token.getTokens()) { - ITokenMapTag1.put(ITokenMapTag1.size() + 1, tokens.getTag()); - for (String strtoken : tokens.getStems()) { - strTokenStems1.put(strTokenStems1.size() + 1, strtoken); - } - } - tokensCounter1++; - } - anotatorcounter1++; - } - sentences = jmweStrAnnotation2.get(CoreAnnotations.SentencesAnnotation.class); - for (CoreMap sentence : sentences) { - for (IMWE token : sentence.get(JMWEAnnotation.class)) { - if (token.isInflected()) { - inflectedCounterPositive2++; - } else { - inflectedCounterNegative--; - } - strTokenForm2.put(strTokenForm2.size() + 1, token.getForm()); - strTokenGetEntry2.put(strTokenGetEntry2.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1)); - Collection values = token.getPartMap().values(); - IMWEDesc entry = token.getEntry(); - MarkedContinuousCounter2 += entry.getMarkedContinuous(); - UnmarkedPatternCounter += entry.getUnmarkedPattern(); - for (IMWEDesc.IPart iPart : values) { - strTokenGetiPart2.put(strTokenGetiPart2.size() + 1, iPart.getForm()); - } - for (String strPostPrefix : entry.getPOS().getPrefixes()) { - strTokenEntryPOS2.put(strTokenEntryPOS2.size() + 1, strPostPrefix); - } - for (IToken tokens : token.getTokens()) { - ITokenMapTag2.put(ITokenMapTag2.size() + 1, tokens.getTag()); - for (String strtoken : tokens.getStems()) { - strTokenStems2.put(strTokenStems2.size() + 1, strtoken); - } - } - tokensCounter2++; - } - anotatorcounter2++; - } - } catch (Exception ex) { - System.out.println("SENTIMENT stacktrace: " + ex.getMessage() + "\n"); + } + if (dotMap.values().size() > 1) { + OptionalDouble minvalueDots = dotMap.values().stream().mapToDouble(Double::doubleValue).min(); + OptionalDouble maxvalueDots = dotMap.values().stream().mapToDouble(Double::doubleValue).max(); + if (maxvalueDots.getAsDouble() - minvalueDots.getAsDouble() < 0.05) { + score += 3500; } - for (String strTokenPos1 : strTokenEntryPOS1.values()) { - for (String strTokenPos2 : strTokenEntryPOS2.values()) { - if (strTokenPos1.equals(strTokenPos2)) { - score += 500; + } + if (elementSumCounter.values().size() > 1){ + OptionalDouble minvalueElements = elementSumCounter.values().stream().mapToDouble(Double::doubleValue).min(); + OptionalDouble maxvalueElements = elementSumCounter.values().stream().mapToDouble(Double::doubleValue).max(); + if (maxvalueElements.getAsDouble() - minvalueElements.getAsDouble() < 0.05) { + score += 3500; + } + } + score -= (sentiment1.size() > sentiment2.size() ? sentiment1.size() - sentiment2.size() : sentiment2.size() - sentiment1.size()) * 500; + DocumentReaderAndWriter readerAndWriter = classifier.makePlainTextReaderAndWriter(); + List classifyRaw1 = classifier.classifyRaw(str, readerAndWriter); + List classifyRaw2 = classifier.classifyRaw(str1, readerAndWriter); + score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200; + int mainSentiment1 = 0; + int longest1 = 0; + int mainSentiment2 = 0; + int longest2 = 0; + for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) { + Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); + int sentiment = RNNCoreAnnotations.getPredictedClass(tree); + String partText = sentence.toString(); + SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); + if (partText.length() > longest1) { + mainSentiment1 = sentiment; + longest1 = partText.length(); + } + } + for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) { + Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); + int sentiment = RNNCoreAnnotations.getPredictedClass(tree); + SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); + String partText = sentence.toString(); + if (partText.length() > longest2) { + mainSentiment2 = sentiment; + longest2 = partText.length(); + } + } + if (longest1 != longest2) { + long deffLongest = longest1 > longest2 ? longest1 : longest2; + long deffshorter = longest1 < longest2 ? longest1 : longest2; + //deffLongest >= (deffshorter * 2) + if (deffLongest < (deffshorter * 2) - 1 && deffLongest - deffshorter <= 45) { + score += (deffLongest - deffshorter) * 120; + } else if (mainSentiment1 != mainSentiment2 && deffLongest - deffshorter > 20 && deffLongest - deffshorter < 45) { + score += (deffLongest - deffshorter) * 120; + } else if (deffLongest - deffshorter < 2) { + score += (deffLongest + deffshorter) * 40; + } else if (deffLongest - deffshorter <= 5){ + score += 2500; + } else{ + score -= (deffLongest - deffshorter) * 50; + } + } + int tokensCounter1 = 0; + int tokensCounter2 = 0; + int anotatorcounter1 = 0; + int anotatorcounter2 = 0; + int inflectedCounterPositive1 = 0; + int inflectedCounterPositive2 = 0; + int inflectedCounterNegative = 0; + int MarkedContinuousCounter1 = 0; + int MarkedContinuousCounter2 = 0; + int UnmarkedPatternCounter = 0; + ConcurrentMap ITokenMapTag1 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap ITokenMapTag2 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strTokenStems1 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strTokenStems2 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strTokenForm1 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strTokenForm2 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strTokenGetEntry1 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strTokenGetEntry2 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strTokenGetiPart1 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strTokenGetiPart2 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strTokenEntryPOS1 = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strTokenEntryPOS2 = new MapMaker().concurrencyLevel(2).makeMap(); + try { + List sentences = jmweStrAnnotation1.get(CoreAnnotations.SentencesAnnotation.class); + for (CoreMap sentence : sentences) { + for (IMWE token : sentence.get(JMWEAnnotation.class)) { + if (token.isInflected()) { + inflectedCounterPositive1++; + } else { + inflectedCounterNegative++; } + strTokenForm1.put(strTokenForm1.size() + 1, token.getForm()); + strTokenGetEntry1.put(strTokenGetEntry1.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1)); + Collection values = token.getPartMap().values(); + IMWEDesc entry = token.getEntry(); + MarkedContinuousCounter1 += entry.getMarkedContinuous(); + UnmarkedPatternCounter += entry.getUnmarkedPattern(); + for (IMWEDesc.IPart iPart : values) { + strTokenGetiPart1.put(strTokenGetiPart1.size() + 1, iPart.getForm()); + } + for (String strPostPrefix : entry.getPOS().getPrefixes()) { + strTokenEntryPOS1.put(strTokenEntryPOS1.size() + 1, strPostPrefix); + } + for (IToken tokens : token.getTokens()) { + ITokenMapTag1.put(ITokenMapTag1.size() + 1, tokens.getTag()); + for (String strtoken : tokens.getStems()) { + strTokenStems1.put(strTokenStems1.size() + 1, strtoken); + } + } + tokensCounter1++; + } + anotatorcounter1++; + } + sentences = jmweStrAnnotation2.get(CoreAnnotations.SentencesAnnotation.class); + for (CoreMap sentence : sentences) { + for (IMWE token : sentence.get(JMWEAnnotation.class)) { + if (token.isInflected()) { + inflectedCounterPositive2++; + } else { + inflectedCounterNegative--; + } + strTokenForm2.put(strTokenForm2.size() + 1, token.getForm()); + strTokenGetEntry2.put(strTokenGetEntry2.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1)); + Collection values = token.getPartMap().values(); + IMWEDesc entry = token.getEntry(); + MarkedContinuousCounter2 += entry.getMarkedContinuous(); + UnmarkedPatternCounter += entry.getUnmarkedPattern(); + for (IMWEDesc.IPart iPart : values) { + strTokenGetiPart2.put(strTokenGetiPart2.size() + 1, iPart.getForm()); + } + for (String strPostPrefix : entry.getPOS().getPrefixes()) { + strTokenEntryPOS2.put(strTokenEntryPOS2.size() + 1, strPostPrefix); + } + for (IToken tokens : token.getTokens()) { + ITokenMapTag2.put(ITokenMapTag2.size() + 1, tokens.getTag()); + for (String strtoken : tokens.getStems()) { + strTokenStems2.put(strTokenStems2.size() + 1, strtoken); + } + } + tokensCounter2++; + } + anotatorcounter2++; + } + } catch (Exception ex) { + System.out.println("SENTIMENT stacktrace: " + ex.getMessage() + "\n"); + } + for (String strTokenPos1 : strTokenEntryPOS1.values()) { + for (String strTokenPos2 : strTokenEntryPOS2.values()) { + if (strTokenPos1.equals(strTokenPos2)) { + score += 500; } } + } + if (UnmarkedPatternCounter > 0 && UnmarkedPatternCounter < 5) { score += UnmarkedPatternCounter * 1600; - if (MarkedContinuousCounter1 > 0 && MarkedContinuousCounter2 > 0) { - score += MarkedContinuousCounter1 > MarkedContinuousCounter2 ? (MarkedContinuousCounter1 - MarkedContinuousCounter2) * 500 - : (MarkedContinuousCounter2 - MarkedContinuousCounter1) * 500; - } - for (String strTokeniPart1 : strTokenGetiPart1.values()) { - for (String strTokeniPart2 : strTokenGetiPart2.values()) { - if (strTokeniPart1.equals(strTokeniPart2)) { - score += 400; - } + } + if (MarkedContinuousCounter1 > 0 && MarkedContinuousCounter2 > 0) { + score += MarkedContinuousCounter1 > MarkedContinuousCounter2 ? (MarkedContinuousCounter1 - MarkedContinuousCounter2) * 500 + : (MarkedContinuousCounter2 - MarkedContinuousCounter1) * 500; + } + for (String strTokeniPart1 : strTokenGetiPart1.values()) { + for (String strTokeniPart2 : strTokenGetiPart2.values()) { + if (strTokeniPart1.equals(strTokeniPart2)) { + score += 400; } } - for (String strTokenEntry1 : strTokenGetEntry1.values()) { - for (String strTokenEntry2 : strTokenGetEntry2.values()) { - if (strTokenEntry1.equals(strTokenEntry2)) { - score += 2500; - } + } + for (String strTokenEntry1 : strTokenGetEntry1.values()) { + for (String strTokenEntry2 : strTokenGetEntry2.values()) { + if (strTokenEntry1.equals(strTokenEntry2)) { + score += 2500; } } - for (String strmapTag : ITokenMapTag1.values()) { - for (String strmapTag1 : ITokenMapTag2.values()) { - if (strmapTag.equals(strmapTag1)) { - score += 1450; - } + } + for (String strmapTag : ITokenMapTag1.values()) { + for (String strmapTag1 : ITokenMapTag2.values()) { + if (strmapTag.equals(strmapTag1)) { + score += 1450; } } - for (String strTokenForm1itr1 : strTokenForm1.values()) { - for (String strTokenForm1itr2 : strTokenForm2.values()) { - if (strTokenForm1itr1.equals(strTokenForm1itr2)) { - score += 2600; - } else if (strTokenForm1itr1.contains(strTokenForm1itr2)) { - score += 500; - } + } + for (String strTokenForm1itr1 : strTokenForm1.values()) { + for (String strTokenForm1itr2 : strTokenForm2.values()) { + if (strTokenForm1itr1.equals(strTokenForm1itr2)) { + score += 2600; + } else if (strTokenForm1itr1.contains(strTokenForm1itr2)) { + score += 500; } } - for (String strTokenStem : strTokenStems1.values()) { - for (String strTokenStem1 : strTokenStems2.values()) { - if (strTokenStem.equals(strTokenStem1)) { - score += 1500; - } + } + for (String strTokenStem : strTokenStems1.values()) { + for (String strTokenStem1 : strTokenStems2.values()) { + if (strTokenStem.equals(strTokenStem1)) { + score += 1500; } } - if (inflectedCounterPositive1 + inflectedCounterPositive2 > inflectedCounterNegative && inflectedCounterNegative > 0) { - score += (inflectedCounterPositive1 - inflectedCounterNegative) * 650; - } - if (inflectedCounterPositive1 > 0 && inflectedCounterPositive2 > 0) { - score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * 550; - } - if (anotatorcounter1 > 1 && anotatorcounter2 > 1) { - score += (anotatorcounter1 + anotatorcounter2) * 400; - } - if (tokensCounter1 > 0 && tokensCounter2 > 0) { - score += (tokensCounter1 + tokensCounter2) * 400; - } else { - score -= tokensCounter1 >= tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500; - } - LevenshteinDistance leven = new LevenshteinDistance(str, str1); - double SentenceScoreDiff = leven.computeLevenshteinDistance(); - SentenceScoreDiff *= 15; - score -= SentenceScoreDiff; + } + if (inflectedCounterPositive1 + inflectedCounterPositive2 > inflectedCounterNegative && inflectedCounterNegative > 0) { + score += (inflectedCounterPositive1 - inflectedCounterNegative) * 650; + } + if (inflectedCounterPositive1 > 0 && inflectedCounterPositive2 > 0) { + score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * 550; + } + if (anotatorcounter1 > 1 && anotatorcounter2 > 1) { + score += (anotatorcounter1 + anotatorcounter2) * 400; + } + if (tokensCounter1 > 0 && tokensCounter2 > 0) { + score += (tokensCounter1 + tokensCounter2) * 400; + } else { + int elseint = tokensCounter1 >= tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500; + score -= elseint; + } + LevenshteinDistance leven = new LevenshteinDistance(str, str1); + double SentenceScoreDiff = leven.computeLevenshteinDistance(); + SentenceScoreDiff *= 15; + score -= SentenceScoreDiff; } catch (Exception ex) { System.out.println("SENTIMENT stacktrace Overall catch: " + ex.getMessage() + "\n"); } diff --git a/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java b/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java index 1861ce57..4f0a516b 100644 --- a/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java +++ b/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java @@ -21,7 +21,6 @@ import FunctionLayer.PipelineJMWESingleton; import java.io.IOException; import java.sql.SQLException; import java.util.List; -import java.util.concurrent.CountDownLatch; import java.util.logging.Level; import java.util.logging.Logger; import org.javacord.api.DiscordApi;