From 4c205f49d5f83f946903653a9880486661b24904 Mon Sep 17 00:00:00 2001 From: christian Date: Mon, 25 Oct 2021 19:08:22 +0200 Subject: [PATCH] further bot updates for chatting --- .../src/main/java/DataLayer/DataMapper.java | 103 +- .../main/java/FunctionLayer/CustomError.java | 17 + .../main/java/FunctionLayer/Datahandler.java | 825 +++++++ .../main/java/FunctionLayer/Datahandler.kt | 658 ------ .../src/main/java/FunctionLayer/DoStuff.java | 65 +- .../FunctionLayer/MessageResponseHandler.java | 101 + .../FunctionLayer/PipelineJMWESingleton.java | 67 +- .../java/FunctionLayer/SimilarityMatrix.java | 33 + .../StanfordParser/SentimentAnalyzerTest.java | 2097 ++++++----------- .../StanfordParser/SentimentValueCache.java | 334 +++ .../src/main/java/META-INF/MANIFEST.MF | 3 - .../PresentationLayer/DiscordHandler.java | 130 +- ArtificialAutism/src/test/java/junit.java | 497 ++++ 13 files changed, 2688 insertions(+), 2242 deletions(-) create mode 100644 ArtificialAutism/src/main/java/FunctionLayer/CustomError.java create mode 100644 ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java delete mode 100644 ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt create mode 100644 ArtificialAutism/src/main/java/FunctionLayer/MessageResponseHandler.java create mode 100644 ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentValueCache.java delete mode 100644 ArtificialAutism/src/main/java/META-INF/MANIFEST.MF create mode 100644 ArtificialAutism/src/test/java/junit.java diff --git a/ArtificialAutism/src/main/java/DataLayer/DataMapper.java b/ArtificialAutism/src/main/java/DataLayer/DataMapper.java index 3ab45f97..4a8f9109 100644 --- a/ArtificialAutism/src/main/java/DataLayer/DataMapper.java +++ b/ArtificialAutism/src/main/java/DataLayer/DataMapper.java @@ -5,41 +5,69 @@ */ package DataLayer; -import org.jetbrains.annotations.NotNull; - +import FunctionLayer.SimilarityMatrix; +import FunctionLayer.CustomError; +import com.google.common.collect.MapMaker; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.util.*; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; import java.util.logging.Level; import java.util.logging.Logger; /** + * * @author install1 */ public class DataMapper { - public static ArrayList getAllStrings() throws SQLException { + public static void createTables() throws CustomError { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + try { + l_cCon = DBCPDataSource.getConnection(); + String l_sSQL = "CREATE TABLE IF NOT EXISTS `ArtificialAutism`.`Sentences` (`Strings` text NOT NULL)"; + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_pStatement.execute(); + } catch (SQLException ex) { + throw new CustomError("failed in DataMapper " + ex.getMessage()); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + } + + public static ConcurrentMap getAllStrings() throws CustomError { + ConcurrentMap allStrings = new MapMaker().concurrencyLevel(2).makeMap(); Connection l_cCon = null; PreparedStatement l_pStatement = null; ResultSet l_rsSearch = null; - ArrayList arrayListStr = new ArrayList(); try { l_cCon = DBCPDataSource.getConnection(); String l_sSQL = "SELECT * FROM `Sentences`"; l_pStatement = l_cCon.prepareStatement(l_sSQL); l_rsSearch = l_pStatement.executeQuery(); + int ij = 0; while (l_rsSearch.next()) { - arrayListStr.add(l_rsSearch.getString(1)); + allStrings.put(ij, l_rsSearch.getString(1)); + ij++; } + } catch (SQLException ex) { + throw new CustomError("failed in DataMapper " + ex.getMessage()); } finally { CloseConnections(l_pStatement, l_rsSearch, l_cCon); } - return arrayListStr; + return allStrings; } - public static void InsertMYSQLStrings(ArrayList str) throws SQLException { + public static void InsertMYSQLStrings(ConcurrentMap str) throws CustomError { Connection l_cCon = null; PreparedStatement l_pStatement = null; ResultSet l_rsSearch = null; @@ -47,15 +75,35 @@ public class DataMapper { try { l_cCon = DBCPDataSource.getConnection(); l_pStatement = l_cCon.prepareStatement(l_sSQL); - for (String str1 : str) { + for (String str1 : str.values()) { + //System.out.println("adding str1: " + str1 + "\n"); l_pStatement.setString(1, str1); - l_pStatement.execute(); + l_pStatement.addBatch(); } + l_pStatement.executeBatch(); + } catch (SQLException ex) { + throw new CustomError("failed in DataMapper " + ex.getMessage()); } finally { CloseConnections(l_pStatement, l_rsSearch, l_cCon); } } + public static ConcurrentMap getHLstatsMessages() { + ConcurrentMap hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap(); + try (Connection l_cCon = DBCPDataSourceHLstats.getConnection()) { + String l_sSQL = "SELECT message FROM `hlstats_Events_Chat`"; + try (PreparedStatement l_pStatement = l_cCon.prepareStatement(l_sSQL)) { + try (ResultSet l_rsSearch = l_pStatement.executeQuery()) { + while (l_rsSearch.next()) { + hlStatsMessages.put(hlStatsMessages.size() + 1, l_rsSearch.getString(1)); + } + } + } + } catch (SQLException ex) { + Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex); + } + return hlStatsMessages; + } public static void CloseConnections(PreparedStatement ps, ResultSet rs, Connection con) { if (rs != null) { @@ -80,39 +128,4 @@ public class DataMapper { } } } - - public static void checkStringsToDelete() { - Connection l_cCon = null; - PreparedStatement l_pStatement = null; - ResultSet l_rsSearch = null; - String l_sSQL = "delete from Sentences order by last_used asc LIMIT 15"; - try { - l_cCon = DBCPDataSource.getConnection(); - l_pStatement = l_cCon.prepareStatement(l_sSQL); - l_pStatement.execute(); - } catch (SQLException throwables) { - throwables.printStackTrace(); - } finally { - CloseConnections(l_pStatement, l_rsSearch, l_cCon); - } - } - - public static void updateLastUsed(@NotNull ArrayList mysqlUpdateLastUsed) { - Connection l_cCon = null; - PreparedStatement l_pStatement = null; - ResultSet l_rsSearch = null; - String l_sSQL = "update Sentences Set last_used = now() where Strings = (?)"; - try { - l_cCon = DBCPDataSource.getConnection(); - l_pStatement = l_cCon.prepareStatement(l_sSQL); - for (String str1 : mysqlUpdateLastUsed) { - l_pStatement.setString(1, str1); - l_pStatement.execute(); - } - } catch (SQLException throwables) { - throwables.printStackTrace(); - } finally { - CloseConnections(l_pStatement, l_rsSearch, l_cCon); - } - } } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/CustomError.java b/ArtificialAutism/src/main/java/FunctionLayer/CustomError.java new file mode 100644 index 00000000..a7988d55 --- /dev/null +++ b/ArtificialAutism/src/main/java/FunctionLayer/CustomError.java @@ -0,0 +1,17 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package FunctionLayer; + +/** + * + * @author install1 + */ +public class CustomError extends Exception { + + public CustomError(String msg) { + super(msg); + } +} diff --git a/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java b/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java new file mode 100644 index 00000000..bb61c948 --- /dev/null +++ b/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java @@ -0,0 +1,825 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package FunctionLayer; + +import DataLayer.DataMapper; +import FunctionLayer.StanfordParser.SentimentAnalyzerTest; +import FunctionLayer.StanfordParser.SentimentValueCache; +import com.google.common.base.Stopwatch; +import com.google.common.collect.MapMaker; +import edu.stanford.nlp.ie.AbstractSequenceClassifier; +import edu.stanford.nlp.ie.crf.CRFClassifier; +import edu.stanford.nlp.ling.CoreLabel; +import edu.stanford.nlp.parser.lexparser.LexicalizedParser; +import edu.stanford.nlp.pipeline.Annotation; +import edu.stanford.nlp.pipeline.CoreDocument; +import edu.stanford.nlp.pipeline.StanfordCoreNLP; +import edu.stanford.nlp.tagger.maxent.MaxentTagger; +import edu.stanford.nlp.trees.GrammaticalStructureFactory; +import edu.stanford.nlp.trees.TreebankLanguagePack; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import static java.lang.Math.random; +import java.net.DatagramPacket; +import java.net.DatagramSocket; +import java.net.InetAddress; +import java.net.SocketException; +import java.sql.SQLException; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.CompletionService; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorCompletionService; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.ForkJoinTask; +import java.util.concurrent.Future; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.function.Consumer; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * + * @author install1 + */ +public class Datahandler { + + public static final long EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(10, TimeUnit.MINUTES); + public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS); + public static Datahandler instance = new Datahandler(); + private static Annotation strAnno; + private static Annotation strAnnoSentiment; + private static Annotation strAnnoJMWE; + private static CoreDocument coreDoc; + private static final ConcurrentMap stringCache = new MapMaker().concurrencyLevel(6).makeMap(); + private static ConcurrentMap pipelineAnnotationCache; + private static ConcurrentMap pipelineSentimentAnnotationCache; + private static ConcurrentMap jmweAnnotationCache; + private static ConcurrentMap coreDocumentAnnotationCache; + private static ConcurrentMap sentimentCachingMap = new MapMaker().concurrencyLevel(6).makeMap(); + private LinkedHashMap> lHMSMX = new LinkedHashMap(); + private final Stopwatch stopwatch; + private static String similar = ""; + private static String shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz"; + private static String sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz"; + private static String lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"; + private static String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger"; + private static String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz"; + private static String nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz"; + private static String nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz"; + private static final String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these,they,this,to,was,will,with"; + private static MaxentTagger tagger; + private static String[] options = {"-maxLength", "100"}; + private static Properties props = new Properties(); + private static Properties propsSentiment = new Properties(); + private static GrammaticalStructureFactory gsf; + private static LexicalizedParser lp; + private static TreebankLanguagePack tlp; + private static AbstractSequenceClassifier classifier; + // set up Stanford CoreNLP pipeline + private static final StanfordCoreNLP pipeline = getPipeLineSetUp(); + private static StanfordCoreNLP pipelineSentiment; + + public Datahandler() { + this.stopwatch = Stopwatch.createUnstarted(); + this.jmweAnnotationCache = new MapMaker().concurrencyLevel(3).makeMap(); + this.pipelineAnnotationCache = new MapMaker().concurrencyLevel(4).makeMap(); + this.pipelineSentimentAnnotationCache = new MapMaker().concurrencyLevel(4).makeMap(); + this.coreDocumentAnnotationCache = new MapMaker().concurrencyLevel(5).makeMap(); + } + + public static StanfordCoreNLP getPipeline() { + return pipeline; + } + + private static StanfordCoreNLP getPipeLineSetUp() { + props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse"); + props.setProperty("parse.model", shiftReduceParserPath); + props.setProperty("parse.maxlen", "90"); + props.setProperty("parse.binaryTrees", "true"); + props.setProperty("threads", "8"); + props.setProperty("pos.maxlen", "90"); + props.setProperty("tokenize.maxlen", "90"); + props.setProperty("ssplit.maxlen", "90"); + props.setProperty("lemma.maxlen", "90"); + props.setProperty("ner.model", nerModel + "," + nerModel2 + "," + nerModel3); + props.setProperty("ner.combinationMode", "HIGH_RECALL"); + props.setProperty("regexner.ignorecase", "true"); + props.setProperty("ner.fine.regexner.ignorecase", "true"); + props.setProperty("tokenize.options", "untokenizable=firstDelete"); + return new StanfordCoreNLP(props); + } + + public void shiftReduceParserInitiate() { + //got 8 cores + CountDownLatch cdl = new CountDownLatch(2); + new Thread(() -> { + try { + classifier = CRFClassifier.getClassifierNoExceptions(nerModel); + } catch (ClassCastException ex) { + Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); + } + cdl.countDown(); + }).start(); + new Thread(() -> { + propsSentiment.setProperty("parse.model", lexParserEnglishRNN); + propsSentiment.setProperty("sentiment.model", sentimentModel); + propsSentiment.setProperty("parse.maxlen", "90"); + propsSentiment.setProperty("threads", "8"); + propsSentiment.setProperty("pos.maxlen", "90"); + propsSentiment.setProperty("tokenize.maxlen", "90"); + propsSentiment.setProperty("ssplit.maxlen", "90"); + propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword"); //coref too expensive memorywise + propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator"); + propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList); + propsSentiment.setProperty("tokenize.options", "untokenizable=firstDelete"); + pipelineSentiment = new StanfordCoreNLP(propsSentiment); + tagger = new MaxentTagger(taggerPath); + cdl.countDown(); + }).start(); + lp = LexicalizedParser.loadModel(lexParserEnglishRNN, options); + tlp = lp.getOp().langpack(); + gsf = tlp.grammaticalStructureFactory(); + try { + cdl.await(); + } catch (InterruptedException ex) { + //System.out.println("cdl await interrupted: " + ex.getLocalizedMessage() + "\n"); + } + System.out.println("finished shiftReduceParserInitiate\n"); + } + + public static AbstractSequenceClassifier getClassifier() { + return classifier; + } + + public static void setClassifier(AbstractSequenceClassifier classifier) { + Datahandler.classifier = classifier; + } + + public void updateStringCache() { + try { + checkIfUpdateStrings(); + } catch (CustomError ex) { + Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); + } + } + + public static GrammaticalStructureFactory getGsf() { + return gsf; + } + + public static MaxentTagger getTagger() { + return tagger; + } + + private Map getCache() throws SQLException, IOException, CustomError { + return DataMapper.getAllStrings(); + } + + public int getlHMSMXSize() { + return lHMSMX.size(); + } + + public int getstringCacheSize() { + return stringCache.size(); + } + + public void initiateMYSQL() throws SQLException, IOException { + try { + DataMapper.createTables(); + stringCache.putAll(getCache()); + // lHMSMX = DataMapper.getAllRelationScores(); + } catch (CustomError ex) { + Logger.getLogger(Datahandler.class + .getName()).log(Level.SEVERE, null, ex); + } + } + + public void addHLstatsMessages() { + ConcurrentMap hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap strCacheLocal = stringCache; + Collection strs = DataMapper.getHLstatsMessages().values(); + for (String str : strs) { + if (hlStatsMessages.get(str) == null) { + hlStatsMessages.put(str, hlStatsMessages.size()); + } + } + int capacity = 150; + hlStatsMessages.keySet().forEach(str -> { + if (!str.startsWith("!") && MessageResponseHandler.getStr().values().size() < capacity) { + String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null); + if (orElse == null) { + MessageResponseHandler.getMessage(str); + } + } + }); + } + + public void instantiateAnnotationMapJMWE() { + if (!stringCache.isEmpty()) { + ConcurrentMap jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(stringCache.values()); + for (Entry entries : jmweAnnotation.entrySet()) { + jmweAnnotationCache.put(entries.getKey(), entries.getValue()); + } + } + } + + public void instantiateAnnotationMap() { + if (!stringCache.isEmpty()) { + ConcurrentMap Annotationspipeline = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(2).makeMap(); + stringCache.values().parallelStream().forEach(str -> { + Annotation strAnno = new Annotation(str); + strAnno.compact(); + Annotationspipeline.put(str, strAnno); + Annotation strAnno2 = new Annotation(str); + strAnno2.compact(); + AnnotationspipelineSentiment.put(str, strAnno2); + }); + ConcurrentMap coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(stringCache.values(), pipeline); + pipeline.annotate(Annotationspipeline.values()); + pipelineSentiment.annotate(AnnotationspipelineSentiment.values()); + Annotationspipeline.entrySet().forEach(pipelineEntry -> { + //relatively experimental change + pipelineEntry.getValue().compact(); + pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue()); + }); + AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> { + pipelineEntry.getValue().compact(); + pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue()); + }); + coreDocumentpipelineMap.entrySet().stream().forEach(CD -> { + coreDocumentAnnotationCache.put(CD.getKey(), CD.getValue()); + }); + } + } + + private ConcurrentMap futuresReturnOverallEvaluation(List similarityMatrixes) { + ConcurrentMap strmapreturn = new MapMaker().concurrencyLevel(6).makeMap(); + if (!similarityMatrixes.isEmpty()) { + int iterator = 0; + for (SimilarityMatrix SMX : similarityMatrixes) { + final Double scoreRelationNewMsgToRecentMsg = SMX.getDistance(); + if (scoreRelationNewMsgToRecentMsg > 0.0) { + strmapreturn = addSMXToMapReturn(strmapreturn, SMX); + } + //System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\niterator: " + iterator); + iterator++; + } + } + return strmapreturn; + } + + private ConcurrentMap addSMXToMapReturn(ConcurrentMap strmapreturn, SimilarityMatrix SMX) { + if (!strmapreturn.containsValue(SMX.getPrimaryString())) { + strmapreturn.put(strmapreturn.size(), SMX.getPrimaryString()); + String transmittedStr = SMX.getSecondaryString(); + SentimentValueCache cacheValue1 = SMX.getCacheValue1(); + SentimentValueCache cacheValue2 = SMX.getCacheValue2(); + if (cacheValue1 != null && !sentimentCachingMap.keySet().contains(SMX.getPrimaryString())) { + sentimentCachingMap.put(SMX.getSecondaryString(), SMX.getCacheValue1()); + } + if (cacheValue2 != null && !sentimentCachingMap.keySet().contains(transmittedStr)) { + sentimentCachingMap.put(transmittedStr, SMX.getCacheValue2()); + } + } + return strmapreturn; + } + + private List StrComparringNoSentenceRelationMap( + ConcurrentMap strCacheLocal, Collection strCollection, ConcurrentMap localJMWEMap, + ConcurrentMap localPipelineAnnotation, ConcurrentMap localPipelineSentimentAnnotation, + ConcurrentMap localCoreDocumentMap) { + ExecutorService threadPool = Executors.newCachedThreadPool(); + CompletionService ecs = new ExecutorCompletionService<>(threadPool); + int index = 0; + int prefix_size = 150; + SentimentValueCache sentimentCacheStr = sentimentCachingMap.getOrDefault(strCollection, null); + List smxReturnList = new ArrayList(); + if (strCacheLocal.size() < prefix_size) + { + for (String colStr : strCollection) + { + strCacheLocal.put(strCacheLocal.size(), colStr); + } + } + + + + for (String str1 : strCollection) { + for (String str : strCollection) { + if (!str.equals(str1)) { + SimilarityMatrix SMXInit = new SimilarityMatrix(str, str1); + SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null); + Callable worker; + if (stringCache.size() < prefix_size) { + worker = new SentimentAnalyzerTest(str, str1, SMXInit, + localJMWEMap.get(str), localJMWEMap.get(str1), localPipelineAnnotation.get(str), + localPipelineAnnotation.get(str1), localPipelineSentimentAnnotation.get(str), + localPipelineSentimentAnnotation.get(str1), localCoreDocumentMap.get(str), localCoreDocumentMap.get(str1), sentimentCacheStr, sentimentCacheStr1); + } else { + worker = new SentimentAnalyzerTest(str, str1, SMXInit, + localJMWEMap.get(str), jmweAnnotationCache.get(str1), localPipelineAnnotation.get(str), + pipelineAnnotationCache.get(str1), localPipelineSentimentAnnotation.get(str), + pipelineSentimentAnnotationCache.get(str1), localCoreDocumentMap.get(str), coreDocumentAnnotationCache.get(str1), sentimentCacheStr, sentimentCacheStr1); + } + ecs.submit(worker); + index++; + if (index % 1000 == 0 && index > 0) { + for (int i = 0; i < index; i++) { + try { + Future take = ecs.take(); + SimilarityMatrix smx = take.get(); + if (smx != null) { + smxReturnList.add(smx); + } + } catch (InterruptedException | ExecutionException ex) { + // + } + } + index = 0; + //System.out.println("smxReturnList size iterating ECS.take(): " + smxReturnList.size()); + } + } + } + } + double distance_requirement = 15500.0; + for (int i = 0; i < index; i++) { + try { + Future take = ecs.take(); + SimilarityMatrix smx = take.get(); + + if (smx != null && smx.getDistance() > distance_requirement) { + smxReturnList.add(smx); + } + } catch (InterruptedException | ExecutionException ex) { + // + } + } + //System.out.println("smxReturnList size: " + smxReturnList.size()); + threadPool.shutdown(); + return smxReturnList; + } + + private ConcurrentMap stringIteratorComparator(ConcurrentMap strmap, + ConcurrentMap strCacheLocal, ConcurrentMap localJMWEMap, + ConcurrentMap localPipelineAnnotation, ConcurrentMap localPipelineSentimentAnnotation, + ConcurrentMap localCoreDocumentMap) { + //System.out.println("strmap siuze: " + strmap.size()); + List StrComparringNoSentenceRelationMap = StrComparringNoSentenceRelationMap(strCacheLocal, strmap.values(), + localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap); + Collections.sort(StrComparringNoSentenceRelationMap, (e1, e2) -> e1.getPrimaryString().compareTo(e2.getPrimaryString())); + ConcurrentMap strmapreturn = futuresReturnOverallEvaluation(StrComparringNoSentenceRelationMap); + //System.out.println("strmapreturn size: " + strmapreturn.size()); + return strmapreturn; + } + + private ConcurrentMap removeNonSensicalStrings(ConcurrentMap strmap) { + final ConcurrentMap strCacheLocal = stringCache; + final ConcurrentMap localJMWEMap = getMultipleJMWEAnnotation(strmap.values()); + final ConcurrentMap localPipelineAnnotation = getMultiplePipelineAnnotation(strmap.values()); + final ConcurrentMap localPipelineSentimentAnnotation = getMultiplePipelineSentimentAnnotation(strmap.values()); + final ConcurrentMap localCoreDocumentMap = getMultipleCoreDocumentsWaySuggestion(strmap.values(), pipeline); + return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap); + } + + public synchronized void checkIfUpdateStrings() throws CustomError { + if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning()) { + ConcurrentMap str = MessageResponseHandler.getStr(); + System.out.println("str size: " + str.size()); + str = filterContent(str); + str = removeNonSensicalStrings(str); + //System.out.println("removeNonSensicalStrings str size POST: " + str.size() + "\n"); + str = annotationCacheUpdate(str); + System.out.println("annotationCacheUpdate str size POST: " + str.size() + "\n"); + ConcurrentMap strf = str; + if (!stringCache.isEmpty()) { + new Thread(() -> { + try { + DataMapper.InsertMYSQLStrings(strf); + } catch (CustomError ex) { + Logger.getLogger(Datahandler.class + .getName()).log(Level.SEVERE, null, ex); + } + MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(6).makeMap()); + }).start(); + } else { + try { + DataMapper.InsertMYSQLStrings(strf); + } catch (CustomError ex) { + Logger.getLogger(Datahandler.class + .getName()).log(Level.SEVERE, null, ex); + } + MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(2).makeMap()); + } + if (!stopwatch.isRunning()) { + stopwatch.start(); + } else { + stopwatch.reset(); + } + } + } + + private String trimString(String str) { + str = str.trim(); + if (str.startsWith("<@")) { + str = str.substring(str.indexOf("> ") + 2); + } + return str; + } + + private String getResponseFutures(String strF) { + List values_copy = new ArrayList(stringCache.values()); + Collections.shuffle(values_copy); + double preRelationUserCounters = -155000.0; + List concurrentRelations = new ArrayList(); + for (String str1 : values_copy) { + if (!strF.equals(str1)) { + SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null); + Callable worker = new SentimentAnalyzerTest(strF, str1, new SimilarityMatrix(strF, str1), + strAnnoJMWE, jmweAnnotationCache.get(str1), strAnno, + pipelineAnnotationCache.get(str1), strAnnoSentiment, + pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1), null, sentimentCacheStr1); + try { + SimilarityMatrix getSMX = worker.call(); + if (getSMX != null) { + Double scoreRelationLastUserMsg = getSMX.getDistance(); + if (scoreRelationLastUserMsg > preRelationUserCounters) { + preRelationUserCounters = scoreRelationLastUserMsg; + concurrentRelations.add(getSMX.getSecondaryString()); + //System.out.println("secondary: " + getSMX.getSecondaryString() + "\nDistance: " + getSMX.getDistance() + "\n"); + //System.out.println("SUCESS concurrentRelationsMap size: " + concurrentRelations.size() + "\n"); + } + } + } catch (Exception ex) { + Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); + } + } + } + + StringBuilder SB = new StringBuilder(); + double randomLenghtPermit = strF.length() * ((Math.random() * Math.random() * Math.random()) * 5); + Collections.reverse(concurrentRelations); + if (concurrentRelations.isEmpty()) { + return "failure, preventing stuckness"; + } + String firstRelation = concurrentRelations.get(0); + for (String secondaryRelation : concurrentRelations) { + if (SB.toString().length() > randomLenghtPermit && !SB.toString().isEmpty()) { + break; + } + boolean append = appendToString(firstRelation, secondaryRelation); + if (append) { + SB.append(secondaryRelation).append(" "); + } + } + return SB.toString(); + } + + private boolean appendToString(String firstRelation, String secondaryRelation) { + if (firstRelation.equals(secondaryRelation)) { + return true; + } + Double scoreRelationStrF = getScoreRelationStrF(firstRelation, secondaryRelation); + if (scoreRelationStrF > 1900) { + return true; + } + return false; + } + + public String getResponseMsg(String str) throws CustomError { + String strF = trimString(str); + getSingularAnnotation(strF); + return getResponseFutures(strF); + } + + public void getSingularAnnotation(String str) { + strAnno = new Annotation(str); + strAnno.compact(); + pipeline.annotate(strAnno); + strAnnoSentiment = new Annotation(str); + strAnnoSentiment.compact(); + pipelineSentiment.annotate(strAnnoSentiment); + List notactualList = new ArrayList(); + notactualList.add(str); + ConcurrentMap jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(notactualList); + strAnnoJMWE = jmweAnnotation.values().iterator().next(); + strAnnoJMWE.compact(); + CoreDocument coreDocument = new CoreDocument(str); + pipeline.annotate(coreDocument); + coreDoc = coreDocument; + } + + private static ConcurrentMap getMultipleJMWEAnnotation(Collection str) { + ConcurrentMap jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str); + return jmweAnnotation; + } + + private static ConcurrentMap getMultiplePipelineAnnotation(Collection str) { + ConcurrentMap pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap(); + for (String str1 : str) { + Annotation strAnno1 = new Annotation(str1); + pipelineAnnotationMap.put(str1, strAnno1); + } + pipeline.annotate(pipelineAnnotationMap.values()); + return pipelineAnnotationMap; + } + + private static ConcurrentMap getMultiplePipelineSentimentAnnotation(Collection str) { + ConcurrentMap pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap(); + for (String str1 : str) { + Annotation strAnno1 = new Annotation(str1); + pipelineAnnotationMap.put(str1, strAnno1); + } + pipelineSentiment.annotate(pipelineAnnotationMap.values()); + return pipelineAnnotationMap; + } + + private Double getScoreRelationNewMsgToRecentMsg(String str, String mostRecentMsg) { + SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg); + SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null); + SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null); + Callable worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX, + jmweAnnotationCache.get(str), jmweAnnotationCache.get(mostRecentMsg), pipelineAnnotationCache.get(str), + pipelineAnnotationCache.get(mostRecentMsg), pipelineSentimentAnnotationCache.get(str), + pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDocumentAnnotationCache.get(str), + coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2); + SimilarityMatrix callSMX = null; + try { + callSMX = worker.call(); + } catch (Exception ex) { + Logger.getLogger(Datahandler.class + .getName()).log(Level.SEVERE, null, ex); + } + if (callSMX != null) { + double smxDistance = callSMX.getDistance(); + return smxDistance; + } + return 0.0; + } + + private Double getScoreRelationStrF(String str, String mostRecentMsg) { + SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg); + SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null); + SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null); + Callable worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX, + strAnnoJMWE, jmweAnnotationCache.get(mostRecentMsg), strAnno, + pipelineAnnotationCache.get(mostRecentMsg), strAnnoSentiment, + pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDoc, coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2); + SimilarityMatrix callSMX = null; + try { + callSMX = worker.call(); + } catch (Exception ex) { + Logger.getLogger(Datahandler.class + .getName()).log(Level.SEVERE, null, ex); + } + if (callSMX != null) { + double smxDistance = callSMX.getDistance(); + return smxDistance; + } + return 0.0; + } + + public static ConcurrentMap filterContent(ConcurrentMap str) { + ConcurrentMap strlistreturn = new MapMaker().concurrencyLevel(2).makeMap(); + str.values().forEach(str1 -> { + if (!str1.isEmpty() && str1.length() > 3) { + str1 = str1.trim(); + if (str1.contains("PM*")) { + str1 = str1.substring(str1.indexOf("PM*") + 3); + } + if (str1.contains("AM*")) { + str1 = str1.substring(str1.indexOf("AM*") + 3); + } + /* + if (str1.contains("?") || str1.contains("°")) + { + if (!str1.contains("http")) + { + str1 = str1.replace("?", " <:wlenny:514861023002624001> "); + str1 = str1.replace("°", " <:wlenny:514861023002624001> "); + } + } + */ + if (str1.contains("(Counter-Terrorist)")) { + str1 = str1.replace("(Counter-Terrorist)", " "); + } + if (str1.contains("(Terrorist)")) { + str1 = str1.replace("(Terrorist)", " "); + } + if (str1.contains("(Spectator)")) { + str1 = str1.replace("(Spectator)", " "); + } + if (str1.contains("*DEAD*")) { + str1 = str1.replace("*DEAD*", " "); + } + if (str1.contains("{red}")) { + str1 = str1.replace("{red}", " "); + } + if (str1.contains("{orange}")) { + str1 = str1.replace("{orange}", " "); + } + if (str1.contains("{yellow}")) { + str1 = str1.replace("{yellow}", " "); + } + if (str1.contains("{green}")) { + str1 = str1.replace("{green}", " "); + } + if (str1.contains("{lightblue}")) { + str1 = str1.replace("{lightblue}", " "); + } + if (str1.contains("{blue}")) { + str1 = str1.replace("{blue}", " "); + } + if (str1.contains("{purple}")) { + str1 = str1.replace("{purple}", " "); + } + if (str1.contains("{white}")) { + str1 = str1.replace("{white}", " "); + } + if (str1.contains("{fullblue}")) { + str1 = str1.replace("{fullblue}", " "); + } + if (str1.contains("{cyan}")) { + str1 = str1.replace("{cyan}", " "); + } + if (str1.contains("{lime}")) { + str1 = str1.replace("{lime}", " "); + } + if (str1.contains("{deeppink}")) { + str1 = str1.replace("{deeppink}", " "); + } + if (str1.contains("{slategray}")) { + str1 = str1.replace("{slategray}", " "); + } + if (str1.contains("{dodgerblue}")) { + str1 = str1.replace("{dodgerblue}", " "); + } + if (str1.contains("{black}")) { + str1 = str1.replace("{black}", " "); + } + if (str1.contains("{orangered}")) { + str1 = str1.replace("{orangered}", " "); + } + if (str1.contains("{darkorchid}")) { + str1 = str1.replace("{darkorchid}", " "); + } + if (str1.contains("{pink}")) { + str1 = str1.replace("{pink}", " "); + } + if (str1.contains("{lightyellow}")) { + str1 = str1.replace("{lightyellow}", " "); + } + if (str1.contains("{chocolate}")) { + str1 = str1.replace("{chocolate}", " "); + } + if (str1.contains("{beige}")) { + str1 = str1.replace("{beige}", " "); + } + if (str1.contains("{azure}")) { + str1 = str1.replace("{azure}", " "); + } + if (str1.contains("{yellowgreen}")) { + str1 = str1.replace("{yellowgreen}", " "); + } + str1 = str1.trim(); + if (str1.length() > 2 && (!str1.startsWith("!"))) { + strlistreturn.put(strlistreturn.size(), str1); + } + } + }); + return strlistreturn; + } + + private ConcurrentMap annotationCacheUpdate(ConcurrentMap strmap) { + ConcurrentMap jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values()); + for (Entry jmweitr : jmweAnnotation.entrySet()) { + jmweAnnotationCache.put(jmweitr.getKey(), jmweitr.getValue()); + } + ConcurrentMap Annotationspipeline = new MapMaker().concurrencyLevel(4).makeMap(); + ConcurrentMap AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(4).makeMap(); + ConcurrentMap coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(strmap.values(), pipeline); + strmap.values().forEach(str -> { + Annotation strAnno1 = new Annotation(str); + Annotationspipeline.put(str, strAnno1); + Annotation strAnno2 = new Annotation(str); + AnnotationspipelineSentiment.put(str, strAnno2); + stringCache.put(stringCache.size() + 1, str); + }); + pipeline.annotate(Annotationspipeline.values()); + pipelineSentiment.annotate(AnnotationspipelineSentiment.values()); + Annotationspipeline.entrySet().forEach(pipelineEntry -> { + if (pipelineEntry != null) { + pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue()); + } + }); + AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> { + if (pipelineEntry != null) { + pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue()); + } + }); + coreDocumentpipelineMap.entrySet().forEach(coreDocumentEntry -> { + coreDocumentAnnotationCache.put(coreDocumentEntry.getKey(), coreDocumentEntry.getValue()); + }); + return strmap; + } + + public int getMessageOverHead() { + return stringCache.values().size() - (stringCache.values().size() / 10); + } + + public void update_autismo_socket_msg() { + try { + try (DatagramSocket serverSocket = new DatagramSocket(48477)) { + try (DatagramSocket serverSocket1 = new DatagramSocket(48478)) { + byte[] receiveData = new byte[4096]; + InetAddress IPAddress = InetAddress.getByName("144.76.218.19"); + DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length); + while (true) { + serverSocket.receive(receivePacket); + String sentence = new String(receivePacket.getData(), 0, receivePacket.getLength()); + sentence = sentence.replace("clientmessage:", ""); + String getResponseMsg = getResponseMsg(sentence); + byte[] sendData = getResponseMsg.getBytes("UTF-8"); + DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 48477); + serverSocket.send(sendPacket); + + receivePacket = new DatagramPacket(receiveData, receiveData.length); + serverSocket1.receive(receivePacket); + sentence = new String(receivePacket.getData(), 0, receivePacket.getLength()); + sentence = sentence.replace("clientmessage:", ""); + getResponseMsg = getResponseMsg(sentence); + sendData = getResponseMsg.getBytes("UTF-8"); + sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 48478); + serverSocket1.send(sendPacket); + } + } + } catch (CustomError ex) { + Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); + } + } catch (SocketException ex) { + Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); + } catch (UnsupportedEncodingException ex) { + Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); + } catch (IOException ex) { + Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); + } + } + + private static class AnnotationCollector implements Consumer { + + private static int i = 0; + private List annotationsT = new ArrayList(); + + @Override + public void accept(T ann) { + //System.out.println("adding ann: " + ann.toString()); + annotationsT.add(ann); + } + } + + public static ConcurrentMap getMultipleCoreDocumentsWaySuggestion(Collection str, StanfordCoreNLP localNLP) { + AnnotationCollector annCollector = new AnnotationCollector(); + for (String exampleString : str) { + localNLP.annotate(new Annotation(exampleString), annCollector); + annCollector.i++; + //System.out.println("iterator: " + annCollector.i + "\nstr size: " + str.size() + "\n"); + } + try { + Thread.sleep(8000); + } catch (InterruptedException ex) { + Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); + } + ConcurrentMap annotationreturnMap = new MapMaker().concurrencyLevel(6).makeMap(); + for (Annotation ann : annCollector.annotationsT) { + if (ann != null) { + ann.compact(); + CoreDocument CD = new CoreDocument(ann); + annotationreturnMap.put(CD.text(), CD); + //System.out.println("CD text:" + CD.text() + "\niterator: " + iterator + "\nsize: " + annCollector.annotationsT.size()); + } + } + return annotationreturnMap; + } +} diff --git a/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt b/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt deleted file mode 100644 index 1bce0028..00000000 --- a/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt +++ /dev/null @@ -1,658 +0,0 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ -package FunctionLayer - -import DataLayer.DataMapper -import FunctionLayer.StanfordParser.SentimentAnalyzerTest -import com.google.common.base.Stopwatch -import edu.mit.jmwe.data.IMWE -import edu.mit.jmwe.data.IToken -import edu.stanford.nlp.ie.AbstractSequenceClassifier -import edu.stanford.nlp.ie.crf.CRFClassifier -import edu.stanford.nlp.ling.CoreAnnotations -import edu.stanford.nlp.ling.CoreLabel -import edu.stanford.nlp.ling.TaggedWord -import edu.stanford.nlp.parser.lexparser.LexicalizedParser -import edu.stanford.nlp.pipeline.Annotation -import edu.stanford.nlp.pipeline.CoreDocument -import edu.stanford.nlp.pipeline.StanfordCoreNLP -import edu.stanford.nlp.tagger.maxent.MaxentTagger -import edu.stanford.nlp.trees.* -import edu.stanford.nlp.util.CoreMap -import kotlinx.coroutines.* -import org.ejml.simple.SimpleMatrix -import java.util.* -import java.util.concurrent.TimeUnit -import java.util.regex.Pattern -import kotlin.collections.ArrayList -import kotlin.collections.HashMap - - -/** - * - * @author install1 - */ -public class Datahandler { - private val stopwatch: Stopwatch - private val EXPIRE_TIME_IN_MINUTES = TimeUnit.MINUTES.convert(30, TimeUnit.MINUTES) - private var pipelineAnnotationCache: HashMap - private var pipelineSentimentAnnotationCache = HashMap() - private var coreDocumentAnnotationCache: HashMap - private var jmweAnnotationCache = HashMap() - private var stringCache = ArrayList() - - //private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz" - private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz" - private var tagger: MaxentTagger = MaxentTagger() - private var gsf: GrammaticalStructureFactory - private var classifier: AbstractSequenceClassifier - - //SentimentAnalyzer Hashmaps - private var tokenizeCountingHashMap: HashMap = HashMap() - private var taggedWordListHashMap: HashMap>> = HashMap() - private var retrieveTGWListHashMap: HashMap> = - HashMap() - private var sentences1HashMap: HashMap> = HashMap() - private var sentencesSentimentHashMap: HashMap> = HashMap() - private var trees1HashMap: HashMap> = HashMap() - private var grammaticalStructureHashMap: HashMap> = - HashMap() - private var typedDependenciesHashMap: HashMap> = - HashMap() - private var rnnCoreAnnotationsPredictedHashMap: HashMap> = HashMap() - private var simpleMatricesHashMap: HashMap> = HashMap() - private var simpleMatricesNodevectorsHashMap: HashMap> = HashMap() - private var listHashMap: HashMap> = HashMap() - private var longestHashMap: HashMap = HashMap() - private var sentimentHashMap: HashMap = HashMap() - private var imwesHashMap: HashMap>> = HashMap() - private var InflectedCounterNegativeHashMap: HashMap = HashMap() - private var InflectedCounterPositiveHashMap: HashMap = HashMap() - private var tokenEntryHashMap: HashMap> = HashMap() - private var MarkedContinuousCounterHashMap: HashMap = HashMap() - private var UnmarkedPatternCounterHashMap: HashMap = HashMap() - private var strTokensIpartFormHashMap: HashMap> = HashMap() - private var tokenFormsHashMap: HashMap> = HashMap() - private var strTokenEntryGetPOSHashMap: HashMap> = HashMap() - private var intTokenEntyCountsHashMap: HashMap> = HashMap() - private var ITokenTagsHashMap: HashMap> = HashMap() - private var strTokenStemsHashMap: HashMap> = HashMap() - private var AnotatorcounterHashMap: HashMap = HashMap() - private var TokensCounterHashMap: HashMap = HashMap() - private var entityTokenTagsHashMap: HashMap> = HashMap() - private var nerEntitiesHashMap: HashMap> = HashMap() - private var nerEntitiesTypeHashMap: HashMap> = HashMap() - private var stopWordTokenHashMap: HashMap> = HashMap() - private var stopWordLemmaHashMap: HashMap> = HashMap() - private var PairCounterHashMap: HashMap = HashMap() - - constructor() { - stopwatch = Stopwatch.createUnstarted() - jmweAnnotationCache = HashMap() - pipelineAnnotationCache = HashMap() - pipelineSentimentAnnotationCache = HashMap() - coreDocumentAnnotationCache = HashMap() - gsf = initiateGrammaticalStructureFactory() - classifier = CRFClassifier.getClassifierNoExceptions(nerModel) - } - - fun initiateGrammaticalStructureFactory(): GrammaticalStructureFactory { - val options = arrayOf("-maxLength", "100") - //val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz" - val lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz" - val lp = LexicalizedParser.loadModel(lexParserEnglishPCFG, *options) - val tlp = lp.getOp().langpack() - return tlp.grammaticalStructureFactory() - } - - public fun pipeLineSetUp(): StanfordCoreNLP { - val props = Properties() - val shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz" - //val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz" - val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz" - //val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz" - val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz" - props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse") - props.setProperty("parse.model", shiftReduceParserPath) - props.setProperty("parse.maxlen", "90") - props.setProperty("parse.binaryTrees", "true") - props.setProperty("threads", "5") - props.setProperty("pos.maxlen", "90") - props.setProperty("tokenize.maxlen", "90") - props.setProperty("ssplit.maxlen", "90") - props.setProperty("lemma.maxlen", "90") - props.setProperty("ner.model", "$nerModel,$nerModel2,$nerModel3") - props.setProperty("ner.combinationMode", "HIGH_RECALL") - props.setProperty("regexner.ignorecase", "true") - props.setProperty("ner.fine.regexner.ignorecase", "true") - props.setProperty("tokenize.options", "untokenizable=firstKeep") - return StanfordCoreNLP(props) - } - - fun shiftReduceParserInitiate(): StanfordCoreNLP { - val propsSentiment = Properties() - //val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz" - val lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz" - val sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz" - //val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger" - val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger" - val customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of," + - "on,or,such,that,the,their,then,there,these,they,this,to,was,will,with" - propsSentiment.setProperty("parse.model", lexParserEnglishPCFG) - propsSentiment.setProperty("sentiment.model", sentimentModel) - propsSentiment.setProperty("parse.maxlen", "90") - propsSentiment.setProperty("threads", "5") - propsSentiment.setProperty("pos.maxlen", "90") - propsSentiment.setProperty("tokenize.maxlen", "90") - propsSentiment.setProperty("ssplit.maxlen", "90") - propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword") //coref too expensive memorywise - propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator") - propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList) - propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep") - tagger = MaxentTagger(taggerPath) - - println("finished shiftReduceParserInitiate\n") - return StanfordCoreNLP(propsSentiment) - } - - fun updateStringCache() { - if (stopwatch.elapsed(TimeUnit.MINUTES) >= EXPIRE_TIME_IN_MINUTES || !stopwatch.isRunning) { - if (!stopwatch.isRunning) { - stopwatch.start() - } else { - stopwatch.reset() - } - stringCache.sortWith(Comparator.comparingInt(String::length).reversed()); - System.out.println("pre InsertMYSQLStrings") - val arrayList = java.util.ArrayList(stringCache) - DataMapper.InsertMYSQLStrings(arrayList) - DataMapper.checkStringsToDelete(); - stringCache = ArrayList(); - initiateMYSQL(); - } - } - - fun initiateMYSQL() { - stringCache.addAll(DataMapper.getAllStrings()) - } - - private fun trimString(str: String): String { - var message = str.trim { it <= ' ' } - if (message.startsWith("<@")) { - message = message.substring(message.indexOf("> ") + 2) - } - if (!message.isEmpty()) { - message = message.replace("@", "") - if (message.contains("<>")) { - message = message.substring(message.indexOf(">")) - } - if (message.startsWith("[ *")) { - message = message.substring(message.indexOf("]")) - } - } - return message - } - - private fun createStrAnnotation(str: String, stanfordCoreNLP: StanfordCoreNLP, sentimentBool: Boolean) { - val strAnno2 = Annotation(str) - strAnno2.compact() - stanfordCoreNLP.annotate(strAnno2) - if (sentimentBool) { - pipelineSentimentAnnotationCache.put(str, strAnno2) - } else { - pipelineAnnotationCache.put(str, strAnno2) - } - } - - private fun getResponseFutures(strF: String, stanfordCoreNLP: StanfordCoreNLP, stanfordCoreNLPSentiment: StanfordCoreNLP): String { - val strAnno: Annotation = Annotation(strF) - strAnno.compact() - stanfordCoreNLP.annotate(strAnno) - - val strAnnoSentiment: Annotation = Annotation(strF) - strAnnoSentiment.compact() - stanfordCoreNLPSentiment.annotate(strAnnoSentiment) - - val coreDocument = CoreDocument(strF) - stanfordCoreNLP.annotate(coreDocument) - - val values_copy: List = ArrayList(stringCache) - var preRelationUserCounters = -155000.0 - val concurrentRelations: MutableList = arrayListOf() - val SB = StringBuilder() - var jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strF) - var tokenizeCountingF: Int? = null - var taggedWordListF: List>? = null - var retrieveTGWListF: java.util.ArrayList? = null - var sentencesF: List? = null - var sentencesSentimentF: List? = null - var coreMaps1: List = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation::class.java) - var treesF: java.util.ArrayList? = null - var grammaticalStructuresF: ArrayList? = null - var typedDependenciesF: java.util.ArrayList? = null - var rnnCoreAnnotationsPredictedF: java.util.ArrayList? = null - var simpleMatricesF: java.util.ArrayList? = null - var simpleMatricesNodevectorsF: java.util.ArrayList? = null - var listF: MutableList? = null - var longestF: Int? = null - var sentimentLongestF: Int? = null - var imwesF: List>? = null - var InflectedCounterNegativeF: Int? = null - var InflectedCounterPositiveF: Int? = null - var tokenEntryF: ArrayList? = null - var MarkedContinuousCounterF: Int? = null - var UnmarkedPatternCounterF: Int? = null - var strTokensIpartFormF: ArrayList? = null - var tokenFormsF: java.util.ArrayList? = null - var strTokenEntryGetPOSF: ArrayList? = null - var intTokenEntyCountsF: java.util.ArrayList? = null - var ITokenTagsF: ArrayList? = null - var strTokenStemsF: java.util.ArrayList? = null - var AnotatorcounterF: Int? = null - var TokensCounterF: Int? = null - var entityTokenTagsF: java.util.ArrayList? = null - var nerEntitiesF: java.util.ArrayList? = null - var nerEntitiesTypeF: java.util.ArrayList? = null - var stopWordTokenF: java.util.ArrayList? = null - var stopWordLemmaF: java.util.ArrayList? = null - var PairCounterF: Int? = null - for (str1 in values_copy) { - if (strF != str1) { - val annotation2 = pipelineSentimentAnnotationCache.getOrDefault(str1, null) - val annotation4 = pipelineAnnotationCache.getOrDefault(str1, null) - val coreDocument1 = coreDocumentAnnotationCache.getOrDefault(str1, null) - var jmweAnnotation = jmweAnnotationCache.getOrDefault(str1, null) - if (annotation2 == null) { - createStrAnnotation(str1, stanfordCoreNLPSentiment, true) - } - if (annotation4 == null) { - createStrAnnotation(str1, stanfordCoreNLP, false) - } - if (coreDocument1 == null) { - getCoreDocumentsSuggested(stanfordCoreNLP, str1) - } - if (jmweAnnotation == null) { - getJMWEAnnotation(str1) - jmweAnnotation = jmweAnnotationCache.get(str1) - } - val tokenizeCounting: Int? = tokenizeCountingHashMap.getOrDefault(str1, null) - val taggedWordList1: List>? = taggedWordListHashMap.getOrDefault(str1, null) - val retrieveTGWList1: java.util.ArrayList? = retrieveTGWListHashMap.getOrDefault(str1, null) - val sentence1: List? = sentences1HashMap.getOrDefault(str1, null) - val sentenceSentiment1: List? = sentencesSentimentHashMap.getOrDefault(str1, null) - val trees1 = trees1HashMap.getOrDefault(str1, null) - var coreMaps2: List = listOf() - val grammaticalStructures1 = grammaticalStructureHashMap.getOrDefault( - str1, null) - if (jmweAnnotation != null) { - coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation::class.java) - } - val typedDependencies1 = typedDependenciesHashMap.getOrDefault(str1, null) - val rnnCoreAnnotationsPredicted1 = rnnCoreAnnotationsPredictedHashMap.getOrDefault(str1, null) - val simpleMatrices1 = simpleMatricesHashMap.getOrDefault(str1, null); - val simpleMatricesNodevectors1 = simpleMatricesNodevectorsHashMap.getOrDefault(str1, null); - val list1 = listHashMap.getOrDefault(str1, null); - val longest1 = longestHashMap.getOrDefault(str1, null); - val sentimentLongest1 = sentimentHashMap.getOrDefault(str1, null); - val imwes1 = imwesHashMap.getOrDefault(str1, null); - val InflectedCounterNegative1 = InflectedCounterNegativeHashMap.getOrDefault(str1, null); - val InflectedCounterPositive1 = InflectedCounterPositiveHashMap.getOrDefault(str1, null) - val tokenEntry1 = tokenEntryHashMap.getOrDefault(str1, null) - val MarkedContinuousCounter1 = MarkedContinuousCounterHashMap.getOrDefault(str1, null) - val UnmarkedPatternCounter1 = UnmarkedPatternCounterHashMap.getOrDefault(str1, null) - val strTokensIpartForm1 = strTokensIpartFormHashMap.getOrDefault(str1, null); - val tokenForms1 = tokenFormsHashMap.getOrDefault(str1, null); - val strTokenEntryGetPOS1 = strTokenEntryGetPOSHashMap.getOrDefault(str1, null) - val intTokenEntyCounts1 = intTokenEntyCountsHashMap.getOrDefault(str1, null); - val ITokenTags1 = ITokenTagsHashMap.getOrDefault(str1, null); - val strTokenStems1 = strTokenStemsHashMap.getOrDefault(str1, null); - val Anotatorcounter1 = AnotatorcounterHashMap.getOrDefault(str1, null); - val TokensCounter1 = TokensCounterHashMap.getOrDefault(str1, null); - val entityTokenTags1 = entityTokenTagsHashMap.getOrDefault(str1, null); - val nerEntities1 = nerEntitiesHashMap.getOrDefault(str1, null); - val nerEntitiesType1 = nerEntitiesTypeHashMap.getOrDefault(str1, null); - val stopWordToken1 = stopWordTokenHashMap.getOrDefault(str1, null); - val stopWordLemma1 = stopWordLemmaHashMap.getOrDefault(str1, null); - val PairCounter1 = PairCounterHashMap.getOrDefault(str1, null); - - var SMX = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1), - coreMaps1, coreMaps2, strAnno, - pipelineAnnotationCache[str1], strAnnoSentiment, - pipelineSentimentAnnotationCache[str1], coreDocument, coreDocumentAnnotationCache[str1], - tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF, - taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1, - sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1, - grammaticalStructuresF, grammaticalStructures1, typedDependenciesF, - typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1, - simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1, - listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF, - imwes1, InflectedCounterNegativeF, InflectedCounterNegative1, InflectedCounterPositiveF, - InflectedCounterPositive1, tokenEntryF, tokenEntry1, MarkedContinuousCounterF, - MarkedContinuousCounter1, UnmarkedPatternCounterF, UnmarkedPatternCounter1, - strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1, - strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF, - intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1, - AnotatorcounterF, Anotatorcounter1, TokensCounterF, TokensCounter1, - entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF, - nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1, - PairCounterF, PairCounter1) - if (tokenizeCounting == null) { - tokenizeCountingHashMap.put(str1, SMX.getTokenizeCounting()) - } - if (taggedWordList1 == null) { - taggedWordListHashMap.put(str1, SMX.getTaggedWordList1()) - } - if (tokenizeCountingF == null) { - tokenizeCountingF = SMX.getTokenizeCountingF(); - } - if (taggedWordListF == null) { - taggedWordListF = SMX.getTaggedWordListF(); - } - if (retrieveTGWListF == null) { - retrieveTGWListF = SMX.getRetrieveTGWListF(); - } - if (retrieveTGWList1 == null) { - retrieveTGWListHashMap.put(str1, SMX.getRetrieveTGWList1()); - } - if (sentencesF == null) { - sentencesF = SMX.getSentencesF(); - } - if (sentence1 == null) { - sentences1HashMap.put(str1, SMX.getSentences1()) - } - if (sentencesSentimentF == null) { - sentencesSentimentF = SMX.getSentencesSentimentF(); - } - if (sentenceSentiment1 == null) { - sentencesSentimentHashMap.put(str1, SMX.getSentencesSentiment1()); - } - if (treesF == null) { - treesF = SMX.getTreesF(); - } - if (trees1 == null) { - trees1HashMap.put(str1, SMX.getTrees1()) - } - if (grammaticalStructuresF == null) { - grammaticalStructuresF = SMX.getGrammaticalStructuresF(); - } - if (grammaticalStructures1 == null) { - grammaticalStructureHashMap.put(str1, SMX.getGrammaticalStructures1()) - } - if (typedDependenciesF == null) { - typedDependenciesF = SMX.getTypedDependenciesF(); - } - if (typedDependencies1 == null) { - typedDependenciesHashMap.put(str1, SMX.getTypedDependencies1()) - } - if (rnnCoreAnnotationsPredictedF == null) { - rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF() - } - if (rnnCoreAnnotationsPredicted1 == null) { - rnnCoreAnnotationsPredictedHashMap.put(str1, SMX.getRnnCoreAnnotationsPredicted1()) - } - if (simpleMatricesF == null) { - simpleMatricesF = SMX.getSimpleMatricesF(); - } - if (simpleMatrices1 == null) { - simpleMatricesHashMap.put(str1, SMX.getSimpleMatrices1()); - } - if (simpleMatricesNodevectorsF == null) { - simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF(); - } - if (simpleMatricesNodevectors1 == null) { - simpleMatricesNodevectorsHashMap.put(str1, SMX.getSimpleMatricesNodevectors1()); - } - if (listF == null) { - listF = SMX.getListF(); - } - if (list1 == null) { - listHashMap.put(str1, SMX.getList1()); - } - if (longestF == null) { - longestF = SMX.getLongestF(); - } - if (longest1 == null) { - longestHashMap.put(str1, SMX.getLongest1()); - } - if (sentimentLongestF == null) { - sentimentLongestF = SMX.getSentimentLongestF(); - } - if (sentimentLongest1 == null) { - sentimentHashMap.put(str1, SMX.getSentimentLongest1()); - } - if (imwesF == null) { - imwesF = SMX.getImwesF(); - } - if (imwes1 == null) { - imwesHashMap.put(str1, SMX.getImwes1()); - } - if (InflectedCounterNegativeF == null) { - InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF(); - } - if (InflectedCounterNegative1 == null) { - InflectedCounterNegativeHashMap.put(str1, SMX.getInflectedCounterNegative1()); - } - if (InflectedCounterPositiveF == null) { - InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF(); - } - if (InflectedCounterPositive1 == null) { - InflectedCounterPositiveHashMap.put(str1, SMX.getInflectedCounterPositive1()); - } - if (tokenEntryF == null) { - tokenEntryF = SMX.getTokenEntryF(); - } - if (tokenEntry1 == null) { - tokenEntryHashMap.put(str1, SMX.getTokenEntry1()) - } - if (MarkedContinuousCounterF == null) { - MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF(); - } - if (MarkedContinuousCounter1 == null) { - MarkedContinuousCounterHashMap.put(str1, SMX.getMarkedContinuousCounter1()); - } - if (UnmarkedPatternCounterF == null) { - UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF(); - } - if (UnmarkedPatternCounter1 == null) { - UnmarkedPatternCounterHashMap.put(str1, SMX.getUnmarkedPatternCounter1()); - } - if (strTokensIpartFormF == null) { - strTokensIpartFormF = SMX.getStrTokensIpartFormF(); - } - if (strTokensIpartForm1 == null) { - strTokensIpartFormHashMap.put(str1, SMX.getStrTokensIpartForm1()); - } - if (tokenFormsF == null) { - tokenFormsF = SMX.getTokenFormsF(); - } - if (tokenForms1 == null) { - tokenFormsHashMap.put(str1, SMX.getTokenForms1()); - } - if (strTokenEntryGetPOSF == null) { - strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF(); - } - if (strTokenEntryGetPOS1 == null) { - strTokenEntryGetPOSHashMap.put(str1, SMX.getStrTokenEntryGetPOS1()) - } - if (intTokenEntyCountsF == null) { - intTokenEntyCountsF = SMX.getIntTokenEntyCountsF(); - } - if (intTokenEntyCounts1 == null) { - intTokenEntyCountsHashMap.put(str1, SMX.getIntTokenEntyCounts1()); - } - if (ITokenTagsF == null) { - ITokenTagsF = SMX.getITokenTagsF(); - } - if (ITokenTags1 == null) { - ITokenTagsHashMap.put(str1, SMX.getITokenTags1()); - } - if (strTokenStemsF == null) { - strTokenStemsF = SMX.getStrTokenStemsF(); - } - if (strTokenStems1 == null) { - strTokenStemsHashMap.put(str1, SMX.getStrTokenStems1()); - } - if (AnotatorcounterF == null) { - AnotatorcounterF = SMX.getAnotatorcounterF(); - } - if (Anotatorcounter1 == null) { - AnotatorcounterHashMap.put(str1, SMX.getAnotatorcounter1()); - } - if (TokensCounterF == null) { - TokensCounterF = SMX.getTokensCounterF(); - } - if (TokensCounter1 == null) { - TokensCounterHashMap.put(str1, SMX.getTokensCounter1()); - } - if (entityTokenTagsF == null) { - entityTokenTagsF = SMX.getEntityTokenTagsF(); - } - if (entityTokenTags1 == null) { - entityTokenTagsHashMap.put(str1, SMX.getEntityTokenTags1()); - } - if (nerEntitiesF == null) { - nerEntitiesF = SMX.getNerEntitiesF(); - } - if (nerEntities1 == null) { - nerEntitiesHashMap.put(str1, SMX.getNerEntities1()); - } - if (nerEntitiesTypeF == null) { - nerEntitiesTypeF = SMX.getNerEntitiesTypeF(); - } - if (nerEntitiesType1 == null) { - nerEntitiesTypeHashMap.put(str1, SMX.getNerEntitiesType1()); - } - if (stopWordTokenF == null) { - stopWordTokenF = SMX.getStopWordTokenF(); - } - if (stopWordToken1 == null) { - stopWordTokenHashMap.put(str1, SMX.getStopWordToken1()); - } - if (stopWordLemmaF == null) { - stopWordLemmaF = SMX.getStopWordLemmaF(); - } - if (stopWordLemma1 == null) { - stopWordLemmaHashMap.put(str1, SMX.getStopWordLemma1()); - } - if (PairCounterF == null) { - PairCounterF = SMX.getPairCounterF(); - } - if (PairCounter1 == null) { - PairCounterHashMap.put(str1, SMX.getPairCounter1()); - } - - var getSMX: SimilarityMatrix = SMX.callSMX() - val scoreRelationLastUserMsg = getSMX.distance - if (scoreRelationLastUserMsg > preRelationUserCounters) { - preRelationUserCounters = scoreRelationLastUserMsg - concurrentRelations.add(getSMX.secondaryString) - } - } - } - val cacheRequirement = 6500; - if (preRelationUserCounters > cacheRequirement && !stringCache.contains(strF) && filterContent(strF)) { - stringCache.add(strF) - } - val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * (Math.random() * 10)) - Collections.reverse(concurrentRelations) - val mysqlUpdateLastUsed: ArrayList = ArrayList() - if (!concurrentRelations.isEmpty()) { - for (secondaryRelation in concurrentRelations) { - if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) { - break - } - SB.append(secondaryRelation).append(" ") - mysqlUpdateLastUsed.add(secondaryRelation) - } - } - if (SB.toString().isEmpty()) { - return "failure, preventing stuckness" - } - runBlocking { - CoroutineScope(launch(Dispatchers.IO) { - DataMapper.updateLastUsed(mysqlUpdateLastUsed); - yield() - }) - } - return SB.toString() - } - - private fun getJMWEAnnotation(str1: String) { - val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str1) - jmweAnnotationCache.put(str1, jmweAnnotation) - } - - fun getResponseMsg(str: String, personName: String, stanfordCoreNLP: StanfordCoreNLP, - stanfordCoreNLPSentiment: StanfordCoreNLP, ingameResponse: Boolean): String { - var responseFutures: String = "" - runBlocking { - val launch1 = launch(Dispatchers.Default) { - var strF = trimString(str) - responseFutures = getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment) - if (!ingameResponse) { - responseFutures = checkPersonPresentInSentence(personName, responseFutures, strF, stanfordCoreNLP, - stanfordCoreNLPSentiment) - } - yield() - } - launch1.join() - } - return responseFutures - } - - private fun checkPersonPresentInSentence(personName: String, responseMsg: String, userLastMessage: String, - stanfordCoreNLP: StanfordCoreNLP, - stanfordCoreNLPSentiment: StanfordCoreNLP): String { - try { - val pipelineCoreDcoument = CoreDocument(responseMsg) - val pipelineCoreDcoumentLastMsg = CoreDocument(userLastMessage) - stanfordCoreNLP.annotate(pipelineCoreDcoument) - stanfordCoreNLPSentiment.annotate(pipelineCoreDcoumentLastMsg) - val regex = "(.*?\\d){10,}" - for (em in pipelineCoreDcoument.entityMentions()) { - val entityType = em.entityType() - if (entityType == "PERSON") { - var str = responseMsg - val emText = em.text() - val pattern = Pattern.compile(regex) - val matcher = pattern.matcher(personName) - val isMatched = matcher.matches() - if (emText != personName && !isMatched) { - for (emLastMsg in pipelineCoreDcoumentLastMsg.entityMentions()) { - if (emText != emLastMsg.text() && !Character.isDigit(emLastMsg.text().trim { it <= ' ' }[0])) { - //System.out.println("emLastMsg.text(): " + emLastMsg.text()); - str = (responseMsg.substring(0, responseMsg.indexOf(emText)) + " " - + emLastMsg + " " + responseMsg.substring(responseMsg.indexOf(emText))) - } - } - str += " $personName" - return str - } - } - } - } catch (e: Exception) { - println("""SCUFFED JAYZ: ${e.localizedMessage}""".trimIndent()) - } - return responseMsg - } - - fun filterContent(str: String): Boolean { - if (!str.isEmpty() && str.length > 3) { - var str1Local: String = str.trim(); - if (str1Local.length > 2 && !str1Local.startsWith("!")) { - return true - } - } - return false - } - - fun getCoreDocumentsSuggested(pipeline: StanfordCoreNLP, str: String) { - val annotation = Annotation(str) - pipeline.annotate(annotation) - val coreDocument = CoreDocument(annotation) - coreDocumentAnnotationCache.put(str, coreDocument) - } -} \ No newline at end of file diff --git a/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java b/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java index eb4a506b..1eb98277 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java @@ -9,43 +9,45 @@ import PresentationLayer.DiscordHandler; import discord4j.core.event.domain.message.MessageCreateEvent; import discord4j.core.object.entity.User; import discord4j.core.object.entity.channel.TextChannel; - import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; - -import edu.stanford.nlp.pipeline.StanfordCoreNLP; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; /** + * * @author install1 */ public class DoStuff { + public static boolean occupied = false; - public static void doStuff(MessageCreateEvent event, String usernameBot, Datahandler datahandler, - StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) { - String username = ""; + public static boolean isOccupied() { + return occupied; + } + + public static void doStuff(MessageCreateEvent event, String usernameBot) { + String username = null; try { username = event.getMessage().getAuthor().get().getUsername(); } catch (java.util.NoSuchElementException e) { username = null; } if (username != null && !username.equals(usernameBot)) { + occupied = true; TextChannel block = event.getMessage().getChannel().cast(TextChannel.class).block(); String name = block.getCategory().block().getName(); name = name.toLowerCase(); String channelName = block.getName().toLowerCase(); boolean channelpermissionsDenied = false; - if (channelName.contains("suggestion-box")) { - channelpermissionsDenied = true; - } switch (name) { - case "public area": + case "public area": { + break; + } case "information area": { break; } @@ -54,34 +56,49 @@ public class DoStuff { break; } } + List blockLast = event.getMessage().getUserMentions().buffer().blockLast(); + String content = event.getMessage().getContent(); if (!channelpermissionsDenied) { - List blockLast = event.getMessage().getUserMentions().buffer().blockLast(); - String content = event.getMessage().getContent(); - if (blockLast != null) { + if (blockLast != null) + { for (User user : blockLast) { content = content.replace(user.getId().asString(), ""); } } - boolean mentionedBot = false; - if (blockLast != null) { - for (User user : blockLast) { - if (user.getUsername().equals(usernameBot)) { - mentionedBot = true; - break; - } + MessageResponseHandler.getMessage(content); + } + boolean mentionedBot = false; + if (blockLast != null){ + for (User user : blockLast) + { + if (user.getUsername().equals(usernameBot)) + { + mentionedBot = true; + break; } } - if (mentionedBot || channelName.contains("general-autism")) { + } + if (mentionedBot || channelName.contains("general-autism")) { + try { String ResponseStr; - ResponseStr = datahandler.getResponseMsg(content, username, stanfordCoreNLP, stanfordCoreNLPSentiment, - false); + ResponseStr = MessageResponseHandler.selectReponseMessage(content, username); if (!ResponseStr.isEmpty()) { System.out.print("\nResponseStr3: " + ResponseStr + "\n"); event.getMessage().getChannel().block().createMessage(ResponseStr).block(); } + } catch (CustomError ex) { + Logger.getLogger(DoStuff.class.getName()).log(Level.SEVERE, null, ex); } + } - datahandler.updateStringCache(); + new Thread(() -> { + try { + Datahandler.instance.checkIfUpdateStrings(); + } catch (CustomError ex) { + Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex); + } + }).start(); + occupied = false; } } } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/MessageResponseHandler.java b/ArtificialAutism/src/main/java/FunctionLayer/MessageResponseHandler.java new file mode 100644 index 00000000..413651ae --- /dev/null +++ b/ArtificialAutism/src/main/java/FunctionLayer/MessageResponseHandler.java @@ -0,0 +1,101 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package FunctionLayer; + +import com.google.common.collect.MapMaker; +import edu.stanford.nlp.pipeline.CoreDocument; +import edu.stanford.nlp.pipeline.CoreEntityMention; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ConcurrentMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * + * @author install1 + */ +public class MessageResponseHandler { + + private static ConcurrentMap str = new MapMaker().concurrencyLevel(2).makeMap(); + + public static ConcurrentMap getStr() { + return str; + } + + public static void setStr(ConcurrentMap str) { + MessageResponseHandler.str = str; + } + + public static void getMessage(String message) { + if (message != null && !message.isEmpty()) { + message = message.replace("@", ""); + if (message.contains("<>")) { + message = message.substring(message.indexOf(">")); + } + if (message.startsWith("[ *")) { + message = message.substring(message.indexOf("]")); + } + str.put(str.size() + 1, message); + } + } + + public static String selectReponseMessage(String toString, String personName) throws CustomError { + ConcurrentMap str1 = new MapMaker().concurrencyLevel(6).makeMap(); + str1.put(str1.size() + 1, toString); + String strreturn = ""; + for (String str : str1.values()) { + if (!str.isEmpty()) { + strreturn = str; + } + } + String getResponseMsg = Datahandler.instance.getResponseMsg(strreturn); + getResponseMsg = checkPersonPresentInSentence(personName, getResponseMsg, strreturn); + return getResponseMsg; + } + + private static String checkPersonPresentInSentence(String personName, String responseMsg, String userLastMessage) { + //check if userlastmsg contains person as refference + //check if first person is author or their person of mention + try { + String strreturn = responseMsg; + CoreDocument pipelineCoreDcoument = new CoreDocument(responseMsg); + CoreDocument pipelineCoreDcoumentLastMsg = new CoreDocument(userLastMessage); + Datahandler.getPipeline().annotate(pipelineCoreDcoument); + Datahandler.getPipeline().annotate(pipelineCoreDcoumentLastMsg); + String regex = "(.*?\\d){10,}"; + for (CoreEntityMention em : pipelineCoreDcoument.entityMentions()) { + String entityType = em.entityType(); + if (entityType.equals("PERSON")) { + String str = strreturn; + String emText = em.text(); + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(personName); + boolean isMatched = matcher.matches(); + if (!emText.equals(personName) && !isMatched) { + for (CoreEntityMention emLastMsg : pipelineCoreDcoumentLastMsg.entityMentions()) { + if (!emText.equals(emLastMsg.text()) && !Character.isDigit(emLastMsg.text().trim().charAt(0))) { + //System.out.println("emLastMsg.text(): " + emLastMsg.text()); + str = strreturn.substring(0, strreturn.indexOf(emText)) + " " + + emLastMsg + " " + strreturn.substring(strreturn.indexOf(emText)); + } + } + str += " " + personName; + return str; + } + } + } + } catch (Exception e) { + System.out.println("SCUFFED JAYZ: " + e.getLocalizedMessage() + "\n"); + } + return responseMsg; + } + + public static int getOverHead() { + int getResponseMsgOverHead = Datahandler.instance.getMessageOverHead(); + return getResponseMsgOverHead; + } +} diff --git a/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java b/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java index 778cdb61..32f2d4b6 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java @@ -5,6 +5,7 @@ */ package FunctionLayer; +import com.google.common.collect.MapMaker; import edu.mit.jmwe.data.IMWE; import edu.mit.jmwe.data.IToken; import edu.mit.jmwe.data.Token; @@ -23,29 +24,37 @@ import edu.stanford.nlp.ling.JMWEAnnotation; import edu.stanford.nlp.pipeline.Annotation; import edu.stanford.nlp.pipeline.StanfordCoreNLP; import edu.stanford.nlp.util.CoreMap; - import java.io.File; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Properties; +import java.util.concurrent.ConcurrentMap; /** + * * @author install1 */ //maybe not public? public class PipelineJMWESingleton { //if not needed to be volatile dont make it, increases time - //public volatile static PipelineJMWESingleton INSTANCE; - public static PipelineJMWESingleton INSTANCE; + public volatile static PipelineJMWESingleton INSTANCE; private static StanfordCoreNLP localNLP = initializeJMWE(); private static String underscoreSpaceReplacement; - private static IMWEIndex index; - private static IMWEDetector detector; private PipelineJMWESingleton() { - String jmweIndexData = "/home/gameservers/autism_bot/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data + } + + public static void getINSTANCE() { + INSTANCE = new PipelineJMWESingleton(); + } + + public final ConcurrentMap getJMWEAnnotation(Collection strvalues) { + boolean verbose = false; + IMWEIndex index; + String jmweIndexData = "/home/debian/autism_bot/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data String jmweIndexDataLocalTest = "E:/java8/Projects/mweindex_wordnet3.0_semcor1.6.data"; File indexFile = new File((String) jmweIndexData); index = new MWEIndex(indexFile); @@ -55,45 +64,36 @@ public class PipelineJMWESingleton { } catch (IOException e) { throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n"); } - detector = getDetector(index, detectorName); + IMWEDetector detector = getDetector(index, detectorName); + ConcurrentMap returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap(); + strvalues.forEach(str -> { + Annotation annoStr = new Annotation(str); + returnAnnotations.put(str, annoStr); + }); + localNLP.annotate(returnAnnotations.values()); + returnAnnotations.values().parallelStream().forEach(annoStr -> { + for (CoreMap sentence : annoStr.get(CoreAnnotations.SentencesAnnotation.class)) { + List> mwes = getjMWEInSentence(sentence, index, detector, verbose); + sentence.set(JMWEAnnotation.class, mwes); + } + }); index.close(); - } - - public static void getINSTANCE() { - INSTANCE = new PipelineJMWESingleton(); - } - - public final Annotation getJMWEAnnotation(String str) { - try { - index.open(); - } catch (IOException e) { - throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n"); - } - Annotation annoStr = new Annotation(str); - localNLP.annotate(annoStr); - Class sentencesAnnotationClass = CoreAnnotations.SentencesAnnotation.class; - for (CoreMap sentence : annoStr.get(sentencesAnnotationClass)) { - List> mwes = getjMWEInSentence(sentence, index, detector, false); - //annoStr.set(JMWEAnnotation.class, mwes); - sentence.set(JMWEAnnotation.class, mwes); - } - index.close(); - return annoStr; + return returnAnnotations; } public final static StanfordCoreNLP initializeJMWE() { Properties propsJMWE; propsJMWE = new Properties(); propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma"); - propsJMWE.setProperty("tokenize.options", "untokenizable=firstKeep"); - propsJMWE.setProperty("threads", "5"); + propsJMWE.setProperty("tokenize.options", "untokenizable=firstDelete"); + propsJMWE.setProperty("threads", "25"); propsJMWE.setProperty("pos.maxlen", "90"); propsJMWE.setProperty("tokenize.maxlen", "90"); propsJMWE.setProperty("ssplit.maxlen", "90"); propsJMWE.setProperty("lemma.maxlen", "90"); underscoreSpaceReplacement = "-"; localNLP = new StanfordCoreNLP(propsJMWE); - System.out.println("finished JMWE constructor \n"); + System.out.println("finished singleton constructor \n"); return localNLP; } @@ -124,7 +124,7 @@ public class PipelineJMWESingleton { } public List> getjMWEInSentence(CoreMap sentence, IMWEIndex index, IMWEDetector detector, - boolean verbose) { + boolean verbose) { List tokens = getITokens(sentence.get(CoreAnnotations.TokensAnnotation.class)); List> mwes = detector.detect(tokens); if (verbose) { @@ -146,4 +146,5 @@ public class PipelineJMWESingleton { } return sentence; } + } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/SimilarityMatrix.java b/ArtificialAutism/src/main/java/FunctionLayer/SimilarityMatrix.java index 59154703..23ada343 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/SimilarityMatrix.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/SimilarityMatrix.java @@ -5,7 +5,10 @@ */ package FunctionLayer; +import FunctionLayer.StanfordParser.SentimentValueCache; + /** + * * @author install1 */ public class SimilarityMatrix { @@ -13,6 +16,8 @@ public class SimilarityMatrix { private String PrimaryString; private String SecondaryString; private double distance; + private SentimentValueCache cacheValue1; + private SentimentValueCache cacheValue2; public final double getDistance() { return distance; @@ -33,8 +38,36 @@ public class SimilarityMatrix { this.distance = result; } + public final String getPrimaryString() { + return PrimaryString; + } + + public final void setPrimaryString(String PrimaryString) { + this.PrimaryString = PrimaryString; + } + public final String getSecondaryString() { return SecondaryString; } + public final void setSecondaryString(String SecondaryString) { + this.SecondaryString = SecondaryString; + } + + public final SentimentValueCache getCacheValue1() { + return cacheValue1; + } + + public final void setCacheValue1(SentimentValueCache cacheValue1) { + this.cacheValue1 = cacheValue1; + } + + public final SentimentValueCache getCacheValue2() { + return cacheValue2; + } + + public final void setCacheValue2(SentimentValueCache cacheValue2) { + this.cacheValue2 = cacheValue2; + } + } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java index 047043d0..a4a6b052 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java @@ -1,8 +1,10 @@ package FunctionLayer.StanfordParser; import FunctionLayer.LevenshteinDistance; +import FunctionLayer.Datahandler; import FunctionLayer.SimilarityMatrix; import FunctionLayer.StopwordAnnotator; +import com.google.common.collect.MapMaker; import edu.mit.jmwe.data.IMWE; import edu.mit.jmwe.data.IMWEDesc; import edu.mit.jmwe.data.IToken; @@ -17,6 +19,7 @@ import edu.stanford.nlp.neural.rnn.RNNCoreAnnotations; import edu.stanford.nlp.pipeline.Annotation; import edu.stanford.nlp.pipeline.CoreDocument; import edu.stanford.nlp.pipeline.CoreEntityMention; +import edu.stanford.nlp.pipeline.StanfordCoreNLP; import edu.stanford.nlp.process.CoreLabelTokenFactory; import edu.stanford.nlp.process.DocumentPreprocessor; import edu.stanford.nlp.process.PTBTokenizer; @@ -34,10 +37,18 @@ import edu.stanford.nlp.trees.TypedDependency; import edu.stanford.nlp.trees.tregex.gui.Tdiff; import edu.stanford.nlp.util.CoreMap; import edu.stanford.nlp.util.Pair; - import java.io.StringReader; -import java.util.*; - +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.OptionalDouble; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.lucene.analysis.core.StopAnalyzer; import org.ejml.simple.SimpleMatrix; @@ -46,11 +57,11 @@ import org.ejml.simple.SimpleMatrix; * To change this template file, choose Tools | Templates * and open the template in the editor. */ - /** + * * @author install1 */ -public class SentimentAnalyzerTest { +public class SentimentAnalyzerTest implements Callable { private final SimilarityMatrix smxParam; private final String str; @@ -58,490 +69,50 @@ public class SentimentAnalyzerTest { private final MaxentTagger tagger; private final GrammaticalStructureFactory gsf; private final AbstractSequenceClassifier classifier; - private final List coreMaps1; - private final List coreMaps2; + private final Annotation jmweStrAnnotation1; + private final Annotation jmweStrAnnotation2; private final Annotation pipelineAnnotation1; private final Annotation pipelineAnnotation2; private final Annotation pipelineAnnotation1Sentiment; private final Annotation pipelineAnnotation2Sentiment; private final CoreDocument pipelineCoreDcoument1; private final CoreDocument pipelineCoreDcoument2; + private SentimentValueCache cacheSentiment1; + private SentimentValueCache cacheSentiment2; - public Integer getTokenizeCounting() { - return tokenizeCounting; + public final SentimentValueCache getCacheSentiment1() { + return cacheSentiment1; } - public List> getTaggedWordListF() { - return taggedWordListF; + public final SentimentValueCache getCacheSentiment2() { + return cacheSentiment2; } - public List> getTaggedWordList1() { - return taggedWordList1; - } - - public ArrayList getRetrieveTGWList1() { - return retrieveTGWList1; - } - - public List getSentencesF() { - return sentencesF; - } - - public Integer getTokenizeCountingF() { - return tokenizeCountingF; - } - - public ArrayList getRetrieveTGWListF() { - return retrieveTGWListF; - } - - public List getSentences1() { - return sentences1; - } - - public List getSentencesSentimentF() { - return sentencesSentimentF; - } - - public List getSentencesSentiment1() { - return sentencesSentiment1; - } - - public ArrayList getTreesF() { - return treesF; - } - - public ArrayList getTrees1() { - return trees1; - } - - - public ArrayList getGrammaticalStructuresF() { - return grammaticalStructuresF; - } - - public ArrayList getGrammaticalStructures1() { - return grammaticalStructures1; - } - - public ArrayList getTypedDependenciesF() { - return typedDependenciesF; - } - - public ArrayList getTypedDependencies1() { - return typedDependencies1; - } - - public ArrayList getRnnCoreAnnotationsPredictedF() { - return rnnCoreAnnotationsPredictedF; - } - - public ArrayList getRnnCoreAnnotationsPredicted1() { - return rnnCoreAnnotationsPredicted1; - } - - public ArrayList getSimpleMatricesF() { - return simpleMatricesF; - } - - public ArrayList getSimpleMatrices1() { - return simpleMatrices1; - } - - public ArrayList getSimpleMatricesNodevectorsF() { - return simpleMatricesNodevectorsF; - } - - public List getListF() { - return listF; - } - - public List getList1() { - return list1; - } - - public ArrayList getSimpleMatricesNodevectors1() { - return simpleMatricesNodevectors1; - } - - public Integer getLongestF() { - return longestF; - } - - public Integer getSentimentLongestF() { - return sentimentLongestF; - } - - public Integer getSentimentLongest1() { - return sentimentLongest1; - } - - public List> getImwesF() { - return imwesF; - } - - public List> getImwes1() { - return imwes1; - } - - public Integer getLongest1() { - return longest1; - } - - public Integer getInflectedCounterNegativeF() { - return InflectedCounterNegativeF; - } - - public Integer getInflectedCounterPositiveF() { - return InflectedCounterPositiveF; - } - - public Integer getInflectedCounterPositive1() { - return InflectedCounterPositive1; - } - - public Integer getInflectedCounterNegative1() { - return InflectedCounterNegative1; - } - - public ArrayList getTokenEntryF() { - return tokenEntryF; - } - - public ArrayList getTokenEntry1() { - return tokenEntry1; - } - - public Integer getMarkedContinuousCounterF() { - return MarkedContinuousCounterF; - } - - public Integer getMarkedContinuousCounter1() { - return MarkedContinuousCounter1; - } - - public Integer getUnmarkedPatternCounterF() { - return UnmarkedPatternCounterF; - } - - public Integer getUnmarkedPatternCounter1() { - return UnmarkedPatternCounter1; - } - - public ArrayList getStrTokensIpartFormF() { - return strTokensIpartFormF; - } - - public ArrayList getStrTokensIpartForm1() { - return strTokensIpartForm1; - } - - public ArrayList getTokenFormsF() { - return tokenFormsF; - } - - public ArrayList getTokenForms1() { - return tokenForms1; - } - - public ArrayList getStrTokenEntryGetPOSF() { - return strTokenEntryGetPOSF; - } - - public ArrayList getStrTokenEntryGetPOS1() { - return strTokenEntryGetPOS1; - } - - public ArrayList getIntTokenEntyCountsF() { - return intTokenEntyCountsF; - } - - public ArrayList getIntTokenEntyCounts1() { - return intTokenEntyCounts1; - } - - public ArrayList getITokenTagsF() { - return ITokenTagsF; - } - - public ArrayList getITokenTags1() { - return ITokenTags1; - } - - public ArrayList getStrTokenStemsF() { - return strTokenStemsF; - } - - public ArrayList getStrTokenStems1() { - return strTokenStems1; - } - - public Integer getAnotatorcounterF() { - return AnotatorcounterF; - } - - public Integer getAnotatorcounter1() { - return Anotatorcounter1; - } - - public Integer getTokensCounterF() { - return TokensCounterF; - } - - public Integer getTokensCounter1() { - return TokensCounter1; - } - - public ArrayList getEntityTokenTagsF() { - return entityTokenTagsF; - } - - public ArrayList getEntityTokenTags1() { - return entityTokenTags1; - } - - public ArrayList getNerEntitiesF() { - return nerEntitiesF; - } - - public ArrayList getNerEntities1() { - return nerEntities1; - } - - public ArrayList getNerEntitiesTypeF() { - return nerEntitiesTypeF; - } - - public ArrayList getNerEntitiesType1() { - return nerEntitiesType1; - } - - public ArrayList getStopWordTokenF() { - return stopWordTokenF; - } - - public ArrayList getStopWordToken1() { - return stopWordToken1; - } - - public ArrayList getStopWordLemmaF() { - return stopWordLemmaF; - } - - public ArrayList getStopWordLemma1() { - return stopWordLemma1; - } - - public Integer getPairCounterF() { - return PairCounterF; - } - - public Integer getPairCounter1() { - return PairCounter1; - } - - //caches - private Integer tokenizeCounting; - private Integer tokenizeCountingF; - private List> taggedWordListF; - private List> taggedWordList1; - private ArrayList retrieveTGWList1; - private ArrayList retrieveTGWListF; - private List sentencesF; - private List sentences1; - private List sentencesSentimentF; - private List sentencesSentiment1; - private ArrayList treesF; - private ArrayList trees1; - private ArrayList grammaticalStructuresF; - private ArrayList grammaticalStructures1; - private ArrayList typedDependenciesF; - private ArrayList typedDependencies1; - private ArrayList rnnCoreAnnotationsPredictedF; - private ArrayList rnnCoreAnnotationsPredicted1; - private ArrayList simpleMatricesF; - private ArrayList simpleMatrices1; - private ArrayList simpleMatricesNodevectorsF; - private ArrayList simpleMatricesNodevectors1; - private List listF; - private List list1; - private Integer longestF; - private Integer longest1; - private Integer sentimentLongestF; - private Integer sentimentLongest1; - private List> imwesF; - private List> imwes1; - private Integer InflectedCounterNegativeF; - private Integer InflectedCounterNegative1; - private Integer InflectedCounterPositiveF; - private Integer InflectedCounterPositive1; - private ArrayList tokenEntryF; - private ArrayList tokenEntry1; - private Integer MarkedContinuousCounterF; - private Integer MarkedContinuousCounter1; - private Integer UnmarkedPatternCounterF; - private Integer UnmarkedPatternCounter1; - private ArrayList strTokensIpartFormF; - private ArrayList strTokensIpartForm1; - private ArrayList tokenFormsF; - private ArrayList tokenForms1; - private ArrayList strTokenEntryGetPOSF; - private ArrayList strTokenEntryGetPOS1; - private ArrayList intTokenEntyCountsF; - private ArrayList intTokenEntyCounts1; - private ArrayList ITokenTagsF; - private ArrayList ITokenTags1; - private ArrayList strTokenStemsF; - private ArrayList strTokenStems1; - private Integer AnotatorcounterF; - private Integer Anotatorcounter1; - private Integer TokensCounterF; - private Integer TokensCounter1; - private ArrayList entityTokenTagsF; - private ArrayList entityTokenTags1; - private ArrayList nerEntitiesF; - private ArrayList nerEntities1; - private ArrayList nerEntitiesTypeF; - private ArrayList nerEntitiesType1; - private ArrayList stopWordTokenF; - private ArrayList stopWordToken1; - private ArrayList stopWordLemmaF; - private ArrayList stopWordLemma1; - private Integer PairCounterF; - private Integer PairCounter1; - - public SentimentAnalyzerTest(String str, String str1, SimilarityMatrix smxParam, List coreMaps1, List coreMaps2, - Annotation strPipeline1, Annotation strPipeline2, Annotation strPipeSentiment1, Annotation strPipeSentiment2, - CoreDocument pipelineCoreDcoument1, CoreDocument pipelineCoreDcoument2, - MaxentTagger tagger, GrammaticalStructureFactory gsf, - AbstractSequenceClassifier classifier, Integer tokenizeCounting, - Integer tokenizeCountingF, List> taggedWordListF, - List> taggedWordList1, ArrayList - retrieveTGWListF, ArrayList retrieveTGWList1, - List sentencesF, List sentences1, - List sentencesSentimentF, List sentencesSentiment1, - ArrayList treesF, ArrayList trees1, - ArrayList grammaticalStructuresF, - ArrayList grammaticalStructures1, - ArrayList typedDependenciesF, - ArrayList typedDependencies1, - ArrayList rnnCoreAnnotationsPredictedF, - ArrayList rnnCoreAnnotationsPredicted1, - ArrayList simpleMatricesF, - ArrayList simpleMatrices1, - ArrayList simpleMatricesNodevectorsF, - ArrayList simpleMatricesNodevectors1, - List listF, List list1, Integer longestF, Integer longest1, - Integer sentimentLongestF, Integer sentimentLongest1, - List> imwesF, List> imwes1, - Integer InflectedCounterNegativeF, - Integer InflectedCounterNegative1, Integer InflectedCounterPositiveF, - Integer InflectedCounterPositive1, ArrayList tokenEntryF, - ArrayList tokenEntry1, Integer MarkedContinuousCounterF, - Integer MarkedContinuousCounter1, Integer UnmarkedPatternCounterF, - Integer UnmarkedPatternCounter1, ArrayList strTokensIpartFormF, - ArrayList strTokensIpartForm1, ArrayList tokenFormsF, - ArrayList tokenForms1, ArrayList strTokenEntryGetPOSF, - ArrayList strTokenEntryGetPOS1, ArrayList intTokenEntyCountsF, - ArrayList intTokenEntyCounts1, ArrayList ITokenTagsF, - ArrayList ITokenTags1, ArrayList strTokenStemsF, - ArrayList strTokenStems1, Integer AnotatorcounterF, - Integer Anotatorcounter1, Integer TokensCounterF, - Integer TokensCounter1, ArrayList entityTokenTagsF, - ArrayList entityTokenTags1, ArrayList nerEntitiesF, - ArrayList nerEntities1, ArrayList nerEntitiesTypeF, - ArrayList nerEntitiesType1, ArrayList stopWordTokenF, - ArrayList stopWordToken1, ArrayList stopWordLemmaF, - ArrayList stopWordLemma1, Integer PairCounterF, - Integer PairCounter1) { + public SentimentAnalyzerTest(String str, String str1, SimilarityMatrix smxParam, Annotation str1Annotation, Annotation str2Annotation, + Annotation strPipeline1, Annotation strPipeline2, Annotation strPipeSentiment1, Annotation strPipeSentiment2, + CoreDocument pipelineCoreDcoument1, CoreDocument pipelineCoreDcoument2, SentimentValueCache cacheValue1, SentimentValueCache cacheValue2) { this.str = str; this.str1 = str1; this.smxParam = smxParam; - this.tagger = tagger; - this.gsf = gsf; - this.classifier = classifier; - this.coreMaps1 = coreMaps1; - this.coreMaps2 = coreMaps2; + this.tagger = Datahandler.getTagger(); + this.gsf = Datahandler.getGsf(); + this.classifier = Datahandler.getClassifier(); + this.jmweStrAnnotation1 = str1Annotation; + this.jmweStrAnnotation2 = str2Annotation; this.pipelineAnnotation1 = strPipeline1; this.pipelineAnnotation2 = strPipeline2; this.pipelineAnnotation1Sentiment = strPipeSentiment1; this.pipelineAnnotation2Sentiment = strPipeSentiment2; this.pipelineCoreDcoument1 = pipelineCoreDcoument1; this.pipelineCoreDcoument2 = pipelineCoreDcoument2; - this.tokenizeCounting = tokenizeCounting; - this.tokenizeCountingF = tokenizeCountingF; - this.taggedWordListF = taggedWordListF; - this.taggedWordList1 = taggedWordList1; - this.retrieveTGWListF = retrieveTGWListF; - this.retrieveTGWList1 = retrieveTGWList1; - this.sentencesF = sentencesF; - this.sentences1 = sentences1; - this.sentencesSentimentF = sentencesSentimentF; - this.sentencesSentiment1 = sentencesSentiment1; - this.treesF = treesF; - this.trees1 = trees1; - this.grammaticalStructuresF = grammaticalStructuresF; - this.grammaticalStructures1 = grammaticalStructures1; - this.typedDependenciesF = typedDependenciesF; - this.typedDependencies1 = typedDependencies1; - this.rnnCoreAnnotationsPredictedF = rnnCoreAnnotationsPredictedF; - this.rnnCoreAnnotationsPredicted1 = rnnCoreAnnotationsPredicted1; - this.simpleMatricesF = simpleMatricesF; - this.simpleMatrices1 = simpleMatrices1; - this.simpleMatricesNodevectorsF = simpleMatricesNodevectorsF; - this.simpleMatricesNodevectors1 = simpleMatricesNodevectors1; - this.listF = listF; - this.list1 = list1; - this.longestF = longestF; - this.longest1 = longest1; - this.sentimentLongestF = sentimentLongestF; - this.sentimentLongest1 = sentimentLongest1; - this.imwesF = imwesF; - this.imwes1 = imwes1; - this.InflectedCounterNegativeF = InflectedCounterNegativeF; - this.InflectedCounterNegative1 = InflectedCounterNegative1; - this.InflectedCounterPositiveF = InflectedCounterPositiveF; - this.InflectedCounterPositive1 = InflectedCounterPositive1; - this.tokenEntryF = tokenEntryF; - this.tokenEntry1 = tokenEntry1; - this.MarkedContinuousCounterF = MarkedContinuousCounterF; - this.MarkedContinuousCounter1 = MarkedContinuousCounter1; - this.UnmarkedPatternCounterF = UnmarkedPatternCounterF; - this.UnmarkedPatternCounter1 = UnmarkedPatternCounter1; - this.strTokensIpartFormF = strTokensIpartFormF; - this.strTokensIpartForm1 = strTokensIpartForm1; - this.tokenFormsF = tokenFormsF; - this.tokenForms1 = tokenForms1; - this.strTokenEntryGetPOSF = strTokenEntryGetPOSF; - this.strTokenEntryGetPOS1 = strTokenEntryGetPOS1; - this.intTokenEntyCountsF = intTokenEntyCountsF; - this.intTokenEntyCounts1 = intTokenEntyCounts1; - this.ITokenTagsF = ITokenTagsF; - this.ITokenTags1 = ITokenTags1; - this.strTokenStemsF = strTokenStemsF; - this.strTokenStems1 = strTokenStems1; - this.AnotatorcounterF = AnotatorcounterF; - this.Anotatorcounter1 = Anotatorcounter1; - this.TokensCounterF = TokensCounterF; - this.TokensCounter1 = TokensCounter1; - this.entityTokenTagsF = entityTokenTagsF; - this.entityTokenTags1 = entityTokenTags1; - this.nerEntitiesF = nerEntitiesF; - this.nerEntities1 = nerEntities1; - this.nerEntitiesTypeF = nerEntitiesTypeF; - this.nerEntitiesType1 = nerEntitiesType1; - this.stopWordTokenF = stopWordTokenF; - this.stopWordToken1 = stopWordToken1; - this.stopWordLemmaF = stopWordLemmaF; - this.stopWordLemma1 = stopWordLemma1; - this.PairCounterF = PairCounterF; - this.PairCounter1 = PairCounter1; + this.cacheSentiment1 = cacheValue1; + this.cacheSentiment2 = cacheValue2; } private List> getTaggedWordList(String message) { List> taggedwordlist = new ArrayList(); DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(message)); - TokenizerFactory ptbTokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=noneDelete"); //noneDelete //firstDelete + TokenizerFactory ptbTokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=firstDelete"); //noneDelete tokenizer.setTokenizerFactory(ptbTokenizerFactory); for (final List sentence : tokenizer) { taggedwordlist.add(tagger.tagSentence(sentence)); @@ -551,127 +122,111 @@ public class SentimentAnalyzerTest { private int tokenizeCounting(List> taggedwordlist) { int counter = 0; + Collection taggedCollection = new ArrayList(); for (List taggedList : taggedwordlist) { counter += taggedList.size(); } return counter; } - private ArrayList retrieveTGWListIndex(List> taggedwordlist) { - ArrayList tgwlistIndex = new ArrayList(); - for (List tGWList : taggedwordlist) { - for (TaggedWord taggedWord : tGWList) { - for (String str : tgwlistIndex) { - if (!taggedWord.tag().equals(str) && !taggedWord.tag().equals(":")) { - tgwlistIndex.add(taggedWord.tag()); - tGWList.remove(taggedWord); + private ConcurrentMap retrieveTGWListIndex(List> taggedwordlist) { + ConcurrentMap tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap(); + taggedwordlist.forEach((TGWList) -> { + TGWList.forEach((TaggedWord) -> { + if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) { + tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag()); + } + }); + }); + return tgwlistIndex; + } + + private Double iterateTrees(ConcurrentMap sentenceConstituencyParseList2, ConcurrentMap sentenceConstituencyParseList1, + Double score) { + double preConstituentsScore = score; + ConcurrentMap constituentsMap = new MapMaker().concurrencyLevel(4).makeMap(); + int constituencySize = sentenceConstituencyParseList1.size() + sentenceConstituencyParseList2.size(); + for (final Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) { + int constiRelationsize = 0; + for (final Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) { + Set constinuent1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse2); + Set constinuent2 = Tdiff.markDiff(sentenceConstituencyParse2, sentenceConstituencyParse1); + ConcurrentMap constiLabels = new MapMaker().concurrencyLevel(2).makeMap(); + for (final Constituent consti : constinuent1) { + for (final Constituent consti1 : constinuent2) { + if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) { + constiLabels.put(constiLabels.size(), consti.value()); + constiRelationsize++; + } + } + } + int constituents1 = constinuent1.size() - constiRelationsize; + int constituents2 = constinuent2.size() - constiRelationsize; + constituentsMap.put(constituentsMap.size(), constituents1); + constituentsMap.put(constituentsMap.size(), constituents2); + constituentsMap.put(constituentsMap.size(), constiRelationsize); + if (constituentsMap.size() < 4) { + if ((constituents1 * 5 < constituents2 || constituents2 * 5 < constituents1) && constituents1 > 0 && constituents2 > 0) { + score -= (constituents1 + constituents2) * 200; + } else if ((constituents1 == 0 || constituents2 == 0) && (constituents1 >= constituents2 + 4 || constituents2 >= constituents1 + 4)) { + score -= constituents1 > constituents2 ? constituents1 * 500 : constituents2 * 500; + } else if (constiRelationsize >= constituents1 + constituents2 && (constituents1 > 2 && constituents2 > 2)) { + score += (constiRelationsize + constituents1 + constituents2) * 350; + } else if (constituents1 >= 2 && constituents2 >= 2 && constituents1 * 1.5 > constituents2 && constituents2 * 1.5 > constituents1) { + if (constituents1 == constituents2 && constiRelationsize - constituents1 == 1) { + score += (constiRelationsize + constituents1 + constituents2) * 1550; + } else if (constiRelationsize >= constituents1 && constituents1 == constituents2) { + score -= (constiRelationsize + constituents1 + constituents2) * 550; + } else if (constiRelationsize < constituents1 && constiRelationsize < constituents2) { + score += 800; + } else if ((constiRelationsize == constituents1 || constiRelationsize == constituents2) && constituents1 * 1.5 > constituents2 + && constituents2 * 1.5 > constituents1) { + score += (constiRelationsize + constituents1 + constituents2) * 350; + } + } else if (constiRelationsize > constituents1 + constituents2) { + score += 2500; + } else if (constiRelationsize * 5 < constituents1 || constiRelationsize * 5 < constituents2) { + score -= (constituents1 + constituents2) * 400; + } + } else { + score = preConstituentsScore; + int n1 = constituentsMap.get(0); + int n2 = constituentsMap.get(1); + int n3 = constituentsMap.get(2); + int cap = 0; + if (n1 > n2 && n1 > n3) { + cap = n1; + } else if (n2 > n3 && n2 > n1) { + cap = n2; + } else { + cap = n3; + } + int overheat = 0; + for (int iterator = 3; iterator < constituentsMap.size(); iterator++) { + Integer getConstituent = constituentsMap.get(iterator); + if (getConstituent > cap) { + overheat++; + } + } + if (overheat > 1) { + score -= overheat * 800; + } else { + score += 1300; } } } } - return tgwlistIndex; - } - - private Double iterateTrees(ArrayList sentenceConstituencyParseList2, ArrayList sentenceConstituencyParseList1, - Double score) { - double preConstituentsScore = score; - ArrayList constituentsMap = new ArrayList(); - int constituencySize = sentenceConstituencyParseList1.size() + sentenceConstituencyParseList2.size(); - for (final Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2) { - int constiRelationsize = 0; - try { - if (sentenceConstituencyParse2 != null && !sentenceConstituencyParse2.isEmpty()) { - for (final Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1) { - try { - if (sentenceConstituencyParse1 != null && !sentenceConstituencyParse1.isEmpty()) { - Set constinuent1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse2); - Set constinuent2 = Tdiff.markDiff(sentenceConstituencyParse2, sentenceConstituencyParse1); - ArrayList constiLabels = new ArrayList(); - for (final Constituent consti : constinuent1) { - for (final Constituent consti1 : constinuent2) { - if (consti.value().equals(consti1.value()) && !constiLabels.contains(consti.value())) { - constiLabels.add(consti.value()); - constiRelationsize++; - } - } - } - int constituents1 = constinuent1.size() - constiRelationsize; - int constituents2 = constinuent2.size() - constiRelationsize; - constituentsMap.add(constituents1); - constituentsMap.add(constituents2); - constituentsMap.add(constiRelationsize); - if (constituentsMap.size() < 4) { - if ((constituents1 * 5 < constituents2 || constituents2 * 5 < constituents1) && constituents1 > 0 && constituents2 > 0) { - score -= (constituents1 + constituents2) * 200; - } else if ((constituents1 == 0 || constituents2 == 0) && (constituents1 >= constituents2 + 4 || constituents2 >= constituents1 + 4)) { - score -= constituents1 > constituents2 ? constituents1 * 500 : constituents2 * 500; - } else if (constiRelationsize >= constituents1 + constituents2 && (constituents1 > 2 && constituents2 > 2)) { - score += (constiRelationsize + constituents1 + constituents2) * 350; - } else if (constituents1 >= 2 && constituents2 >= 2 && constituents1 * 1.5 > constituents2 && constituents2 * 1.5 > constituents1) { - if (constituents1 == constituents2 && constiRelationsize - constituents1 == 1) { - score += (constiRelationsize + constituents1 + constituents2) * 1550; - } else if (constiRelationsize >= constituents1 && constituents1 == constituents2) { - score -= (constiRelationsize + constituents1 + constituents2) * 550; - } else if (constiRelationsize < constituents1 && constiRelationsize < constituents2) { - score += 800; - } else if ((constiRelationsize == constituents1 || constiRelationsize == constituents2) && constituents1 * 1.5 > constituents2 - && constituents2 * 1.5 > constituents1) { - score += (constiRelationsize + constituents1 + constituents2) * 350; - } - } else if (constiRelationsize > constituents1 + constituents2) { - score += 2500; - } else if (constiRelationsize * 5 < constituents1 || constiRelationsize * 5 < constituents2) { - score -= (constituents1 + constituents2) * 400; - } - } else { - score = preConstituentsScore; - int n1 = constituentsMap.get(0); - int n2 = constituentsMap.get(1); - int n3 = constituentsMap.get(2); - int cap = 0; - if (n1 > n2 && n1 > n3) { - cap = n1; - } else if (n2 > n3 && n2 > n1) { - cap = n2; - } else { - cap = n3; - } - int overheat = 0; - for (int iterator = 3; iterator < constituentsMap.size(); iterator++) { - Integer getConstituent = constituentsMap.get(iterator); - if (getConstituent > cap) { - overheat++; - } - } - if (overheat > 1) { - score -= overheat * 800; - } else { - score += 1300; - } - } - } - } catch (NoSuchElementException e) { - } - } - if (constituencySize > 10) { - score -= constituencySize * 400; - } - } - - } catch (NoSuchElementException e) { - - } + if (constituencySize > 10) { + score -= constituencySize * 400; } return score; } - private Double typeDependenciesGrammaticalRelation - (Collection allTypedDependencies1, Collection allTypedDependencies2, - Double score, ArrayList grammaticalMap1, - ArrayList grammaticalMap2, - ArrayList sentenceConstituencyParseList1, ArrayList sentenceConstituencyParseList2) { - ArrayList alltypeDepsSize1 = new ArrayList(); - ArrayList summationList = new ArrayList(); + private Double typeDependenciesGrammaticalRelation(Collection allTypedDependencies1, Collection allTypedDependencies2, + Double score, ConcurrentMap grammaticalMap1, ConcurrentMap grammaticalMap2, + ConcurrentMap sentenceConstituencyParseList1, ConcurrentMap sentenceConstituencyParseList2) { + ConcurrentMap alltypeDepsSizeMap = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap summationMap = new MapMaker().concurrencyLevel(2).makeMap(); int relationApplicable1 = 0; int relationApplicable2 = 0; int grammaticalRelation1 = 0; @@ -681,25 +236,19 @@ public class SentimentAnalyzerTest { for (TypedDependency TDY1 : allTypedDependencies1) { IndexedWord dep = TDY1.dep(); IndexedWord gov = TDY1.gov(); - for (GrammaticalStructure gs : grammaticalMap1) { + for (GrammaticalStructure gs : grammaticalMap1.values()) { GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep); - for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2) { - try { - if (sentenceConstituencyParse2 != null && !sentenceConstituencyParse2.isEmpty()) { - if (grammaticalRelation.isApplicable(sentenceConstituencyParse2) && !treeCollectionGramatical.contains(sentenceConstituencyParse2)) { - score += 700; - grammaticalRelation1++; - treeCollectionGramatical.add(sentenceConstituencyParse2); - } - GrammaticalRelation reln = TDY1.reln(); - if (reln.isApplicable(sentenceConstituencyParse2) && !treeCollectionReln.contains(sentenceConstituencyParse2)) { - score += 525; - relationApplicable1++; - treeCollectionReln.add(sentenceConstituencyParse2); - } - } - } catch (NoSuchElementException e) { - + for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) { + if (grammaticalRelation.isApplicable(sentenceConstituencyParse2) && !treeCollectionGramatical.contains(sentenceConstituencyParse2)) { + score += 700; + grammaticalRelation1++; + treeCollectionGramatical.add(sentenceConstituencyParse2); + } + GrammaticalRelation reln = TDY1.reln(); + if (reln.isApplicable(sentenceConstituencyParse2) && !treeCollectionReln.contains(sentenceConstituencyParse2)) { + score += 525; + relationApplicable1++; + treeCollectionReln.add(sentenceConstituencyParse2); } } } @@ -709,26 +258,20 @@ public class SentimentAnalyzerTest { for (TypedDependency TDY : allTypedDependencies2) { IndexedWord dep = TDY.dep(); IndexedWord gov = TDY.gov(); - for (GrammaticalStructure gs : grammaticalMap2) { + for (GrammaticalStructure gs : grammaticalMap2.values()) { GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep); - for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1) { - try { - if (sentenceConstituencyParse1 != null && !sentenceConstituencyParse1.isEmpty()) { - if (grammaticalRelation.isApplicable(sentenceConstituencyParse1) && !treeCollectionGramatical.contains(sentenceConstituencyParse1)) { - score += 700; - grammaticalRelation2++; - treeCollectionGramatical.add(sentenceConstituencyParse1); - } - GrammaticalRelation reln = TDY.reln(); - //sentenceConstituencyParse1 - if (reln.isApplicable(sentenceConstituencyParse1) && !treeCollectionReln.contains(sentenceConstituencyParse1)) { - score += 525; - relationApplicable2++; - treeCollectionReln.add(sentenceConstituencyParse1); - } - } - } catch (NoSuchElementException r) { - + for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) { + if (grammaticalRelation.isApplicable(sentenceConstituencyParse1) && !treeCollectionGramatical.contains(sentenceConstituencyParse1)) { + score += 700; + grammaticalRelation2++; + treeCollectionGramatical.add(sentenceConstituencyParse1); + } + GrammaticalRelation reln = TDY.reln(); + //sentenceConstituencyParse1 + if (reln.isApplicable(sentenceConstituencyParse1) && !treeCollectionReln.contains(sentenceConstituencyParse1)) { + score += 525; + relationApplicable2++; + treeCollectionReln.add(sentenceConstituencyParse1); } } } @@ -750,8 +293,8 @@ public class SentimentAnalyzerTest { } else { score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 600 : (allTypeDep2 - allTypeDep1) * 600; } - alltypeDepsSize1.add(allTypeDep1); - alltypeDepsSize1.add(allTypeDep2); + alltypeDepsSizeMap.put(alltypeDepsSizeMap.size() + 1, allTypeDep1); + alltypeDepsSizeMap.put(alltypeDepsSizeMap.size() + 1, allTypeDep2); } } if (allTypeDep1 >= 5 && allTypeDep2 >= 5) { @@ -759,9 +302,9 @@ public class SentimentAnalyzerTest { int smallerTypeDep = allTypeDep1 < allTypeDep2 ? allTypeDep1 : allTypeDep2; int summation = (largerTypeDep * largerTypeDep) - (smallerTypeDep * smallerTypeDep); if (summation / largerTypeDep < 15.0 && summation / largerTypeDep > 10.0 && smallerTypeDep * 2 > largerTypeDep - && !summationList.contains(summation)) { + && !summationMap.values().contains(summation)) { score += summation * 80; - summationList.add(summation); + summationMap.put(summationMap.size() + 1, summation); } else if (largerTypeDep == smallerTypeDep) { score += 2500; } @@ -789,45 +332,25 @@ public class SentimentAnalyzerTest { : (grammaticalRelation2 - grammaticalRelation1) * 500; } } - ArrayList filerTreeContent = new ArrayList(); - int runCount1 = 0; - for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1) { - try { - if (sentenceConstituencyParse1 != null && !sentenceConstituencyParse1.isEmpty()) { - for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2) { - try { - if (sentenceConstituencyParse2 != null && !sentenceConstituencyParse2.isEmpty()) { - for (CoreLabel LBW : sentenceConstituencyParse1.taggedLabeledYield()) { - for (CoreLabel LBW1 : sentenceConstituencyParse2.taggedLabeledYield()) { - if (LBW.lemma().equals(LBW1.lemma())) { - boolean found = false; - for (String str : filerTreeContent) { - if (str.equals(LBW.lemma())) { - found = true; - break; - } - } - if (!found) { - filerTreeContent.add(LBW.lemma()); - runCount1++; - } - } - } - } - } - } catch (NoSuchElementException e) { - - } - } - } - } catch (NoSuchElementException e) { - + ConcurrentMap filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap(); + AtomicInteger runCount1 = new AtomicInteger(0); + for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) { + for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) { + sentenceConstituencyParse1.taggedLabeledYield().forEach((LBW) -> { + sentenceConstituencyParse2.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma()) + && !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> { + filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma()); + return _item; + }).forEachOrdered((_item) -> { + runCount1.getAndIncrement(); + }); + }); } } - score += runCount1 * 250; + score += runCount1.get() * 250; int typeSizeSmallest = 100; int typeSizeLargest = 0; - for (Integer i : alltypeDepsSize1) { + for (Integer i : alltypeDepsSizeMap.values()) { if (i > typeSizeLargest) { typeSizeLargest = i; } @@ -840,7 +363,7 @@ public class SentimentAnalyzerTest { } typeSizeLargest = 0; typeSizeSmallest = 100; - for (int i : summationList) { + for (int i : summationMap.values()) { if (i > typeSizeLargest) { typeSizeLargest = i; } @@ -854,55 +377,54 @@ public class SentimentAnalyzerTest { return score; } - private Double simpleRNNMatrixCalculations(Double score, ArrayList simpleSMXlist1, - ArrayList simpleSMXlist2) { + private Double simpleRNNMatrixCalculations(Double score, ConcurrentMap simpleSMXlist1, ConcurrentMap simpleSMXlist2) { List iteratedDoubleList = new ArrayList(); List iterateddotPredictions = new ArrayList(); double dotpredictionTransfer = 0.0; int iterationOverHeat = 0; double scoreFallback = score; - for (SimpleMatrix simpleSMX2 : simpleSMXlist2) { - ArrayList AccumulateDotList = new ArrayList<>(); - ArrayList subtractorList = new ArrayList(); - ArrayList dotPredictions = new ArrayList(); - ArrayList DotOverTransfer = new ArrayList(); + for (SimpleMatrix simpleSMX2 : simpleSMXlist2.values()) { + ConcurrentMap AccumulateDotMap = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap subtractorMap = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap dotPredictions = new MapMaker().concurrencyLevel(2).makeMap(); + ConcurrentMap DotOverTransfer = new MapMaker().concurrencyLevel(2).makeMap(); Double totalSubtraction = 0.0; Double largest = 10.0; Double shortest = 100.0; - for (SimpleMatrix simpleSMX1 : simpleSMXlist1) { + for (SimpleMatrix simpleSMX1 : simpleSMXlist1.values()) { double dotPrediction2 = simpleSMX2.dot(simpleSMX1) * 100; double dotPrediction1 = simpleSMX1.dot(simpleSMX2) * 100; - AccumulateDotList.add(dotPrediction1); - AccumulateDotList.add(dotPrediction2); + AccumulateDotMap.put(AccumulateDotMap.size() + 1, dotPrediction1); + AccumulateDotMap.put(AccumulateDotMap.size() + 1, dotPrediction2); double subtracter1 = dotPrediction1 > 50 ? dotPrediction1 - 100 : dotPrediction1 > 0 ? 100 - dotPrediction1 : 0; double subtracter2 = dotPrediction2 > 50 ? dotPrediction2 - 100 : dotPrediction2 > 0 ? 100 - dotPrediction2 : 0; - subtractorList.add(subtracter1); - subtractorList.add(subtracter2); + subtractorMap.put(subtractorMap.size() + 1, subtracter1); + subtractorMap.put(subtractorMap.size() + 1, subtracter2); dotpredictionTransfer = dotPrediction1; - if (!dotPredictions.contains(dotPrediction1)) { - for (Double transferDots : DotOverTransfer) { + if (!dotPredictions.values().contains(dotPrediction1)) { + for (Double transferDots : DotOverTransfer.values()) { if (transferDots == dotPrediction1) { totalSubtraction += transferDots; } else { score -= subtracter1 * 25; } } - DotOverTransfer.add(dotPrediction1); + DotOverTransfer.put(DotOverTransfer.size(), dotPrediction1); } else { subtracter1 -= 100; subtracter1 *= 25; score += subtracter1 * dotPrediction1; } - dotPredictions.add(dotPrediction1); - if (!dotPredictions.contains(dotPrediction2)) { - for (Double transferDots : DotOverTransfer) { + dotPredictions.put(dotPredictions.size() + 1, dotPrediction1); + if (!dotPredictions.values().contains(dotPrediction2)) { + for (Double transferDots : DotOverTransfer.values()) { if (transferDots == dotPrediction2) { totalSubtraction += transferDots; } else { score -= subtracter1 * 25; } } - DotOverTransfer.add(dotPrediction2); + DotOverTransfer.put(DotOverTransfer.size(), dotPrediction2); if (dotPrediction2 > largest) { largest = dotPrediction2; } @@ -912,7 +434,7 @@ public class SentimentAnalyzerTest { Double dotPredictionIntervalDifference = largest - shortest; subtracter2 *= 25; if (dotPredictionIntervalDifference < 5.0) { - if (dotPredictions.size() > 0) { + if (dotPredictions.values().size() > 0) { if (subtracter2 > 0) { score -= subtracter2; } else { @@ -939,7 +461,7 @@ public class SentimentAnalyzerTest { score -= 4500; } } else if (!iterateddotPredictions.contains(dotPrediction2)) { - score += subtracter2 * dotPrediction2; // += + score -= subtracter2 * dotPrediction2; // += iterateddotPredictions.add(dotPrediction2); } else { score -= 550; @@ -959,19 +481,19 @@ public class SentimentAnalyzerTest { } } } - dotPredictions.add(dotPrediction2); + dotPredictions.put(dotPredictions.size() + 1, dotPrediction2); iterationOverHeat++; } Double subTracPre = 0.0; - for (Double subtractors : subtractorList) { + for (Double subtractors : subtractorMap.values()) { if (Objects.equals(subTracPre, subtractors)) { if (subTracPre > 43.5 && subTracPre < 50.0) { - score += (subTracPre * 15) / subtractorList.size(); + score += (subTracPre * 15) / subtractorMap.values().size(); } else if (subTracPre > 60.0 && subTracPre < 66.5) { - score += (subTracPre * 15) / subtractorList.size(); + score += (subTracPre * 15) / subtractorMap.values().size(); } } else if (subTracPre > 75.0 && subTracPre < 90.0) { - score += (subTracPre * 50) / subtractorList.size(); + score += (subTracPre * 50) / subtractorMap.values().size(); } else if (subTracPre >= 55.0) { score -= 2800; } else if (subTracPre < -25.0 && subTracPre > -45.0) { @@ -986,7 +508,7 @@ public class SentimentAnalyzerTest { } Double preAccumulatorDot = 0.0; Double postAccumulatorDot = 0.0; - for (Double accumulators : AccumulateDotList) { + for (Double accumulators : AccumulateDotMap.values()) { if (Objects.equals(preAccumulatorDot, accumulators)) { if (Objects.equals(postAccumulatorDot, accumulators)) { score -= 1400; @@ -996,7 +518,7 @@ public class SentimentAnalyzerTest { preAccumulatorDot = accumulators; } subTracPre = 0.0; - for (Double subtractors : subtractorList) { + for (Double subtractors : subtractorMap.values()) { if (Objects.equals(subTracPre, subtractors) && subTracPre != 0.0) { if (!iteratedDoubleList.contains(subTracPre)) { score += 500; @@ -1015,17 +537,16 @@ public class SentimentAnalyzerTest { return score; } - private Double simpleRNNMaxtrixVectors(Double - score, ArrayList simpleSMXlistVector1, ArrayList simpleSMXlistVector2) { - ArrayList elementSumCounter = new ArrayList<>(); - ArrayList dotMap = new ArrayList<>(); - ArrayList elementSumMap = new ArrayList<>(); - ArrayList dotSumMap = new ArrayList<>(); + private Double simpleRNNMaxtrixVectors(Double score, ConcurrentMap simpleSMXlistVector1, ConcurrentMap simpleSMXlistVector2) { + ConcurrentMap elementSumCounter = new MapMaker().concurrencyLevel(3).makeMap(); + ConcurrentMap dotMap = new MapMaker().concurrencyLevel(3).makeMap(); + ConcurrentMap elementSumMap = new MapMaker().concurrencyLevel(3).makeMap(); + ConcurrentMap dotSumMap = new MapMaker().concurrencyLevel(3).makeMap(); Double preDot = 0.0; Double postDot = 0.0; - int iterateSize = simpleSMXlistVector1.size() + simpleSMXlistVector2.size(); - for (SimpleMatrix simpleSMX2 : simpleSMXlistVector2) { - for (SimpleMatrix simpleSMX1 : simpleSMXlistVector1) { + int iterateSize = simpleSMXlistVector1.values().size() + simpleSMXlistVector2.values().size(); + for (SimpleMatrix simpleSMX2 : simpleSMXlistVector2.values()) { + for (SimpleMatrix simpleSMX1 : simpleSMXlistVector1.values()) { double dot2 = simpleSMX2.dot(simpleSMX1); double elementSum2 = simpleSMX2.kron(simpleSMX1).elementSum(); double dot1 = simpleSMX1.dot(simpleSMX2); @@ -1044,13 +565,13 @@ public class SentimentAnalyzerTest { } preDot = dot1; elementSum1 = Math.round(elementSum1 * 100.0) / 100.0; - elementSumCounter.add(elementSum1); - dotMap.add(dot1); + elementSumCounter.put(elementSumCounter.size() + 1, elementSum1); + dotMap.put(dotMap.size() + 1, dot1); preDot = dot2; elementSum2 = Math.round(elementSum2 * 100.0) / 100.0; - elementSumCounter.add(elementSum2); - dotMap.add(dot2); - if (!dotSumMap.contains(dot1)) { + elementSumCounter.put(elementSumCounter.size() + 1, elementSum2); + dotMap.put(dotMap.size() + 1, dot2); + if (!dotSumMap.values().contains(dot1)) { if (dot1 < 0.1 && dot1 > 0.050) { score += 256; } @@ -1059,11 +580,11 @@ public class SentimentAnalyzerTest { } else if (dot1 > 0.40 && dot1 < 0.445) { score += 3600; } - dotSumMap.add(dot1); + dotSumMap.put(dotSumMap.size() + 1, dot1); } else { score -= 50; } - if (!elementSumMap.contains(elementSum1)) { + if (!elementSumMap.values().contains(elementSum1)) { if (elementSum1 < 0.01 && elementSum1 > 0.00) { score += 1300; } else if (elementSum1 > 0.1 && elementSum1 < 1.0) { @@ -1071,11 +592,11 @@ public class SentimentAnalyzerTest { } else { score -= elementSum1 * 1024; } - elementSumMap.add(elementSum1); + elementSumMap.put(elementSumMap.size() + 1, elementSum1); } else { score -= 50; } - if (!dotSumMap.contains(dot2)) { + if (!dotSumMap.values().contains(dot2)) { if (dot2 < 0.000) { score += dot2 * 500; } else if (dot2 < 0.1) { @@ -1084,13 +605,13 @@ public class SentimentAnalyzerTest { if (dot2 > 0.50) { score -= 1200; } - dotSumMap.add(dot2); + dotSumMap.put(dotSumMap.size() + 1, dot2); } else if (dot2 > 0.050 && dot2 < 0.10) { score -= 350; } else { score = score > 0 ? score - dot2 * 1200 : score + dot2 * 1200; } - if (!elementSumMap.contains(elementSum2)) { + if (!elementSumMap.values().contains(elementSum2)) { if (elementSum2 < 0.01 && elementSum2 > 0.00) { score += 3300; } else if (elementSum2 > 0.1 && elementSum2 < 0.2) { @@ -1098,7 +619,7 @@ public class SentimentAnalyzerTest { } else { score -= elementSum2 * 1024; } - elementSumMap.add(elementSum2); + elementSumMap.put(elementSumMap.size() + 1, elementSum2); } else if (elementSum2 > 0.050 && elementSum2 < 0.10) { score += 750; } else { @@ -1113,10 +634,10 @@ public class SentimentAnalyzerTest { return score; } - private Double elementsAndDotsRelation(Double - score, ArrayList dotMap, ArrayList elementSumCounter) { - OptionalDouble minvalueDots = dotMap.stream().mapToDouble(Double::doubleValue).min(); - OptionalDouble maxvalueDots = dotMap.stream().mapToDouble(Double::doubleValue).max(); + private Double elementsAndDotsRelation(Double score, ConcurrentMap dotMap, ConcurrentMap elementSumCounter) { + OptionalDouble minvalueDots = dotMap.values().stream().mapToDouble(Double::doubleValue).min(); + OptionalDouble maxvalueDots = dotMap.values().stream().mapToDouble(Double::doubleValue).max(); + double total = minvalueDots.getAsDouble() + maxvalueDots.getAsDouble(); boolean permitted = false; if (minvalueDots.getAsDouble() != maxvalueDots.getAsDouble()) { permitted = true; @@ -1134,8 +655,8 @@ public class SentimentAnalyzerTest { score -= 3500; } } - OptionalDouble minvalueElements = elementSumCounter.stream().mapToDouble(Double::doubleValue).min(); - OptionalDouble maxvalueElements = elementSumCounter.stream().mapToDouble(Double::doubleValue).max(); + OptionalDouble minvalueElements = elementSumCounter.values().stream().mapToDouble(Double::doubleValue).min(); + OptionalDouble maxvalueElements = elementSumCounter.values().stream().mapToDouble(Double::doubleValue).max(); Double elementsVariance = maxvalueElements.getAsDouble() - minvalueElements.getAsDouble(); if (elementsVariance != 0.0) { if (elementsVariance <= 0.01 && maxvalueElements.getAsDouble() <= 0.02) { @@ -1160,8 +681,7 @@ public class SentimentAnalyzerTest { return score; } - private Double sentimentMatrixVariances(Double score, int longest1, int longest2, int mainSentiment1, - int mainSentiment2) { + private Double sentimentMatrixVariances(Double score, int longest1, int longest2, int mainSentiment1, int mainSentiment2) { if (longest1 != longest2) { long deffLongest = longest1 > longest2 ? longest1 : longest2; long deffshorter = longest1 < longest2 ? longest1 : longest2; @@ -1197,16 +717,28 @@ public class SentimentAnalyzerTest { return score; } - private int classifyRawEvaluation() { - final List classifyRaw1 = this.listF; - final List classifyRaw2 = this.list1; - return (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200; + private final Map.Entry> classifyRawEvaluation(Double score, SentimentValueCache cacheSentimentLocal1, + SentimentValueCache cacheSentimentLocal2) { + if (cacheSentiment1 == null || cacheSentiment2 == null) { + DocumentReaderAndWriter readerAndWriter = classifier.makePlainTextReaderAndWriter(); + if (cacheSentiment1 == null) { + cacheSentimentLocal1.setClassifyRaw(classifier.classifyRaw(str, readerAndWriter)); + } + if (cacheSentiment2 == null) { + cacheSentimentLocal2.setClassifyRaw(classifier.classifyRaw(str1, readerAndWriter)); + } + } + final List classifyRaw1 = cacheSentiment1 == null ? cacheSentimentLocal1.getClassifyRaw() : cacheSentiment1.getClassifyRaw(); + final List classifyRaw2 = cacheSentiment2 == null ? cacheSentimentLocal2.getClassifyRaw() : cacheSentiment2.getClassifyRaw(); + score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200; + Map.Entry< Double, Map.Entry> entry + = new AbstractMap.SimpleEntry(score, new AbstractMap.SimpleEntry(cacheSentimentLocal1, cacheSentimentLocal2)); + return entry; } - private Double entryCountsRelation(Double - score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { - int entry1 = cacheSentimentLocal1.size(); - int entry2 = cacheSentimentLocal2.size(); + private Double entryCountsRelation(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + int entry1 = cacheSentiment1 == null ? cacheSentimentLocal1.getEntryCounts().values().size() : cacheSentiment1.getEntryCounts().values().size(); + int entry2 = cacheSentiment2 == null ? cacheSentimentLocal2.getEntryCounts().values().size() : cacheSentiment2.getEntryCounts().values().size(); if (entry1 > 0 && entry2 > 0) { if ((entry1 >= entry2 * 5) || (entry2 >= entry1 * 5)) { score -= entry1 > entry2 ? (entry1 - entry2) * 450 : (entry2 - entry1) * 450; @@ -1215,7 +747,7 @@ public class SentimentAnalyzerTest { } else if ((entry1 >= entry2 * 2 || entry2 >= entry1 * 2) && entry1 * 2 >= entry2 && entry2 * 2 >= entry1) { score -= entry1 > entry2 ? (entry1 - entry2) * 450 : (entry2 - entry1) * 450; } else if (entry1 * 3 >= entry2 && entry2 * 3 >= entry1) { - score -= entry1 > entry2 ? (entry1 - entry2) * 550 : (entry2 - entry1) * 550; + score += entry1 > entry2 ? (entry1 - entry2) * 550 : (entry2 - entry1) * 550; } else if (entry1 > 10 && entry2 > 10 && entry1 * 2 > entry2 && entry2 * 2 > entry1) { score += entry1 > entry2 ? entry2 * 600 : entry1 * 600; } @@ -1223,116 +755,104 @@ public class SentimentAnalyzerTest { return score; } - private ArrayList grammaticalStructureAllTypedDependencies( - ArrayList grammaticalStructures) { - ArrayList typedDependenciesArr = new ArrayList<>(); - for (GrammaticalStructure gs : grammaticalStructures) { - Collection typedDependencies = gs.allTypedDependencies(); - typedDependenciesArr.addAll(typedDependencies); - } - return typedDependenciesArr; - } - - private ArrayList grammaticalStructureSetup(ArrayList trees) { - ArrayList grammaticalStructures = new ArrayList(); - for (Tree tree : trees) { - try { - if (!tree.isEmpty()) { - GrammaticalStructure gs = gsf.newGrammaticalStructure(tree); - grammaticalStructures.add(gs); - } - } catch (NoSuchElementException e) { - - } - } - return grammaticalStructures; - } - - private ArrayList retrieveTrees(List sentences) { - ArrayList treeList = new ArrayList(); - for (CoreMap sentence : sentences) { + private SentimentValueCache GrammaticStructureSetup(SentimentValueCache cacheSentimentLocal, Annotation pipelineAnnotation) { + for (CoreMap sentence : pipelineAnnotation.get(CoreAnnotations.SentencesAnnotation.class)) { Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class); - treeList.add(sentenceConstituencyParse); + cacheSentimentLocal.addSentenceConstituencyParse(sentenceConstituencyParse); + GrammaticalStructure gs = gsf.newGrammaticalStructure(sentenceConstituencyParse); + cacheSentimentLocal.addTypedDependencies(gs.allTypedDependencies()); + cacheSentimentLocal.addGS(gs); } - return treeList; + return cacheSentimentLocal; } - private ArrayList sentimentRNNCorePredicted(List sentences, - Class sentimentAnnotatedTreeClass) { - ArrayList rnnCoreAnnotationsPrediction = new ArrayList<>(); - for (CoreMap sentence : sentences) { - Tree tree = sentence.get(sentimentAnnotatedTreeClass); - if (tree != null) { - SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); - rnnCoreAnnotationsPrediction.add(predictions); - } - } - return rnnCoreAnnotationsPrediction; + private SentimentValueCache initializeCacheSetup(String str, SentimentValueCache cacheSentimentLocal) { + cacheSentimentLocal = new SentimentValueCache(str); + cacheSentimentLocal.setTaggedwords(getTaggedWordList(str)); + cacheSentimentLocal.setCounter(tokenizeCounting(cacheSentimentLocal.getTaggedwordlist())); + return cacheSentimentLocal; } - private ArrayList sentimentRNNCoreNodevectors(List sentences, - Class sentimentAnnotatedTreeClass) { - ArrayList rnnCoreAnnotationsNodevectors = new ArrayList<>(); - for (CoreMap sentence : sentences) { - Tree tree = sentence.get(sentimentAnnotatedTreeClass); - if (tree != null) { - SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree); - rnnCoreAnnotationsNodevectors.add(nodeVector); - } - } - return rnnCoreAnnotationsNodevectors; - } - - private ArrayList sentimentRNNCoreAnnotations(List sentences, - Class sentimentAnnotatedTreeClass) { - ArrayList rnnCoreAnnotationsPredicted = new ArrayList<>(); - for (CoreMap sentence : sentences) { - Tree tree = sentence.get(sentimentAnnotatedTreeClass); + private SentimentValueCache sentimentCoreAnnotationSetup(Annotation pipelineAnnotationSentiment, SentimentValueCache cacheSentimentLocal) { + for (CoreMap sentence : pipelineAnnotationSentiment.get(CoreAnnotations.SentencesAnnotation.class)) { + Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); if (tree != null) { int predictedClass = RNNCoreAnnotations.getPredictedClass(tree); - rnnCoreAnnotationsPredicted.add(predictedClass); + SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); + SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree); + cacheSentimentLocal.addRNNPredictClass(predictedClass); + cacheSentimentLocal.addSimpleMatrix(predictions); + cacheSentimentLocal.addSimpleMatrixVector(nodeVector); } } - return rnnCoreAnnotationsPredicted; + return cacheSentimentLocal; } - private int setupMainSentiment(List sentences4, - Class sentimentAnnotatedTreeClass) { - int longest = 0; - int longestSentiment = 0; - for (CoreMap sentence : sentences4) { - Tree tree = sentence.get(sentimentAnnotatedTreeClass); + private SentimentValueCache setupMainSentimentandLongestVal(Annotation pipelineAnnotationSentiment, SentimentValueCache cacheSentimentLocal) { + for (CoreMap sentence : pipelineAnnotationSentiment.get(CoreAnnotations.SentencesAnnotation.class)) { + Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); int sentiment = RNNCoreAnnotations.getPredictedClass(tree); String partText = sentence.toString(); - if (partText.length() > longest) { - longestSentiment = sentiment; - longest = partText.length(); + //SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); + if (partText.length() > cacheSentimentLocal.getLongest()) { + cacheSentimentLocal.setMainSentiment(sentiment); + cacheSentimentLocal.setLongest(partText.length()); } } - return longestSentiment; + return cacheSentimentLocal; } - private int setupMainLongest(List sentences) { - int longest = 0; + private SentimentValueCache jmweAnnotationSetup(Annotation jmweStrAnnotation, SentimentValueCache cacheSentimentLocal) { + List sentences = jmweStrAnnotation.get(CoreAnnotations.SentencesAnnotation.class); + Collection> tokeninflectionMap = new ArrayList(); + int tokenadder = 0; for (CoreMap sentence : sentences) { - String partText = sentence.toString(); - if (partText.length() > longest) { - longest = partText.length(); + for (IMWE token : sentence.get(JMWEAnnotation.class)) { + if (token.isInflected()) { + cacheSentimentLocal.setInflectedCounterPositive(cacheSentimentLocal.getInflectedCounterPositive() + 1); + } else if (!tokeninflectionMap.contains(token)) { + cacheSentimentLocal.setInflectedCounterNegative(cacheSentimentLocal.getInflectedCounterNegative() + 1); + tokeninflectionMap.add(token); + } + cacheSentimentLocal.addstrTokenForm(token.getForm()); + cacheSentimentLocal.addstrTokenGetEntry(token.getEntry().toString().substring(token.getEntry().toString().length() - 1)); + Collection values = token.getPartMap().values(); + IMWEDesc entry = token.getEntry(); + cacheSentimentLocal.setMarkedContinuousCounter(cacheSentimentLocal.getMarkedContinuousCounter() + entry.getMarkedContinuous()); + cacheSentimentLocal.setUnmarkedPatternCounter(cacheSentimentLocal.getUnmarkedPatternCounter() + entry.getUnmarkedPattern()); + for (IMWEDesc.IPart iPart : values) { + cacheSentimentLocal.addstrTokenGetiPart(iPart.getForm()); + } + for (String strPostPrefix : entry.getPOS().getPrefixes()) { + cacheSentimentLocal.addstrTokenEntryPOS(strPostPrefix); + } + for (int counts : entry.getCounts()) { + cacheSentimentLocal.addEntryCounts(counts); + } + for (IToken tokens : token.getTokens()) { + cacheSentimentLocal.addITokenMapTag(tokens.getTag()); + for (String strtoken : tokens.getStems()) { + cacheSentimentLocal.addstrTokenStems(strtoken); + cacheSentimentLocal.setMarkedContiniousCounterEntries(cacheSentimentLocal.getMarkedContiniousCounterEntries() + 1); + } + } + tokenadder += 1; } + cacheSentimentLocal.setAnotatorcounter(cacheSentimentLocal.getAnotatorcounter() + 1); } - return longest; + cacheSentimentLocal.setTokensCounter(tokenadder); + return cacheSentimentLocal; } - private Double entryCountsScoring(Double - score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { - ArrayList countsMap = new ArrayList(); - int totalsize = cacheSentimentLocal1.size() + cacheSentimentLocal2.size(); - for (int counts : cacheSentimentLocal1) { - for (int counts1 : cacheSentimentLocal2) { + private Double entryCountsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + ConcurrentMap countsMap = new MapMaker().concurrencyLevel(2).makeMap(); + int totalsize = cacheSentimentLocal1.getEntryCounts().values().size() + cacheSentimentLocal2.getEntryCounts().values().size(); + for (int counts : cacheSentimentLocal1.getEntryCounts().values()) { + for (int counts1 : cacheSentimentLocal2.getEntryCounts().values()) { if (counts > 0 && counts1 > 0) { - if (counts == counts1 && !countsMap.contains(counts)) { + if (counts == counts1 && !countsMap.values().contains(counts)) { score += (counts * 250) / totalsize; - countsMap.add(counts); + countsMap.put(countsMap.size() + 1, counts); } else if (counts * 3 < counts1 || counts1 * 3 < counts) { score -= 600; } @@ -1342,18 +862,17 @@ public class SentimentAnalyzerTest { return score; } - private Double tokenEntryPosScoring(Double - score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { - if (cacheSentimentLocal1.size() > 1 && cacheSentimentLocal2.size() > 1) { - for (String strTokenPos1 : cacheSentimentLocal1) { - for (String strTokenPos2 : cacheSentimentLocal2) { + private Double tokenEntryPosScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + if (cacheSentimentLocal1.getstrTokenEntryPOS().values().size() > 1 && cacheSentimentLocal2.getstrTokenEntryPOS().values().size() > 1) { + for (String strTokenPos1 : cacheSentimentLocal1.getstrTokenEntryPOS().values()) { + for (String strTokenPos2 : cacheSentimentLocal2.getstrTokenEntryPOS().values()) { if (strTokenPos1.equals(strTokenPos2)) { score += 500; } } } - int posEntrySize1 = cacheSentimentLocal1.size(); - int posEntrySize2 = cacheSentimentLocal2.size(); + int posEntrySize1 = cacheSentimentLocal1.getstrTokenEntryPOS().values().size(); + int posEntrySize2 = cacheSentimentLocal2.getstrTokenEntryPOS().values().size(); if (posEntrySize1 * 3 > posEntrySize2 && posEntrySize2 * 3 > posEntrySize1) { score += posEntrySize1 > posEntrySize2 ? (posEntrySize1 - posEntrySize2) * 700 : (posEntrySize2 - posEntrySize1) * 700; } @@ -1361,8 +880,9 @@ public class SentimentAnalyzerTest { return score; } - private Double unmarkedPatternCounterScoring(Double score, int UnmarkedPatternCounter1, - int UnmarkedPatternCounter2) { + private Double unmarkedPatternCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + int UnmarkedPatternCounter1 = cacheSentimentLocal1.getUnmarkedPatternCounter(); + int UnmarkedPatternCounter2 = cacheSentimentLocal2.getUnmarkedPatternCounter(); if (UnmarkedPatternCounter1 > 0 && UnmarkedPatternCounter2 > 0) { if (UnmarkedPatternCounter1 < 100 && UnmarkedPatternCounter2 < 100) { if (UnmarkedPatternCounter1 * 2 > UnmarkedPatternCounter2 && UnmarkedPatternCounter2 * 2 > UnmarkedPatternCounter1) { @@ -1377,10 +897,11 @@ public class SentimentAnalyzerTest { return score; } - private Double markedContiniousCounterScoring(Double score, int MarkedContinuousCounter1, - int MarkedContinuousCounter2) { - int MarkedContiniousCounter1Entries = MarkedContinuousCounter1; - int MarkedContiniousCounter2Entries = MarkedContinuousCounter2; + private Double markedContiniousCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + int MarkedContinuousCounter1 = cacheSentimentLocal1.getMarkedContinuousCounter(); + int MarkedContinuousCounter2 = cacheSentimentLocal2.getMarkedContinuousCounter(); + int MarkedContiniousCounter1Entries = cacheSentimentLocal1.getMarkedContiniousCounterEntries(); + int MarkedContiniousCounter2Entries = cacheSentimentLocal2.getMarkedContiniousCounterEntries(); if (MarkedContinuousCounter1 > 0 && MarkedContinuousCounter2 > 0) { if (MarkedContinuousCounter1 > MarkedContinuousCounter2 * 50 || MarkedContinuousCounter2 > MarkedContinuousCounter1 * 50) { score -= MarkedContinuousCounter1 > MarkedContinuousCounter2 ? MarkedContinuousCounter1 * 120 : MarkedContinuousCounter2 * 120; @@ -1408,20 +929,19 @@ public class SentimentAnalyzerTest { return score; } - private Double strTokensMapScoring(Double - score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { - ArrayList strtokensMap = new ArrayList(); - for (String strTokeniPart1 : cacheSentimentLocal1) { - for (String strTokeniPart2 : cacheSentimentLocal2) { - if (strTokeniPart1.equals(strTokeniPart2) && !strtokensMap.contains(strTokeniPart2)) { - strtokensMap.add(strTokeniPart2); + private Double strTokensMapScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + ConcurrentMap strtokensMap = new MapMaker().concurrencyLevel(2).makeMap(); + for (String strTokeniPart1 : cacheSentimentLocal1.getstrTokenGetiPart().values()) { + for (String strTokeniPart2 : cacheSentimentLocal2.getstrTokenGetiPart().values()) { + if (strTokeniPart1.equals(strTokeniPart2) && !strtokensMap.values().contains(strTokeniPart2)) { + strtokensMap.put(strtokensMap.size() + 1, strTokeniPart2); score += 800; } } } - int tokenIPartSize1 = cacheSentimentLocal1.size(); - int tokenIPartSize2 = cacheSentimentLocal2.size(); - int strTokenMapSize = strtokensMap.size(); + int tokenIPartSize1 = cacheSentimentLocal1.getstrTokenGetiPart().values().size(); + int tokenIPartSize2 = cacheSentimentLocal2.getstrTokenGetiPart().values().size(); + int strTokenMapSize = strtokensMap.values().size(); if (tokenIPartSize1 * 2 > tokenIPartSize2 && tokenIPartSize2 * 2 > tokenIPartSize1) { score += tokenIPartSize1 > tokenIPartSize2 ? (tokenIPartSize1 - tokenIPartSize2) * 700 : (tokenIPartSize2 - tokenIPartSize1) * 700; score += strTokenMapSize * 600; @@ -1431,19 +951,18 @@ public class SentimentAnalyzerTest { return score; } - private Double strTokenEntryScoring(Double - score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { - int tokenEntry1 = cacheSentimentLocal1.size(); - int tokenEntry2 = cacheSentimentLocal2.size(); + private Double strTokenEntryScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + int tokenEntry1 = cacheSentimentLocal1.getstrTokenGetEntry().values().size(); + int tokenEntry2 = cacheSentimentLocal2.getstrTokenGetEntry().values().size(); boolean boundaryLeaks = false; int remnantCounter = 0; if (tokenEntry1 * 2 != tokenEntry2 && tokenEntry2 * 2 != tokenEntry1) { boundaryLeaks = true; } - ArrayList entryTokenMap = new ArrayList(); - for (String strTokenEntry1 : cacheSentimentLocal1) { - for (String strTokenEntry2 : cacheSentimentLocal2) { - if (!entryTokenMap.contains(strTokenEntry2)) { + ConcurrentMap entryTokenMap = new MapMaker().concurrencyLevel(2).makeMap(); + for (String strTokenEntry1 : cacheSentimentLocal1.getstrTokenGetEntry().values()) { + for (String strTokenEntry2 : cacheSentimentLocal2.getstrTokenGetEntry().values()) { + if (!entryTokenMap.values().contains(strTokenEntry2)) { if (strTokenEntry1.equals(strTokenEntry2)) { score += boundaryLeaks ? 2500 : 2500 / 2; } else if (!boundaryLeaks) { @@ -1452,27 +971,26 @@ public class SentimentAnalyzerTest { remnantCounter++; } } - entryTokenMap.add(strTokenEntry2); + entryTokenMap.put(entryTokenMap.size() + 1, strTokenEntry2); } } score += remnantCounter * 250; return score; } - private Double strTokenMapTagsScoring(Double - score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { - ArrayList iTokenMapTagsMap = new ArrayList(); - for (String strmapTag : cacheSentimentLocal1) { - for (String strmapTag1 : cacheSentimentLocal2) { - if (strmapTag.equals(strmapTag1) && !iTokenMapTagsMap.contains(strmapTag1)) { + private Double strTokenMapTagsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + ConcurrentMap iTokenMapTagsMap = new MapMaker().concurrencyLevel(2).makeMap(); + for (String strmapTag : cacheSentimentLocal1.getITokenMapTag().values()) { + for (String strmapTag1 : cacheSentimentLocal2.getITokenMapTag().values()) { + if (strmapTag.equals(strmapTag1) && !iTokenMapTagsMap.values().contains(strmapTag1)) { score -= 1450; - iTokenMapTagsMap.add(strmapTag); + iTokenMapTagsMap.put(iTokenMapTagsMap.size() + 1, strmapTag); } } } - int mapTagsize1 = cacheSentimentLocal1.size(); - int mapTagsize2 = cacheSentimentLocal2.size(); - int tokenTagMapSize = iTokenMapTagsMap.size(); + int mapTagsize1 = cacheSentimentLocal1.getITokenMapTag().values().size(); + int mapTagsize2 = cacheSentimentLocal2.getITokenMapTag().values().size(); + int tokenTagMapSize = iTokenMapTagsMap.values().size(); if (mapTagsize1 != 0 && mapTagsize2 != 0) { if (mapTagsize1 * 2 > mapTagsize2 && mapTagsize2 * 2 > mapTagsize1) { score += mapTagsize1 > mapTagsize2 ? (mapTagsize1 - mapTagsize2) * 700 : (mapTagsize2 - mapTagsize1) * 700; @@ -1484,14 +1002,13 @@ public class SentimentAnalyzerTest { return score; } - private Double tokenformSizeScoring(Double - score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { - int tokenform1size = cacheSentimentLocal1.size(); - int tokenform2size = cacheSentimentLocal2.size(); + private Double tokenformSizeScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + int tokenform1size = cacheSentimentLocal1.getstrTokenForm().values().size(); + int tokenform2size = cacheSentimentLocal2.getstrTokenForm().values().size(); if (tokenform1size > 0 || tokenform2size > 0) { if (tokenform1size < tokenform2size * 5 && tokenform2size < tokenform1size * 5) { - for (String strTokenForm1itr1 : cacheSentimentLocal1) { - for (String strTokenForm1itr2 : cacheSentimentLocal2) { + for (String strTokenForm1itr1 : cacheSentimentLocal1.getstrTokenForm().values()) { + for (String strTokenForm1itr2 : cacheSentimentLocal2.getstrTokenForm().values()) { if (strTokenForm1itr1.equals(strTokenForm1itr2)) { score -= 1600; } else { @@ -1510,26 +1027,25 @@ public class SentimentAnalyzerTest { return score; } - private Double tokenStemmingMapScoring(Double - score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { - ArrayList tokenStemmingMap = new ArrayList(); - for (String strTokenStem : cacheSentimentLocal1) { - for (String strTokenStem1 : cacheSentimentLocal2) { - if (strTokenStem.equals(strTokenStem1) && !tokenStemmingMap.contains(strTokenStem)) { + private Double tokenStemmingMapScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + ConcurrentMap tokenStemmingMap = new MapMaker().concurrencyLevel(2).makeMap(); + for (String strTokenStem : cacheSentimentLocal1.getstrTokenStems().values()) { + for (String strTokenStem1 : cacheSentimentLocal2.getstrTokenStems().values()) { + if (strTokenStem.equals(strTokenStem1) && !tokenStemmingMap.values().contains(strTokenStem)) { score += 500; - tokenStemmingMap.add(strTokenStem); + tokenStemmingMap.put(tokenStemmingMap.size() + 1, strTokenStem); } } } return score; } - private Double inflectedCounterScoring(Double score, int inflectedCounterPositive1, - int inflectedCounterPositive2, - int inflectedCounterNegative1, int inflectedCounterNegative2) { - int inflectedCounterNegative = inflectedCounterNegative1 > inflectedCounterNegative1 - ? inflectedCounterNegative1 - inflectedCounterNegative2 - : inflectedCounterNegative2 - inflectedCounterNegative1; + private Double inflectedCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + int inflectedCounterPositive1 = cacheSentimentLocal1.getInflectedCounterPositive(); + int inflectedCounterPositive2 = cacheSentimentLocal2.getInflectedCounterPositive(); + int inflectedCounterNegative = cacheSentimentLocal1.getInflectedCounterNegative() > cacheSentimentLocal2.getInflectedCounterNegative() + ? cacheSentimentLocal1.getInflectedCounterNegative() - cacheSentimentLocal2.getInflectedCounterNegative() + : cacheSentimentLocal2.getInflectedCounterNegative() - cacheSentimentLocal1.getInflectedCounterNegative(); if ((inflectedCounterPositive1 + inflectedCounterPositive2) > inflectedCounterNegative && inflectedCounterNegative > 0) { score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * 650; } @@ -1548,7 +1064,9 @@ public class SentimentAnalyzerTest { return score; } - private Double annotatorCountScoring(Double score, int anotatorcounter1, int anotatorcounter2) { + private Double annotatorCountScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + int anotatorcounter1 = cacheSentimentLocal1.getAnotatorcounter(); + int anotatorcounter2 = cacheSentimentLocal2.getAnotatorcounter(); if (anotatorcounter1 > 1 && anotatorcounter2 > 1) { if (anotatorcounter1 * 2 > anotatorcounter2 && anotatorcounter2 * 2 > anotatorcounter1) { score += anotatorcounter1 > anotatorcounter2 ? (anotatorcounter1 - anotatorcounter2) * 700 @@ -1560,7 +1078,9 @@ public class SentimentAnalyzerTest { return score; } - private Double tokensCounterScoring(Double score, int tokensCounter1, int tokensCounter2) { + private Double tokensCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + int tokensCounter1 = cacheSentimentLocal1.getTokensCounter(); + int tokensCounter2 = cacheSentimentLocal2.getTokensCounter(); if ((tokensCounter1 > 1 && tokensCounter2 > 1) && tokensCounter1 < tokensCounter2 * 5 && tokensCounter2 < tokensCounter1 * 5) { if (tokensCounter1 > tokensCounter2 / 2 && tokensCounter2 > tokensCounter1 / 2 && tokensCounter1 < 10 && tokensCounter2 < 10) { score += (tokensCounter1 + tokensCounter2) * 500; @@ -1589,12 +1109,36 @@ public class SentimentAnalyzerTest { return score; } - private Double nerEntitiesAndTokenScoring(Double score, ArrayList entityTokenTags1, - ArrayList entityTokenTags2, ArrayList nerEntities1, - ArrayList nerEntities2) { + private SentimentValueCache setupNEREntitiesAndTokenTags(CoreDocument pipelineCoreDcoument, SentimentValueCache cacheSentimentLocal) { + for (CoreEntityMention em : pipelineCoreDcoument.entityMentions()) { + Set> entrySet = em.entityTypeConfidences().entrySet(); + String entityType = em.entityType(); + Double EntityConfidences = 0.0; + for (Map.Entry entries : entrySet) { + EntityConfidences = entries.getValue(); + } + List tokens = em.tokens(); + for (CoreLabel token : tokens) { + if (token != null) { + if (!cacheSentimentLocal.getnerEntityTokenTags().values().contains(token.tag())) { + if (entityType.equals("PERSON") && EntityConfidences > 0.80) { + cacheSentimentLocal.addnerEntityTokenTags(token.tag()); + } + } + } + } + if (!cacheSentimentLocal.getnerEntities1().values().contains(em.text())) { + cacheSentimentLocal.addNEREntities1(em.text()); + cacheSentimentLocal.addNEREntities2(em.entityType()); + } + } + return cacheSentimentLocal; + } + + private Double nerEntitiesAndTokenScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { List entsCollection = new ArrayList(); - for (String strEnts1 : nerEntities1) { - for (String strEnts2 : nerEntities2) { + for (String strEnts1 : cacheSentimentLocal1.getnerEntities1().values()) { + for (String strEnts2 : cacheSentimentLocal2.getnerEntities2().values()) { if (strEnts1.equalsIgnoreCase(strEnts2) && !entsCollection.contains(strEnts1)) { score += 2500; entsCollection.add(strEnts1); @@ -1602,8 +1146,8 @@ public class SentimentAnalyzerTest { } } entsCollection = new ArrayList(); - for (String strToken : entityTokenTags1) { - for (String strToken1 : entityTokenTags2) { + for (String strToken : cacheSentimentLocal1.getnerEntityTokenTags().values()) { + for (String strToken1 : cacheSentimentLocal2.getnerEntityTokenTags().values()) { if (strToken.equalsIgnoreCase(strToken1) && !entsCollection.contains(strToken)) { score += 2000; entsCollection.add(strToken); @@ -1613,12 +1157,32 @@ public class SentimentAnalyzerTest { return score; } - private Double stopWordTokenLemmaScoring(Double score, ArrayList stopWordToken1, - ArrayList stopWordToken2, ArrayList stopWordLemma1, - ArrayList stopWordLemma2) { + private SentimentValueCache setupStoWordTokensLemma(Annotation pipelineAnnotationSentiment, SentimentValueCache cacheSentimentLocal) { + String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these,they,this,to,was,will,with"; + List tokensSentiment = pipelineAnnotationSentiment.get(CoreAnnotations.TokensAnnotation.class); + Set stopWords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; + Set stopWordsCustom = StopwordAnnotator.getStopWordList(customStopWordList, true); + for (CoreLabel token : tokensSentiment) { + Pair stopword = token.get(StopwordAnnotator.class); + String word = token.word().toLowerCase(); + if (stopWords.contains(word) || stopWordsCustom.contains(word)) { + cacheSentimentLocal.addstopwordTokens(word); + } + String lemma = token.lemma().toLowerCase(); + if (stopWords.contains(lemma) || stopWordsCustom.contains(lemma)) { + cacheSentimentLocal.addStopWordLemma(lemma); + } + if (stopword.first() && stopword.second()) { + cacheSentimentLocal.setPairCounter(cacheSentimentLocal.getPairCounter() + 1); + } + } + return cacheSentimentLocal; + } + + private Double stopWordTokenLemmaScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { Collection stopWordCollection = new ArrayList(); - for (String stopwords1 : stopWordToken1) { - for (String stopwords2 : stopWordToken2) { + for (String stopwords1 : cacheSentimentLocal1.getStopwordTokens().values()) { + for (String stopwords2 : cacheSentimentLocal2.getStopwordTokens().values()) { if (stopwords1.equals(stopwords2) && !stopWordCollection.contains(stopwords1)) { score -= 50; stopWordCollection.add(stopwords1); @@ -1626,8 +1190,8 @@ public class SentimentAnalyzerTest { } } stopWordCollection = new ArrayList(); - for (String stopwords1 : stopWordLemma1) { - for (String stopwords2 : stopWordLemma2) { + for (String stopwords1 : cacheSentimentLocal1.getStopWordLemma().values()) { + for (String stopwords2 : cacheSentimentLocal2.getStopWordLemma().values()) { if (stopwords1.equals(stopwords2) && !stopWordCollection.contains(stopwords1)) { score -= 50; stopWordCollection.add(stopwords1); @@ -1637,12 +1201,10 @@ public class SentimentAnalyzerTest { return score; } - private Double stopwordTokenPairCounterScoring(Double score, ArrayList stopWordToken1, - ArrayList stopWordToken2, int pairCounter1, - int pairCounter2) { - if (!stopWordToken1.isEmpty() && !stopWordToken1.isEmpty()) { - int stopwordsize1 = stopWordToken1.size(); - int stopwordsize2 = stopWordToken1.size(); + private Double stopwordTokenPairCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + if (!cacheSentimentLocal1.getStopwordTokens().values().isEmpty() && !cacheSentimentLocal2.getStopwordTokens().values().isEmpty()) { + int stopwordsize1 = cacheSentimentLocal1.getStopwordTokens().values().size(); + int stopwordsize2 = cacheSentimentLocal2.getStopwordTokens().values().size(); if (stopwordsize1 * 5 < stopwordsize2 || stopwordsize2 * 5 < stopwordsize1) { score -= stopwordsize1 > stopwordsize2 ? (stopwordsize1 - stopwordsize2) * 850 : (stopwordsize2 - stopwordsize1) * 850; } else if (stopwordsize1 == stopwordsize2 && stopwordsize1 > 1) { @@ -1653,6 +1215,8 @@ public class SentimentAnalyzerTest { score += stopwordsize1 > stopwordsize2 ? (stopwordsize1 - stopwordsize2) * 850 : (stopwordsize2 - stopwordsize1) * 850; } } + int pairCounter1 = cacheSentimentLocal1.getPairCounter(); + int pairCounter2 = cacheSentimentLocal2.getPairCounter(); if (pairCounter1 > 0 && pairCounter2 > 0) { if (pairCounter1 * 5 <= pairCounter2 || pairCounter2 * 5 <= pairCounter1) { score -= pairCounter1 > pairCounter2 ? (pairCounter1 - pairCounter2) * 1500 : (pairCounter2 - pairCounter1) * 1500; @@ -1667,532 +1231,277 @@ public class SentimentAnalyzerTest { return score; } - private Double tgwListScoreIncrementer(Double score, ArrayList tgwListIndex1, - ArrayList tgwListIndex2) { - int runCount = 0; - for (String taggedWord : tgwListIndex1) { - boolean found = false; - for (String taggedWord1 : tgwListIndex2) { - if (taggedWord.equals(taggedWord1)) { - found = true; - break; - } + private Double tgwListScoreIncrementer(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + AtomicInteger runCount = new AtomicInteger(0); + cacheSentimentLocal1.getTgwlistIndex().values().forEach(TaggedWord -> { + if (!cacheSentimentLocal2.getTgwlistIndex().values().contains(TaggedWord)) { + cacheSentimentLocal2.addTgwlistIndex(TaggedWord); + runCount.getAndIncrement(); } - if (!found) { - runCount++; - } - } - score += runCount * 64; + }); + score += runCount.get() * 64; return score; } - private List> getIMWES(List coreMaps) { - List> tokenList = new ArrayList<>(); - for (CoreMap sentence : coreMaps) { - List> imwes = sentence.get(JMWEAnnotation.class); - tokenList.addAll(imwes); - } - return tokenList; - } - - private int getInflictedCounterPositive(List> imwesFLocal) { - int InflectedCounterPositive = 0; - for (IMWE token : imwesFLocal) { - if (token.isInflected()) { - InflectedCounterPositive++; - } - } - return InflectedCounterPositive; - } - - private int getUnmarkedPatterns(List> imwesFLocal) { - int unmarked = 0; - for (IMWE token : imwesFLocal) { - IMWEDesc entry = token.getEntry(); - unmarked += entry.getUnmarkedPattern(); - } - return unmarked; - } - - private ArrayList gettokenForms(List> imwesFLocal) { - ArrayList arrs = new ArrayList<>(); - for (IMWE token : imwesFLocal) { - String form = token.getForm(); - arrs.add(form); - } - return arrs; - } - - private ArrayList getStrtokenEntryPos(List> imwesFLocal) { - ArrayList arrs = new ArrayList<>(); - for (IMWE token : imwesFLocal) { - IMWEDesc entry = token.getEntry(); - for (String strPostPrefix : entry.getPOS().getPrefixes()) { - arrs.add(strPostPrefix); - } - } - return arrs; - } - - private ArrayList getintTokenEntyCounts(List> imwesFLocal) { - ArrayList arrs = new ArrayList<>(); - for (IMWE token : imwesFLocal) { - IMWEDesc entry = token.getEntry(); - for (int counts : entry.getCounts()) { - arrs.add(counts); - } - } - return arrs; - } - - private ArrayList getITokenTags(List> imwesFLocal) { - ArrayList arrs = new ArrayList<>(); - for (IMWE token : imwesFLocal) { - for (IToken tokens : token.getTokens()) { - arrs.add(tokens.getTag()); - } - } - return arrs; - } - - private ArrayList getstrTokenStems(List> imwesFLocal) { - ArrayList arrs = new ArrayList<>(); - for (IMWE token : imwesFLocal) { - for (IToken tokens : token.getTokens()) { - for (String strtoken : tokens.getStems()) { - arrs.add(strtoken); - } - } - } - return arrs; - } - - private Integer getAnotatorcounter(List> imwesFLocal) { - return imwesFLocal.size(); - } - - private ArrayList getnerEntities(CoreDocument coreDocument) { - if (coreDocument == null || coreDocument.entityMentions() == null) { - return new ArrayList(); - } - ArrayList arrs = new ArrayList<>(); - for (CoreEntityMention em : coreDocument.entityMentions()) { - if (!arrs.contains(em.text())) { - arrs.add(em.text()); - } - } - return arrs; - } - - private ArrayList getnerEntitiesType(CoreDocument coreDocument) { - if (coreDocument == null || coreDocument.entityMentions() == null) { - return new ArrayList(); - } - ArrayList arrs = new ArrayList<>(); - for (CoreEntityMention em : coreDocument.entityMentions()) { - if (!arrs.contains(em.entityType())) { - arrs.add(em.entityType()); - } - } - return arrs; - } - - private Integer getPairCounter(Annotation pipelineAnnotationSentiment) { - int counter = 0; - List tokensSentiment = pipelineAnnotationSentiment. - get(CoreAnnotations.TokensAnnotation.class); - for (CoreLabel token : tokensSentiment) { - Pair stopword = token.get(StopwordAnnotator.class); - if (stopword.first() && stopword.second()) { - counter++; - } - } - return counter; - } - - private ArrayList getstopWordLemma(Annotation pipelineAnnotationSentiment) { - ArrayList arrs = new ArrayList<>(); - List tokensSentiment = pipelineAnnotationSentiment. - get(CoreAnnotations.TokensAnnotation.class); - String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for," + - "if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these," + - "they,this,to,was,will,with"; - for (CoreLabel token : tokensSentiment) { - Set stopWords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; - Set stopWordsCustom = StopwordAnnotator.getStopWordList(customStopWordList, true); - String lemma = token.lemma().toLowerCase(); - if (stopWords.contains(lemma) || stopWordsCustom.contains(lemma)) { - arrs.add(lemma); - } - } - return arrs; - } - - private ArrayList getstopWordToken(Annotation pipelineAnnotationSentiment) { - ArrayList arrs = new ArrayList<>(); - List tokensSentiment = pipelineAnnotationSentiment. - get(CoreAnnotations.TokensAnnotation.class); - String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for," + - "if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these," + - "they,this,to,was,will,with"; - for (CoreLabel token : tokensSentiment) { - String word = token.word().toLowerCase(); - Set stopWords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; - Set stopWordsCustom = StopwordAnnotator.getStopWordList(customStopWordList, true); - if (stopWords.contains(word) || stopWordsCustom.contains(word)) { - arrs.add(word); - } - } - return arrs; - } - - private ArrayList getentityTokenTags(CoreDocument coreDocument) { - if (coreDocument == null || coreDocument.entityMentions() == null) { - return new ArrayList(); - } - ArrayList arrs = new ArrayList<>(); - if (coreDocument != null) { - for (CoreEntityMention em : coreDocument.entityMentions()) { - List tokens = em.tokens(); - String entityType = em.entityType(); - Double EntityConfidences = 0.0; - Set> entrySet = em.entityTypeConfidences().entrySet(); - for (Map.Entry entries : entrySet) { - if (EntityConfidences < entries.getValue()) { - EntityConfidences = entries.getValue(); - } - } - for (CoreLabel token : tokens) { - if (token != null) { - if (!arrs.contains(token.tag())) { - if (entityType.equals("PERSON") && EntityConfidences > 0.80) { - arrs.add(token.tag()); - } - } - } - } - } - } - return arrs; - } - - private ArrayList getstrTokensIpartForm(List> imwesFLocal) { - ArrayList arrs = new ArrayList<>(); - for (IMWE token : imwesFLocal) { - Collection values = token.getPartMap().values(); - for (IMWEDesc.IPart iPart : values) { - String iPartForm = iPart.getForm(); - arrs.add(iPartForm); - } - } - return arrs; - } - - private int getMarkedCounter(List> imwesFLocal) { - int marked = 0; - for (IMWE token : imwesFLocal) { - IMWEDesc entry = token.getEntry(); - marked += entry.getMarkedContinuous(); - for (IToken tokens : token.getTokens()) { - marked += tokens.getStems().size(); - } - } - return marked; - } - - public int getInflictedCounterNegative(List> imwesFLocal) { - int InflectedCounterNegative = 0; - Collection> tokeninflectionMap = new ArrayList(); - for (IMWE token : imwesFLocal) { - if (!token.isInflected() && !tokeninflectionMap.contains(token)) { - InflectedCounterNegative++; - tokeninflectionMap.add(token); - } - } - return InflectedCounterNegative; - } - - public ArrayList getTokenEntries(List> imwesFLocal) { - ArrayList tokenStrList = new ArrayList<>(); - for (IMWE token : imwesFLocal) { - final String substring = token.getEntry().toString().substring(token.getEntry() - .toString().length() - 1); - tokenStrList.add(substring); - } - return tokenStrList; - } - - - public void validateStringCaches() { - Class sentimentAnnotatedTreeClass = - SentimentCoreAnnotations.SentimentAnnotatedTree.class; - - if (this.tokenizeCountingF == null) { - this.tokenizeCountingF = tokenizeCounting(getTaggedWordList(str1)); - } - if (this.tokenizeCounting == null) { - this.tokenizeCounting = tokenizeCounting(getTaggedWordList(str)); - } - if (this.taggedWordListF == null) { - this.taggedWordListF = getTaggedWordList(str); - } - if (this.taggedWordList1 == null) { - this.taggedWordList1 = getTaggedWordList(str1); - } - if (this.retrieveTGWListF == null) { - this.retrieveTGWListF = retrieveTGWListIndex(this.taggedWordListF); - } - if (this.retrieveTGWList1 == null) { - this.retrieveTGWList1 = retrieveTGWListIndex(this.taggedWordList1); - } - if (this.sentencesF == null) { - this.sentencesF = pipelineAnnotation1.get(CoreAnnotations.SentencesAnnotation.class); - } - if (this.sentences1 == null) { - this.sentences1 = pipelineAnnotation2.get(CoreAnnotations.SentencesAnnotation.class); - } - if (this.sentencesSentimentF == null) { - this.sentencesSentimentF = pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class); - } - if (this.sentencesSentiment1 == null) { - this.sentencesSentiment1 = pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class); - } - if (this.treesF == null) { - this.treesF = retrieveTrees(this.sentencesF); - } - if (this.trees1 == null) { - this.trees1 = retrieveTrees(this.sentences1); - } - if (this.grammaticalStructuresF == null) { - this.grammaticalStructuresF = grammaticalStructureSetup(this.treesF); - } - if (this.grammaticalStructures1 == null) { - this.grammaticalStructures1 = grammaticalStructureSetup(this.trees1); - } - if (this.typedDependenciesF == null) { - this.typedDependenciesF = grammaticalStructureAllTypedDependencies(this.grammaticalStructuresF); - } - if (this.typedDependencies1 == null) { - this.typedDependencies1 = grammaticalStructureAllTypedDependencies(this.grammaticalStructures1); - } - if (this.rnnCoreAnnotationsPredictedF == null) { - this.rnnCoreAnnotationsPredictedF = sentimentRNNCoreAnnotations(this.sentencesSentimentF, - sentimentAnnotatedTreeClass); - } - if (this.rnnCoreAnnotationsPredicted1 == null) { - this.rnnCoreAnnotationsPredicted1 = sentimentRNNCoreAnnotations(this.sentencesSentiment1, - sentimentAnnotatedTreeClass); - } - if (this.simpleMatricesF == null) { - this.simpleMatricesF = sentimentRNNCorePredicted(this.sentencesSentimentF, sentimentAnnotatedTreeClass); - } - if (this.simpleMatrices1 == null) { - this.simpleMatrices1 = sentimentRNNCorePredicted(this.sentencesSentiment1, sentimentAnnotatedTreeClass); - } - if (this.simpleMatricesNodevectorsF == null) { - this.simpleMatricesNodevectorsF = sentimentRNNCoreNodevectors(this.sentencesSentimentF, sentimentAnnotatedTreeClass); - } - if (this.simpleMatricesNodevectors1 == null) { - this.simpleMatricesNodevectors1 = sentimentRNNCoreNodevectors(this.sentencesSentiment1, sentimentAnnotatedTreeClass); - } - if (this.listF == null) { - DocumentReaderAndWriter readerAndWriter = classifier.makePlainTextReaderAndWriter(); - this.listF = classifier.classifyRaw(str, readerAndWriter); - } - if (this.list1 == null) { - DocumentReaderAndWriter readerAndWriter = classifier.makePlainTextReaderAndWriter(); - this.list1 = classifier.classifyRaw(str1, readerAndWriter); - } - if (this.longestF == null) { - this.longestF = setupMainLongest(this.sentencesSentimentF); - } - if (this.longest1 == null) { - this.longest1 = setupMainLongest(this.sentencesSentiment1); - } - if (this.sentimentLongestF == null) { - this.sentimentLongestF = setupMainSentiment(this.sentencesSentimentF, sentimentAnnotatedTreeClass); - } - if (this.sentimentLongest1 == null) { - this.sentimentLongest1 = setupMainSentiment(this.sentencesSentiment1, sentimentAnnotatedTreeClass); - } - if (this.imwesF == null) { - this.imwesF = getIMWES(this.coreMaps1); - } - if (this.imwes1 == null) { - this.imwes1 = getIMWES(this.coreMaps2); - } - if (this.InflectedCounterNegativeF == null) { - this.InflectedCounterNegativeF = getInflictedCounterNegative(this.imwesF); - } - if (this.InflectedCounterNegative1 == null) { - this.InflectedCounterNegative1 = getInflictedCounterNegative(this.imwes1); - } - if (this.InflectedCounterPositiveF == null) { - this.InflectedCounterPositiveF = getInflictedCounterPositive(this.imwesF); - } - if (this.InflectedCounterPositive1 == null) { - this.InflectedCounterPositive1 = getInflictedCounterPositive(this.imwes1); - } - if (this.tokenEntryF == null) { - this.tokenEntryF = getTokenEntries(this.imwesF); - } - if (this.tokenEntry1 == null) { - this.tokenEntry1 = getTokenEntries(this.imwes1); - } - if (this.MarkedContinuousCounterF == null) { - this.MarkedContinuousCounterF = getMarkedCounter(this.imwesF); - } - if (this.MarkedContinuousCounter1 == null) { - this.MarkedContinuousCounter1 = getMarkedCounter(this.imwes1); - } - if (this.UnmarkedPatternCounterF == null) { - this.UnmarkedPatternCounterF = getUnmarkedPatterns(this.imwesF); - } - if (this.UnmarkedPatternCounter1 == null) { - this.UnmarkedPatternCounter1 = getUnmarkedPatterns(this.imwes1); - } - if (this.strTokensIpartFormF == null) { - this.strTokensIpartFormF = getstrTokensIpartForm(this.imwesF); - } - if (this.strTokensIpartForm1 == null) { - this.strTokensIpartForm1 = getstrTokensIpartForm(this.imwes1); - } - if (this.tokenFormsF == null) { - this.tokenFormsF = gettokenForms(this.imwesF); - } - if (this.tokenForms1 == null) { - this.tokenForms1 = gettokenForms(this.imwes1); - } - if (this.strTokenEntryGetPOSF == null) { - this.strTokenEntryGetPOSF = getStrtokenEntryPos(this.imwesF); - } - if (this.strTokenEntryGetPOS1 == null) { - this.strTokenEntryGetPOS1 = getStrtokenEntryPos(this.imwes1); - } - if (this.intTokenEntyCountsF == null) { - this.intTokenEntyCountsF = getintTokenEntyCounts(this.imwesF); - } - if (this.intTokenEntyCounts1 == null) { - this.intTokenEntyCounts1 = getintTokenEntyCounts(this.imwes1); - } - if (this.ITokenTagsF == null) { - this.ITokenTagsF = getITokenTags(this.imwesF); - } - if (this.ITokenTags1 == null) { - this.ITokenTags1 = getITokenTags(this.imwes1); - } - if (this.strTokenStemsF == null) { - this.strTokenStemsF = getstrTokenStems(this.imwesF); - } - if (this.strTokenStems1 == null) { - this.strTokenStems1 = getstrTokenStems(this.imwes1); - } - if (this.AnotatorcounterF == null) { - this.AnotatorcounterF = getAnotatorcounter(this.imwesF); - } - if (this.Anotatorcounter1 == null) { - this.Anotatorcounter1 = getAnotatorcounter(this.imwes1); - } - if (this.TokensCounterF == null) { - this.TokensCounterF = getAnotatorcounter(this.imwesF); - } - if (this.TokensCounter1 == null) { - this.TokensCounter1 = getAnotatorcounter(this.imwes1); - } - if (this.entityTokenTagsF == null) { - this.entityTokenTagsF = getentityTokenTags(this.pipelineCoreDcoument1); - } - if (this.entityTokenTags1 == null) { - this.entityTokenTags1 = getentityTokenTags(this.pipelineCoreDcoument2); - } - if (this.nerEntitiesF == null) { - this.nerEntitiesF = getnerEntities(this.pipelineCoreDcoument1); - } - if (this.nerEntities1 == null) { - this.nerEntities1 = getnerEntities(this.pipelineCoreDcoument2); - } - if (this.nerEntitiesTypeF == null) { - this.nerEntitiesTypeF = getnerEntitiesType(this.pipelineCoreDcoument1); - } - if (this.nerEntitiesType1 == null) { - this.nerEntitiesType1 = getnerEntitiesType(this.pipelineCoreDcoument2); - } - if (this.stopWordTokenF == null) { - this.stopWordTokenF = getstopWordToken(this.pipelineAnnotation1Sentiment); - } - if (this.stopWordToken1 == null) { - this.stopWordToken1 = getstopWordToken(this.pipelineAnnotation2Sentiment); - } - if (this.stopWordLemmaF == null) { - this.stopWordLemmaF = getstopWordLemma(this.pipelineAnnotation1Sentiment); - } - if (this.stopWordLemma1 == null) { - this.stopWordLemma1 = getstopWordLemma(this.pipelineAnnotation2Sentiment); - } - if (this.PairCounterF == null) { - this.PairCounterF = getPairCounter(this.pipelineAnnotation1Sentiment); - } - if (this.PairCounter1 == null) { - this.PairCounter1 = getPairCounter(this.pipelineAnnotation2Sentiment); - } - } - - - public SimilarityMatrix callSMX() { + @Override + public final SimilarityMatrix call() { Double score = -100.0; + SentimentValueCache cacheSentimentLocal1 = null; + SentimentValueCache cacheSentimentLocal2 = null; int counter1; int counter2; - validateStringCaches(); - counter1 = this.tokenizeCountingF; - counter2 = this.tokenizeCounting; - final int overValue = (counter1 >= counter2 ? counter1 - counter2 : counter2 - counter1) * 32; - score -= overValue; - score = tgwListScoreIncrementer(score, this.retrieveTGWListF, this.retrieveTGWList1); - Class sentimentAnnotatedTreeClass = - SentimentCoreAnnotations.SentimentAnnotatedTree.class; + try { + if (cacheSentiment1 == null) { + cacheSentimentLocal1 = initializeCacheSetup(str, cacheSentimentLocal1); + } + if (cacheSentiment2 == null) { + cacheSentimentLocal2 = initializeCacheSetup(str1, cacheSentimentLocal2); + } + } catch (Exception ex) { - score = iterateTrees(this.treesF, this.trees1, score); - score = typeDependenciesGrammaticalRelation(this.typedDependenciesF, typedDependencies1, score, this.grammaticalStructuresF, this.grammaticalStructures1, - this.treesF, this.trees1); + } + try { + counter1 = cacheSentiment1 == null ? cacheSentimentLocal1.getCounter() : cacheSentiment1.getCounter(); + counter2 = cacheSentiment2 == null ? cacheSentimentLocal2.getCounter() : cacheSentiment2.getCounter(); - score = simpleRNNMatrixCalculations(score, this.simpleMatricesF, this.simpleMatrices1); - score = simpleRNNMaxtrixVectors(score, this.simpleMatricesNodevectorsF, this.simpleMatricesNodevectors1); - Integer sentiment1 = this.rnnCoreAnnotationsPredictedF.size(); - Integer sentiment2 = this.rnnCoreAnnotationsPredicted1.size(); - score -= (sentiment1 > sentiment2 ? sentiment2 - sentiment1 : sentiment1 - sentiment2) * 500; - score -= classifyRawEvaluation(); + final int overValue = (counter1 >= counter2 ? counter1 - counter2 : counter2 - counter1) * 32; + score -= overValue; + } catch (Exception ex) { - score = sentimentMatrixVariances(score, this.longestF, this.longest1, this.sentimentLongestF, this.sentimentLongest1); + } + try { + if (cacheSentiment1 == null) { + ConcurrentMap retrieveTGWListIndex = retrieveTGWListIndex(cacheSentimentLocal1.getTaggedwordlist()); + for (String str : retrieveTGWListIndex.values()) { + cacheSentimentLocal1.addTgwlistIndex(str); + } + } + if (cacheSentiment2 == null) { + ConcurrentMap retrieveTGWListIndex = retrieveTGWListIndex(cacheSentimentLocal2.getTaggedwordlist()); + for (String str : retrieveTGWListIndex.values()) { + cacheSentimentLocal2.addTgwlistIndex(str); + } + } + } catch (Exception ex) { - score = entryCountsRelation(score, this.intTokenEntyCountsF, this.intTokenEntyCounts1); - score = entryCountsScoring(score, this.intTokenEntyCountsF, this.intTokenEntyCounts1); - score = tokenEntryPosScoring(score, this.strTokenEntryGetPOSF, this.strTokenEntryGetPOS1); - score = unmarkedPatternCounterScoring(score, this.UnmarkedPatternCounterF, - this.UnmarkedPatternCounter1); - score = markedContiniousCounterScoring(score, this.MarkedContinuousCounterF, - this.MarkedContinuousCounter1); - score = strTokensMapScoring(score, this.strTokensIpartFormF, this.strTokensIpartForm1); - score = strTokenEntryScoring(score, this.tokenEntryF, this.tokenEntry1); - score = strTokenMapTagsScoring(score, this.ITokenTagsF, this.ITokenTags1); - score = tokenformSizeScoring(score, this.tokenFormsF, this.tokenForms1); - score = tokenStemmingMapScoring(score, this.strTokenStemsF, this.strTokenStems1); + } + try { + score = tgwListScoreIncrementer(score, cacheSentiment1 == null + ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2); + if (cacheSentiment1 == null) { + cacheSentimentLocal1 = GrammaticStructureSetup(cacheSentimentLocal1, pipelineAnnotation1); + } + if (cacheSentiment2 == null) { + cacheSentimentLocal2 = GrammaticStructureSetup(cacheSentimentLocal2, pipelineAnnotation2); + } + } catch (Exception ex) { - score = inflectedCounterScoring(score, this.InflectedCounterPositiveF, this.InflectedCounterPositive1, - this.InflectedCounterNegativeF, this.InflectedCounterNegative1); - score = annotatorCountScoring(score, this.AnotatorcounterF, this.Anotatorcounter1); - score = tokensCounterScoring(score, this.TokensCounterF, this.TokensCounter1); - LevenshteinDistance leven = new LevenshteinDistance(str, str1); - double SentenceScoreDiff = leven.computeLevenshteinDistance(); - SentenceScoreDiff *= 15; - score -= SentenceScoreDiff; - score = nerEntitiesAndTokenScoring(score, this.entityTokenTagsF, this.entityTokenTags1, - this.nerEntitiesF, this.nerEntities1); + } + ConcurrentMap sentenceConstituencyParseList1 = null; + ConcurrentMap sentenceConstituencyParseList2 = null; + try { + sentenceConstituencyParseList2 = cacheSentiment2 == null + ? cacheSentimentLocal2.getSentenceConstituencyParseList() : cacheSentiment2.getSentenceConstituencyParseList(); + sentenceConstituencyParseList1 = cacheSentiment1 == null + ? cacheSentimentLocal1.getSentenceConstituencyParseList() : cacheSentiment1.getSentenceConstituencyParseList(); + score = iterateTrees(sentenceConstituencyParseList2, sentenceConstituencyParseList1, score); + } catch (Exception ex) { - score = stopWordTokenLemmaScoring(score, this.stopWordTokenF, this.stopWordToken1, - this.stopWordLemmaF, this.stopWordLemma1); - score = stopwordTokenPairCounterScoring(score, this.stopWordTokenF, this.stopWordToken1, - this.PairCounterF, this.PairCounter1); + } + try { + Collection allTypedDependencies2 = cacheSentiment2 == null ? cacheSentimentLocal2.getAllTypedDependencies() + : cacheSentiment2.getAllTypedDependencies(); + Collection allTypedDependencies1 = cacheSentiment1 == null ? cacheSentimentLocal1.getAllTypedDependencies() + : cacheSentiment1.getAllTypedDependencies(); + + ConcurrentMap grammaticalMap1 = cacheSentiment1 == null ? cacheSentimentLocal1.getGs() : cacheSentiment1.getGs(); + ConcurrentMap grammaticalMap2 = cacheSentiment2 == null ? cacheSentimentLocal2.getGs() : cacheSentiment2.getGs(); + score = typeDependenciesGrammaticalRelation(allTypedDependencies1, allTypedDependencies2, score, grammaticalMap1, grammaticalMap2, + sentenceConstituencyParseList1, sentenceConstituencyParseList2); + } catch (Exception ex) { + + } + try { + if (cacheSentiment1 == null) { + cacheSentimentLocal1 = sentimentCoreAnnotationSetup(pipelineAnnotation1Sentiment, cacheSentimentLocal1); + } + if (cacheSentiment2 == null) { + cacheSentimentLocal2 = sentimentCoreAnnotationSetup(pipelineAnnotation2Sentiment, cacheSentimentLocal2); + } + } catch (Exception ex) { + + } + try { + final ConcurrentMap simpleSMXlist1 = cacheSentiment1 == null + ? cacheSentimentLocal1.getSimpleSMXlist() : cacheSentiment1.getSimpleSMXlist(); + final ConcurrentMap simpleSMXlist2 = cacheSentiment2 == null + ? cacheSentimentLocal2.getSimpleSMXlist() : cacheSentiment2.getSimpleSMXlist(); + final ConcurrentMap simpleSMXlistVector1 = cacheSentiment1 == null + ? cacheSentimentLocal1.getSimpleSMXlistVector() : cacheSentiment1.getSimpleSMXlistVector(); + final ConcurrentMap simpleSMXlistVector2 = cacheSentiment2 == null + ? cacheSentimentLocal2.getSimpleSMXlistVector() : cacheSentiment2.getSimpleSMXlistVector(); + score = simpleRNNMatrixCalculations(score, simpleSMXlist1, simpleSMXlist2); + score = simpleRNNMaxtrixVectors(score, simpleSMXlistVector1, simpleSMXlistVector2); + } catch (Exception ex) { + + } + try { + int sentiment1 = cacheSentiment1 == null ? cacheSentimentLocal1.getRnnPrediectClassMap().size() : cacheSentiment1.getRnnPrediectClassMap().size(); + int sentiment2 = cacheSentiment2 == null ? cacheSentimentLocal2.getRnnPrediectClassMap().size() : cacheSentiment2.getRnnPrediectClassMap().size(); + score -= (sentiment1 > sentiment2 ? sentiment1 - sentiment2 : sentiment2 - sentiment1) * 500; + Map.Entry> classifyRawEvaluationEntry = classifyRawEvaluation(score, cacheSentimentLocal1, + cacheSentimentLocal2); + score = classifyRawEvaluationEntry.getKey(); + if (cacheSentiment1 == null) { + cacheSentimentLocal1 = classifyRawEvaluationEntry.getValue().getKey(); + } + if (cacheSentiment2 == null) { + cacheSentimentLocal2 = classifyRawEvaluationEntry.getValue().getValue(); + } + } catch (Exception ex) { + + } + try { + if (cacheSentiment1 == null) { + cacheSentimentLocal1 = setupMainSentimentandLongestVal(pipelineAnnotation1Sentiment, cacheSentimentLocal1); + } + if (cacheSentiment2 == null) { + cacheSentimentLocal2 = setupMainSentimentandLongestVal(pipelineAnnotation2Sentiment, cacheSentimentLocal2); + } + score = sentimentMatrixVariances(score, cacheSentiment1 == null ? cacheSentimentLocal1.getLongest() : cacheSentiment1.getLongest(), + cacheSentiment2 == null ? cacheSentimentLocal2.getLongest() : cacheSentiment2.getLongest(), cacheSentiment1 == null + ? cacheSentimentLocal1.getMainSentiment() : cacheSentiment1.getMainSentiment(), cacheSentiment2 == null + ? cacheSentimentLocal2.getMainSentiment() : cacheSentiment2.getMainSentiment()); + } catch (Exception ex) { + + } + try { + if (cacheSentiment1 == null) { + cacheSentimentLocal1 = jmweAnnotationSetup(jmweStrAnnotation1, cacheSentimentLocal1); + } + if (cacheSentiment2 == null) { + cacheSentimentLocal2 = jmweAnnotationSetup(jmweStrAnnotation2, cacheSentimentLocal2); + } + } catch (Exception ex) { + + } + + SentimentValueCache scoringCache1 = cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1; + SentimentValueCache scoringCache2 = cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2; + try { + score = entryCountsRelation(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = entryCountsScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = tokenEntryPosScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = unmarkedPatternCounterScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = markedContiniousCounterScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = strTokensMapScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = strTokenEntryScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = strTokenMapTagsScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = tokenformSizeScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = tokenStemmingMapScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = inflectedCounterScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = annotatorCountScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + score = tokensCounterScoring(score, scoringCache1, scoringCache2); + } catch (Exception ex) { + + } + try { + LevenshteinDistance leven = new LevenshteinDistance(str, str1); + double SentenceScoreDiff = leven.computeLevenshteinDistance(); + SentenceScoreDiff *= 15; + score -= SentenceScoreDiff; + } catch (Exception ex) { + + } + try { + if (cacheSentiment1 == null) { + cacheSentimentLocal1 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument1, cacheSentimentLocal1); + } + if (cacheSentiment2 == null) { + cacheSentimentLocal2 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument2, cacheSentimentLocal2); + } + score = nerEntitiesAndTokenScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null + ? cacheSentimentLocal2 : cacheSentiment2); + } catch (Exception ex) { + + } + try { + if (cacheSentiment1 == null) { + cacheSentimentLocal1 = setupStoWordTokensLemma(pipelineAnnotation1Sentiment, cacheSentimentLocal1); + } + if (cacheSentiment2 == null) { + cacheSentimentLocal2 = setupStoWordTokensLemma(pipelineAnnotation2Sentiment, cacheSentimentLocal2); + } + score = stopWordTokenLemmaScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null + ? cacheSentimentLocal2 : cacheSentiment2); + } catch (Exception ex) { + + } + try { + score = stopwordTokenPairCounterScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null + ? cacheSentimentLocal2 : cacheSentiment2); + } catch (Exception ex) { + + } smxParam.setDistance(score); + try { + if (cacheSentiment1 == null) { + smxParam.setCacheValue1(cacheSentimentLocal1); + } + if (cacheSentiment2 == null) { + smxParam.setCacheValue2(cacheSentimentLocal2); + } + } catch (Exception ex) { + + } return smxParam; } } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentValueCache.java b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentValueCache.java new file mode 100644 index 00000000..33455779 --- /dev/null +++ b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentValueCache.java @@ -0,0 +1,334 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package FunctionLayer.StanfordParser; + +import com.google.common.collect.MapMaker; +import edu.stanford.nlp.ling.TaggedWord; +import edu.stanford.nlp.trees.GrammaticalStructure; +import edu.stanford.nlp.trees.Tree; +import edu.stanford.nlp.trees.TypedDependency; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; +import org.ejml.simple.SimpleMatrix; + +/** + * + * @author install1 + */ +public class SentimentValueCache { + + private String sentence; + private int counter; + private List> taggedwordlist = new ArrayList(); + private final ConcurrentMap tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap(); + private final Collection allTypedDependencies = new ArrayList(); + private final ConcurrentMap gsMap = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap simpleSMXlist = new MapMaker().concurrencyLevel(3).makeMap(); + private final ConcurrentMap simpleSMXlistVector = new MapMaker().concurrencyLevel(3).makeMap(); + private final ConcurrentMap rnnPredictClassMap = new MapMaker().concurrencyLevel(3).makeMap(); + private List classifyRaw; + private int mainSentiment = 0; + private int longest = 0; + private int tokensCounter = 0; + private int anotatorcounter = 0; + private int inflectedCounterPositive = 0; + private int inflectedCounterNegative = 0; + private int MarkedContinuousCounter = 0; + private int MarkedContiniousCounterEntries = 0; + private int UnmarkedPatternCounter = 0; + private int pairCounter = 0; + private final ConcurrentMap ITokenMapTag = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap strTokenStems = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap strTokenForm = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap strTokenGetEntry = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap strTokenGetiPart = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap strTokenEntryPOS = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap entryCounts = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap nerEntities1 = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap nerEntities2 = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap nerEntityTokenTags = new MapMaker().concurrencyLevel(3).makeMap(); + private final ConcurrentMap stopwordTokens = new MapMaker().concurrencyLevel(2).makeMap(); + private final ConcurrentMap stopWordLemma = new MapMaker().concurrencyLevel(2).makeMap(); + + public int getPairCounter() { + return pairCounter; + } + + public void setPairCounter(int pairCounter) { + this.pairCounter = pairCounter; + } + + public void addStopWordLemma(String str) { + stopWordLemma.put(stopWordLemma.size(), str); + } + + public void addstopwordTokens(String str) { + stopwordTokens.put(stopwordTokens.size(), str); + } + + public ConcurrentMap getStopwordTokens() { + return stopwordTokens; + } + + public ConcurrentMap getStopWordLemma() { + return stopWordLemma; + } + + public void addnerEntityTokenTags(String str) { + nerEntityTokenTags.put(nerEntityTokenTags.size(), str); + } + + public ConcurrentMap getnerEntityTokenTags() { + return nerEntityTokenTags; + } + + public ConcurrentMap getnerEntities1() { + return nerEntities1; + } + + public ConcurrentMap getnerEntities2() { + return nerEntities2; + } + + public void addNEREntities1(String str) { + nerEntities1.put(nerEntities1.size(), str); + } + + public void addNEREntities2(String str) { + nerEntities2.put(nerEntities2.size(), str); + } + + public void setTaggedwords(List> twlist) { + taggedwordlist = twlist; + } + + public List> getTaggedwordlist() { + return taggedwordlist; + } + + public void addEntryCounts(int counts) { + entryCounts.put(entryCounts.size(), counts); + } + + public ConcurrentMap getEntryCounts() { + return entryCounts; + } + + public void addstrTokenEntryPOS(String str) { + strTokenEntryPOS.put(strTokenEntryPOS.size(), str); + } + + public ConcurrentMap getstrTokenEntryPOS() { + return strTokenEntryPOS; + } + + public void addstrTokenGetiPart(String str) { + strTokenGetiPart.put(strTokenGetiPart.size(), str); + } + + public ConcurrentMap getstrTokenGetiPart() { + return strTokenGetiPart; + } + + public ConcurrentMap getstrTokenGetEntry() { + return strTokenGetEntry; + } + + public void addstrTokenGetEntry(String str) { + strTokenGetEntry.put(strTokenGetEntry.size(), str); + } + + public ConcurrentMap getstrTokenForm() { + return strTokenForm; + } + + public void addstrTokenForm(String str) { + strTokenForm.put(strTokenForm.size(), str); + } + + public ConcurrentMap getstrTokenStems() { + return strTokenStems; + } + + public void addstrTokenStems(String str) { + strTokenStems.put(strTokenStems.size(), str); + } + + public ConcurrentMap getITokenMapTag() { + return ITokenMapTag; + } + + public void addITokenMapTag(String str) { + ITokenMapTag.put(ITokenMapTag.size(), str); + } + + public int getUnmarkedPatternCounter() { + return UnmarkedPatternCounter; + } + + public void setUnmarkedPatternCounter(int UnmarkedPatternCounter) { + this.UnmarkedPatternCounter = UnmarkedPatternCounter; + } + + public int getMarkedContiniousCounterEntries() { + return MarkedContiniousCounterEntries; + } + + public void setMarkedContiniousCounterEntries(int MarkedContiniousCounterEntries) { + this.MarkedContiniousCounterEntries = MarkedContiniousCounterEntries; + } + + public int getMarkedContinuousCounter() { + return MarkedContinuousCounter; + } + + public void setMarkedContinuousCounter(int MarkedContinuousCounter) { + this.MarkedContinuousCounter = MarkedContinuousCounter; + } + + public int getInflectedCounterNegative() { + return inflectedCounterNegative; + } + + public void setInflectedCounterNegative(int inflectedCounterNegative) { + this.inflectedCounterNegative = inflectedCounterNegative; + } + + public int getInflectedCounterPositive() { + return inflectedCounterPositive; + } + + public void setInflectedCounterPositive(int inflectedCounterPositive) { + this.inflectedCounterPositive = inflectedCounterPositive; + } + + public int getAnotatorcounter() { + return anotatorcounter; + } + + public void setAnotatorcounter(int anotatorcounter) { + this.anotatorcounter = anotatorcounter; + } + + public int getTokensCounter() { + return tokensCounter; + } + + public void setTokensCounter(int tokensCounter) { + this.tokensCounter = tokensCounter; + } + + public int getMainSentiment() { + return mainSentiment; + } + + public void setMainSentiment(int mainSentiment) { + this.mainSentiment = mainSentiment; + } + + public int getLongest() { + return longest; + } + + public void setLongest(int longest) { + this.longest = longest; + } + + public List getClassifyRaw() { + return classifyRaw; + } + + public void setClassifyRaw(List classifyRaw) { + this.classifyRaw = classifyRaw; + } + + public ConcurrentMap getRnnPrediectClassMap() { + return rnnPredictClassMap; + } + + public void addRNNPredictClass(int rnnPrediction) { + rnnPredictClassMap.put(rnnPredictClassMap.size(), rnnPrediction); + } + + public void addSimpleMatrix(SimpleMatrix SMX) { + simpleSMXlist.put(simpleSMXlist.size(), SMX); + } + + public void addSimpleMatrixVector(SimpleMatrix SMX) { + simpleSMXlistVector.put(simpleSMXlistVector.size(), SMX); + } + + public ConcurrentMap getGsMap() { + return gsMap; + } + + public ConcurrentMap getSimpleSMXlist() { + return simpleSMXlist; + } + + public ConcurrentMap getSimpleSMXlistVector() { + return simpleSMXlistVector; + } + + public ConcurrentMap getGs() { + return gsMap; + } + + public int getCounter() { + return counter; + } + + public void addGS(GrammaticalStructure gs) { + gsMap.put(gsMap.size(), gs); + } + + public Collection getAllTypedDependencies() { + return allTypedDependencies; + } + + public void addTypedDependencies(Collection TDPlist) { + for (TypedDependency TDP : TDPlist) { + allTypedDependencies.add(TDP); + } + } + + public ConcurrentMap getSentenceConstituencyParseList() { + return sentenceConstituencyParseList; + } + + public void addSentenceConstituencyParse(Tree tree) { + sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), tree); + } + + public void setCounter(int counter) { + counter = counter; + } + + public String getSentence() { + return sentence; + } + + public SentimentValueCache(String str, int counter) { + this.sentence = str; + this.counter = counter; + } + + public ConcurrentMap getTgwlistIndex() { + return tgwlistIndex; + } + + public void addTgwlistIndex(String str) { + tgwlistIndex.put(tgwlistIndex.size(), str); + } + + public SentimentValueCache(String str) { + this.sentence = str; + } +} diff --git a/ArtificialAutism/src/main/java/META-INF/MANIFEST.MF b/ArtificialAutism/src/main/java/META-INF/MANIFEST.MF deleted file mode 100644 index 3de0fd0a..00000000 --- a/ArtificialAutism/src/main/java/META-INF/MANIFEST.MF +++ /dev/null @@ -1,3 +0,0 @@ -Manifest-Version: 1.0 -Main-Class: PresentationLayer.DiscordHandler - diff --git a/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java b/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java index 26179d8d..23d87a64 100644 --- a/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java +++ b/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java @@ -1,111 +1,71 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + + ps ax | grep EventNotfierDiscordBot-1.0 + kill $pid (number) + +nohup screen -d -m -S nonroot java -Xmx6048M -jar /home/javatests/ArtificialAutism-1.0.jar +nohup screen -d -m -S nonroot java -Xmx6800M -jar /home/javatests/ArtificialAutism-1.0.jar + +screen -ls (number1) +screen -X -S (number1) quit + */ package PresentationLayer; -import DataLayer.settings; import FunctionLayer.Datahandler; +import FunctionLayer.DoStuff; import FunctionLayer.PipelineJMWESingleton; import discord4j.core.DiscordClient; import discord4j.core.GatewayDiscordClient; -import discord4j.core.event.domain.message.MessageCreateEvent; -import edu.stanford.nlp.pipeline.StanfordCoreNLP; - import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.*; import java.sql.SQLException; -import java.util.ArrayList; +import java.util.Timer; +import java.util.TimerTask; +import java.util.logging.Level; +import java.util.logging.Logger; +import DataLayer.settings; +import discord4j.common.util.Snowflake; +import discord4j.core.event.domain.message.MessageCreateEvent; +import java.math.BigInteger; /** + * * @author install1 */ public class DiscordHandler { - - private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port, - Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) throws IOException { - byte[] receiveData = new byte[4096]; - DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length); + public static void main(String[] args) { + System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "15"); try { - serverSocket.receive(receivePacket); - } catch (IOException e) { - e.printStackTrace(); + Datahandler.instance.initiateMYSQL(); + //nohup screen -d -m -S nonroot java -Xmx6900M -jar /home/javatests/ArtificialAutism-1.0.jar + //uncomment db fetch when ready, just keep the comment for future reference + System.out.println("finished initiating MYSQL"); + } catch (SQLException | IOException ex) { + Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex); } - String sentence = new String(receivePacket.getData(), 0, - receivePacket.getLength()); - sentence = sentence.replace("clientmessage:", ""); - String ResponseMsg = datahandler.getResponseMsg(sentence, "", stanfordCoreNLP, stanfordCoreNLPSentiment, - true); - byte[] sendData = ResponseMsg.getBytes("UTF-8"); - int deliver_port = 0; - switch (port) { - case 48475: - deliver_port = 48470; - break; - case 48476: - deliver_port = 48471; - break; - case 48477: - deliver_port = 48472; - break; - case 48478: - deliver_port = 48473; - break; - } - DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port); - serverSocket.send(sendPacket); - } - - public static void handleUDPTraffic(int port, Datahandler datahandler, - StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) { - - try (DatagramSocket serverSocket = new DatagramSocket(port)) { - - String hostIP = "195.154.53.196"; - if (port == 48477 || port == 48478) { - hostIP = "51.158.20.245"; - } - InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip' - while (true) { - receiveAndSendPacket(serverSocket, ipAddress, port, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); - } - } catch (SocketException | UnknownHostException e) { - e.printStackTrace(); - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - public static void main(String[] args) throws IOException, SQLException { - Datahandler datahandler = new Datahandler(); - datahandler.initiateMYSQL(); - PipelineJMWESingleton.getINSTANCE(); - StanfordCoreNLP stanfordCoreNLP = datahandler.pipeLineSetUp(); - StanfordCoreNLP stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate(); + Datahandler.instance.instantiateAnnotationMapJMWE(); + Datahandler.instance.shiftReduceParserInitiate(); + Datahandler.instance.instantiateAnnotationMap(); System.out.println("FINISHED ALL ANNOTATIONS"); - datahandler.updateStringCache(); - System.out.println("updatedstring cache"); + Datahandler.instance.addHLstatsMessages(); + Datahandler.instance.updateStringCache(); + //String token = "NTI5NzAxNTk5NjAyMjc4NDAx.Dw0vDg.7-aMjVWdQMYPl8qVNyvTCPS5F_A"; String token = new settings().getDiscordToken(); final DiscordClient client = DiscordClient.create(token); final GatewayDiscordClient gateway = client.login().block(); String usernameBot = gateway.getSelf().block().getUsername(); - int autismbotCount = 4; - //make sure not to use ports that are already occupied. - for (int i = 0; i < autismbotCount; i++) { - final int j = i; - new Thread(() -> { - ArrayList ports = new ArrayList(); - ports.add(48475); - ports.add(48476); - ports.add(48477); - ports.add(48478); - handleUDPTraffic(ports.get(j), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); - }).start(); - } + new Thread(() -> { + Datahandler.instance.update_autismo_socket_msg(); + }).start(); gateway.on(MessageCreateEvent.class).subscribe(event -> { - FunctionLayer.DoStuff.doStuff(event, usernameBot, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); + if (!FunctionLayer.DoStuff.isOccupied()) { + FunctionLayer.DoStuff.doStuff(event, usernameBot); + } }); gateway.onDisconnect().block(); - } //3.1.1 discord4j version + } } diff --git a/ArtificialAutism/src/test/java/junit.java b/ArtificialAutism/src/test/java/junit.java new file mode 100644 index 00000000..c20e3fc9 --- /dev/null +++ b/ArtificialAutism/src/test/java/junit.java @@ -0,0 +1,497 @@ +import FunctionLayer.Datahandler; +import FunctionLayer.PipelineJMWESingleton; +import FunctionLayer.StanfordParser.SentimentAnalyzerTest; +import edu.mit.jmwe.data.IMWE; +import edu.mit.jmwe.data.IToken; +import edu.stanford.nlp.ie.AbstractSequenceClassifier; +import edu.stanford.nlp.ie.crf.CRFClassifier; +import edu.stanford.nlp.ling.CoreAnnotations; +import edu.stanford.nlp.ling.CoreLabel; +import edu.stanford.nlp.ling.TaggedWord; +import edu.stanford.nlp.parser.lexparser.LexicalizedParser; +import edu.stanford.nlp.pipeline.Annotation; +import edu.stanford.nlp.pipeline.CoreDocument; +import edu.stanford.nlp.pipeline.StanfordCoreNLP; +import edu.stanford.nlp.tagger.maxent.MaxentTagger; +import edu.stanford.nlp.trees.*; +import edu.stanford.nlp.util.CoreMap; +import org.ejml.simple.SimpleMatrix; +import org.junit.Assert; +import org.junit.Test; +import FunctionLayer.SimilarityMatrix; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.logging.FileHandler; +import java.util.logging.Logger; +import java.util.logging.SimpleFormatter; + +public class junit { + + private String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger"; + private MaxentTagger tagger = new MaxentTagger(taggerPath); + private GrammaticalStructureFactory gsf = initiateGrammaticalStructureFactory(); + + String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz"; + AbstractSequenceClassifier classifier = CRFClassifier. + getClassifierNoExceptions(nerModel); + + public GrammaticalStructureFactory initiateGrammaticalStructureFactory() { + String lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz"; + LexicalizedParser lp = LexicalizedParser. + loadModel(lexParserEnglishPCFG, "-maxLength", "100"); + TreebankLanguagePack langpack = lp.getOp().langpack(); + return langpack.grammaticalStructureFactory(); + } + + public Double testCall(String sent1, String sent2, StanfordCoreNLP stanfordCoreNLP, + StanfordCoreNLP stanfordCoreNLPSentiment) { + System.out.println("\n\n\n\nNEW ITERATION"); + System.out.println("sent1: " + sent1); + System.out.println("sent2: " + sent2); + ArrayList concurrentRelations = new ArrayList(); + Annotation jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(sent1); + Annotation jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(sent2); + + Integer tokenizeCountingF = null; + List> taggedWordListF = null; + List> taggedWordList1 = null; + ArrayList retrieveTGWListF = null; + java.util.ArrayList retrieveTGWList1 = null; + List sentencesF = null; + List sentence1 = null; + List sentencesSentimentF = null; + List sentenceSentiment1 = null; + List coreMaps1 = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation.class); + ArrayList treesF = null; + ArrayList trees1 = null; + ArrayList grammaticalStructuresF = null; + ArrayList grammaticalStructures1 = null; + ArrayList typedDependenciesF = null; + ArrayList rnnCoreAnnotationsPredictedF = null; + ArrayList simpleMatricesF = null; + ArrayList simpleMatricesNodevectorsF = null; + ArrayList listF = null; + Integer longestF = null; + Integer sentimentLongestF = null; + List> imwesF = null; + Integer InflectedCounterNegativeF = null; + Integer InflectedCounterPositiveF = null; + ArrayList tokenEntryF = null; + Integer MarkedContinuousCounterF = null; + Integer UnmarkedPatternCounterF = null; + ArrayList strTokensIpartFormF = null; + ArrayList tokenFormsF = null; + ArrayList strTokenEntryGetPOSF = null; + ArrayList intTokenEntyCountsF = null; + ArrayList ITokenTagsF = null; + ArrayList strTokenStemsF = null; + Integer AnotatorcounterF = null; + Integer TokensCounterF = null; + ArrayList entityTokenTagsF = null; + ArrayList nerEntitiesF = null; + ArrayList nerEntitiesTypeF = null; + ArrayList stopWordTokenF = null; + ArrayList stopWordLemmaF = null; + Integer PairCounterF = null; + + java.util.ArrayList typedDependencies1 = null; + ArrayList rnnCoreAnnotationsPredicted1 = null; + ArrayList simpleMatrices1 = null; + ArrayList simpleMatricesNodevectors1 = null; + List list1 = null; + Integer longest1 = null; + Integer sentimentLongest1 = null; + List> imwes1 = null; + Integer InflectedCounterNegative1 = null; + Integer InflectedCounterPositive1 = null; + ArrayList tokenEntry1 = null; + Integer MarkedContinuousCounter1 = null; + Integer UnmarkedPatternCounter1 = null; + ArrayList strTokensIpartForm1 = null; + ArrayList tokenForms1 = null; + ArrayList strTokenEntryGetPOS1 = null; + ArrayList intTokenEntyCounts1 = null; + ArrayList ITokenTags1 = null; + ArrayList strTokenStems1 = null; + Integer Anotatorcounter1 = null; + Integer TokensCounter1 = null; + ArrayList entityTokenTags1 = null; + ArrayList nerEntities1 = null; + ArrayList nerEntitiesType1 = null; + ArrayList stopWordToken1 = null; + ArrayList stopWordLemma1 = null; + Integer PairCounter1 = null; + List coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation.class); + Annotation strAnno = new Annotation(sent1); + strAnno.compact(); + stanfordCoreNLP.annotate(strAnno); + + Annotation strAnnoSentiment = new Annotation(sent2); + strAnnoSentiment.compact(); + stanfordCoreNLPSentiment.annotate(strAnnoSentiment); + + Annotation strAnno2 = new Annotation(sent2); + strAnno2.compact(); + stanfordCoreNLP.annotate(strAnno2); + + Annotation strAnno22 = new Annotation(sent2); + strAnno22.compact(); + stanfordCoreNLPSentiment.annotate(strAnno22); + + Annotation annotation = new Annotation(sent1); + stanfordCoreNLP.annotate(annotation); + CoreDocument coreDocument = new CoreDocument(annotation); + + annotation = new Annotation(sent2); + stanfordCoreNLP.annotate(annotation); + CoreDocument coreDocument1 = new CoreDocument(annotation); + + + Integer tokenizeCounting = null; + + SentimentAnalyzerTest sentimentAnalyzerTest = new SentimentAnalyzerTest(sent1, sent2, + new SimilarityMatrix(sent1, sent2), coreMaps1, coreMaps2, strAnno, + strAnno2, strAnnoSentiment, + strAnno22, coreDocument, + coreDocument1, + tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF, + taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1, + sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1, + grammaticalStructuresF, grammaticalStructures1, typedDependenciesF, + typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1, + simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1, + listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF, + imwes1, InflectedCounterNegativeF, InflectedCounterNegative1, InflectedCounterPositiveF, + InflectedCounterPositive1, tokenEntryF, tokenEntry1, MarkedContinuousCounterF, + MarkedContinuousCounter1, UnmarkedPatternCounterF, UnmarkedPatternCounter1, + strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1, + strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF, + intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1, + AnotatorcounterF, Anotatorcounter1, TokensCounterF, TokensCounter1, + entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF, + nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1, + PairCounterF, PairCounter1); + Double score = sentimentAnalyzerTest.callSMX().getDistance(); + System.out.println("score: " + score + "\n"); + return score; + } + + //@Test + public void testScoring() { + Datahandler datahandler = new Datahandler(); + PipelineJMWESingleton.getINSTANCE(); + StanfordCoreNLP stanfordCoreNLP = datahandler.pipeLineSetUp(); + StanfordCoreNLP stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate(); + String sent1 = "I was thinking to small supplies to avoid waiting in the rain. This way, in case of trouble you go home and take in your supply instead of waiting 45 min"; + String sent2 = "*NêkØ* Kroaat_West bG <3"; + double PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 800.0); + sent2 = "no thanks but i know some ladyboys here that would"; + sent1 = "u want head from me :wlenny:"; + double PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + + Assert.assertTrue(PerformTestingFitting > 200.0); + sent1 = "we need a trim for kroaat's teamwin"; + double PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess); + sent1 = "i am not a stalker"; + sent2 = "but we ban for bhop hack"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + sent2 = "hey stalker"; + PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < PerformTestingFitting); + sent1 = "what do you think of humanzz"; + sent2 = "did we actually go inside rocket -_-"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + sent2 = "crying for beeing tossed for fire"; + PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess); + Assert.assertTrue(PerformTestingFitting > 3000); + sent1 = "admin ! this map needs a Free Torchlight for all"; + sent2 = "( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ?"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 100); + sent1 = "i said that because i indeed have more knowledge about medicines than the average joe"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + sent2 = "Depends on the situation but i will mostly trust my doctor if he says this will help and i actually need it"; + PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess); + sent1 = "tell me something"; + sent2 = "you learn fast yoshmi"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + sent2 = "when i see europeans dead i laugh"; + PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess); + sent1 = "crobat im gonna nominate next event for you"; + sent2 = "why did we sploit . <:wlenny:514861023002624001> <:wlenny:514861023002624001> <:wlenny:514861023002624001>"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + sent2 = "lets go for mako"; + PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess); + sent1 = "how are the calcluations going? any issue with the JMWE?"; + sent2 = "anyone know if upgrading damage increases the mines' damage also"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + sent2 = "i have to get back to work"; + PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess); + sent1 = "sprichst du Deutsch?"; + sent2 = "like rpggift unknown !!! 130"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 500); + sent1 = "do you like memes?"; + sent2 = "we need to adapt to the team we have"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 3400); + sent2 = "i have to get back to work"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 4400); + sent1 = "is that a cursed sentence?"; + sent2 = "just didnt want to have heal since i died"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2000); + sent1 = "my name is ? ? ? ? G ? ? ? but this server doesn't read my special ? ? ? ? ? ? characters"; + sent2 = "dont say that sentence again"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 5000); + sent2 = "please dont tell me your gonna repeat that"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2000); + sent2 = "na it was a good try"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2000); + sent2 = "NATSU DIES IN THE END"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2000); + sent1 = "reeee"; + sent2 = "??( ? :wlenny~1: ?? ? :wlenny~1: )?? ( ? :wlenny~1: ?? ? :wlenny~1: )/ [ :wlenny~1: ?~ :wlenny~1: :] ? :wlenny~1: ?? ?? <"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2500); + sent1 = "dw, my mom is a stupid cunt, she deserved it"; + sent2 = "(????????????-)---….. JOINT :wlenny~1: !"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2500); + sent1 = "are you a paste cut or a cut and paste?"; + sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2500); + sent1 = "Did you know that Denmark's short form (DK) is pronounced as \"decay\"? :thonk~1:"; + sent2 = "?( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> )??( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> )??( ? <:wlenny:514861023002624001> ?? ?<:wlenny:514861023002624001>)??( ?<:wlenny:514861023002624001>?? ? <:w"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2500); + sent1 = "are you a space cat or a cat in space? <:thonk:382012909942734858>"; + sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2500); + sent1 = "something else to tell me now"; + sent2 = "{fullred}(--)? ?(--? )?{mediumblue}?(--)? ?(--)?{magenta}?(--)?{indigo}?(--? )?"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2500); + sent1 = "do you have repeating sentences"; + sent2 = "its pretty cheap with 10 ppl you pay about 60 euro for a week"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 5500); + sent1 = "what is 60 euro a week"; + sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2500); + sent1 = "do you watch news and if yes which one"; + sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?"; + PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTestingFittingLess < 2500); + sent1 = "\"im gonna bad manner you"; + sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "LOOK OUT BIG DOG"; + sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "3 days = 30 cents"; + sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = ":D we had a lot of fun for 2 rounds :D"; + sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = ">FUCK I JUST PRESSED MY ZTELE BIND"; + sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "400 solos on mako <:wlenny:514861023002624001>"; + sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "2 noobs 3 not bad"; + sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "??????? NOW WE RIOT ???????"; + sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "admin turn on bhop pleas"; + sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "paranoid is never event"; + sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "players keep diying LLLLOOOOLLL"; + sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "PRESS THE FUCKING BUTTON IN THE ELEVATOR"; + sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "but instead of minecraft server i got css ze"; + sent2 = "Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 5500.0); + sent1 = "First time there's that many CT at this point"; + sent2 = "Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "go to spec so changemap"; + sent2 = "Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "What's for lunch?"; + sent2 = "what does bm stand for"; + double PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + sent2 = "2 eggs and 1 cup"; + double PerformTesting2 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting1 < PerformTesting2); + sent1 = "do you watch any series or animes or cartoons"; + sent2 = "you guys voted for this"; + PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting1 < 5500); + sent1 = "do you know pyrono"; + sent2 = "i have to get accustomed to it"; + PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting1 < 2000); + sent1 = "Is William a good admin?"; + sent2 = "but this is helms deep"; + PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + sent2 = "keep an eye on them"; + PerformTesting2 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting2 > PerformTesting1); + sent1 = "scuffed jenz"; + sent2 = "I HAVE WATCHED ONLY ONE CARTOON AND IT'S POKEMON"; + PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting1 < 2500); + sent1 = "So what?"; + PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting1 < 5500); + sent1 = "Who is the enemy?"; + PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting1 < 2500); + sent1 = "Sounds bad, doesn't work"; + sent2 = "that hold is identical to the other room"; + PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting1 < 2500); + sent1 = "oh wait, because I don't have any"; + sent2 = "would be cool if VIPs would nominate other than paranoid. All the maps in the vote all the time suck so people just vote for an"; + PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting1 < 4500); + sent1 = "{navy}? :wlenny~1: ?? {violet}? :wlenny~1: ? :wlenny~1: ? :wlenny~1: ?? ? :wlenny~1: ? :wlenny~1: ? :wlenny~1: ??"; + sent2 = "will you still be online tommorow?"; + PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting1 < 4500); + sent1 = "stop being such a kid and act more polite towards people "; + sent2 = "i played nemesis on paradise a few days ago and it worked fine"; + PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting1 < 6500); + sent1 = "Enfin. Map noob"; + sent2 = "dagger dagger"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 400.0); + sent1 = "u have to hit the middle one with ur nade"; + sent2 = "your not going to mcdonalds, you have had your chance with the cheeseburger"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 1400.0); + sent1 = "How is everyone doing"; + sent2 = "wieso ist dein Bein am Arsch. Eigdl hängt das runter"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 400.0); + sent2 = "meshlem how does it feel to be russian"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 700.0); + + //new pairs + sent1 = "they dont buy kefvlar"; + sent2 = "you have a database available again"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 400.0); + sent1 = "because of lag?"; + sent2 = "french tourit"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 400.0); + sent2 = "Even when I'm working"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 3500.0); + sent1 = "or need another restart"; + sent2 = "Even when I'm working"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 600.0); + sent2 = "french tourit"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 400.0); + sent1 = "wow that clock works/"; + sent2 = "didnt the bot like mako? what happened to that?"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 400.0); + sent1 = "haHAA stop that cringe chat haHAA"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 400.0); + sent1 = "like 1s down now i guess i will die"; + sent2 = "monkaGIGA"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 400.0); + sent1 = "what do you want"; + sent2 = "admun extend"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting > 100.0); + sent1 = "You are a one large bug"; + sent2 = "omg you are right"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting > 5900.0); + sent1 = "I'm not a mapper, wtf"; + sent2 = "this map was made by wtf"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting > 1400.0); + sent1 = "do you have plants thonk"; + sent2 = "banana trees are plants"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting > 1400.0); + sent2 = "fucking alcolo"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 600.0); + sent2 = "qual arma e 382012909942734858"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < -400.0); + sent2 = "wlenny on gamebanana"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 2500.0); + sent1 = "And how was it? :wlenny~1:"; + sent2 = "at lvl 1 avad is 140 cd"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < 400.0); + sent1 = "wtf? :monkaS~2:"; + sent2 = "think thats it kangaroo next"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < -400.0); + sent1 = "yurope"; + sent2 = "?? ??????? ??? ??"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < -2400.0); + sent1 = "fuck"; + PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment); + Assert.assertTrue(PerformTesting < -2400.0); + } +} +